diff --git a/.github/workflows/smoketest.yml b/.github/workflows/smoketest.yml index c1c97e0455f..a309e43e395 100644 --- a/.github/workflows/smoketest.yml +++ b/.github/workflows/smoketest.yml @@ -31,6 +31,8 @@ name: smoketest env: cacheversion: 3 VCPKG_BINARY_SOURCES: "clear;nuget,GitHub,read" + OS_DEPENDENCIES: "bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk + r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake" on: pull_request: @@ -50,7 +52,7 @@ on: jobs: check-skip: # continue-on-error: true # Uncomment once integration is finished - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 # Map a step output to a job output outputs: platform: ${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') || steps.skip_check.outputs.platform }} @@ -66,7 +68,7 @@ jobs: name: Build platform and regress setup needs: check-skip if: ${{ needs.check-skip.outputs.platform || needs.check-skip.outputs.eclwatch }} - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 timeout-minutes: 150 # the build step has its own timeout, however the job can sometimes take time to download dependencies outputs: cache-state: ${{ steps.check-cache.outputs.state }} @@ -227,19 +229,14 @@ jobs: fi fi - - name: Dependencies20.04 + - name: Dependencies22.04 if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed run: | sudo apt-get update - sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \ - zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \ - libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \ - default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \ - libssl-dev xmlstarlet - - curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add - - sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main" - sudo apt-get update -y + sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} + sudo apt-get install -y xmlstarlet + + curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - sudo apt-get install -y nodejs echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" @@ -397,7 +394,7 @@ jobs: needs: build-and-setup if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.platform }} timeout-minutes: 60 # each matrix step has its own timeout, however the job can sometimes take time to download dependencies - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: fail-fast: true matrix: @@ -434,15 +431,10 @@ jobs: - name: Prerequisites run: | sudo apt-get update - sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \ - zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \ - libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \ - default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \ - libssl-dev gdb - - curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add - - sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main" - sudo apt-get update -y + sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} + sudo apt-get install -y gdb + + curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - sudo apt-get install -y nodejs echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" @@ -491,7 +483,7 @@ jobs: needs: build-and-setup if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' }} timeout-minutes: 30 # the unittests run step has its own timeout, however the job can sometimes take time to download dependencies - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - uses: actions/download-artifact@v2 with: @@ -500,15 +492,10 @@ jobs: - name: Prerequisites run: | sudo apt-get update - sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \ - zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \ - libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \ - default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \ - libssl-dev gdb - - curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add - - sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main" - sudo apt-get update -y + sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} + sudo apt-get install -y gdb + + curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - sudo apt-get install -y nodejs echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" @@ -527,7 +514,7 @@ jobs: needs: build-and-setup if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.eclwatch }} timeout-minutes: 30 # the ui-tests run step has its own timeout, however the job can sometimes take time to download dependencies - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - uses: actions/download-artifact@v2 with: @@ -558,15 +545,10 @@ jobs: sudo apt-get install -y git wget net-tools sudo apt-get install -y tzdata unzip xvfb libxi6 sudo apt-get install -y default-jdk - sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \ - zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \ - libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \ - default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \ - libssl-dev gdb - - curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add - - sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main" - sudo apt-get update -y + sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} + sudo apt-get install -y gdb + + curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - sudo apt-get install -y nodejs echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" diff --git a/dali/base/dafdesc.cpp b/dali/base/dafdesc.cpp index a7ecd157f7c..41ff97693de 100644 --- a/dali/base/dafdesc.cpp +++ b/dali/base/dafdesc.cpp @@ -3659,6 +3659,20 @@ bool getDefaultSpillPlane(StringBuffer &ret) #endif } +bool getDefaultIndexBuildStoragePlane(StringBuffer &ret) +{ +#ifdef _CONTAINERIZED + if (getComponentConfigSP()->getProp("@indexBuildPlane", ret)) + return true; + else if (getGlobalConfigSP()->getProp("storage/@indexBuildPlane", ret)) + return true; + else + return getDefaultStoragePlane(ret); +#else + return false; +#endif +} + //--------------------------------------------------------------------------------------------------------------------- static bool isAccessible(const IPropertyTree * xml) diff --git a/dali/base/dafdesc.hpp b/dali/base/dafdesc.hpp index 345bb2efd09..2238875d489 100644 --- a/dali/base/dafdesc.hpp +++ b/dali/base/dafdesc.hpp @@ -403,6 +403,7 @@ extern da_decl bool setReplicateDir(const char *name,StringBuffer &out, bool isr extern da_decl void initializeStorageGroups(bool createPlanesFromGroups); extern da_decl bool getDefaultStoragePlane(StringBuffer &ret); extern da_decl bool getDefaultSpillPlane(StringBuffer &ret); +extern da_decl bool getDefaultIndexBuildStoragePlane(StringBuffer &ret); extern da_decl IStoragePlane * getDataStoragePlane(const char * name, bool required); extern da_decl IStoragePlane * getRemoteStoragePlane(const char * name, bool required); extern da_decl IStoragePlane * createStoragePlane(IPropertyTree *meta); diff --git a/ecl/hthor/hthor.cpp b/ecl/hthor/hthor.cpp index e06be9865dc..7dd3b43417b 100644 --- a/ecl/hthor/hthor.cpp +++ b/ecl/hthor/hthor.cpp @@ -351,11 +351,14 @@ class CHThorClusterWriteHandler : public ClusterWriteHandler } }; -ClusterWriteHandler *createClusterWriteHandler(IAgentContext &agent, IHThorIndexWriteArg *iwHelper, IHThorDiskWriteArg *dwHelper, const char * lfn, StringAttr &fn, bool extend) +ClusterWriteHandler *createClusterWriteHandler(IAgentContext &agent, IHThorIndexWriteArg *iwHelper, IHThorDiskWriteArg *dwHelper, const char * lfn, StringAttr &fn, bool extend, bool isIndex) { //In the containerized system, the default data plane for this component is in the configuration StringBuffer defaultCluster; - getDefaultStoragePlane(defaultCluster); + if (isIndex) + getDefaultIndexBuildStoragePlane(defaultCluster); + else + getDefaultStoragePlane(defaultCluster); Owned clusterHandler; unsigned clusterIdx = 0; while(true) @@ -536,7 +539,7 @@ void CHThorDiskWriteActivity::resolve() throw MakeStringException(99, "Could not resolve DFS Logical file %s", lfn.str()); } - clusterHandler.setown(createClusterWriteHandler(agent, NULL, &helper, dfsLogicalName.get(), filename, extend)); + clusterHandler.setown(createClusterWriteHandler(agent, NULL, &helper, dfsLogicalName.get(), filename, extend, false)); } } else @@ -1117,7 +1120,7 @@ CHThorIndexWriteActivity::CHThorIndexWriteActivity(IAgentContext &_agent, unsign throw MakeStringException(99, "Cannot write %s, file already exists (missing OVERWRITE attribute?)", lfn.str()); } } - clusterHandler.setown(createClusterWriteHandler(agent, &helper, NULL, lfn, filename, false)); + clusterHandler.setown(createClusterWriteHandler(agent, &helper, NULL, lfn, filename, false, true)); sizeLimit = agent.queryWorkUnit()->getDebugValueInt64("hthorDiskWriteSizeLimit", defaultHThorDiskWriteSizeLimit); defaultNoSeek = agent.queryWorkUnit()->getDebugValueBool("noSeekBuildIndex", isContainerized()); } diff --git a/esp/src/src/nls/fr/hpcc.ts b/esp/src/src/nls/fr/hpcc.ts index 1b7178c25d2..f4d8e6061ca 100644 --- a/esp/src/src/nls/fr/hpcc.ts +++ b/esp/src/src/nls/fr/hpcc.ts @@ -2,6 +2,7 @@ export = { Abort: "Annuler", AbortedBy: "Annulé par", AbortedTime: "L'heure d'annulation", + AbortSelectedWorkunits: "Annuler les Workunits selectionées? 'Votre identifiant de connexion sera enregistré pour cette action dans les WU.'", About: "À propos", AboutGraphControl: "À propos de Graph Control", AboutHPCCSystems: "À propos de HPCC Systems®", @@ -111,6 +112,7 @@ export = { Columns: "Colonnes", Command: "Commander", Comment: "Commenter", + CompileCost: "Compiler le coût", Compiled: "Compilé", Compiling: "Compilant", Completed: "Terminé", @@ -233,6 +235,7 @@ export = { Downloads: "Téléchargements", DownloadSelectionAsCSV: "Télécharger la sélection au format CSV", DownloadToCSV: "Télécharger à CSV", + DownloadToCSVNonFlatWarning: "Remarque : le téléchargement de fichiers contenant des ensembles de données imbriqués, comme des données séparées par des virgules, ne peuvent pas être formatées comme prévu", DropZone: "Zone de largage", DueToInctivity: "Vous serez déconnecté de toutes les sessions ECL Watch en 3 minutes en raison de l'inactivité.", Duration: "Duration", @@ -638,6 +641,7 @@ export = { PleaseSelectAUserToAdd: "Veuillez sélectionner un utilisateur à ajouter", Plugins: "Plugins", Pods: "Capsules", + PodsAccessError: "Impossible de récupérer la liste des pods", Port: "Port", Prefix: "Préfixe", PrefixPlaceholder: "nom de fichier {: longueur}, taille de fichier {: [B | L] [1-8]}", @@ -870,6 +874,7 @@ export = { ThorProcess: "Thor Processus", ThreadID: "Identitfiant de fil", Time: "Temps", + Timeline: "Chronologie", TimeMaxTotalExecuteMinutes: "Temps Maximum Total Exécuter Minutes", TimeMeanTotalExecuteMinutes: "Temps Moyen Total Exécuter Minutes", TimeMinTotalExecuteMinutes: "Temps Minimum Total Exécuter Minutes", diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index e26448ad760..604c5eda07b 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -209,6 +209,9 @@ storage: {{ toYaml $storage.remote | indent 2 }} {{- end }} dataPlane: {{ include "hpcc.getDefaultDataPlane" . }} +{{- if hasKey $storage "indexBuildPlane" }} + indexBuildPlane: {{ $storage.indexBuildPlane }} +{{- end }} planes: {{- /*Generate entries for each data plane (removing the pvc). Exclude the planes used for dlls and dali.*/ -}} {{- range $plane := $planes }} @@ -453,12 +456,13 @@ The plane will generate a volume if it matches either an includeLabel or an incl {{/* Check that the data plane name is valid, and report an error if not -Pass in dict with root, planeName +Pass in dict with root, planeName and optional contextPrefix */}} {{- define "hpcc.checkPlaneExists" -}} {{- $storage := (.root.Values.storage | default dict) -}} {{- $planes := ($storage.planes | default list) -}} {{- $name := .planeName -}} +{{- $ctxMsg := .contextPrefix | default "" -}} {{- $matched := dict -}} {{- range $plane := $planes -}} {{- if not $plane.disabled -}} @@ -468,7 +472,7 @@ Pass in dict with root, planeName {{- end -}} {{- end -}} {{- if not $matched.ok -}} - {{- $_ := fail (printf "Storage plane %s does not exist" $name) -}} + {{- $_ := fail (printf "%sStorage plane %s does not exist" $ctxMsg $name) -}} {{- end -}} {{- end -}} @@ -641,6 +645,10 @@ Check that the storage and spill planes for a component exist {{- $search := .me.spillPlane -}} {{- include "hpcc.checkValidStoragePlane" (dict "search" $search "root" .root "category" "spill" "type" "storage spill" "for" .me.name) -}} {{- end }} +{{- if (hasKey .me "indexBuildPlane") }} + {{- $search := .me.indexBuildPlane -}} + {{- include "hpcc.checkValidStoragePlane" (dict "search" $search "root" .root "category" "data" "type" "storage data" "for" .me.name) -}} +{{- end }} {{- end -}} {{/* diff --git a/helm/hpcc/templates/_warnings.tpl b/helm/hpcc/templates/_warnings.tpl index 9cbdf664f94..6ce514aef97 100644 --- a/helm/hpcc/templates/_warnings.tpl +++ b/helm/hpcc/templates/_warnings.tpl @@ -33,6 +33,9 @@ Pass in dict with root and warnings {{- end -}} {{- /* Gather a list of ephemeral and persistant planes */ -}} {{- $storage := (.root.Values.storage | default dict) -}} + {{- if hasKey $storage "indexBuildPlane" -}} + {{- include "hpcc.checkPlaneExists" (dict "root" .root "planeName" $storage.indexBuildPlane "contextPrefix" "indexBuildPlane: ") -}} + {{- end -}} {{- $match := dict "ephemeral" (list) "persistant" (list) -}} {{- $planes := ($storage.planes | default list) -}} {{- $searchLabels := list "data" "dali" "sasha" "dll" "lz" -}} @@ -124,4 +127,16 @@ Pass in dict with root and warnings {{- $_ := set $warning "msg" (printf "Default cpu cost rate is being used for %s: %s" ((len $ctx.defaultCpuRateComponents)| plural "component" "components") ($ctx.defaultCpuRateComponents|toStrings)) -}} {{- $_ := set $ctx "warnings" (append $ctx.warnings $warning) -}} {{- end -}} + {{- /* Warn if insecure embed, pipe or extern enabled */ -}} + {{- $_ := set $ctx "insecureEclFeature" list -}} + {{- range $opt, $value := (pick .root.Values.security.eclSecurity "embedded" "pipe" "extern") -}} + {{- if eq $value "allow" -}} + {{- $_ := set $ctx "insecureEclFeature" (append $ctx.insecureEclFeature $opt) -}} + {{- end -}} + {{- end -}} + {{- if $ctx.insecureEclFeature -}} + {{- $warning := dict "source" "helm" "severity" "warning" -}} + {{- $_ := set $warning "msg" (printf "Insecure feature enabled in ecl: %s " $ctx.insecureEclFeature) -}} + {{- $_ := set $ctx "warnings" (append $ctx.warnings $warning) -}} + {{- end -}} {{- end -}} diff --git a/helm/hpcc/values.schema.json b/helm/hpcc/values.schema.json index 328dcd233bd..c4acc6e322d 100644 --- a/helm/hpcc/values.schema.json +++ b/helm/hpcc/values.schema.json @@ -34,6 +34,10 @@ }, "remote": { "$ref": "#/definitions/remoteStorage" + }, + "indexBuildPlane": { + "type": "string", + "description": "Default plane for index builds" } }, "additionalProperties": false @@ -1415,6 +1419,10 @@ "description": "The default storage plane to write data files to", "type": "string" }, + "indexBuildPlane": { + "description": "The default storage plane to write index files to", + "type": "string" + }, "annotations": { "type": "object", "additionalProperties": { "type": "string" } @@ -1494,6 +1502,10 @@ "description": "The storage plane to write spill files to", "type": "string" }, + "indexBuildPlane": { + "description": "The default storage plane to write index files to", + "type": "string" + }, "resources": { "$ref": "#/definitions/resources" }, @@ -2277,6 +2289,10 @@ "description": "The storage plane to write spill files to", "type": "string" }, + "indexBuildPlane": { + "description": "The default storage plane to write index files to", + "type": "string" + }, "annotations": { "type": "object", "additionalProperties": { "type": "string" } diff --git a/roxie/ccd/ccd.hpp b/roxie/ccd/ccd.hpp index ed68816b5fd..9505636f259 100644 --- a/roxie/ccd/ccd.hpp +++ b/roxie/ccd/ccd.hpp @@ -368,6 +368,7 @@ extern StringBuffer fileNameServiceDali; extern StringBuffer roxieName; #ifdef _CONTAINERIZED extern StringBuffer defaultPlane; +extern StringBuffer defaultIndexBuildPlane; #endif extern bool trapTooManyActiveQueries; extern unsigned maxEmptyLoopIterations; diff --git a/roxie/ccd/ccdfile.cpp b/roxie/ccd/ccdfile.cpp index 3a018ed2f84..feafa9e635e 100644 --- a/roxie/ccd/ccdfile.cpp +++ b/roxie/ccd/ccdfile.cpp @@ -2137,7 +2137,7 @@ static bool getDirectAccessStoragePlanes(StringArray &planes) ILazyFileIO *createPhysicalFile(const char *id, IPartDescriptor *pdesc, IPartDescriptor *remotePDesc, RoxieFileType fileType, int numParts, bool startCopy, unsigned channel) { #ifdef _CONTAINERIZED - const char *myCluster = defaultPlane.str(); + const char *myCluster = (ROXIE_KEY == fileType) ? defaultIndexBuildPlane.str() : defaultPlane.str(); #else const char *myCluster = roxieName.str(); #endif diff --git a/roxie/ccd/ccdmain.cpp b/roxie/ccd/ccdmain.cpp index e7f9715c69f..698570d3b2e 100644 --- a/roxie/ccd/ccdmain.cpp +++ b/roxie/ccd/ccdmain.cpp @@ -185,6 +185,7 @@ StringBuffer fileNameServiceDali; StringBuffer roxieName; #ifdef _CONTAINERIZED StringBuffer defaultPlane; +StringBuffer defaultIndexBuildPlane; #endif bool trapTooManyActiveQueries; unsigned maxEmptyLoopIterations; @@ -735,6 +736,7 @@ int CCD_API roxie_main(int argc, const char *argv[], const char * defaultYaml) setStatisticsComponentName(SCTroxie, "roxie", true); #ifdef _CONTAINERIZED getDefaultStoragePlane(defaultPlane); + getDefaultIndexBuildStoragePlane(defaultIndexBuildPlane); #endif installDefaultFileHooks(topology); diff --git a/thorlcr/activities/indexwrite/thindexwrite.cpp b/thorlcr/activities/indexwrite/thindexwrite.cpp index 5f62ab04da4..8235ee8e3cd 100644 --- a/thorlcr/activities/indexwrite/thindexwrite.cpp +++ b/thorlcr/activities/indexwrite/thindexwrite.cpp @@ -101,7 +101,7 @@ class IndexWriteActivityMaster : public CMasterActivity if (idx == 0) { StringBuffer defaultCluster; - if (getDefaultStoragePlane(defaultCluster)) + if (getDefaultIndexBuildStoragePlane(defaultCluster)) clusters.append(defaultCluster); } diff --git a/thorlcr/activities/keydiff/thkeydiff.cpp b/thorlcr/activities/keydiff/thkeydiff.cpp index c1690afe05c..c45b9388b03 100644 --- a/thorlcr/activities/keydiff/thkeydiff.cpp +++ b/thorlcr/activities/keydiff/thkeydiff.cpp @@ -81,7 +81,7 @@ class CKeyDiffMaster : public CMasterActivity throw MakeActivityException(this, 0, "Unsupported: keydiff(%s, %s) - Cannot diff a key that's wider(%d) than the target cluster size(%d)", originalIndexFile->queryLogicalName(), newIndexFile->queryLogicalName(), width, container.queryJob().querySlaves()); StringBuffer defaultCluster; - if (getDefaultStoragePlane(defaultCluster)) + if (getDefaultIndexBuildStoragePlane(defaultCluster)) clusters.append(defaultCluster); IArrayOf groups; fillClusterArray(container.queryJob(), outputName, clusters, groups); diff --git a/thorlcr/activities/keypatch/thkeypatch.cpp b/thorlcr/activities/keypatch/thkeypatch.cpp index 50fc44ddd64..66c0bfed593 100644 --- a/thorlcr/activities/keypatch/thkeypatch.cpp +++ b/thorlcr/activities/keypatch/thkeypatch.cpp @@ -82,7 +82,7 @@ class CKeyPatchMaster : public CMasterActivity throw MakeActivityException(this, 0, "Unsupported: keypatch(%s, %s) - Cannot patch a key that's wider(%d) than the target cluster size(%d)", originalIndexFile->queryLogicalName(), patchFile->queryLogicalName(), width, container.queryJob().querySlaves()); StringBuffer defaultCluster; - if (getDefaultStoragePlane(defaultCluster)) + if (getDefaultIndexBuildStoragePlane(defaultCluster)) clusters.append(defaultCluster); IArrayOf groups; fillClusterArray(container.queryJob(), outputName, clusters, groups);