Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/candidate-8.12.x' into candidate…
Browse files Browse the repository at this point in the history
…-8.12.0

Signed-off-by: Gavin Halliday <[email protected]>
  • Loading branch information
ghalliday committed Jan 25, 2023
2 parents be5ff59 + fd7797f commit 1f5c66f
Show file tree
Hide file tree
Showing 14 changed files with 99 additions and 52 deletions.
66 changes: 24 additions & 42 deletions .github/workflows/smoketest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ name: smoketest
env:
cacheversion: 3
VCPKG_BINARY_SOURCES: "clear;nuget,GitHub,read"
OS_DEPENDENCIES: "bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk
r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake"

on:
pull_request:
Expand All @@ -50,7 +52,7 @@ on:
jobs:
check-skip:
# continue-on-error: true # Uncomment once integration is finished
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
# Map a step output to a job output
outputs:
platform: ${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') || steps.skip_check.outputs.platform }}
Expand All @@ -66,7 +68,7 @@ jobs:
name: Build platform and regress setup
needs: check-skip
if: ${{ needs.check-skip.outputs.platform || needs.check-skip.outputs.eclwatch }}
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
timeout-minutes: 150 # the build step has its own timeout, however the job can sometimes take time to download dependencies
outputs:
cache-state: ${{ steps.check-cache.outputs.state }}
Expand Down Expand Up @@ -227,19 +229,14 @@ jobs:
fi
fi
- name: Dependencies20.04
- name: Dependencies22.04
if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed
run: |
sudo apt-get update
sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \
zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \
libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \
default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \
libssl-dev xmlstarlet
curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main"
sudo apt-get update -y
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }}
sudo apt-get install -y xmlstarlet
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
sudo apt-get install -y nodejs
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)"
Expand Down Expand Up @@ -397,7 +394,7 @@ jobs:
needs: build-and-setup
if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.platform }}
timeout-minutes: 60 # each matrix step has its own timeout, however the job can sometimes take time to download dependencies
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
strategy:
fail-fast: true
matrix:
Expand Down Expand Up @@ -434,15 +431,10 @@ jobs:
- name: Prerequisites
run: |
sudo apt-get update
sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \
zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \
libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \
default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \
libssl-dev gdb
curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main"
sudo apt-get update -y
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }}
sudo apt-get install -y gdb
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
sudo apt-get install -y nodejs
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)"
Expand Down Expand Up @@ -491,7 +483,7 @@ jobs:
needs: build-and-setup
if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' }}
timeout-minutes: 30 # the unittests run step has its own timeout, however the job can sometimes take time to download dependencies
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
steps:
- uses: actions/download-artifact@v2
with:
Expand All @@ -500,15 +492,10 @@ jobs:
- name: Prerequisites
run: |
sudo apt-get update
sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \
zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \
libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \
default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \
libssl-dev gdb
curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main"
sudo apt-get update -y
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }}
sudo apt-get install -y gdb
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
sudo apt-get install -y nodejs
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)"
Expand All @@ -527,7 +514,7 @@ jobs:
needs: build-and-setup
if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.eclwatch }}
timeout-minutes: 30 # the ui-tests run step has its own timeout, however the job can sometimes take time to download dependencies
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
steps:
- uses: actions/download-artifact@v2
with:
Expand Down Expand Up @@ -558,15 +545,10 @@ jobs:
sudo apt-get install -y git wget net-tools
sudo apt-get install -y tzdata unzip xvfb libxi6
sudo apt-get install -y default-jdk
sudo apt-get -yq install bison flex build-essential binutils-dev libldap2-dev libcppunit-dev libicu-dev libxslt1-dev \
zlib1g-dev libboost-regex-dev libarchive-dev libv8-dev default-jdk libapr1-dev libaprutil1-dev libiberty-dev \
libhiredis-dev libtbb-dev libxalan-c-dev libnuma-dev libevent-dev libatlas-base-dev libblas-dev python3-dev \
default-libmysqlclient-dev libsqlite3-dev libmemcached-dev libcurl4-openssl-dev pkg-config libtool autotools-dev automake \
libssl-dev gdb
curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
sudo apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main"
sudo apt-get update -y
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }}
sudo apt-get install -y gdb
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
sudo apt-get install -y nodejs
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)"
Expand Down
14 changes: 14 additions & 0 deletions dali/base/dafdesc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3659,6 +3659,20 @@ bool getDefaultSpillPlane(StringBuffer &ret)
#endif
}

bool getDefaultIndexBuildStoragePlane(StringBuffer &ret)
{
#ifdef _CONTAINERIZED
if (getComponentConfigSP()->getProp("@indexBuildPlane", ret))
return true;
else if (getGlobalConfigSP()->getProp("storage/@indexBuildPlane", ret))
return true;
else
return getDefaultStoragePlane(ret);
#else
return false;
#endif
}

//---------------------------------------------------------------------------------------------------------------------

static bool isAccessible(const IPropertyTree * xml)
Expand Down
1 change: 1 addition & 0 deletions dali/base/dafdesc.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -403,6 +403,7 @@ extern da_decl bool setReplicateDir(const char *name,StringBuffer &out, bool isr
extern da_decl void initializeStorageGroups(bool createPlanesFromGroups);
extern da_decl bool getDefaultStoragePlane(StringBuffer &ret);
extern da_decl bool getDefaultSpillPlane(StringBuffer &ret);
extern da_decl bool getDefaultIndexBuildStoragePlane(StringBuffer &ret);
extern da_decl IStoragePlane * getDataStoragePlane(const char * name, bool required);
extern da_decl IStoragePlane * getRemoteStoragePlane(const char * name, bool required);
extern da_decl IStoragePlane * createStoragePlane(IPropertyTree *meta);
Expand Down
11 changes: 7 additions & 4 deletions ecl/hthor/hthor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -351,11 +351,14 @@ class CHThorClusterWriteHandler : public ClusterWriteHandler
}
};

ClusterWriteHandler *createClusterWriteHandler(IAgentContext &agent, IHThorIndexWriteArg *iwHelper, IHThorDiskWriteArg *dwHelper, const char * lfn, StringAttr &fn, bool extend)
ClusterWriteHandler *createClusterWriteHandler(IAgentContext &agent, IHThorIndexWriteArg *iwHelper, IHThorDiskWriteArg *dwHelper, const char * lfn, StringAttr &fn, bool extend, bool isIndex)
{
//In the containerized system, the default data plane for this component is in the configuration
StringBuffer defaultCluster;
getDefaultStoragePlane(defaultCluster);
if (isIndex)
getDefaultIndexBuildStoragePlane(defaultCluster);
else
getDefaultStoragePlane(defaultCluster);
Owned<CHThorClusterWriteHandler> clusterHandler;
unsigned clusterIdx = 0;
while(true)
Expand Down Expand Up @@ -536,7 +539,7 @@ void CHThorDiskWriteActivity::resolve()
throw MakeStringException(99, "Could not resolve DFS Logical file %s", lfn.str());
}

clusterHandler.setown(createClusterWriteHandler(agent, NULL, &helper, dfsLogicalName.get(), filename, extend));
clusterHandler.setown(createClusterWriteHandler(agent, NULL, &helper, dfsLogicalName.get(), filename, extend, false));
}
}
else
Expand Down Expand Up @@ -1117,7 +1120,7 @@ CHThorIndexWriteActivity::CHThorIndexWriteActivity(IAgentContext &_agent, unsign
throw MakeStringException(99, "Cannot write %s, file already exists (missing OVERWRITE attribute?)", lfn.str());
}
}
clusterHandler.setown(createClusterWriteHandler(agent, &helper, NULL, lfn, filename, false));
clusterHandler.setown(createClusterWriteHandler(agent, &helper, NULL, lfn, filename, false, true));
sizeLimit = agent.queryWorkUnit()->getDebugValueInt64("hthorDiskWriteSizeLimit", defaultHThorDiskWriteSizeLimit);
defaultNoSeek = agent.queryWorkUnit()->getDebugValueBool("noSeekBuildIndex", isContainerized());
}
Expand Down
5 changes: 5 additions & 0 deletions esp/src/src/nls/fr/hpcc.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ export = {
Abort: "Annuler",
AbortedBy: "Annulé par",
AbortedTime: "L'heure d'annulation",
AbortSelectedWorkunits: "Annuler les Workunits selectionées? 'Votre identifiant de connexion sera enregistré pour cette action dans les WU.'",
About: "À propos",
AboutGraphControl: "À propos de Graph Control",
AboutHPCCSystems: "À propos de HPCC Systems®",
Expand Down Expand Up @@ -111,6 +112,7 @@ export = {
Columns: "Colonnes",
Command: "Commander",
Comment: "Commenter",
CompileCost: "Compiler le coût",
Compiled: "Compilé",
Compiling: "Compilant",
Completed: "Terminé",
Expand Down Expand Up @@ -233,6 +235,7 @@ export = {
Downloads: "Téléchargements",
DownloadSelectionAsCSV: "Télécharger la sélection au format CSV",
DownloadToCSV: "Télécharger à CSV",
DownloadToCSVNonFlatWarning: "Remarque : le téléchargement de fichiers contenant des ensembles de données imbriqués, comme des données séparées par des virgules, ne peuvent pas être formatées comme prévu",
DropZone: "Zone de largage",
DueToInctivity: "Vous serez déconnecté de toutes les sessions ECL Watch en 3 minutes en raison de l'inactivité.",
Duration: "Duration",
Expand Down Expand Up @@ -638,6 +641,7 @@ export = {
PleaseSelectAUserToAdd: "Veuillez sélectionner un utilisateur à ajouter",
Plugins: "Plugins",
Pods: "Capsules",
PodsAccessError: "Impossible de récupérer la liste des pods",
Port: "Port",
Prefix: "Préfixe",
PrefixPlaceholder: "nom de fichier {: longueur}, taille de fichier {: [B | L] [1-8]}",
Expand Down Expand Up @@ -870,6 +874,7 @@ export = {
ThorProcess: "Thor Processus",
ThreadID: "Identitfiant de fil",
Time: "Temps",
Timeline: "Chronologie",
TimeMaxTotalExecuteMinutes: "Temps Maximum Total Exécuter Minutes",
TimeMeanTotalExecuteMinutes: "Temps Moyen Total Exécuter Minutes",
TimeMinTotalExecuteMinutes: "Temps Minimum Total Exécuter Minutes",
Expand Down
12 changes: 10 additions & 2 deletions helm/hpcc/templates/_helpers.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,9 @@ storage:
{{ toYaml $storage.remote | indent 2 }}
{{- end }}
dataPlane: {{ include "hpcc.getDefaultDataPlane" . }}
{{- if hasKey $storage "indexBuildPlane" }}
indexBuildPlane: {{ $storage.indexBuildPlane }}
{{- end }}
planes:
{{- /*Generate entries for each data plane (removing the pvc). Exclude the planes used for dlls and dali.*/ -}}
{{- range $plane := $planes }}
Expand Down Expand Up @@ -453,12 +456,13 @@ The plane will generate a volume if it matches either an includeLabel or an incl

{{/*
Check that the data plane name is valid, and report an error if not
Pass in dict with root, planeName
Pass in dict with root, planeName and optional contextPrefix
*/}}
{{- define "hpcc.checkPlaneExists" -}}
{{- $storage := (.root.Values.storage | default dict) -}}
{{- $planes := ($storage.planes | default list) -}}
{{- $name := .planeName -}}
{{- $ctxMsg := .contextPrefix | default "" -}}
{{- $matched := dict -}}
{{- range $plane := $planes -}}
{{- if not $plane.disabled -}}
Expand All @@ -468,7 +472,7 @@ Pass in dict with root, planeName
{{- end -}}
{{- end -}}
{{- if not $matched.ok -}}
{{- $_ := fail (printf "Storage plane %s does not exist" $name) -}}
{{- $_ := fail (printf "%sStorage plane %s does not exist" $ctxMsg $name) -}}
{{- end -}}
{{- end -}}

Expand Down Expand Up @@ -641,6 +645,10 @@ Check that the storage and spill planes for a component exist
{{- $search := .me.spillPlane -}}
{{- include "hpcc.checkValidStoragePlane" (dict "search" $search "root" .root "category" "spill" "type" "storage spill" "for" .me.name) -}}
{{- end }}
{{- if (hasKey .me "indexBuildPlane") }}
{{- $search := .me.indexBuildPlane -}}
{{- include "hpcc.checkValidStoragePlane" (dict "search" $search "root" .root "category" "data" "type" "storage data" "for" .me.name) -}}
{{- end }}
{{- end -}}

{{/*
Expand Down
15 changes: 15 additions & 0 deletions helm/hpcc/templates/_warnings.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ Pass in dict with root and warnings
{{- end -}}
{{- /* Gather a list of ephemeral and persistant planes */ -}}
{{- $storage := (.root.Values.storage | default dict) -}}
{{- if hasKey $storage "indexBuildPlane" -}}
{{- include "hpcc.checkPlaneExists" (dict "root" .root "planeName" $storage.indexBuildPlane "contextPrefix" "indexBuildPlane: ") -}}
{{- end -}}
{{- $match := dict "ephemeral" (list) "persistant" (list) -}}
{{- $planes := ($storage.planes | default list) -}}
{{- $searchLabels := list "data" "dali" "sasha" "dll" "lz" -}}
Expand Down Expand Up @@ -124,4 +127,16 @@ Pass in dict with root and warnings
{{- $_ := set $warning "msg" (printf "Default cpu cost rate is being used for %s: %s" ((len $ctx.defaultCpuRateComponents)| plural "component" "components") ($ctx.defaultCpuRateComponents|toStrings)) -}}
{{- $_ := set $ctx "warnings" (append $ctx.warnings $warning) -}}
{{- end -}}
{{- /* Warn if insecure embed, pipe or extern enabled */ -}}
{{- $_ := set $ctx "insecureEclFeature" list -}}
{{- range $opt, $value := (pick .root.Values.security.eclSecurity "embedded" "pipe" "extern") -}}
{{- if eq $value "allow" -}}
{{- $_ := set $ctx "insecureEclFeature" (append $ctx.insecureEclFeature $opt) -}}
{{- end -}}
{{- end -}}
{{- if $ctx.insecureEclFeature -}}
{{- $warning := dict "source" "helm" "severity" "warning" -}}
{{- $_ := set $warning "msg" (printf "Insecure feature enabled in ecl: %s " $ctx.insecureEclFeature) -}}
{{- $_ := set $ctx "warnings" (append $ctx.warnings $warning) -}}
{{- end -}}
{{- end -}}
16 changes: 16 additions & 0 deletions helm/hpcc/values.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@
},
"remote": {
"$ref": "#/definitions/remoteStorage"
},
"indexBuildPlane": {
"type": "string",
"description": "Default plane for index builds"
}
},
"additionalProperties": false
Expand Down Expand Up @@ -1415,6 +1419,10 @@
"description": "The default storage plane to write data files to",
"type": "string"
},
"indexBuildPlane": {
"description": "The default storage plane to write index files to",
"type": "string"
},
"annotations": {
"type": "object",
"additionalProperties": { "type": "string" }
Expand Down Expand Up @@ -1494,6 +1502,10 @@
"description": "The storage plane to write spill files to",
"type": "string"
},
"indexBuildPlane": {
"description": "The default storage plane to write index files to",
"type": "string"
},
"resources": {
"$ref": "#/definitions/resources"
},
Expand Down Expand Up @@ -2277,6 +2289,10 @@
"description": "The storage plane to write spill files to",
"type": "string"
},
"indexBuildPlane": {
"description": "The default storage plane to write index files to",
"type": "string"
},
"annotations": {
"type": "object",
"additionalProperties": { "type": "string" }
Expand Down
1 change: 1 addition & 0 deletions roxie/ccd/ccd.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,7 @@ extern StringBuffer fileNameServiceDali;
extern StringBuffer roxieName;
#ifdef _CONTAINERIZED
extern StringBuffer defaultPlane;
extern StringBuffer defaultIndexBuildPlane;
#endif
extern bool trapTooManyActiveQueries;
extern unsigned maxEmptyLoopIterations;
Expand Down
2 changes: 1 addition & 1 deletion roxie/ccd/ccdfile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2137,7 +2137,7 @@ static bool getDirectAccessStoragePlanes(StringArray &planes)
ILazyFileIO *createPhysicalFile(const char *id, IPartDescriptor *pdesc, IPartDescriptor *remotePDesc, RoxieFileType fileType, int numParts, bool startCopy, unsigned channel)
{
#ifdef _CONTAINERIZED
const char *myCluster = defaultPlane.str();
const char *myCluster = (ROXIE_KEY == fileType) ? defaultIndexBuildPlane.str() : defaultPlane.str();
#else
const char *myCluster = roxieName.str();
#endif
Expand Down
2 changes: 2 additions & 0 deletions roxie/ccd/ccdmain.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ StringBuffer fileNameServiceDali;
StringBuffer roxieName;
#ifdef _CONTAINERIZED
StringBuffer defaultPlane;
StringBuffer defaultIndexBuildPlane;
#endif
bool trapTooManyActiveQueries;
unsigned maxEmptyLoopIterations;
Expand Down Expand Up @@ -735,6 +736,7 @@ int CCD_API roxie_main(int argc, const char *argv[], const char * defaultYaml)
setStatisticsComponentName(SCTroxie, "roxie", true);
#ifdef _CONTAINERIZED
getDefaultStoragePlane(defaultPlane);
getDefaultIndexBuildStoragePlane(defaultIndexBuildPlane);
#endif
installDefaultFileHooks(topology);

Expand Down
Loading

0 comments on commit 1f5c66f

Please sign in to comment.