From 695dee55467fad57a6a95d6b9bc8bc9563a02990 Mon Sep 17 00:00:00 2001 From: g-pan Date: Mon, 25 Sep 2023 12:13:25 -0400 Subject: [PATCH 01/35] HPCC-30334 Fix Pagination of PT_BR Containerized doc Signed-off-by: g-pan --- .../ContainerizedMods/LocalDeployment.xml | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/PT_BR/ContainerizedHPCC/ContainerizedMods/LocalDeployment.xml b/docs/PT_BR/ContainerizedHPCC/ContainerizedMods/LocalDeployment.xml index 684cb870a7e..f6766ee81e2 100644 --- a/docs/PT_BR/ContainerizedHPCC/ContainerizedMods/LocalDeployment.xml +++ b/docs/PT_BR/ContainerizedHPCC/ContainerizedMods/LocalDeployment.xml @@ -11,7 +11,7 @@ Pré-requisitos - + Todas ferramentas de terceiros devem ser 64-bits. @@ -269,7 +269,11 @@ mkdir c:\hpccdata\dropzone + + + + Instale o hpcc-localfile Helm chart. @@ -367,9 +371,9 @@ sasha: Isto não é necessário em ambientes MacOS ou WSL 2. - + - + @@ -422,7 +426,7 @@ OUTPUT(allPeople,,'MyData::allPeople',THOR,OVERWRITE); - + @@ -481,10 +485,10 @@ OUTPUT(allPeople,,'MyData::allPeople',THOR,OVERWRITE); - + - + - + From 8155e1a9352851e8c8d2fe4e8d6a7b4e54060e47 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Thu, 5 Oct 2023 11:56:22 +0100 Subject: [PATCH 02/35] HPCC-30434 Remove meaningless global stats Signed-off-by: Gavin Halliday --- roxie/ccd/ccdserver.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/roxie/ccd/ccdserver.cpp b/roxie/ccd/ccdserver.cpp index 0c74a4c8106..3eeee9656d7 100644 --- a/roxie/ccd/ccdserver.cpp +++ b/roxie/ccd/ccdserver.cpp @@ -430,9 +430,8 @@ static const StatisticsMapping indexWriteStatistics({ StNumDuplicateKeys, StNumL // These ones get accumulated and reported in COMPLETE: line (and workunit). // Excludes ones that are not sensible to sum across activities, other than StTimeTotalExecute which must be explicitly overwritten at global level before we report it -extern const StatisticsMapping accumulatedStatistics({StWhenFirstRow, StTimeLocalExecute, StTimeTotalExecute, StSizeMaxRowSize, - StNumRowsProcessed, StNumSlaves, StNumStarts, StNumStops, StNumStrands, - StNumScansPerRow, StNumAllocations, StNumAllocationScans, +extern const StatisticsMapping accumulatedStatistics({StWhenFirstRow, StTimeLocalExecute, StSizeMaxRowSize, + StNumAllocations, StNumAllocationScans, StCycleLocalExecuteCycles, StNumAtmostTriggered, StNumServerCacheHits, StNumIndexSeeks, StNumIndexScans, StNumIndexWildSeeks, From 25c853459d903dfdd7ca7b1cc329c04fee13ef07 Mon Sep 17 00:00:00 2001 From: Ken Rowland Date: Fri, 6 Oct 2023 15:34:32 -0400 Subject: [PATCH 03/35] HPCC-29854 Enable logging of scope search results in non debug builds Added PROG and WARN log statements to handle access denial cases. Signed-off-by: Kenneth.Rowland@lexisnexisrisk.com --- system/security/shared/caching.cpp | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/system/security/shared/caching.cpp b/system/security/shared/caching.cpp index 4f1430b4ac8..81127330199 100644 --- a/system/security/shared/caching.cpp +++ b/system/security/shared/caching.cpp @@ -556,6 +556,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha if (!fullScope || !*fullScope) { *accessFlags = queryDefaultPermission(sec_user); + WARNLOG("FileScope unspecified for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); return true; } @@ -579,6 +580,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha if (m_managedFileScopesMap.empty()) { *accessFlags = queryDefaultPermission(sec_user); + WARNLOG("Filescope managed scopes empty for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); return true; } @@ -624,7 +626,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha { *accessFlags = res->getAccessFlags(); managedScope.append(const_cast(res->getName())); - DBGLOG("FileScope %s for %s(%s) access denied %d at scope %s, took %dms",fullScope, sec_user.getName(), res->getName(), *accessFlags, scope, msTick()-start); + PROGLOG("FileScope %s for %s(%s) access denied %s(%d) at scope %s, took %dms", fullScope, sec_user.getName(), res->getName(), getSecAccessFlagName(*accessFlags), *accessFlags, scope, msTick()-start); return true; } else @@ -653,7 +655,6 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha else { managedScope.append(const_cast(res->getName()));//return deepest managed scope - #ifdef _DEBUG DBGLOG("FileScope %s for %s(%s) managed but not cached, took %dms", fullScope, sec_user.getName(), res->getName(), msTick()-start); #endif @@ -663,9 +664,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha else { *accessFlags = queryDefaultPermission(sec_user); -#ifdef _DEBUG - DBGLOG("FileScope %s for %s not managed, using default %d, took %dms", fullScope, sec_user.getName(),*accessFlags, msTick()-start); -#endif + WARNLOG("FileScope %s for %s not managed, using default %s(%d), took %dms", fullScope, sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); rc = true; } return rc; From 0dfaefadc76e5d1c1a548e078c257fc23f1d82b4 Mon Sep 17 00:00:00 2001 From: g-pan Date: Mon, 9 Oct 2023 12:16:56 -0400 Subject: [PATCH 04/35] HPCC-30448 Improve Pagination for Security Manager Doc Signed-off-by: g-pan --- .../HPCCSystemAdmin/SA-Mods/SecMgrMod.xml | 18 ++++++++---------- .../SA-Mods/SecMgrModConfDeploy.xml | 6 ++++-- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrMod.xml b/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrMod.xml index fa39e78c519..d0cdf92f782 100644 --- a/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrMod.xml +++ b/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrMod.xml @@ -7,7 +7,7 @@ The Security Manager Plugin framework provides a mechanism for the creation and deployment of custom security manager plugins. - + Plugin Development A custom Security Manager Plugin consists of a Buildset definition - The plugin - declares itself as an HPCC Systems® Security Manager Plugin component, and declares - the location of the plugin files and the configuration definition - schema. + declares itself as an HPCC Systems® Security + Manager Plugin component, and declares the location of the plugin files + and the configuration definition schema. EXAMPLE: @@ -67,8 +67,8 @@ Configuration Definition - The plugin must provide a definition of the configuration elements and the structure it expects to receive at the time it is instantiated. The XSD - file is consumed by the HPCC Systems Configuration Manager component and is - rendered as a GUI form. The configuration definition is defined as an + file is consumed by the HPCC Systems Configuration Manager component and + is rendered as a GUI form. The configuration definition is defined as an element of the component name (as declared in the buildset) followed by attributes and/or complex elements. @@ -157,8 +157,6 @@ </xs:element> </xs:schema> - - genenvrules.conf - (optional) This file allows the plugin to add itself to the "do_not(automatically)_generate" list. While this is an optional file, @@ -170,8 +168,8 @@ Configuration transformation rules - (optional) specified as an xsl template, this set of rules can be - applied to the configuration XML. Refer to XSL templates in the HPCC Systems - source tree. + applied to the configuration XML. Refer to XSL templates in the HPCC + Systems source tree. Concrete Example diff --git a/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrModConfDeploy.xml b/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrModConfDeploy.xml index f35d8aeb9cc..4f0e2736012 100644 --- a/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrModConfDeploy.xml +++ b/docs/EN_US/HPCCSystemAdmin/SA-Mods/SecMgrModConfDeploy.xml @@ -5,9 +5,11 @@ Configure and Deploy the Security Manager Plugin The following sections detail the process of configuring your HPCC - Systems® platform to use the Security Manager Plugin. + Systems® platform to use the Security Manager + Plugin. - + How to Configure a Security Manager Plugin Once the plugin has been installed, the plugin can be configured From adeca2e38bbd1d7c239524bf6706c106fb6627a3 Mon Sep 17 00:00:00 2001 From: Ken Rowland Date: Wed, 11 Oct 2023 14:45:27 -0400 Subject: [PATCH 05/35] Addressed review comments --- system/security/shared/caching.cpp | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/system/security/shared/caching.cpp b/system/security/shared/caching.cpp index 81127330199..4fef9e91c0f 100644 --- a/system/security/shared/caching.cpp +++ b/system/security/shared/caching.cpp @@ -556,7 +556,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha if (!fullScope || !*fullScope) { *accessFlags = queryDefaultPermission(sec_user); - WARNLOG("FileScope unspecified for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); + OWARNLOG("FileScope missing, using root for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); return true; } @@ -580,7 +580,9 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha if (m_managedFileScopesMap.empty()) { *accessFlags = queryDefaultPermission(sec_user); - WARNLOG("Filescope managed scopes empty for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); + if (m_secMgr) { + OWARNLOG("Filescope managed scopes empty for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick() - start); + } return true; } @@ -626,7 +628,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha { *accessFlags = res->getAccessFlags(); managedScope.append(const_cast(res->getName())); - PROGLOG("FileScope %s for %s(%s) access denied %s(%d) at scope %s, took %dms", fullScope, sec_user.getName(), res->getName(), getSecAccessFlagName(*accessFlags), *accessFlags, scope, msTick()-start); + LOG(MCoperatorProgress, "FileScope %s for %s(%s) access denied %s(%d) at scope %s, took %dms", fullScope, sec_user.getName(), res->getName(), getSecAccessFlagName(*accessFlags), *accessFlags, scope, msTick()-start); return true; } else @@ -664,7 +666,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha else { *accessFlags = queryDefaultPermission(sec_user); - WARNLOG("FileScope %s for %s not managed, using default %s(%d), took %dms", fullScope, sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); + OWARNLOG("FileScope %s for %s not managed, using default %s(%d), took %dms", fullScope, sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); rc = true; } return rc; From d27e138b3676333fa59bd9f79aac8c48d3acdb65 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Fri, 13 Oct 2023 12:54:02 +0100 Subject: [PATCH 06/35] HPCC-30517 Add lfn external unit tests Signed-off-by: Jake Smith --- testing/unittests/dalitests.cpp | 61 +++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/testing/unittests/dalitests.cpp b/testing/unittests/dalitests.cpp index b74c13dbf9e..44b471d0ae5 100644 --- a/testing/unittests/dalitests.cpp +++ b/testing/unittests/dalitests.cpp @@ -2782,6 +2782,14 @@ class CDaliUtils : public CppUnit::TestFixture { "~remote::dfs1::ascope::afile", "remote::dfs1::ascope"}, { nullptr, nullptr } // terminator }; + + const char *externalUrlChecks[][2] = { + { "~file::127.0.0.1::var::lib::^H^P^C^C^Systems::mydropzone::ascope::afile", "/var/lib/HPCCSystems/mydropzone/ascope/afile"}, + { "~file::10.3.2.1::var::lib::^H^P^C^C^Systems::mydropzone::ascope::afile", "//10.3.2.1/var/lib/HPCCSystems/mydropzone/ascope/afile"}, + { "~plane::mydropzone::ascope::afile", "/var/lib/HPCCSystems/mydropzone/ascope/afile"}, + { "~plane::dropzone2::ascope::afile", "//10.4.3.2/var/lib/HPCCSystems/mydropzone/ascope/afile"}, + { nullptr, nullptr } // terminator + }; PROGLOG("Checking valid logical filenames"); unsigned nlfn=0; for (;;) @@ -2866,6 +2874,59 @@ class CDaliUtils : public CppUnit::TestFixture CPPUNIT_FAIL(err.str()); } } + + constexpr const char * globalConfigYaml = R"!!( + storage: + planes: + - name: data + category: data + prefix: /var/lib/HPCCSystems/hpcc-data + - name: mydropzone + category: lz + prefix: /var/lib/HPCCSystems/mydropzone + - name: dropzone2 + category: lz + prefix: /var/lib/HPCCSystems/mydropzone + hosts: + - 10.4.3.2 + )!!"; + Owned globalConfig = createPTreeFromYAMLString(globalConfigYaml); + replaceComponentConfig(getComponentConfigSP(), globalConfig); + + PROGLOG("Checking physical file paths"); + nlfn = 0; + for (;;) + { + const char **entry = externalUrlChecks[nlfn++]; + if (nullptr == entry[0]) + break; + const char *lfn = entry[0]; + const char *expected = entry[1]; + PROGLOG("lfn = %s, expect = %s", lfn, expected); + CDfsLogicalFileName dlfn; + StringBuffer err; + try + { + dlfn.set(lfn); + RemoteFilename rfn; + dlfn.getExternalFilename(rfn); + StringBuffer filePath; + rfn.getPath(filePath); + if (!streq(filePath, expected)) + err.appendf("Logical filename '%s' external url should be '%s', but result was '%s'.", lfn, expected, filePath.str()); + } + catch (IException *e) + { + err.appendf("Logical filename '%s' failed: ", lfn); + e->errorMessage(err); + e->Release(); + } + if (err.length()) + { + ERRLOG("%s", err.str()); + CPPUNIT_FAIL(err.str()); + } + } } }; From ffbd06cf099524b6bbd23d4a77d66711939ad4bd Mon Sep 17 00:00:00 2001 From: wangkx Date: Tue, 3 Oct 2023 08:59:07 -0400 Subject: [PATCH 07/35] HPCC-30231 Handle HTTP headers/path case insensitively Also fix the partial string matching Signed-off-by: wangkx --- esp/bindings/http/client/httpclient.cpp | 1 + esp/bindings/http/platform/httptransport.cpp | 46 +++++++++++++++++++- 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/esp/bindings/http/client/httpclient.cpp b/esp/bindings/http/client/httpclient.cpp index 6f9968ea850..38061770de5 100644 --- a/esp/bindings/http/client/httpclient.cpp +++ b/esp/bindings/http/client/httpclient.cpp @@ -516,6 +516,7 @@ void copyHeaders(CHttpMessage ©To, CHttpMessage ©From, bool resetForward } break; case 'X': + case 'x': if (strieq(name, "X-Forwarded-For")) { if (resetForwardedFor) diff --git a/esp/bindings/http/platform/httptransport.cpp b/esp/bindings/http/platform/httptransport.cpp index 258c853fcdd..42e112d284c 100644 --- a/esp/bindings/http/platform/httptransport.cpp +++ b/esp/bindings/http/platform/httptransport.cpp @@ -798,6 +798,49 @@ void CHttpMessage::logSOAPMessage(const char* message, const char* prefix) return; } +static const char* POST_METHOD_STR = "POST "; +static bool skipLogContent(const char* httpHeader) +{ + if (!startsWith(httpHeader, POST_METHOD_STR)) + return false; + + const char* servicePtr = httpHeader + 5; + if (servicePtr[0] != '/') + return false; + + const char* methodPtr = strchr(++servicePtr, '/'); + if (!methodPtr) + return false; + + unsigned serviceType = 0; + if (startsWithIgnoreCase(servicePtr, "ws_access/")) + serviceType = 1; + else if (startsWithIgnoreCase(servicePtr, "ws_account/")) + serviceType = 2; + if (serviceType == 0) + return false; + + StringBuffer espMethod; + const char* tail = strchr(++methodPtr, '.'); + if (tail && (startsWithIgnoreCase(tail, ".xml") || startsWithIgnoreCase(tail, ".json"))) + espMethod.append(tail - methodPtr, methodPtr); + else + { + tail = strchr(methodPtr, '?'); + if (!tail) + tail = strchr(methodPtr, ' '); + if (tail) + espMethod.append(tail - methodPtr, methodPtr); + else + espMethod.append(methodPtr); + } + + if (serviceType == 1) + return (strieq(espMethod, "AddUser") || strieq(espMethod, "UserResetPass")); + + return strieq(espMethod, "UpdateUser"); +} + void CHttpMessage::logMessage(MessageLogFlag messageLogFlag, const char *prefix) { logMessage(messageLogFlag, m_content, prefix); @@ -812,8 +855,7 @@ void CHttpMessage::logMessage(MessageLogFlag messageLogFlag, StringBuffer& conte if (((messageLogFlag == LOGCONTENT) || (messageLogFlag == LOGALL)) && (content.length() > 0)) {//log content - if ((m_header.length() > 0) && (startsWith(m_header.str(), "POST /ws_access/AddUser") - || startsWith(m_header.str(), "POST /ws_access/UserResetPass") || startsWith(m_header.str(), "POST /ws_account/UpdateUser"))) + if (skipLogContent(m_header)) DBGLOG("%s", prefix); else if (isSoapMessage()) logSOAPMessage(content.str(), prefix); From 12967e293f89e98388ebe3738dd306ff25987473 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 13 Oct 2023 18:21:45 +0100 Subject: [PATCH 08/35] Split off 8.12.62 Signed-off-by: Gavin Halliday --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dafilesrv.yaml | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 14 files changed, 24 insertions(+), 24 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index c761cf79056..0d3dbcdeead 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 8.12.61-closedown0 +version: 8.12.63-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 8.12.61-closedown0 +appVersion: 8.12.63-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 1ee77588afc..58813040843 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1240,7 +1240,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index d264eaec1f8..25849b87011 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -50,7 +50,7 @@ spec: labels: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dafilesrv" "name" "dafilesrv" "instance" .name) | indent 8 }} server: {{ .name | quote }} - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 annotations: checksum/config: {{ $configSHA }} spec: diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index 124319b4293..a687b1c2494 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -82,7 +82,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 4f7f8226f85..3012f0204aa 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index 50a22974158..d703a6e304c 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -137,7 +137,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index db2ac991bc2..25f8ea15d3d 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -142,7 +142,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index 8af8f388bb2..faa44b6b3d5 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index c094f5a32b0..ca81a7446fa 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -117,7 +117,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 9b8188ee5e3..5d55edbbe9e 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -70,7 +70,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index e330f619b78..a25040cc110 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -120,7 +120,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -180,7 +180,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -242,7 +242,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -347,7 +347,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index 28317f02449..73831a68e9e 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 1c683c5b9f0..5d89c56d6bd 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName "instanceOf" (printf "%s-job" .eclAgentName)) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -149,7 +149,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName "instanceOf" (printf "%s-thormanager-job" .me.name)) | indent 12 }} @@ -218,7 +218,7 @@ data: accessEsp: "yes" app: "thor" component: "thorworker" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName "instanceOf" (printf "%s-thorworker-job" .me.name)) | indent 12 }} @@ -353,7 +353,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -418,7 +418,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 8.12.61-closedown0 + helmVersion: 8.12.63-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index 12dfc21152b..00ad8913fcf 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 8 ) set ( HPCC_MINOR 12 ) -set ( HPCC_POINT 61 ) +set ( HPCC_POINT 63 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From 6366758760f8747a5d1d47d63ca1bc2265b756f8 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 13 Oct 2023 18:25:15 +0100 Subject: [PATCH 09/35] Split off 9.0.52 Signed-off-by: Gavin Halliday --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dafilesrv.yaml | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 14 files changed, 24 insertions(+), 24 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index 5ee1ccba7d0..6a24bfd9cd5 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 9.0.51-closedown0 +version: 9.0.53-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 9.0.51-closedown0 +appVersion: 9.0.53-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index c6f671b0410..7e67d5f887b 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1314,7 +1314,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index 6de4260e5cc..de9a6e37fb0 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -50,7 +50,7 @@ spec: labels: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dafilesrv" "name" "dafilesrv" "instance" .name) | indent 8 }} server: {{ .name | quote }} - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 annotations: checksum/config: {{ $configSHA }} spec: diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index e515daa28f9..8b696ddb4f8 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -82,7 +82,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 2d9bccac0fc..4ed40e2e2b5 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index 63569b3a1fb..5cbcd11ae98 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -135,7 +135,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index 361613dae95..0e78d8ad823 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -142,7 +142,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index fce586467ff..b466ac78f61 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index dfc9324a1e4..1d714b5ff9c 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -117,7 +117,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 847d40ef15d..199d6633930 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -70,7 +70,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index 7c15584d75c..21ebea9829c 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -120,7 +120,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -180,7 +180,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -242,7 +242,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -347,7 +347,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index ac4b870a889..e39414d21c0 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 0e6d308a7e3..174fbd574de 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName "instanceOf" (printf "%s-job" .eclAgentName)) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -147,7 +147,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName "instanceOf" (printf "%s-thormanager-job" .me.name)) | indent 12 }} @@ -214,7 +214,7 @@ data: accessEsp: "yes" app: "thor" component: "thorworker" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName "instanceOf" (printf "%s-thorworker-job" .me.name)) | indent 12 }} @@ -347,7 +347,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -412,7 +412,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 9.0.51-closedown0 + helmVersion: 9.0.53-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index 1672f107e59..c7d82c665cf 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 9 ) set ( HPCC_MINOR 0 ) -set ( HPCC_POINT 51 ) +set ( HPCC_POINT 53 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From 71a73b616173344c7b71194da33a07204834c0cd Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 13 Oct 2023 18:27:01 +0100 Subject: [PATCH 10/35] Split off 9.2.30 Signed-off-by: Gavin Halliday --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dafilesrv.yaml | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 14 files changed, 24 insertions(+), 24 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index ee6c3d84244..57517cb36b9 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 9.2.29-closedown0 +version: 9.2.31-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 9.2.29-closedown0 +appVersion: 9.2.31-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 28b168824a6..fde95ff039f 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1384,7 +1384,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index ae3d5e73608..128a8f0de56 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -50,7 +50,7 @@ spec: labels: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dafilesrv" "name" "dafilesrv" "instance" .name) | indent 8 }} server: {{ .name | quote }} - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 annotations: checksum/config: {{ $configSHA }} spec: diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index 5244ab69ef7..b3a336ad701 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -82,7 +82,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 81a7c5401ec..f932435b704 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index 0e2024dd602..51f09faeb00 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -135,7 +135,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index d417587a175..f0755e59df8 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -142,7 +142,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index 19facff5f34..70a36e0d205 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index b677736cd0a..4f124ba61ff 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -117,7 +117,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 25e56f84d81..1f9fdf27b84 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -70,7 +70,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index 8181a466e08..c5b8a96ae3e 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -120,7 +120,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -180,7 +180,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -242,7 +242,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -347,7 +347,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index bb25e531867..54ff10416ff 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 528275aa74f..28f5cc9f625 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName "instanceOf" (printf "%s-job" .eclAgentName)) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -147,7 +147,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName "instanceOf" (printf "%s-thormanager-job" .me.name)) | indent 12 }} @@ -214,7 +214,7 @@ data: accessEsp: "yes" app: "thor" component: "thorworker" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName "instanceOf" (printf "%s-thorworker-job" .me.name)) | indent 12 }} @@ -347,7 +347,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -412,7 +412,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 9.2.29-closedown0 + helmVersion: 9.2.31-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index a45d49e42e7..b0a307c4ba4 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 9 ) set ( HPCC_MINOR 2 ) -set ( HPCC_POINT 29 ) +set ( HPCC_POINT 31 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From a05215b09d427e7f4f8dd1d3ff46767ce11a969e Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 13 Oct 2023 18:30:13 +0100 Subject: [PATCH 11/35] Split off 9.4.4 Signed-off-by: Gavin Halliday --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dafilesrv.yaml | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 14 files changed, 24 insertions(+), 24 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index 41c8ff6df9f..2074b20b1ed 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 9.4.3-closedown0 +version: 9.4.5-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 9.4.3-closedown0 +appVersion: 9.4.5-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 80797335827..9bafda8e487 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1415,7 +1415,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index 1fb62de76ae..736c381f54f 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -50,7 +50,7 @@ spec: labels: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dafilesrv" "name" "dafilesrv" "instance" .name) | indent 8 }} server: {{ .name | quote }} - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 annotations: checksum/config: {{ $configSHA }} spec: diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index c463d1e7f04..75d31139ed1 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -82,7 +82,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index eade2aece14..6d54ab1e8b7 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index 3648fa1b1b5..c41bdc498bb 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -137,7 +137,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index 31982186c4b..47d8df9c820 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -144,7 +144,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index 5bda6684c86..2e40f6fa545 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index 144bc4bb9e7..fe100cef312 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -117,7 +117,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 6bafb44e067..50bbdf471b8 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -70,7 +70,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index cafb8bd2c74..844058bff8d 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -123,7 +123,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -183,7 +183,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -245,7 +245,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -352,7 +352,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index bbd3929b673..cd14c41e209 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 1591e0260b2..515caff0272 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName "instanceOf" (printf "%s-job" .eclAgentName)) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -149,7 +149,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName "instanceOf" (printf "%s-thormanager-job" .me.name)) | indent 12 }} @@ -218,7 +218,7 @@ data: accessEsp: "yes" app: "thor" component: "thorworker" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName "instanceOf" (printf "%s-thorworker-job" .me.name)) | indent 12 }} @@ -353,7 +353,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -420,7 +420,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 9.4.3-closedown0 + helmVersion: 9.4.5-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index 1ec50b2d761..95178893955 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 9 ) set ( HPCC_MINOR 4 ) -set ( HPCC_POINT 3 ) +set ( HPCC_POINT 5 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From 1f948f40a89ad0b368dff4c40240b9a6aae29fe2 Mon Sep 17 00:00:00 2001 From: Jack Del Vecchio Date: Fri, 13 Oct 2023 17:31:41 +0000 Subject: [PATCH 12/35] HPCC-30456 ParquetIO.Write() overwrites files with no warning or error --- plugins/parquet/README.md | 6 ++--- plugins/parquet/examples/decimal_test.ecl | 9 +++---- plugins/parquet/parquet.ecllib | 8 +++--- plugins/parquet/parquetembed.cpp | 32 ++++++++++++++++++++--- plugins/parquet/parquetembed.hpp | 3 ++- 5 files changed, 41 insertions(+), 17 deletions(-) diff --git a/plugins/parquet/README.md b/plugins/parquet/README.md index 241d89d7a51..39e4358ab69 100644 --- a/plugins/parquet/README.md +++ b/plugins/parquet/README.md @@ -38,17 +38,17 @@ dataset := ParquetIO.Read(layout, '/source/directory/data.parquet'); #### 2. Writing Parquet Files -The Write function empowers ECL programmers to write ECL datasets to Parquet files. By leveraging the Parquet format's columnar storage capabilities, this function provides efficient compression and optimized storage for data. +The Write function empowers ECL programmers to write ECL datasets to Parquet files. By leveraging the Parquet format's columnar storage capabilities, this function provides efficient compression and optimized storage for data. There is an optional argument that sets the overwrite behavior of the plugin. The default value is false meaning it will throw an error if the target file already exists. ``` -ParquetIO.Write(inDataset, '/output/directory/data.parquet'); +ParquetIO.Write(inDataset, '/output/directory/data.parquet', overwriteOption); ``` ### Partitioned Files (Tabular Datasets) #### 1. Reading Partitioned Files -The Read Partition function extends the Read functionality by enabling ECL programmers to read from partitioned Parquet files. +The Read Partition function extends the Read functionality by enabling ECL programmers to read from partitioned Parquet files. ``` github_dataset := ParquetIO.ReadPartition(layout, '/source/directory/partioned_dataset'); diff --git a/plugins/parquet/examples/decimal_test.ecl b/plugins/parquet/examples/decimal_test.ecl index 39a7cb0cf84..f7dc79e20b8 100644 --- a/plugins/parquet/examples/decimal_test.ecl +++ b/plugins/parquet/examples/decimal_test.ecl @@ -4,14 +4,11 @@ IMPORT PARQUET; layout := RECORD DECIMAL5_2 height; -END; +END; decimal_data := DATASET([{152.25}, {125.56}], layout); -#IF(1) -ParquetIO.Write(decimal_data, '/datadrive/dev/test_data/decimal.parquet'); -#END +overwriteOption := TRUE; +ParquetIO.Write(decimal_data, '/datadrive/dev/test_data/decimal.parquet', overwriteOption); -#IF(1) ParquetIO.Read(layout, '/datadrive/dev/test_data/decimal.parquet'); -#END diff --git a/plugins/parquet/parquet.ecllib b/plugins/parquet/parquet.ecllib index ff15be29f66..5056b4f2ced 100644 --- a/plugins/parquet/parquet.ecllib +++ b/plugins/parquet/parquet.ecllib @@ -33,14 +33,14 @@ EXPORT ParquetIO := MODULE RETURN _DoParquetReadPartition(); ENDMACRO; - EXPORT Write(outDS, filePath) := FUNCTIONMACRO - LOCAL _DoParquetWrite(STREAMED DATASET(RECORDOF(outDS)) _ds) := EMBED(parquet : activity, option('write'), destination(filePath)) + EXPORT Write(outDS, filePath, overwriteOption = false) := FUNCTIONMACRO + LOCAL _DoParquetWrite(STREAMED DATASET(RECORDOF(outDS)) _ds) := EMBED(parquet : activity, option('write'), destination(filePath), overwriteOpt(overwriteOption)) ENDEMBED; RETURN _doParquetWrite(outDS); ENDMACRO; - EXPORT WritePartition(outDS, outRows = 100000, basePath) := FUNCTIONMACRO - LOCAL _DoParquetWritePartition(STREAMED DATASET(RECORDOF(outDS)) _ds) := EMBED(parquet : activity, option('writepartition'), destination(basePath), MaxRowSize(outRows)) + EXPORT WritePartition(outDS, outRows = 100000, basePath, overwriteOption = false) := FUNCTIONMACRO + LOCAL _DoParquetWritePartition(STREAMED DATASET(RECORDOF(outDS)) _ds) := EMBED(parquet : activity, option('writepartition'), destination(basePath), MaxRowSize(outRows), overwriteOpt(overwriteOption)) ENDEMBED; RETURN _DoParquetWritePartition(outDS); ENDMACRO; diff --git a/plugins/parquet/parquetembed.cpp b/plugins/parquet/parquetembed.cpp index 366b76279c3..3ba3eea9af8 100644 --- a/plugins/parquet/parquetembed.cpp +++ b/plugins/parquet/parquetembed.cpp @@ -110,11 +110,12 @@ extern void fail(const char *message) * @param _batchSize The size of the batches when converting parquet columns to rows. */ ParquetHelper::ParquetHelper(const char *option, const char *_location, const char *destination, - int _rowSize, int _batchSize, const IThorActivityContext *_activityCtx) + int _rowSize, int _batchSize, bool _overwrite, const IThorActivityContext *_activityCtx) : partOption(option), location(_location), destination(destination) { rowSize = _rowSize; batchSize = _batchSize; + overwrite = _overwrite; activityCtx = _activityCtx; pool = arrow::default_memory_pool(); @@ -162,10 +163,31 @@ arrow::Status ParquetHelper::openWriteFile() writeOptions.filesystem = filesystem; writeOptions.base_dir = destination; writeOptions.partitioning = partitioning; - writeOptions.existing_data_behavior = arrow::dataset::ExistingDataBehavior::kOverwriteOrIgnore; + writeOptions.existing_data_behavior = overwrite ? arrow::dataset::ExistingDataBehavior::kOverwriteOrIgnore : arrow::dataset::ExistingDataBehavior::kError; } else { + StringBuffer filename; + StringBuffer path; + StringBuffer ext; + splitFilename(destination.c_str(), nullptr, &path, &filename, &ext, false); + + if(!strieq(ext, ".parquet")) + failx("Error opening file: Invalid file extension %s", ext.str()); + + Owned itr = createDirectoryIterator(path.str(), filename.append("*.parquet")); + + ForEach(*itr) + { + if (overwrite) + { + IFile &file = itr->query(); + if(!file.remove()) + failx("File %s could not be overwritten.", file.queryFilename()); + } + else + failx("Cannot write to file %s because it already exists. To delete it set the overwrite option to true.", destination.c_str()); + } // Currently under the assumption that all channels and workers are given a worker id and no matter // the configuration will show up in activityCtx->numSlaves() if (activityCtx->numSlaves() > 1) @@ -1633,6 +1655,8 @@ ParquetEmbedFunctionContext::ParquetEmbedFunctionContext(const IContextLogger &_ const char *destination = ""; // file name and location of where to read parquet file from __int64 rowsize = 40000; // Size of the row groups when writing to parquet files __int64 batchSize = 40000; // Size of the batches when converting parquet columns to rows + bool overwrite = false; // If true overwrite file with no error. The default is false and will throw an error if the file already exists. + // Iterate through user options and save them StringArray inputOptions; inputOptions.appendList(options, ","); @@ -1654,6 +1678,8 @@ ParquetEmbedFunctionContext::ParquetEmbedFunctionContext(const IContextLogger &_ rowsize = atoi(val); else if (stricmp(optName, "BatchSize") == 0) batchSize = atoi(val); + else if (stricmp(optName, "overwriteOpt") == 0) + overwrite = clipStrToBool(val); else failx("Unknown option %s", optName.str()); } @@ -1664,7 +1690,7 @@ ParquetEmbedFunctionContext::ParquetEmbedFunctionContext(const IContextLogger &_ } else { - m_parquet = std::make_shared(option, location, destination, rowsize, batchSize, activityCtx); + m_parquet = std::make_shared(option, location, destination, rowsize, batchSize, overwrite, activityCtx); } } diff --git a/plugins/parquet/parquetembed.hpp b/plugins/parquet/parquetembed.hpp index 8da6377116b..18bb1b363c4 100644 --- a/plugins/parquet/parquetembed.hpp +++ b/plugins/parquet/parquetembed.hpp @@ -708,7 +708,7 @@ class JsonValueConverter class ParquetHelper { public: - ParquetHelper(const char *option, const char *location, const char *destination, int rowsize, int _batchSize, const IThorActivityContext *_activityCtx); + ParquetHelper(const char *option, const char *_location, const char *destination, int rowsize, int _batchSize, bool _overwrite, const IThorActivityContext *_activityCtx); ~ParquetHelper(); std::shared_ptr getSchema(); arrow::Status openWriteFile(); @@ -747,6 +747,7 @@ class ParquetHelper __int64 rowsCount = 0; // The number of result rows in a given RowGroup read from the parquet file. size_t batchSize = 0; // batchSize for converting Parquet Columns to ECL rows. It is more efficient to break the data into small batches for converting to rows than to convert all at once. bool partition; // Boolean variable to track whether we are writing partitioned files or not. + bool overwrite = false; // Overwrite option specified by the user. If true the plugin will overwrite files that are already exisiting. Default is false. std::string partOption; // Read, r, Write, w, option for specifying parquet operation. std::string location; // Location to read parquet file from. std::string destination; // Destination to write parquet file to. From 617722de13a1f6ff988d3969301fd3aa04f0d08f Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Tue, 17 Oct 2023 09:45:33 -0400 Subject: [PATCH 13/35] HPCC-30556 ECL Watch v9 fix WU Execution Cost sort param Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/src-react/components/Workunits.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esp/src/src-react/components/Workunits.tsx b/esp/src/src-react/components/Workunits.tsx index c4592d5835a..06182233675 100644 --- a/esp/src/src-react/components/Workunits.tsx +++ b/esp/src/src-react/components/Workunits.tsx @@ -157,7 +157,7 @@ export const Workunits: React.FunctionComponent = ({ return `${formatCost(row.CompileCost)}`; } }, - "Execute Cost": { + "Execution Cost": { label: nlsHPCC.ExecuteCost, width: 100, justify: "right", formatter: (cost, row) => { From e74b187f39dd9bca36143f91fa4e73c669d97572 Mon Sep 17 00:00:00 2001 From: Jack Del Vecchio Date: Tue, 17 Oct 2023 16:05:55 +0000 Subject: [PATCH 14/35] HPCC-30561 ECL DATA datatype not properly written to Parquet files. --- plugins/parquet/parquetembed.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/plugins/parquet/parquetembed.cpp b/plugins/parquet/parquetembed.cpp index 749b14d35b6..46b96f45b15 100644 --- a/plugins/parquet/parquetembed.cpp +++ b/plugins/parquet/parquetembed.cpp @@ -1437,8 +1437,7 @@ void bindDataParam(unsigned len, const char *value, const RtlFieldInfo *field, s rapidjson::Value key; key.SetString(field->name, jsonAlloc); rapidjson::Value val; - size32_t utf8size = rtlUtf8Size(len, value); - val.SetString(value, utf8size, jsonAlloc); + val.SetString(value, len, jsonAlloc); addMember(r_parquet, key, val); } From d9b36a2679568ff1c99ad40c34ac88eacb6c958c Mon Sep 17 00:00:00 2001 From: Jim DeFabia Date: Tue, 17 Oct 2023 12:48:16 -0400 Subject: [PATCH 15/35] HPCC-30420 Document ecl url-secret-name command Signed-off-by: Jim DeFabia --- .../HPCCClientTools/CT_Mods/CT_ECL_CLI.xml | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/docs/EN_US/HPCCClientTools/CT_Mods/CT_ECL_CLI.xml b/docs/EN_US/HPCCClientTools/CT_Mods/CT_ECL_CLI.xml index 3e7fd5f9e8b..8bee5c570fd 100644 --- a/docs/EN_US/HPCCClientTools/CT_Mods/CT_ECL_CLI.xml +++ b/docs/EN_US/HPCCClientTools/CT_Mods/CT_ECL_CLI.xml @@ -165,6 +165,13 @@ returns a list of all the key user IDs that can be used by the sign command. + + + url-secret-name + + generate a secret name from a URL for automatic URL + mapping + @@ -8609,6 +8616,64 @@ ecl zapgen W20171018-091399 --path ~ --inc-thor-slave-logs --description "Unexpe + + ecl url-secret-name + + ecl url-secret-name url + [--username=<username>] + + + + + + + + + + ecl url-secret-name + + Generates a secret name from a URL for automatic URL + mapping. + + + + url + + The URL to convert into a secret name. + + + + --username + + Optional. The username to associate with the URL. + This will override any username embedded in the URL. + + + + The ecl url-secret-name command + generates a secret name from a URL that can be used to support ECL + SOAPCALL/HTTPCALL automated URL to Secret mapping. + + A username can either be embedded in the URL, such as + https://username@example.com, or passed in as a parameter using the + --username=username option. If a username is passed in as a parameter + it overrides a username in the URL. + + Passwords embedded in the URL are not needed and will be + ignored. + + When ECL SOAPCALL URL secret mapping is enabled SOAPCALL will + convert the URL provided into a name of this format. ECL will then + attempt to lookup the secret, and if found, will use the contents of + the secret, rather then the original URL. + + Examples: + + ecl url-secret-name https://example.com --username jimi +ecl url-secret-name http://example.com --username jimi + + + ecl roxie memlock From 364c2e847b45a2a5dbdbc8a2f35f821ee68ca411 Mon Sep 17 00:00:00 2001 From: Godji Fortil Date: Tue, 17 Oct 2023 11:21:47 -0400 Subject: [PATCH 16/35] HPCC-30227 Fix failed Docker builds on JFrog Signed-off-by: Godji Fortil HPCC-30227 Fix failed Docker builds on JFrog Signed-off-by: Godji Fortil HPCC-30227 Fix failed Docker builds on JFrog Signed-off-by: Godji Fortil Fix failed docker builds on JFrog Signed-off-by: Godji Fortil --- .github/workflows/build-assets.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 63743cc84f2..d814a7a4f52 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -330,8 +330,7 @@ jobs: build-args: | PKG_FILE=${{ steps.ln-container.outputs.k8s_pkg_file }} tags: | - ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/ln/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.internal_tag }} - ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/ln/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} + ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.internal_tag }} cache-from: | type=registry,ref=hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} From 9605b7f2e053d9c1470baba916cf054c060b195e Mon Sep 17 00:00:00 2001 From: wangkx Date: Tue, 17 Oct 2023 16:58:02 -0400 Subject: [PATCH 17/35] HPCC-30455 Fix a bug when trying to validate empty host When File Upload dialog box is displayed in a containerized environment, it sends a FileList request with an empty Netaddr to ESP FileSpray service. The service should not validate the Netaddr. Signed-off-by: wangkx --- esp/smc/SMCLib/TpCommon.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/esp/smc/SMCLib/TpCommon.cpp b/esp/smc/SMCLib/TpCommon.cpp index 1d1fc12ba09..241401930d6 100644 --- a/esp/smc/SMCLib/TpCommon.cpp +++ b/esp/smc/SMCLib/TpCommon.cpp @@ -175,9 +175,10 @@ extern TPWRAPPER_API IPropertyTree* getDropZoneAndValidateHostAndPath(const char dropZone.setown(getDropZonePlane(dropZoneName)); if (!dropZone) throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "DropZone '%s' not found.", dropZoneName); - if (!validateDropZone(dropZone, pathToCheck, hostToCheck, isIPAddress(hostToCheck))) - throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "The host '%s' or path '%s' is not valid for dropzone %s.", - isEmptyString(host) ? "unspecified" : host, isEmptyString(path) ? "unspecified" : path, dropZoneName); + if (!isEmptyString(hostToCheck) && !isHostInPlane(dropZone, hostToCheck, isIPAddress(hostToCheck))) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "The host '%s' is not valid for dropzone %s.", host, dropZoneName); + if (!isEmptyString(pathToCheck) && !isPathInPlane(dropZone, pathToCheck)) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "The path '%s' is not valid for dropzone %s.", path, dropZoneName); } return dropZone.getClear(); } From 1c56efda717a92e30e4b713b9e2e556c701b9f40 Mon Sep 17 00:00:00 2001 From: wangkx Date: Wed, 18 Oct 2023 08:50:12 -0400 Subject: [PATCH 18/35] HPCC-30526 Allow wildcards file name for lfntype_plane in normalizeExternal Signed-off-by: wangkx --- dali/base/dautils.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dali/base/dautils.cpp b/dali/base/dautils.cpp index 40816a5d1ed..b69e5a1e917 100644 --- a/dali/base/dautils.cpp +++ b/dali/base/dautils.cpp @@ -844,6 +844,8 @@ bool CDfsLogicalFileName::normalizeExternal(const char * name, StringAttr &res, normalizeScope(s1, s1, ns1-s1, planeName, strict, false); str.append("::").append(planeName); + //Allow wildcards in plane path + allowWild = true; break; } case lfntype_remote: From a331f0846a1b9c1838d97a089bd0028c53ee738b Mon Sep 17 00:00:00 2001 From: Rodrigo Pastrana Date: Fri, 22 Sep 2023 11:13:20 -0400 Subject: [PATCH 19/35] HPCC-30295 LogAccess support AzureLogAnalytics V2 - Adds support for ALA ContainerLogV2 schema - Retains V1 shema support - Adds scripted enablement of V2 on Azure - Updates ALA logaccess README documentation - Avoids expensive KQL join opperations to fetch containerName Signed-off-by: Rodrigo Pastrana --- helm/examples/azure/log-analytics/README.md | 26 ++- .../container-azm-ms-agentconfig.yaml | 217 ++++++++++++++++++ .../log-analytics/dataCollectionSettings.json | 7 + .../log-analytics/enable-loganalytics.sh | 9 + .../azure/log-analytics/env-loganalytics | 5 +- .../loganalytics-hpcc-logaccessV2.yaml | 88 +++++++ .../AzureLogAnalyticsCurlClient.cpp | 88 ++++++- .../AzureLogAnalyticsCurlClient.hpp | 6 +- 8 files changed, 428 insertions(+), 18 deletions(-) create mode 100644 helm/examples/azure/log-analytics/container-azm-ms-agentconfig.yaml create mode 100644 helm/examples/azure/log-analytics/dataCollectionSettings.json create mode 100644 helm/examples/azure/log-analytics/loganalytics-hpcc-logaccessV2.yaml diff --git a/helm/examples/azure/log-analytics/README.md b/helm/examples/azure/log-analytics/README.md index 3d566d7225c..6fe91fb4255 100644 --- a/helm/examples/azure/log-analytics/README.md +++ b/helm/examples/azure/log-analytics/README.md @@ -22,6 +22,16 @@ The user should populate the following values in order to create a new Azure Log For example: "admin=MyName email=my.email@mycompany.com environment=myenv justification=testing" - AZURE_SUBSCRIPTION (Optional - Ensures this subscription is set before creating the new workspace) +- AKS_RESOURCE_LOCATION (Optional e.g. eastus) + +- ENABLE_CONTAINER_LOG_V2 (true|false) Enables the ContainerLog V2 schema. + If set to true, the stdout/stderr Logs are forwarded to ContainerLogV2 table, otherwise the container logs continue to be forwarded to ContainerLog table. + Utilizes ./helm/examples/azure/log-analytics/dataCollectionSettings.json, and + ./helm/examples/azure/log-analytics/container-azm-ms-agentconfig.yaml which creates and applies a new configmap 'kube-system/container-azm-ms-agentconfig'. + + Details on benefits of V2 schema: https://learn.microsoft.com/en-us/azure/azure-monitor/containers/container-insights-logging-v2?tabs=configure-portal + + #### b - Execute enable-loganalytics.sh This helper script attempts to create new Azure LogAnalytics workspace (user can provide pre-existing), associates the workspace with the target AKS cluster, and enables the Azure Log Analytics feature. This script is dependant on the values provided in the previous step. @@ -37,7 +47,7 @@ Depending on your Azure subscription structure, it might be necessary to request The Registered Application must provide a 'client secret' which is used to gain access to the Log Analytics API. -#### b - Provide AAD registered application inforation and target ALA Workspace +#### b - Provide AAD registered application information and target ALA Workspace HPCC logAccess requires access to the AAD Tenant ID, client ID, and secret which are provided by the registered app from section '2.a' above. The target workspace ID is also required, and can be retrieved after the step in section '1.b' is successfully completed. Those four values must be provided via a secure secret object. The secret is expected to be in the 'esp' category, and be named 'azure-logaccess'. @@ -62,9 +72,19 @@ Example manual secret creation command (assuming ./secrets-templates contains a ``` #### c - Configure HPCC logAccess -The target HPCC deployment should be directed to use the above Azure Log Analytics workspace, and the newly created secret by providing appropriate logAccess values (such as ./loganalytics-hpcc-logaccess.yaml). +The target HPCC deployment should be directed to use the above Azure Log Analytics workspace, and the newly created secret by providing appropriate logAccess values (such as ./loganalytics-hpcc-logaccess.yaml or ./loganalytics-hpcc-logaccessV2.yaml if targeting Azure Log Analytics ContainerLogV2 - recommended ). Example use: ```console - helm install myhpcc hpcc/hpcc -f HPCC-Platform/helm/examples/azure/log-analytics/loganalytics-hpcc-logaccess.yaml + helm install myhpcc hpcc/hpcc -f HPCC-Platform/helm/examples/azure/log-analytics/loganalytics-hpcc-logaccessV2.yaml ``` +## Directory Contents + +- 'create-azure-logaccess-secret.sh' - Script for creating 'azure-logaccess' secret needed for accessing logs stored in Azure Log Analytics +- 'secrets-templates' - Contains placeholders for information required to create 'azure-logaccess' secret via 'create-azure-logaccess-secret.sh' script +- 'enable-loganalytics.sh' - Script for enabling Azure LogAnalytics upon a given AKS cluster +- 'env-loganalytics' - Environment information required to enable ALA upon target AKS cluster. +- 'dataCollectionSettings.json' - Provided to enable ContainerLogV2 schema on target AKS cluster +- 'container-azm-ms-agentconfig.yaml' - Defines ConfigMap used to configure ALA log collection. Provided to re-direct ALA log collection to ContainerLogV2 schema. +- 'loganalytics-hpcc-logaccess.yaml' - Used to configure ALA -> HPCC LogAccess. Provides mapping between ALA log tables to HPCC's known log categories +- 'loganalytics-hpcc-logaccessV2.yaml' - Used to configure ALA -> HPCC LogAccess. Provides mapping between ALA ContainerLogV2 log table to HPCC's known log categories diff --git a/helm/examples/azure/log-analytics/container-azm-ms-agentconfig.yaml b/helm/examples/azure/log-analytics/container-azm-ms-agentconfig.yaml new file mode 100644 index 00000000000..0ed5b561d81 --- /dev/null +++ b/helm/examples/azure/log-analytics/container-azm-ms-agentconfig.yaml @@ -0,0 +1,217 @@ +# Azure Log Analytics collects stdout, stderr, and environmental variables from container +# workloads deployed to managed Kubernetes clusters from the containerized agent. +# Users can customize agent data collection settings by creating a custom Kubernetes ConfigMap +# This ConfiMap can be used to enable/disable log collection, exclude namespaces, and enable/disable +# It is included here specifically to enable Azure Log Analytics ContainerLogV2 schema +# See this document for details: +# https://learn.microsoft.com/en-us/azure/azure-monitor/containers/container-insights-agent-config#configure-and-deploy-configmaps + +kind: ConfigMap +apiVersion: v1 +data: + schema-version: + #string.used by agent to parse config. supported versions are {v1}. Configs with other schema versions will be rejected by the agent. + v1 + config-version: + #string.used by customer to keep track of this config file's version in their source control/repository (max allowed 10 chars, other chars will be truncated) + ver1 + log-data-collection-settings: |- + # Log data collection settings + # Any errors related to config map settings can be found in the KubeMonAgentEvents table in the Log Analytics workspace that the cluster is sending data to. + + [log_collection_settings] + [log_collection_settings.stdout] + # In the absense of this configmap, default value for enabled is true + enabled = true + # exclude_namespaces setting holds good only if enabled is set to true + # kube-system,gatekeeper-system log collection are disabled by default in the absence of 'log_collection_settings.stdout' setting. If you want to enable kube-system,gatekeeper-system, remove them from the following setting. + # If you want to continue to disable kube-system,gatekeeper-system log collection keep the namespaces in the following setting and add any other namespace you want to disable log collection to the array. + # In the absense of this configmap, default value for exclude_namespaces = ["kube-system","gatekeeper-system"] + exclude_namespaces = ["kube-system","gatekeeper-system"] + + [log_collection_settings.stderr] + # Default value for enabled is true + enabled = true + # exclude_namespaces setting holds good only if enabled is set to true + # kube-system,gatekeeper-system log collection are disabled by default in the absence of 'log_collection_settings.stderr' setting. If you want to enable kube-system,gatekeeper-system, remove them from the following setting. + # If you want to continue to disable kube-system,gatekeeper-system log collection keep the namespaces in the following setting and add any other namespace you want to disable log collection to the array. + # In the absense of this configmap, default value for exclude_namespaces = ["kube-system","gatekeeper-system"] + exclude_namespaces = ["kube-system","gatekeeper-system"] + + [log_collection_settings.env_var] + # In the absense of this configmap, default value for enabled is true + enabled = true + [log_collection_settings.enrich_container_logs] + # In the absense of this configmap, default value for enrich_container_logs is false + enabled = false + # When this is enabled (enabled = true), every container log entry (both stdout & stderr) will be enriched with container Name & container Image + [log_collection_settings.collect_all_kube_events] + # In the absense of this configmap, default value for collect_all_kube_events is false + # When the setting is set to false, only the kube events with !normal event type will be collected + enabled = false + # When this is enabled (enabled = true), all kube events including normal events will be collected + [log_collection_settings.schema] + # In the absence of this configmap, default value for containerlog_schema_version is "v1" + # Supported values for this setting are "v1","v2" + # See documentation at https://aka.ms/ContainerLogv2 for benefits of v2 schema over v1 schema before opting for "v2" schema + containerlog_schema_version = "v2" + #[log_collection_settings.enable_multiline_logs] + # fluent-bit based multiline log collection for go (stacktrace), dotnet (stacktrace) + # if enabled will also stitch together container logs split by docker/cri due to size limits(16KB per log line) + # enabled = "false" + + + prometheus-data-collection-settings: |- + # Custom Prometheus metrics data collection settings + [prometheus_data_collection_settings.cluster] + # Cluster level scrape endpoint(s). These metrics will be scraped from agent's Replicaset (singleton) + # Any errors related to prometheus scraping can be found in the KubeMonAgentEvents table in the Log Analytics workspace that the cluster is sending data to. + + #Interval specifying how often to scrape for metrics. This is duration of time and can be specified for supporting settings by combining an integer value and time unit as a string value. Valid time units are ns, us (or µs), ms, s, m, h. + interval = "1m" + + ## Uncomment the following settings with valid string arrays for prometheus scraping + #fieldpass = ["metric_to_pass1", "metric_to_pass12"] + + #fielddrop = ["metric_to_drop"] + + # An array of urls to scrape metrics from. + # urls = ["http://myurl:9101/metrics"] + + # An array of Kubernetes services to scrape metrics from. + # kubernetes_services = ["http://my-service-dns.my-namespace:9102/metrics"] + + # When monitor_kubernetes_pods = true, replicaset will scrape Kubernetes pods for the following prometheus annotations: + # - prometheus.io/scrape: Enable scraping for this pod + # - prometheus.io/scheme: Default is http + # - prometheus.io/path: If the metrics path is not /metrics, define it with this annotation. + # - prometheus.io/port: If port is not 9102 use this annotation + monitor_kubernetes_pods = false + + ## Restricts Kubernetes monitoring to namespaces for pods that have annotations set and are scraped using the monitor_kubernetes_pods setting. + ## This will take effect when monitor_kubernetes_pods is set to true + ## ex: monitor_kubernetes_pods_namespaces = ["default1", "default2", "default3"] + # monitor_kubernetes_pods_namespaces = ["default1"] + + ## Label selector to target pods which have the specified label + ## This will take effect when monitor_kubernetes_pods is set to true + ## Reference the docs at https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors + # kubernetes_label_selector = "env=dev,app=nginx" + + ## Field selector to target pods which have the specified field + ## This will take effect when monitor_kubernetes_pods is set to true + ## Reference the docs at https://kubernetes.io/docs/concepts/overview/working-with-objects/field-selectors/ + ## eg. To scrape pods on a specific node + # kubernetes_field_selector = "spec.nodeName=$HOSTNAME" + + [prometheus_data_collection_settings.node] + # Node level scrape endpoint(s). These metrics will be scraped from agent's DaemonSet running in every node in the cluster + # Any errors related to prometheus scraping can be found in the KubeMonAgentEvents table in the Log Analytics workspace that the cluster is sending data to. + + #Interval specifying how often to scrape for metrics. This is duration of time and can be specified for supporting settings by combining an integer value and time unit as a string value. Valid time units are ns, us (or µs), ms, s, m, h. + interval = "1m" + + ## Uncomment the following settings with valid string arrays for prometheus scraping + + # An array of urls to scrape metrics from. $NODE_IP (all upper case) will substitute of running Node's IP address + # urls = ["http://$NODE_IP:9103/metrics"] + + #fieldpass = ["metric_to_pass1", "metric_to_pass12"] + + #fielddrop = ["metric_to_drop"] + + metric_collection_settings: |- + # Metrics collection settings for metrics sent to Log Analytics and MDM + [metric_collection_settings.collect_kube_system_pv_metrics] + # In the absense of this configmap, default value for collect_kube_system_pv_metrics is false + # When the setting is set to false, only the persistent volume metrics outside the kube-system namespace will be collected + enabled = false + # When this is enabled (enabled = true), persistent volume metrics including those in the kube-system namespace will be collected + + alertable-metrics-configuration-settings: |- + # Alertable metrics configuration settings for container resource utilization + [alertable_metrics_configuration_settings.container_resource_utilization_thresholds] + # The threshold(Type Float) will be rounded off to 2 decimal points + # Threshold for container cpu, metric will be sent only when cpu utilization exceeds or becomes equal to the following percentage + container_cpu_threshold_percentage = 95.0 + # Threshold for container memoryRss, metric will be sent only when memory rss exceeds or becomes equal to the following percentage + container_memory_rss_threshold_percentage = 95.0 + # Threshold for container memoryWorkingSet, metric will be sent only when memory working set exceeds or becomes equal to the following percentage + container_memory_working_set_threshold_percentage = 95.0 + + # Alertable metrics configuration settings for persistent volume utilization + [alertable_metrics_configuration_settings.pv_utilization_thresholds] + # Threshold for persistent volume usage bytes, metric will be sent only when persistent volume utilization exceeds or becomes equal to the following percentage + pv_usage_threshold_percentage = 60.0 + + # Alertable metrics configuration settings for completed jobs count + [alertable_metrics_configuration_settings.job_completion_threshold] + # Threshold for completed job count , metric will be sent only for those jobs which were completed earlier than the following threshold + job_completion_threshold_time_minutes = 360 + integrations: |- + [integrations.azure_network_policy_manager] + collect_basic_metrics = false + collect_advanced_metrics = false + [integrations.azure_subnet_ip_usage] + enabled = false + +# Doc - https://github.com/microsoft/Docker-Provider/blob/ci_prod/Documentation/AgentSettings/ReadMe.md + agent-settings: |- + # prometheus scrape fluent bit settings for high scale + # buffer size should be greater than or equal to chunk size else we set it to chunk size. + # settings scoped to prometheus sidecar container. all values in mb + [agent_settings.prometheus_fbit_settings] + tcp_listener_chunk_size = 10 + tcp_listener_buffer_size = 10 + tcp_listener_mem_buf_limit = 200 + + # prometheus scrape fluent bit settings for high scale + # buffer size should be greater than or equal to chunk size else we set it to chunk size. + # settings scoped to daemonset container. all values in mb + # [agent_settings.node_prometheus_fbit_settings] + # tcp_listener_chunk_size = 1 + # tcp_listener_buffer_size = 1 + # tcp_listener_mem_buf_limit = 10 + + # prometheus scrape fluent bit settings for high scale + # buffer size should be greater than or equal to chunk size else we set it to chunk size. + # settings scoped to replicaset container. all values in mb + # [agent_settings.cluster_prometheus_fbit_settings] + # tcp_listener_chunk_size = 1 + # tcp_listener_buffer_size = 1 + # tcp_listener_mem_buf_limit = 10 + + # The following settings are "undocumented", we don't recommend uncommenting them unless directed by Microsoft. + # They increase the maximum stdout/stderr log collection rate but will also cause higher cpu/memory usage. + ## Ref for more details about Ignore_Older - https://docs.fluentbit.io/manual/v/1.7/pipeline/inputs/tail + # [agent_settings.fbit_config] + # log_flush_interval_secs = "1" # default value is 15 + # tail_mem_buf_limit_megabytes = "10" # default value is 10 + # tail_buf_chunksize_megabytes = "1" # default value is 32kb (comment out this line for default) + # tail_buf_maxsize_megabytes = "1" # default value is 32kb (comment out this line for default) + # tail_ignore_older = "5m" # default value same as fluent-bit default i.e.0m + + # On both AKS & Arc K8s enviornments, if Cluster has configured with Forward Proxy then Proxy settings automatically applied and used for the agent + # Certain configurations, proxy config should be ignored for example Cluster with AMPLS + Proxy + # in such scenarios, use the following config to ignore proxy settings + # [agent_settings.proxy_config] + # ignore_proxy_settings = "true" # if this is not applied, default value is false + + # The following settings are "undocumented", we don't recommend uncommenting them unless directed by Microsoft. + # Configuration settings for the waittime for the network listeners to be available + # [agent_settings.network_listener_waittime] + # tcp_port_25226 = 45 # Port 25226 is used for telegraf to fluent-bit data in ReplicaSet + # tcp_port_25228 = 60 # Port 25228 is used for telegraf to fluentd data + # tcp_port_25229 = 45 # Port 25229 is used for telegraf to fluent-bit data in DaemonSet + + # The following settings are "undocumented", we don't recommend uncommenting them unless directed by Microsoft. + # [agent_settings.mdsd_config] + # monitoring_max_event_rate = "50000" # default 20K eps + # backpressure_memory_threshold_in_mb = "1500" # default 3500MB + # upload_max_size_in_mb = "20" # default 2MB + # upload_frequency_seconds = "1" # default 60 upload_frequency_seconds + # compression_level = "0" # supported levels 0 to 9 and 0 means no compression + +metadata: + name: container-azm-ms-agentconfig + namespace: kube-system diff --git a/helm/examples/azure/log-analytics/dataCollectionSettings.json b/helm/examples/azure/log-analytics/dataCollectionSettings.json new file mode 100644 index 00000000000..4b6eb3bcc10 --- /dev/null +++ b/helm/examples/azure/log-analytics/dataCollectionSettings.json @@ -0,0 +1,7 @@ +{ + "interval": "1m", + "namespaceFilteringMode": "Include", + "namespaces": ["kube-system"], + "enableContainerLogV2": true, + "streams": ["Microsoft-Perf", "Microsoft-ContainerLogV2"] + } diff --git a/helm/examples/azure/log-analytics/enable-loganalytics.sh b/helm/examples/azure/log-analytics/enable-loganalytics.sh index 4ad63ecea92..1e47cf353b9 100755 --- a/helm/examples/azure/log-analytics/enable-loganalytics.sh +++ b/helm/examples/azure/log-analytics/enable-loganalytics.sh @@ -83,6 +83,10 @@ else fi echo "Enabling workspace on target AKS cluster '$AKS_CLUSTER_NAME'..." + +if $ENABLE_CONTAINER_LOG_V2 ; then DATA_COLLECTION_SETTINGS="--workspace-resource-id $wsid --data-collection-settings dataCollectionSettings.json"; fi + +echo "aks enable-addons -g $AKS_RESOURCE_GROUP -n $AKS_CLUSTER_NAME -a monitoring $DATA_COLLECTION_SETTINGS" az aks enable-addons -g $AKS_RESOURCE_GROUP -n $AKS_CLUSTER_NAME -a monitoring --workspace-resource-id $wsid if [[ $? -ne 0 ]] then @@ -91,3 +95,8 @@ then else echo "Success, workspace id: '$wsid' enabled on AKS $AKS_CLUSTER_NAME" fi + +if $ENABLE_CONTAINER_LOG_V2 ; then + echo "Setting ContainerLogV2 schema via container-azm-ms-agentconfig" + kubectl apply -f ${WORK_DIR}/container-azm-ms-agentconfig.yaml; +fi diff --git a/helm/examples/azure/log-analytics/env-loganalytics b/helm/examples/azure/log-analytics/env-loganalytics index b3e01320ea5..30ccfb8f344 100755 --- a/helm/examples/azure/log-analytics/env-loganalytics +++ b/helm/examples/azure/log-analytics/env-loganalytics @@ -21,4 +21,7 @@ AKS_RESOURCE_GROUP= AZURE_SUBSCRIPTION= # Azure resource location -AKS_RESOURCE_LOCATION=eastus \ No newline at end of file +AKS_RESOURCE_LOCATION=eastus + +# Enable enableContainerLogV2 +ENABLE_CONTAINER_LOG_V2=true diff --git a/helm/examples/azure/log-analytics/loganalytics-hpcc-logaccessV2.yaml b/helm/examples/azure/log-analytics/loganalytics-hpcc-logaccessV2.yaml new file mode 100644 index 00000000000..f680737b823 --- /dev/null +++ b/helm/examples/azure/log-analytics/loganalytics-hpcc-logaccessV2.yaml @@ -0,0 +1,88 @@ +# Configures HPCC logAccess to target Azure Log Analytics Workspace +global: + logAccess: + name: "Azure LogAnalytics LogAccess" + type: "AzureLogAnalyticsCurl" + #connection: + #All connection attributes are optional. + #It is preferable to provide connection values as secret values category 'esp', secret name 'azure_logaccess' + # NOTE: secret 'azure_logaccess' must include 'aad-client-secret' and it cannot be provided in configuration + # + #workspaceID: "XYZ" #ID of the Azure LogAnalytics workspace to query logs from + # Secret value equivalent: 'ala-workspace-id' + #clientID: "DEF" #ID of Azure Active Directory registered application with api.loganalytics.io access - format: 00000000-0000-0000-0000-000000000000 + # Secret value equivalent: 'aad-client-id' + #tenantID: "ABC" #The Azure Active Directory Tenant ID, required for KQL API access + # Secret value equivalent: 'aad-tenant-id' + logMaps: + - type: "global" + storeName: "ContainerLogV2" + searchColumn: "LogMessage" + timeStampColumn: "hpcc_log_timestamp" + columnType: "dynamic" + columnMode: "ALL" + - type: "workunits" + searchColumn: "hpcc_log_jobid" + columnMode: "DEFAULT" + columnType: "string" + - type: "components" + storeName: "ContainerLogV2" + searchColumn: "ContainerName" # Container name happens to coincide with component name + keyColumn: "ContainerName" + columnMode: "DEFAULT" + columnType: "string" + - type: "audience" + searchColumn: "hpcc_log_audience" + enumValues: + - code: OPR + - code: USR + - code: PRO + - code: ADT + - code: MON + columnMode: "DEFAULT" + columnType: "enum" + - type: "class" + searchColumn: "hpcc_log_class" + enumValues: + - code: DIS + - code: ERR + - code: WRN + - code: INF + - code: PRO + - code: MET + - code: EVT + columnMode: "DEFAULT" + columnType: "enum" + - type: "node" + columnMode: "DEFAULT" + searchColumn: "Computer" + columnMode: "ALL" + columnType: "string" + - type: "message" + searchColumn: "hpcc_log_message" + columnMode: "MIN" + columnType: "string" + - type: "logid" + searchColumn: "hpcc_log_sequence" + columnMode: "DEFAULT" + columnType: "numeric" + - type: "processid" + searchColumn: "hpcc_log_procid" + columnMode: "ALL" + columnType: "numeric" + - type: "threadid" + searchColumn: "hpcc_log_threadid" + columnMode: "DEFAULT" + columnType: "numeric" + - type: "timestamp" + searchColumn: "hpcc_log_timestamp" + columnMode: "MIN" + columnType: "datetime" +secrets: + esp: + azure-logaccess: "azure-logaccess" +vaults: + esp: + - name: my-azure-logaccess-vault + url: http://${env.VAULT_SERVICE_HOST}:${env.VAULT_SERVICE_PORT}/v1/secret/data/esp/${secret} + kind: kv-v2 diff --git a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp index 9aa987a51f8..6cd4c6bf59a 100644 --- a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp +++ b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp @@ -46,7 +46,6 @@ static constexpr const char * logMapTimeStampColAtt = "@timeStampColumn"; static constexpr const char * logMapKeyColAtt = "@keyColumn"; static constexpr const char * logMapDisableJoinsAtt = "@disableJoins"; - static constexpr std::size_t defaultMaxRecordsPerFetch = 100; static size_t captureIncomingCURLReply(void* contents, size_t size, size_t nmemb, void* userp) @@ -344,7 +343,10 @@ AzureLogAnalyticsCurlClient::AzureLogAnalyticsCurlClient(IPropertyTree & logAcce if (streq(logMapType, "global")) { if (logMap.hasProp(logMapIndexPatternAtt)) + { m_globalIndexSearchPattern = logMap.queryProp(logMapIndexPatternAtt); + targetIsContainerLogV2 = strcmp("ContainerLogV2", m_globalIndexSearchPattern)==0; + } if (logMap.hasProp(logMapSearchColAtt)) m_globalSearchColName = logMap.queryProp(logMapSearchColAtt); if (logMap.hasProp(logMapTimeStampColAtt)) @@ -371,6 +373,15 @@ AzureLogAnalyticsCurlClient::AzureLogAnalyticsCurlClient(IPropertyTree & logAcce m_componentsTimestampField = defaultHPCCLogComponentTSCol; m_disableComponentNameJoins = logMap.getPropBool(logMapDisableJoinsAtt, false); + if (targetIsContainerLogV2) + m_disableComponentNameJoins = true; //Don't attempt a join on ContainerLogV2 + else + { + if (strcmp("ContainerLogV2", m_componentsIndexSearchPattern)==0) + targetIsContainerLogV2 = true; + + m_disableComponentNameJoins = !m_disableComponentNameJoins && logMap.getPropBool(logMapDisableJoinsAtt, false); + } } else if (streq(logMapType, "class")) { @@ -392,6 +403,8 @@ AzureLogAnalyticsCurlClient::AzureLogAnalyticsCurlClient(IPropertyTree & logAcce m_instanceIndexSearchPattern = logMap.queryProp(logMapIndexPatternAtt); if (logMap.hasProp(logMapSearchColAtt)) m_instanceSearchColName = logMap.queryProp(logMapSearchColAtt); + if (logMap.hasProp(logMapKeyColAtt)) + m_instanceLookupKeyColumn = logMap.queryProp(logMapKeyColAtt); } else if (streq(logMapType, "node")) { @@ -420,26 +433,58 @@ AzureLogAnalyticsCurlClient::AzureLogAnalyticsCurlClient(IPropertyTree & logAcce } } -void AzureLogAnalyticsCurlClient::getMinReturnColumns(StringBuffer & columns, bool & includeComponentName) +void AzureLogAnalyticsCurlClient::getMinReturnColumns(StringBuffer & columns, const bool includeComponentName) { columns.append("\n| project "); if (includeComponentName) - columns.appendf("%s, ", defaultHPCCLogComponentCol); + { + if (targetIsContainerLogV2 && m_componentsSearchColName.length() > 0) + { + columns.append(m_componentsSearchColName.str()); + if (m_componentsLookupKeyColumn.length() > 0 && !strsame(m_componentsSearchColName.str(), m_componentsLookupKeyColumn.str())) + columns.appendf("=%s", m_componentsLookupKeyColumn.str()); + } + else + columns.append(defaultHPCCLogComponentCol); + columns.append(", "); + } columns.appendf("%s, %s", m_globalIndexTimestampField.str(), defaultHPCCLogMessageCol); } -void AzureLogAnalyticsCurlClient::getDefaultReturnColumns(StringBuffer & columns, bool & includeComponentName) +void AzureLogAnalyticsCurlClient::getDefaultReturnColumns(StringBuffer & columns, const bool includeComponentName) { columns.append("\n| project "); + if (includeComponentName) - columns.appendf("%s, ", defaultHPCCLogComponentCol); + { + if (targetIsContainerLogV2 && m_componentsSearchColName.length() > 0) + { + columns.append(m_componentsSearchColName.str()); + if (m_componentsLookupKeyColumn.length() > 0 && !strsame(m_componentsSearchColName.str(), m_componentsLookupKeyColumn.str())) + columns.appendf("=%s", m_componentsLookupKeyColumn.str()); + } + else + columns.append(defaultHPCCLogComponentCol); + + columns.append(", "); + } + + if (!isEmptyString(m_instanceSearchColName.str())) + { + columns.appendf("%s", m_instanceSearchColName.str()); + + if (m_instanceLookupKeyColumn.length()>0 && !strsame(m_instanceLookupKeyColumn.str(),m_instanceSearchColName.str())) + columns.appendf("=%s", m_instanceLookupKeyColumn.str()); + + columns.append(", "); + } columns.appendf("%s, %s, %s, %s, %s, %s, %s", m_globalIndexTimestampField.str(), defaultHPCCLogMessageCol, m_classSearchColName.str(), m_audienceSearchColName.str(), m_workunitSearchColName.str(), defaultHPCCLogSeqCol, defaultHPCCLogThreadIDCol); } -bool generateHPCCLogColumnstAllColumns(StringBuffer & kql, const char * colName) +bool generateHPCCLogColumnstAllColumns(StringBuffer & kql, const char * colName, bool targetsV2) { if (isEmptyString(colName)) { @@ -447,7 +492,15 @@ bool generateHPCCLogColumnstAllColumns(StringBuffer & kql, const char * colName) return false; } - kql.appendf("\n| extend hpcclogfields = extract_all(@\'^([0-9A-Fa-f]+)\\s+(OPR|USR|PRG|AUD|UNK)\\s+(DIS|ERR|WRN|INF|PRO|MET|UNK)\\s+(\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(UNK|[A-Z]\\d{8}-\\d{6}(?:-\\d+)?)\\s+\\\"(.*)\\\"$', %s)[0]", colName); + StringBuffer sourceCol; + if (targetsV2 && strcmp(colName, "LogMessage")==0) + sourceCol.set("tostring(LogMessage)"); + else if (!targetsV2 && strcmp(colName, "LogEntry")==0) + sourceCol.append(colName); + else + throw makeStringExceptionV(-1, "%s: Invalid Azure Log Analytics log message column name detected: '%s'. Review logAccess configuration.", COMPONENT_NAME, colName); + + kql.appendf("\n| extend hpcclogfields = extract_all(@\'^([0-9A-Fa-f]+)\\s+(OPR|USR|PRG|AUD|UNK)\\s+(DIS|ERR|WRN|INF|PRO|MET|UNK)\\s+(\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(UNK|[A-Z]\\d{8}-\\d{6}(?:-\\d+)?)\\s+\\\"(.*)\\\"$', %s)[0]", sourceCol.str()); kql.appendf("\n| extend %s = tostring(hpcclogfields.[0])", defaultHPCCLogSeqCol); kql.appendf("\n| extend %s = tostring(hpcclogfields.[1])", defaultHPCCLogAudCol); kql.appendf("\n| extend %s = tostring(hpcclogfields.[2])", defaultHPCCLogTypeCol); @@ -456,6 +509,13 @@ bool generateHPCCLogColumnstAllColumns(StringBuffer & kql, const char * colName) kql.appendf("\n| extend %s = toint(hpcclogfields.[5])", defaultHPCCLogThreadIDCol); kql.appendf("\n| extend %s = tostring(hpcclogfields.[6])", defaultHPCCLogJobIDCol); kql.appendf("\n| extend %s = tostring(hpcclogfields.[7])", defaultHPCCLogMessageCol); + kql.appendf("\n| project-away hpcclogfields, Type, TenantId, _ResourceId, %s, ", colName); + + if (targetsV2) + kql.append("LogSource, SourceSystem"); + else + kql.append("LogEntrySource, TimeOfCommand, SourceSystem"); + return true; } @@ -630,7 +690,10 @@ void AzureLogAnalyticsCurlClient::populateKQLQueryString(StringBuffer & queryStr if (m_instanceSearchColName.isEmpty()) throw makeStringExceptionV(-1, "%s: 'Instance' log entry field not configured", COMPONENT_NAME); - queryField = m_instanceSearchColName.str(); + if (m_instanceLookupKeyColumn.length()>0 && !strsame(m_instanceLookupKeyColumn.str(),m_instanceSearchColName.str())) + queryField = m_instanceLookupKeyColumn.str(); + else + queryField = m_instanceSearchColName.str(); if (!m_instanceIndexSearchPattern.isEmpty()) { @@ -708,13 +771,13 @@ void AzureLogAnalyticsCurlClient::populateKQLQueryString(StringBuffer & queryStr queryIndex.set(m_globalIndexSearchPattern.str()); StringBuffer searchColumns; - bool includeComponentName = !m_disableComponentNameJoins; + bool includeComponentName = !m_disableComponentNameJoins || targetIsContainerLogV2; searchMetaData(searchColumns, options.getReturnColsMode(), options.getLogFieldNames(), includeComponentName, options.getLimit(), options.getStartFrom()); - if (includeComponentName) + if (!m_disableComponentNameJoins && !targetIsContainerLogV2) declareContainerIndexJoinTable(queryString, options); queryString.append(queryIndex); - generateHPCCLogColumnstAllColumns(queryString, m_globalSearchColName.str()); + generateHPCCLogColumnstAllColumns(queryString, m_globalSearchColName.str(), targetIsContainerLogV2); if (options.queryFilter() == nullptr || options.queryFilter()->filterType() == LOGACCESS_FILTER_wildcard) // No filter { @@ -730,7 +793,8 @@ void AzureLogAnalyticsCurlClient::populateKQLQueryString(StringBuffer & queryStr StringBuffer range; azureLogAnalyticsTimestampQueryRangeString(range, m_globalIndexTimestampField.str(), trange.getStartt().getSimple(),trange.getEndt().isNull() ? -1 : trange.getEndt().getSimple()); queryString.append("\n| where ").append(range.str()); - if (includeComponentName) + //if (includeComponentName) + if (!m_disableComponentNameJoins && !targetIsContainerLogV2) queryString.append("\n) on ").append(m_componentsLookupKeyColumn); queryString.append(searchColumns); diff --git a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp index f64af4817fd..b622db4d749 100644 --- a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp +++ b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp @@ -65,12 +65,14 @@ class AzureLogAnalyticsCurlClient : public CInterfaceOf StringBuffer m_aadClientSecret; StringBuffer m_componentsLookupKeyColumn; + StringBuffer m_instanceLookupKeyColumn; + bool targetIsContainerLogV2 = false; public: AzureLogAnalyticsCurlClient(IPropertyTree & logAccessPluginConfig); - void getMinReturnColumns(StringBuffer & columns, bool & includeComponentName); - void getDefaultReturnColumns(StringBuffer & columns, bool & includeComponentName); + void getMinReturnColumns(StringBuffer & columns, const bool includeComponentName); + void getDefaultReturnColumns(StringBuffer & columns, const bool includeComponentName); void searchMetaData(StringBuffer & search, const LogAccessReturnColsMode retcolmode, const StringArray & selectcols, bool & includeComponentName, unsigned size = defaultEntryLimit, offset_t from = defaultEntryStart); void populateKQLQueryString(StringBuffer & queryString, StringBuffer& queryIndex, const LogAccessConditions & options); void populateKQLQueryString(StringBuffer & queryString, StringBuffer& queryIndex, const ILogAccessFilter * filter); From 154ab5d3035bc807338b15ef96e9d4890dc4b6f1 Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Tue, 17 Oct 2023 11:30:40 -0400 Subject: [PATCH 20/35] HPCC-30560 ECL Watch v9 grid columns should visually indicate sortability Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/src-react/components/controls/Grid.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esp/src/src-react/components/controls/Grid.tsx b/esp/src/src-react/components/controls/Grid.tsx index 3cbba4a0506..73b979e7b3c 100644 --- a/esp/src/src-react/components/controls/Grid.tsx +++ b/esp/src/src-react/components/controls/Grid.tsx @@ -83,7 +83,7 @@ function columnsAdapter(columns: FluentColumns, columnWidths: Map): iconName: column.headerIcon, isIconOnly: !!column.headerIcon, data: column, - styles: { root: { width } }, + styles: { root: { width, ":hover": { cursor: column?.sortable !== false ? "pointer" : "default" } } }, onRender: (item: any, index: number, col: IColumn) => { col.minWidth = column.width ?? 70; col.maxWidth = column.width; From bd5c7daf4dec644252037d59c5a1b11d107d40e8 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Wed, 18 Oct 2023 18:30:16 +0100 Subject: [PATCH 21/35] HPCC-30569 Bump kubectl and git-lfs Fix security issues with underlying go libraries Signed-off-by: Gordon Smith --- dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile b/dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile index 7f55eb63a7d..16523211116 100644 --- a/dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile +++ b/dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile @@ -33,7 +33,6 @@ RUN apt-get clean -y && \ expect \ g++ \ git \ - git-lfs \ locales \ jq \ openssh-client \ @@ -59,10 +58,14 @@ RUN apt-get install -y \ gdb \ nano -RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/v1.27.6/bin/linux/amd64/kubectl && \ +RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/v1.28.3/bin/linux/amd64/kubectl && \ chmod +x ./kubectl && \ mv ./kubectl /usr/local/bin +RUN curl -LO https://packagecloud.io/github/git-lfs/packages/debian/bullseye/git-lfs_3.4.0_amd64.deb/download && \ + dpkg -i download && \ + rm download + # Set the locale RUN locale-gen en_US.UTF-8 ENV LANG en_US.UTF-8 From 760ad0f44bde7b4b5557ff7bfc2363e2638ef79d Mon Sep 17 00:00:00 2001 From: Ken Rowland Date: Wed, 18 Oct 2023 14:06:58 -0400 Subject: [PATCH 22/35] Updates log statements based on reveiw comments --- system/security/shared/caching.cpp | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/system/security/shared/caching.cpp b/system/security/shared/caching.cpp index 4fef9e91c0f..bc131920b4c 100644 --- a/system/security/shared/caching.cpp +++ b/system/security/shared/caching.cpp @@ -556,7 +556,7 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha if (!fullScope || !*fullScope) { *accessFlags = queryDefaultPermission(sec_user); - OWARNLOG("FileScope missing, using root for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); + OWARNLOG("FileScope empty for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick()-start); return true; } @@ -580,9 +580,6 @@ bool CPermissionsCache::queryPermsManagedFileScope(ISecUser& sec_user, const cha if (m_managedFileScopesMap.empty()) { *accessFlags = queryDefaultPermission(sec_user); - if (m_secMgr) { - OWARNLOG("Filescope managed scopes empty for %s, applying default permissions %s(%d), took %dms", sec_user.getName(), getSecAccessFlagName(*accessFlags), *accessFlags, msTick() - start); - } return true; } From 237a33fb16a1d2ce7e8c0c83e619d9bc903edbc3 Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Thu, 19 Oct 2023 09:16:07 -0400 Subject: [PATCH 23/35] HPCC-30356 ECL Watch remote copy dialog do not require dali param Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/eclwatch/templates/DFUQueryWidget.html | 2 +- esp/src/src-react/components/forms/RemoteCopy.tsx | 5 ----- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/esp/src/eclwatch/templates/DFUQueryWidget.html b/esp/src/eclwatch/templates/DFUQueryWidget.html index e8bfa3315ce..b45187e2be7 100644 --- a/esp/src/eclwatch/templates/DFUQueryWidget.html +++ b/esp/src/eclwatch/templates/DFUQueryWidget.html @@ -15,7 +15,7 @@
${i18n.Source}
- + diff --git a/esp/src/src-react/components/forms/RemoteCopy.tsx b/esp/src/src-react/components/forms/RemoteCopy.tsx index f50bb4885a6..113c819bd8c 100644 --- a/esp/src/src-react/components/forms/RemoteCopy.tsx +++ b/esp/src/src-react/components/forms/RemoteCopy.tsx @@ -130,14 +130,9 @@ export const RemoteCopy: React.FunctionComponent = ({ }) => } - rules={{ - required: nlsHPCC.ValidationErrorRequired - }} /> Date: Wed, 18 Oct 2023 16:49:21 -0400 Subject: [PATCH 24/35] HPCC-30575 Github Action jfrog cli push to artifactory and promote build Signed-off-by: Michael Gardner --- .github/workflows/build-assets.yml | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index d814a7a4f52..395df3ec30e 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -34,6 +34,7 @@ jobs: internal_ref: ${{ steps.vars.outputs.internal_ref }} community_tag: ${{ steps.vars.outputs.community_tag }} internal_tag: ${{ steps.vars.outputs.internal_tag }} + hpcc_version: ${{ steps.vars.outputs.hpcc_version }} candidate_base_branch: ${{ steps.vars.outputs.candidate_base_branch }} candidate_branch: ${{ steps.vars.outputs.candidate_branch }} cmake_docker_config: ${{ steps.vars.outputs.cmake_docker_config }} @@ -54,6 +55,7 @@ jobs: community_tag=$(echo $community_ref | cut -d'/' -f3) echo "community_tag=$community_tag" >> $GITHUB_OUTPUT echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT + echo "hpcc_version=$(echo $community_tag | sed 's/community_//')" >> $GITHUB_OUTPUT community_base_ref=${{ github.event.base_ref || github.ref }} candidate_branch=$(echo $community_base_ref | cut -d'/' -f3) echo "candidate_branch=$candidate_branch" >> $GITHUB_OUTPUT @@ -266,6 +268,14 @@ jobs: username: ${{ secrets.JFROG_USERNAME }} password: ${{ secrets.JFROG_PASSWORD }} + - name: Setup JFrog CLI (internal) + if: ${{ matrix.ln && matrix.container && github.repository_owner == 'hpcc-systems' }} + uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: https://${{ secrets.JFROG_REGISTRY }} + JF_USER: ${{ secrets.JFROG_USERNAME }} + JF_PASSWORD: ${{ secrets.JFROG_PASSWORD }} + - name: CMake Packages (internal) if: ${{ matrix.ln && !matrix.container && !matrix.documentation }} run: | @@ -326,14 +336,20 @@ jobs: builder: ${{ steps.buildx.outputs.name }} file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/platform-core-${{ matrix.os }}/Dockerfile context: ${{ needs.preamble.outputs.folder_build }} - push: ${{ github.repository_owner == 'hpcc-systems' }} + push: false build-args: | PKG_FILE=${{ steps.ln-container.outputs.k8s_pkg_file }} tags: | - ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.internal_tag }} + ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} cache-from: | type=registry,ref=hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} + - name: JFrog Docker Push and Publish + if: ${{ matrix.ln && matrix.container && !matrix.documentation && github.repository_owner == 'hpcc-systems' }} + run: | + jf rt dp ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} hpccpl-docker-local --build-name=platform-core-ln --build-number=${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl + jf rt bp platform-core-ln ${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl + # Common --- - name: Cleanup Environment if: always() From b03b83db3ee70f9c6a82edf2923f60fb62f0ec14 Mon Sep 17 00:00:00 2001 From: Jack Del Vecchio Date: Thu, 19 Oct 2023 16:59:58 +0000 Subject: [PATCH 25/35] HPCC-30524 Parquet Strings cannot be converted to REAL --- plugins/parquet/parquetembed.cpp | 39 ++++++++++++++++++++++++++++---- plugins/parquet/parquetembed.hpp | 1 + 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/plugins/parquet/parquetembed.cpp b/plugins/parquet/parquetembed.cpp index 749b14d35b6..290766733a9 100644 --- a/plugins/parquet/parquetembed.cpp +++ b/plugins/parquet/parquetembed.cpp @@ -956,12 +956,44 @@ __int64 ParquetRowBuilder::getCurrIntValue(const RtlFieldInfo *field) { __int64 myint64 = 0; auto scalar = getCurrView(field); - handleDeserializeOutcome(tokenDeserializer.deserialize(scalar.data(), myint64), "signed", scalar.data()); + std::string scalarStr(scalar.data(), scalar.size()); + handleDeserializeOutcome(tokenDeserializer.deserialize(scalarStr.c_str(), myint64), "signed", scalarStr.c_str()); return myint64; } } } +double ParquetRowBuilder::getCurrRealValue(const RtlFieldInfo *field) +{ + switch ((*array_visitor)->type) + { + case BoolType: + return (*array_visitor)->bool_arr->Value(currArrayIndex()); + case IntType: + return getSigned(array_visitor, currArrayIndex()); + case UIntType: + return getUnsigned(array_visitor, currArrayIndex()); + case RealType: + return getReal(array_visitor, currArrayIndex()); + case DateType: + return (*array_visitor)->size == 32 ? (*array_visitor)->date32_arr->Value(currArrayIndex()) : (*array_visitor)->date64_arr->Value(currArrayIndex()); + case TimestampType: + return (*array_visitor)->timestamp_arr->Value(currArrayIndex()); + case TimeType: + return (*array_visitor)->size == 32 ? (*array_visitor)->time32_arr->Value(currArrayIndex()) : (*array_visitor)->time64_arr->Value(currArrayIndex()); + case DurationType: + return (*array_visitor)->duration_arr->Value(currArrayIndex()); + default: + { + double mydouble = 0.0; + auto scalar = getCurrView(field); + std::string scalarStr(scalar.data(), scalar.size()); + handleDeserializeOutcome(tokenDeserializer.deserialize(scalarStr.c_str(), mydouble), "real", scalarStr.c_str()); + return mydouble; + } + } +} + /** * @brief Gets a Boolean result for an ECL Row * @@ -1020,10 +1052,7 @@ double ParquetRowBuilder::getRealResult(const RtlFieldInfo *field) return p.doubleResult; } - if ((*array_visitor)->type == RealType) - return getReal(array_visitor, currArrayIndex()); - else - return getCurrIntValue(field); + return getCurrRealValue(field); } /** diff --git a/plugins/parquet/parquetembed.hpp b/plugins/parquet/parquetembed.hpp index 9f1bb2015e0..7e233f53986 100644 --- a/plugins/parquet/parquetembed.hpp +++ b/plugins/parquet/parquetembed.hpp @@ -899,6 +899,7 @@ class ParquetRowBuilder : public CInterfaceOf const std::shared_ptr &getChunk(std::shared_ptr *column); std::string_view getCurrView(const RtlFieldInfo *field); __int64 getCurrIntValue(const RtlFieldInfo *field); + double getCurrRealValue(const RtlFieldInfo *field); void nextField(const RtlFieldInfo *field); void nextFromStruct(const RtlFieldInfo *field); void xpathOrName(StringBuffer &outXPath, const RtlFieldInfo *field) const; From 9efab561fa0bfcfd0b2f9f5c2e24459064e8113e Mon Sep 17 00:00:00 2001 From: Michael Gardner Date: Thu, 19 Oct 2023 13:18:04 -0400 Subject: [PATCH 26/35] HPCC-30575 Fix versioning for golds and new jf docker push syntax Signed-off-by: Michael Gardner --- .github/workflows/build-assets.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 395df3ec30e..510fb15c55b 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -55,7 +55,7 @@ jobs: community_tag=$(echo $community_ref | cut -d'/' -f3) echo "community_tag=$community_tag" >> $GITHUB_OUTPUT echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT - echo "hpcc_version=$(echo $community_tag | sed 's/community_//')" >> $GITHUB_OUTPUT + echo "hpcc_version=$(echo $community_tag | sed 's/community_//' | sed 's/-[0-9]$//')" >> $GITHUB_OUTPUT community_base_ref=${{ github.event.base_ref || github.ref }} candidate_branch=$(echo $community_base_ref | cut -d'/' -f3) echo "candidate_branch=$candidate_branch" >> $GITHUB_OUTPUT @@ -347,7 +347,7 @@ jobs: - name: JFrog Docker Push and Publish if: ${{ matrix.ln && matrix.container && !matrix.documentation && github.repository_owner == 'hpcc-systems' }} run: | - jf rt dp ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} hpccpl-docker-local --build-name=platform-core-ln --build-number=${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl + jf docker push ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} --build-name=platform-core-ln --build-number=${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl jf rt bp platform-core-ln ${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl # Common --- From 7dda53526ff15df76473b6f5a73ef7082143ae7a Mon Sep 17 00:00:00 2001 From: Ken Rowland Date: Mon, 9 Oct 2023 17:12:18 -0400 Subject: [PATCH 27/35] HPCC-30446 esp components failing to start in cloud due to invalid metrics Added code to metrics manager to remove illegal characters from metric name Signed-Off-By: Kenneth Rowland kenneth.rowland@lexisnexisrisk.com --- esp/espcommon/espcommon.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/esp/espcommon/espcommon.cpp b/esp/espcommon/espcommon.cpp index 24bdf7c6788..2599434910b 100644 --- a/esp/espcommon/espcommon.cpp +++ b/esp/espcommon/espcommon.cpp @@ -59,8 +59,13 @@ ESPCOMMON_API std::shared_ptr registerServic { std::string metricName(processName); metricName.append(".").append(serviceName).append(".").append(methodName); - auto no_ = std::remove(metricName.begin(), metricName.end(), '_'); - metricName.erase(no_, metricName.end()); + + // Remove unwanted characters from new metric name + constexpr char removeChars[] = "_-* "; + for (unsigned i=0; i Date: Thu, 19 Oct 2023 15:23:00 -0400 Subject: [PATCH 28/35] HPCC-30575 Fix export from docker/build-push-action for jf cli to access image Signed-off-by: Michael Gardner --- .github/workflows/build-assets.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 510fb15c55b..1fc07de8f64 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -330,13 +330,14 @@ jobs: echo "$k8s_pkg_file" - name: Create Docker Image (internal) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 if: ${{ matrix.ln && matrix.container && !matrix.documentation }} with: builder: ${{ steps.buildx.outputs.name }} file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/platform-core-${{ matrix.os }}/Dockerfile context: ${{ needs.preamble.outputs.folder_build }} push: false + load: true build-args: | PKG_FILE=${{ steps.ln-container.outputs.k8s_pkg_file }} tags: | From a81a6e475a2a8e376247c88ab5ee44f0e09730c5 Mon Sep 17 00:00:00 2001 From: Michael Gardner Date: Fri, 20 Oct 2023 10:56:28 -0400 Subject: [PATCH 29/35] HPCC-30575 Modify ln docker containers final label Signed-off-by: Michael Gardner --- .github/workflows/build-assets.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 1fc07de8f64..b58d268d4df 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -341,14 +341,14 @@ jobs: build-args: | PKG_FILE=${{ steps.ln-container.outputs.k8s_pkg_file }} tags: | - ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} + ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} cache-from: | type=registry,ref=hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} - name: JFrog Docker Push and Publish if: ${{ matrix.ln && matrix.container && !matrix.documentation && github.repository_owner == 'hpcc-systems' }} run: | - jf docker push ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} --build-name=platform-core-ln --build-number=${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl + jf docker push ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} --build-name=platform-core-ln --build-number=${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl jf rt bp platform-core-ln ${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl # Common --- From c5220ca9f08e69b80d2d11e6ce0c2e4c0561233b Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Sun, 22 Oct 2023 22:07:41 +0100 Subject: [PATCH 30/35] HPCC-30616 Ensure check_executes handles signals properly Signed-off-by: Jake Smith --- initfiles/bin/check_executes | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/initfiles/bin/check_executes b/initfiles/bin/check_executes index 53311d464ba..b4980e8339b 100755 --- a/initfiles/bin/check_executes +++ b/initfiles/bin/check_executes @@ -61,14 +61,34 @@ done ulimit -c unlimited -#Ensure any signals to the script kill the child process -trap 'echo EXIT via signal ; kill 0; wait; ' EXIT +function cleanup { + echo "EXIT via signal for $progPid" + if [ -n "$progPid" ]; then + kill $progPid + wait $progPid + retVal=$? + fi +} + +# Ensure any signals to the script kill the child process +# NB: do not include SIGEXIT since when handled, it will cause the script to exit prematurely. +trap cleanup SIGTERM SIGINT SIGABRT SIGQUIT SIGHUP # Execute the main program, defaulting postmortem logging on (can be overriden by program's config file) -${PMD_PROGNAME} --logging.postMortem=1000 "$@" +${PMD_PROGNAME} --logging.postMortem=1000 "$@" & +progPid=$! + +echo "Waiting for child process $progPid" +# If the signal handler (cleanup) was called, it will wait and catpure retVal and cause this 'wait $progPid' to exit on completion. +# NB: If the signal handler itself doesn't wait, then it will still cause this statement to complete before the child process has exited. +wait $progPid +retVal2=$? +if [ ! -v retVal ]; then + retVal=$retVal2 +fi +echo "Child process $progPid has exited with exit code $retVal" # If it did not exit cleanly, copy some post-mortem info -retVal=$? if [ $PMD_ALWAYS = true ] || [ $retVal -ne 0 ]; then POST_MORTEM_DIR=${PMD_DIRECTORYBASE}/$(hostname)/$(date -Iseconds) mkdir -p ${POST_MORTEM_DIR} From f742b266f1df03e3748ede03471926fbe7aa0c45 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Mon, 23 Oct 2023 15:10:35 +0100 Subject: [PATCH 31/35] HPCC-30569 Bump kubectl and git-lfs Back port latest build-assets.yml and build-vcpkg.yml Signed-off-by: Gordon Smith --- .github/workflows/build-assets.yml | 186 ++++++++++++++---- .github/workflows/build-vcpkg.yml | 88 ++++++--- .../platform-core-ubuntu-22.04/Dockerfile | 93 +++++++++ 3 files changed, 300 insertions(+), 67 deletions(-) create mode 100644 dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 559c6a2f03c..aebc22cc54a 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -34,7 +34,8 @@ jobs: internal_ref: ${{ steps.vars.outputs.internal_ref }} community_tag: ${{ steps.vars.outputs.community_tag }} internal_tag: ${{ steps.vars.outputs.internal_tag }} - community_branch: ${{ steps.vars.outputs.community_branch }} + candidate_base_branch: ${{ steps.vars.outputs.candidate_base_branch }} + candidate_branch: ${{ steps.vars.outputs.candidate_branch }} cmake_docker_config: ${{ steps.vars.outputs.cmake_docker_config }} gpg_import: ${{ steps.vars.outputs.gpg_import }} steps: @@ -54,8 +55,10 @@ jobs: echo "community_tag=$community_tag" >> $GITHUB_OUTPUT echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT community_base_ref=${{ github.event.base_ref || github.ref }} - echo "community_branch=$(echo $community_base_ref | cut -d'/' -f3)" >> $GITHUB_OUTPUT - echo "cmake_docker_config=-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DSIGN_MODULES=ON" >> $GITHUB_OUTPUT + candidate_branch=$(echo $community_base_ref | cut -d'/' -f3) + echo "candidate_branch=$candidate_branch" >> $GITHUB_OUTPUT + echo "candidate_base_branch=$(echo $candidate_branch | awk -F'.' -v OFS='.' '{ $3="x"; print }')" >> $GITHUB_OUTPUT + echo "cmake_docker_config=-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DSIGN_MODULES=${{ github.repository_owner == 'hpcc-systems' && 'ON' || 'OFF' }}" >> $GITHUB_OUTPUT echo 'gpg_import=gpg --batch --import /hpcc-dev/build/private.key' >> $GITHUB_OUTPUT - name: Print vars @@ -82,7 +85,6 @@ jobs: build-docker: name: Build Docker - # if: github.repository == 'hpcc-systems/HPCC-Platform' needs: preamble runs-on: ubuntu-22.04 strategy: @@ -90,14 +92,26 @@ jobs: include: - os: ubuntu-22.10 - os: ubuntu-22.04 - container: true - os: ubuntu-22.04 + name: k8s + container: true - os: ubuntu-20.04 - os: ubuntu-18.04 - os: centos-8 - os: centos-7 + cmake_options_extra: "" - os: amazonlinux + cmake_options_extra: "" + - os: ubuntu-22.04 + name: LN k8s + ln: true + container: true + - os: ubuntu-20.04 + name: LN + ln: true - os: centos-7 + name: LN + cmake_options_extra: "" ln: true fail-fast: false @@ -127,7 +141,12 @@ jobs: id: vars working-directory: ${{ needs.preamble.outputs.folder_platform }}/vcpkg run: | - echo "vcpkg_sha_short=$(git rev-parse --short=8 HEAD)" >> $GITHUB_OUTPUT + vcpkg_sha_short=$(git rev-parse --short=8 HEAD) + echo "vcpkg_sha_short=$vcpkg_sha_short" >> $GITHUB_OUTPUT + docker_build_label=hpccsystems/platform-build-${{ matrix.os }} + echo "docker_build_label=$docker_build_label" >> $GITHUB_OUTPUT + echo "docker_tag=$docker_build_label:$vcpkg_sha_short" >> $GITHUB_OUTPUT + echo "docker_tag_candidate_base=$docker_build_label:${{ needs.preamble.outputs.candidate_base_branch }}" >> $GITHUB_OUTPUT - name: Print vars run: | @@ -145,19 +164,25 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: Create Build Image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: builder: ${{ steps.buildx.outputs.name }} file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/${{ matrix.os }}.dockerfile context: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg - load: true + push: ${{ github.repository_owner == 'hpcc-systems' }} + load: ${{ github.repository_owner != 'hpcc-systems' }} build-args: | VCPKG_REF=${{ steps.vars.outputs.vcpkg_sha_short }} tags: | - build-${{ matrix.os }}:latest - - - name: CMake Packages - if: ${{ !matrix.container && !matrix.ln }} + ${{ steps.vars.outputs.docker_tag_candidate_base }} + cache-from: | + type=registry,ref=${{ steps.vars.outputs.docker_tag_candidate_base }} + type=registry,ref=${{ steps.vars.outputs.docker_tag }} + cache-to: type=inline + + # Communtiy Build + - name: CMake Packages (community) + if: ${{ !matrix.ln && !matrix.container && !matrix.documentation }} run: | mkdir -p ${{ needs.preamble.outputs.folder_build }} echo "${{ secrets.SIGNING_SECRET }}" > ${{ needs.preamble.outputs.folder_build }}/private.key @@ -165,38 +190,85 @@ jobs: for plugin in "${plugins[@]}"; do sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker_label=build-${{ matrix.os }}:latest - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "${{ needs.preamble.outputs.gpg_import }} && \ + cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" # Disabled as not currently needed --- - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" done - - name: CMake Containerized Packages - if: ${{ matrix.container }} + - name: CMake Containerized Packages (community) + if: ${{ !matrix.ln && matrix.container && !matrix.documentation }} run: | mkdir -p ${{ needs.preamble.outputs.folder_build }} echo "${{ secrets.SIGNING_SECRET }}" > ${{ needs.preamble.outputs.folder_build }}/private.key sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker_label=build-${{ matrix.os }}:latest - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "${{ needs.preamble.outputs.gpg_import }} && \ + cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "${{ needs.preamble.outputs.gpg_import }} && \ + cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + + - name: CMake documentation (community) + if: ${{ !matrix.ln && !matrix.container && matrix.documentation }} + run: | + mkdir -p {${{needs.preamble.outputs.folder_build }},EN_US,PT_BR} + sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt + sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "\ + cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build -DMAKE_DOCS_ONLY=ON -DUSE_NATIVE_LIBRARIES=ON -DDOCS_AUTO=ON -DDOC_LANGS=ALL && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target all" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cd /hpcc-dev/build/Release/docs/EN_US && zip ALL_HPCC_DOCS_EN_US-${{ needs.preamble.outputs.community_tag }}.zip *.pdf" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cd /hpcc-dev/build/Release/docs/PT_BR && zip ALL_HPCC_DOCS_PT_BR-${{ needs.preamble.outputs.community_tag }}.zip *.pdf" - - name: Upload Assets - uses: ncipollo/release-action@v1.12.0 + - name: Upload Assets (community) if: ${{ !matrix.ln }} + uses: ncipollo/release-action@v1.12.0 with: allowUpdates: true generateReleaseNotes: false prerelease: ${{ contains(github.ref, '-rc') }} - artifacts: "${{ needs.preamble.outputs.folder_build }}/*.deb,${{ needs.preamble.outputs.folder_build }}/*.rpm" + artifacts: "${{ needs.preamble.outputs.folder_build }}/*.deb,${{ needs.preamble.outputs.folder_build }}/*.rpm,${{ needs.preamble.outputs.folder_build }}/Release/docs/*.zip,${{ needs.preamble.outputs.folder_build }}/Release/docs/EN_US/*.zip,${{ needs.preamble.outputs.folder_build }}/Release/docs/PT_BR/*.zip,${{ needs.preamble.outputs.folder_build }}/docs/EN_US/EclipseHelp/*.zip,${{ needs.preamble.outputs.folder_build }}/docs/EN_US/HTMLHelp/*.zip,${{ needs.preamble.outputs.folder_build }}/docs/PT_BR/HTMLHelp/*.zip" - - name: CMake LN Packages - if: ${{ matrix.ln }} + - name: Locate k8s deb file (community) + if: ${{ !matrix.ln && matrix.container && !matrix.documentation }} + id: container + run: | + k8s_pkg_path=$(ls -t ${{ needs.preamble.outputs.folder_build }}/*64_k8s.deb 2>/dev/null | head -1) + k8s_pkg_file=$(basename "$k8s_pkg_path") + echo "k8s_pkg_file=$k8s_pkg_file" >> $GITHUB_OUTPUT + echo "$k8s_pkg_file" + + - name: Create Docker Image (community) + uses: docker/build-push-action@v4 + if: ${{ !matrix.ln && matrix.container && !matrix.documentation }} + with: + builder: ${{ steps.buildx.outputs.name }} + file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/platform-core-${{ matrix.os }}/Dockerfile + context: ${{ needs.preamble.outputs.folder_build }} + push: ${{ github.repository_owner == 'hpcc-systems' }} + build-args: | + PKG_FILE=${{ steps.container.outputs.k8s_pkg_file }} + tags: | + hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.community_tag }} + hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} + cache-from: | + type=registry,ref=hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} + + # Internal Build --- + - name: Login to JFrog (internal) + if: ${{ matrix.ln && matrix.container && github.repository_owner == 'hpcc-systems' }} + uses: docker/login-action@v2 + with: + registry: ${{ secrets.JFROG_REGISTRY }} + username: ${{ secrets.JFROG_USERNAME }} + password: ${{ secrets.JFROG_PASSWORD }} + + - name: CMake Packages (internal) + if: ${{ matrix.ln && !matrix.container && !matrix.documentation }} run: | mkdir -p ${{ needs.preamble.outputs.folder_build }} echo "${{ secrets.SIGNING_SECRET }}" > ${{ needs.preamble.outputs.folder_build }}/private.key @@ -204,14 +276,29 @@ jobs: for plugin in "${plugins[@]}"; do sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker_label=build-${{ matrix.os }}:latest - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -D$plugin=ON -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "${{ needs.preamble.outputs.gpg_import }} && cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "${{ needs.preamble.outputs.gpg_import }} && \ + cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -D$plugin=ON -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + # Disabled as not currently needed --- + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" done - - name: Upload LN Assets + - name: CMake Containerized Packages (internal) + if: ${{ matrix.ln && matrix.container && !matrix.documentation }} + run: | + mkdir -p ${{ needs.preamble.outputs.folder_build }} + echo "${{ secrets.SIGNING_SECRET }}" > ${{ needs.preamble.outputs.folder_build }}/private.key + sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt + sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "${{ needs.preamble.outputs.gpg_import }} && \ + cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "${{ needs.preamble.outputs.gpg_import }} && \ + cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DSIGN_MODULES_PASSPHRASE=${{ secrets.SIGN_MODULES_PASSPHRASE }} -DSIGN_MODULES_KEYID=${{ secrets.SIGN_MODULES_KEYID }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + + - name: Upload Assets (internal) if: ${{ matrix.ln }} uses: ncipollo/release-action@v1.12.0 with: @@ -224,6 +311,31 @@ jobs: tag: ${{ needs.preamble.outputs.internal_tag }} artifacts: "${{ needs.preamble.outputs.folder_build }}/hpccsystems-*-internal*.deb,${{ needs.preamble.outputs.folder_build }}/hpccsystems-*-internal*.rpm" + - name: Locate k8s deb file (internal) + if: ${{ matrix.ln && matrix.container && !matrix.documentation }} + id: ln-container + run: | + k8s_pkg_path=$(ls -t ${{ needs.preamble.outputs.folder_build }}/*64_k8s.deb 2>/dev/null | head -1) + k8s_pkg_file=$(basename "$k8s_pkg_path") + echo "k8s_pkg_file=$k8s_pkg_file" >> $GITHUB_OUTPUT + echo "$k8s_pkg_file" + + - name: Create Docker Image (internal) + uses: docker/build-push-action@v4 + if: ${{ matrix.ln && matrix.container && !matrix.documentation }} + with: + builder: ${{ steps.buildx.outputs.name }} + file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/platform-core-${{ matrix.os }}/Dockerfile + context: ${{ needs.preamble.outputs.folder_build }} + push: ${{ github.repository_owner == 'hpcc-systems' }} + build-args: | + PKG_FILE=${{ steps.ln-container.outputs.k8s_pkg_file }} + tags: | + ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.internal_tag }} + cache-from: | + type=registry,ref=hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} + + # Common --- - name: Cleanup Environment if: always() run: rm -f ${{ needs.preamble.outputs.folder_build }}/private.key @@ -232,7 +344,7 @@ jobs: if: ${{ failure() || cancelled() }} uses: actions/upload-artifact@v3 with: - name: ${{ matrix.os }}-${{ matrix.package }}-logs + name: ${{ matrix.os }}-${{ matrix.ln }}-${{ matrix.container }}-${{ matrix.documentation }}-logs path: ${{ needs.preamble.outputs.folder_build }}/**/*.log build-bare-metal: @@ -345,5 +457,5 @@ jobs: if: ${{ failure() || cancelled() }} uses: actions/upload-artifact@v3 with: - name: ${{ matrix.os }}-${{ matrix.package }}-logs + name: ${{ matrix.os }}-logs path: ./build/**/*.log diff --git a/.github/workflows/build-vcpkg.yml b/.github/workflows/build-vcpkg.yml index 5aab20df5ed..c379998a398 100644 --- a/.github/workflows/build-vcpkg.yml +++ b/.github/workflows/build-vcpkg.yml @@ -35,10 +35,11 @@ jobs: internal_ref: ${{ steps.vars.outputs.internal_ref }} community_tag: ${{ steps.vars.outputs.community_tag }} internal_tag: ${{ steps.vars.outputs.internal_tag }} - community_branch: ${{ steps.vars.outputs.community_branch }} + candidate_base_branch: ${{ steps.vars.outputs.candidate_base_branch }} cmake_docker_config: ${{ steps.vars.outputs.cmake_docker_config }} platform: "1" platform_testing_do_not_release: ${{ steps.skip_check.outputs.platform }} + include_plugins: ${{ (steps.skip_check.outputs.plugins && 'ON') || 'OFF' }} steps: - name: Calculate vars id: vars @@ -51,12 +52,11 @@ jobs: echo 'mount_build=source="${{ github.workspace }}/build",target=/hpcc-dev/build,type=bind,consistency=cached' >> $GITHUB_OUTPUT community_ref=${{ github.ref }} echo "community_ref=$community_ref" >> $GITHUB_OUTPUT - echo "internal_ref=$(echo $community_ref | sed 's/community/internal/')" >> $GITHUB_OUTPUT + echo "internal_ref=${{ github.base_ref }}" >> $GITHUB_OUTPUT community_tag=$(echo $community_ref | cut -d'/' -f3) echo "community_tag=$community_tag" >> $GITHUB_OUTPUT echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT - community_base_ref=${{ github.event.base_ref || github.ref }} - echo "community_branch=$(echo $community_base_ref | cut -d'/' -f3)" >> $GITHUB_OUTPUT + echo "candidate_base_branch=${{ github.base_ref }}" >> $GITHUB_OUTPUT echo "cmake_docker_config=-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF" >> $GITHUB_OUTPUT - id: skip_check @@ -71,7 +71,6 @@ jobs: build-docker: name: Build Docker - # if: github.repository == 'hpcc-systems/HPCC-Platform' needs: preamble runs-on: ubuntu-22.04 strategy: @@ -80,6 +79,7 @@ jobs: - os: ubuntu-22.10 event_name: "pull_request" - os: ubuntu-22.04 + name: k8s container: true event_name: "schedule" - os: ubuntu-22.04 @@ -90,8 +90,15 @@ jobs: event_name: "schedule" - os: centos-7 event_name: "pull_request" + cmake_options_extra: "" - os: amazonlinux event_name: "schedule" + cmake_options_extra: "" + - os: centos-7 + name: LN + cmake_options_extra: "" + ln: true + event_name: "schedule" fail-fast: false steps: @@ -108,7 +115,7 @@ jobs: path: ${{ needs.preamble.outputs.folder_platform }} - name: Checkout LN - if: ${{ matrix.ln }} + if: ${{ matrix.ln && contains(matrix.event_name, github.event_name) }} uses: actions/checkout@v3 with: repository: ${{ github.repository_owner }}/LN @@ -122,7 +129,12 @@ jobs: id: vars working-directory: ${{ needs.preamble.outputs.folder_platform }}/vcpkg run: | - echo "vcpkg_sha_short=$(git rev-parse --short=8 HEAD)" >> $GITHUB_OUTPUT + vcpkg_sha_short=$(git rev-parse --short=8 HEAD) + echo "vcpkg_sha_short=$vcpkg_sha_short" >> $GITHUB_OUTPUT + docker_build_label=hpccsystems/platform-build-${{ matrix.os }} + echo "docker_build_label=$docker_build_label" >> $GITHUB_OUTPUT + echo "docker_tag=$docker_build_label:$vcpkg_sha_short" >> $GITHUB_OUTPUT + echo "docker_tag_candidate_base=$docker_build_label:${{ needs.preamble.outputs.candidate_base_branch }}" >> $GITHUB_OUTPUT - name: Print vars run: | @@ -134,9 +146,14 @@ jobs: id: buildx uses: docker/setup-buildx-action@v2 + - name: Pull previous images + run: | + docker pull ${{ steps.vars.outputs.docker_tag_candidate_base }} || true + docker pull ${{ steps.vars.outputs.docker_tag }} || true + - name: Create Build Image if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: builder: ${{ steps.buildx.outputs.name }} file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/${{ matrix.os }}.dockerfile @@ -145,22 +162,32 @@ jobs: build-args: | VCPKG_REF=${{ steps.vars.outputs.vcpkg_sha_short }} tags: | - build-${{ matrix.os }}:latest + ${{ steps.vars.outputs.docker_tag_candidate_base }} + cache-from: | + type=registry,ref=${{ steps.vars.outputs.docker_tag_candidate_base }} + type=registry,ref=${{ steps.vars.outputs.docker_tag }} + cache-to: type=inline + # Communtiy Build - name: CMake Packages - if: ${{ !matrix.container && !matrix.ln && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} + if: ${{ !matrix.ln && !matrix.container && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} run: | mkdir -p ${{ needs.preamble.outputs.folder_build }} - plugins=("CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MYSQLEMBED" "NLP" "REDIS" "SQLITE3EMBED" "SQS" "PLATFORM") + declare -a plugins + if [ ${{ needs.preamble.outputs.include_plugins }} == "ON" ]; then + plugins=("CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MYSQLEMBED" "NLP" "REDIS" "SQLITE3EMBED" "SQS" "PLATFORM") + else + plugins=("PLATFORM") + fi for plugin in "${plugins[@]}"; do sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker_label=build-${{ matrix.os }}:latest - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "\ + cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" # Disabled as not currently needed --- - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" done - name: CMake Containerized Packages @@ -169,29 +196,30 @@ jobs: mkdir -p ${{ needs.preamble.outputs.folder_build }} sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker_label=build-${{ matrix.os }}:latest - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "\ + cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + # Internal Build --- - name: CMake LN Packages - if: ${{ matrix.ln && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} + if: ${{ matrix.ln && !matrix.container && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} run: | mkdir -p ${{ needs.preamble.outputs.folder_build }} sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker_label=build-${{ matrix.os }}:latest - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "\ + cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ + cmake --build /hpcc-dev/build --parallel $(nproc) --target package" # Disabled as not currently needed --- - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} $docker_label "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" + # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" + # Common --- - name: Upload error logs if: ${{ failure() || cancelled() }} uses: actions/upload-artifact@v3 with: - name: ${{ matrix.os }}-${{ matrix.package }}-logs + name: ${{ matrix.os }}-${{ matrix.ln }}-${{ matrix.container }}-logs path: ${{ needs.preamble.outputs.folder_build }}/**/*.log build-bare-metal: @@ -204,13 +232,13 @@ jobs: - os: "ubuntu-20.04" mono: "mono" sudo: "sudo" - cmake_config_options: "-DCMAKE_BUILD_TYPE=RelWithDebInfo -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_REMBED=ON" + cmake_config_options: "-DCMAKE_BUILD_TYPE=RelWithDebInfo -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DSUPPRESS_V8EMBED=ON -DSUPPRESS_REMBED=ON" cmake_build_options: "-- -j$(nproc) -k" event_name: "schedule" - os: "ubuntu-22.04" mono: "mono" sudo: "sudo" - cmake_config_options: "-DCMAKE_BUILD_TYPE=RelWithDebInfo -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_REMBED=ON" + cmake_config_options: "-DCMAKE_BUILD_TYPE=RelWithDebInfo -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DSUPPRESS_V8EMBED=ON -DSUPPRESS_REMBED=ON" cmake_build_options: "-- -j$(nproc) -k" event_name: "pull_request" - os: "windows-2019" @@ -336,5 +364,5 @@ jobs: if: ${{ failure() || cancelled() }} uses: actions/upload-artifact@v3 with: - name: ${{ matrix.os }}-${{ matrix.package }}-logs + name: ${{ matrix.os }}--${{ matrix.ln }}-${{ matrix.container }}-logs path: ./build/**/*.log diff --git a/dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile b/dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile new file mode 100644 index 00000000000..16523211116 --- /dev/null +++ b/dockerfiles/vcpkg/platform-core-ubuntu-22.04/Dockerfile @@ -0,0 +1,93 @@ +############################################################################## +# +# HPCC SYSTEMS software Copyright (C) 2020 HPCC Systems®. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################## + +# Create base container image to be used by all HPCC processes + +ARG BASE_IMAGE=ubuntu:22.04 +FROM ${BASE_IMAGE} + +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get clean -y && \ + apt-get autoclean -y && \ + apt-get install -y -f && \ + apt-get autoremove -y && \ + apt-get update -y && \ + apt-get install --no-install-recommends -y \ + default-jdk \ + elfutils \ + expect \ + g++ \ + git \ + locales \ + jq \ + openssh-client \ + openssh-server \ + python3 \ + python3-dev \ + psmisc \ + r-base-core \ + r-cran-rcpp \ + r-cran-inline \ + rsync \ + zip \ + curl \ + clang + +ARG USE_CPPUNIT=1 +RUN if [ ${USE_CPPUNIT} -eq 1 ] ; then apt-get install -y libcppunit-1.15-0 ; fi + +# these are developer tools - we may want to move them elsewhere so that they are only in the incremental builds? + +RUN apt-get install -y \ + dnsutils \ + gdb \ + nano + +RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/v1.28.3/bin/linux/amd64/kubectl && \ + chmod +x ./kubectl && \ + mv ./kubectl /usr/local/bin + +RUN curl -LO https://packagecloud.io/github/git-lfs/packages/debian/bullseye/git-lfs_3.4.0_amd64.deb/download && \ + dpkg -i download && \ + rm download + +# Set the locale +RUN locale-gen en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 + +RUN groupadd -g 10001 hpcc +RUN useradd -s /bin/bash -m -r -N -c "hpcc runtime User" -u 10000 -g hpcc hpcc +RUN passwd -l hpcc + +RUN mkdir /var/lib/HPCCSystems && chown hpcc:hpcc /var/lib/HPCCSystems +RUN mkdir /var/log/HPCCSystems && chown hpcc:hpcc /var/log/HPCCSystems +RUN mkdir /var/lock/HPCCSystems && chown hpcc:hpcc /var/lock/HPCCSystems +RUN mkdir /var/run/HPCCSystems && chown hpcc:hpcc /var/run/HPCCSystems + +ARG PKG_FILE=hpccsystems-platform-community_9.2.4-1jammy_amd64_k8s.deb +COPY ./${PKG_FILE} /tmp/${PKG_FILE} +RUN dpkg -i /tmp/${PKG_FILE} && \ + apt-get install -f && \ + rm /tmp/${PKG_FILE} + +USER hpcc +ENV PATH="/opt/HPCCSystems/bin:${PATH}" +ENV HPCC_containerized=1 +WORKDIR /var/lib/HPCCSystems From 4ef9c8476b385d129f7cb966f118a94578dc534d Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Tue, 24 Oct 2023 12:00:39 +0100 Subject: [PATCH 32/35] Split off 8.12.64 Signed-off-by: Gordon Smith --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dafilesrv.yaml | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 14 files changed, 24 insertions(+), 24 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index 0d3dbcdeead..f8e16c03d7c 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 8.12.63-closedown0 +version: 8.12.65-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 8.12.63-closedown0 +appVersion: 8.12.65-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 58813040843..ea88746a5bd 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1240,7 +1240,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index 25849b87011..73437ad5179 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -50,7 +50,7 @@ spec: labels: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dafilesrv" "name" "dafilesrv" "instance" .name) | indent 8 }} server: {{ .name | quote }} - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 annotations: checksum/config: {{ $configSHA }} spec: diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index a687b1c2494..5d01007d814 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -82,7 +82,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 3012f0204aa..d677a1f538a 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index d703a6e304c..3b500cf121e 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -137,7 +137,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index 25f8ea15d3d..23371f83f9c 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -142,7 +142,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index faa44b6b3d5..c10360b89e4 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index ca81a7446fa..7e8f8bbf3a8 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -117,7 +117,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 5d55edbbe9e..3f550ba431c 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -70,7 +70,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index a25040cc110..593f32167bd 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -120,7 +120,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -180,7 +180,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -242,7 +242,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -347,7 +347,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index 73831a68e9e..a3c95154a81 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 5d89c56d6bd..2034014cda3 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName "instanceOf" (printf "%s-job" .eclAgentName)) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -149,7 +149,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName "instanceOf" (printf "%s-thormanager-job" .me.name)) | indent 12 }} @@ -218,7 +218,7 @@ data: accessEsp: "yes" app: "thor" component: "thorworker" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName "instanceOf" (printf "%s-thorworker-job" .me.name)) | indent 12 }} @@ -353,7 +353,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -418,7 +418,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 8.12.63-closedown0 + helmVersion: 8.12.65-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index 00ad8913fcf..db2534f1fab 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 8 ) set ( HPCC_MINOR 12 ) -set ( HPCC_POINT 63 ) +set ( HPCC_POINT 65 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From e1b7d22d0a6d74323c7d40e4ef051698225a8b1b Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Tue, 24 Oct 2023 13:23:14 +0100 Subject: [PATCH 33/35] HPCC-30569 Bump kubectl and git-lfs Back port latest build-assets.yml and build-vcpkg.yml Signed-off-by: Gordon Smith --- .github/workflows/build-assets.yml | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index aebc22cc54a..bf15d449aee 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -34,6 +34,7 @@ jobs: internal_ref: ${{ steps.vars.outputs.internal_ref }} community_tag: ${{ steps.vars.outputs.community_tag }} internal_tag: ${{ steps.vars.outputs.internal_tag }} + hpcc_version: ${{ steps.vars.outputs.hpcc_version }} candidate_base_branch: ${{ steps.vars.outputs.candidate_base_branch }} candidate_branch: ${{ steps.vars.outputs.candidate_branch }} cmake_docker_config: ${{ steps.vars.outputs.cmake_docker_config }} @@ -54,6 +55,7 @@ jobs: community_tag=$(echo $community_ref | cut -d'/' -f3) echo "community_tag=$community_tag" >> $GITHUB_OUTPUT echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT + echo "hpcc_version=$(echo $community_tag | sed 's/community_//' | sed 's/-[0-9]$//')" >> $GITHUB_OUTPUT community_base_ref=${{ github.event.base_ref || github.ref }} candidate_branch=$(echo $community_base_ref | cut -d'/' -f3) echo "candidate_branch=$candidate_branch" >> $GITHUB_OUTPUT @@ -90,13 +92,11 @@ jobs: strategy: matrix: include: - - os: ubuntu-22.10 - os: ubuntu-22.04 - os: ubuntu-22.04 name: k8s container: true - os: ubuntu-20.04 - - os: ubuntu-18.04 - os: centos-8 - os: centos-7 cmake_options_extra: "" @@ -267,6 +267,14 @@ jobs: username: ${{ secrets.JFROG_USERNAME }} password: ${{ secrets.JFROG_PASSWORD }} + - name: Setup JFrog CLI (internal) + if: ${{ matrix.ln && matrix.container && github.repository_owner == 'hpcc-systems' }} + uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: https://${{ secrets.JFROG_REGISTRY }} + JF_USER: ${{ secrets.JFROG_USERNAME }} + JF_PASSWORD: ${{ secrets.JFROG_PASSWORD }} + - name: CMake Packages (internal) if: ${{ matrix.ln && !matrix.container && !matrix.documentation }} run: | @@ -321,20 +329,27 @@ jobs: echo "$k8s_pkg_file" - name: Create Docker Image (internal) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 if: ${{ matrix.ln && matrix.container && !matrix.documentation }} with: builder: ${{ steps.buildx.outputs.name }} file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/platform-core-${{ matrix.os }}/Dockerfile context: ${{ needs.preamble.outputs.folder_build }} - push: ${{ github.repository_owner == 'hpcc-systems' }} + push: false + load: true build-args: | PKG_FILE=${{ steps.ln-container.outputs.k8s_pkg_file }} tags: | - ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/hpcc-platform/${{ matrix.os }}/platform-core-ln:${{ needs.preamble.outputs.internal_tag }} + ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} cache-from: | type=registry,ref=hpccsystems/platform-core-${{ matrix.os }}:${{ needs.preamble.outputs.candidate_base_branch }} + - name: JFrog Docker Push and Publish + if: ${{ matrix.ln && matrix.container && !matrix.documentation && github.repository_owner == 'hpcc-systems' }} + run: | + jf docker push ${{ secrets.JFROG_REGISTRY || 'dummy.io' }}/hpccpl-docker-local/platform-core-ln:${{ needs.preamble.outputs.hpcc_version }} --build-name=platform-core-ln --build-number=${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl + jf rt bp platform-core-ln ${{ needs.preamble.outputs.hpcc_version }} --project=hpccpl + # Common --- - name: Cleanup Environment if: always() From d671c72ee8cb7fd4e43e4de61345f21dcd88ddb1 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Sun, 22 Oct 2023 22:07:41 +0100 Subject: [PATCH 34/35] HPCC-30616 Ensure check_executes handles signals properly Signed-off-by: Jake Smith --- initfiles/bin/check_executes | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/initfiles/bin/check_executes b/initfiles/bin/check_executes index 53311d464ba..b4980e8339b 100755 --- a/initfiles/bin/check_executes +++ b/initfiles/bin/check_executes @@ -61,14 +61,34 @@ done ulimit -c unlimited -#Ensure any signals to the script kill the child process -trap 'echo EXIT via signal ; kill 0; wait; ' EXIT +function cleanup { + echo "EXIT via signal for $progPid" + if [ -n "$progPid" ]; then + kill $progPid + wait $progPid + retVal=$? + fi +} + +# Ensure any signals to the script kill the child process +# NB: do not include SIGEXIT since when handled, it will cause the script to exit prematurely. +trap cleanup SIGTERM SIGINT SIGABRT SIGQUIT SIGHUP # Execute the main program, defaulting postmortem logging on (can be overriden by program's config file) -${PMD_PROGNAME} --logging.postMortem=1000 "$@" +${PMD_PROGNAME} --logging.postMortem=1000 "$@" & +progPid=$! + +echo "Waiting for child process $progPid" +# If the signal handler (cleanup) was called, it will wait and catpure retVal and cause this 'wait $progPid' to exit on completion. +# NB: If the signal handler itself doesn't wait, then it will still cause this statement to complete before the child process has exited. +wait $progPid +retVal2=$? +if [ ! -v retVal ]; then + retVal=$retVal2 +fi +echo "Child process $progPid has exited with exit code $retVal" # If it did not exit cleanly, copy some post-mortem info -retVal=$? if [ $PMD_ALWAYS = true ] || [ $retVal -ne 0 ]; then POST_MORTEM_DIR=${PMD_DIRECTORYBASE}/$(hostname)/$(date -Iseconds) mkdir -p ${POST_MORTEM_DIR} From 5cc7b8567fcad108f4bb51dc032d04b7217fba96 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Wed, 25 Oct 2023 07:59:55 +0100 Subject: [PATCH 35/35] Split off 9.0.54 Signed-off-by: Gordon Smith --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dafilesrv.yaml | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 14 files changed, 24 insertions(+), 24 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index 6a24bfd9cd5..19bcda62647 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 9.0.53-closedown0 +version: 9.0.55-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 9.0.53-closedown0 +appVersion: 9.0.55-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 7e67d5f887b..5e7182f11e3 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1314,7 +1314,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index de9a6e37fb0..ccde99e5e5c 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -50,7 +50,7 @@ spec: labels: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dafilesrv" "name" "dafilesrv" "instance" .name) | indent 8 }} server: {{ .name | quote }} - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 annotations: checksum/config: {{ $configSHA }} spec: diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index 8b696ddb4f8..2152b076bb4 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -82,7 +82,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 4ed40e2e2b5..2a9e2baa4d2 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index 5cbcd11ae98..f6d53b1f45c 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -135,7 +135,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index 0e78d8ad823..74e4014ec2c 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -142,7 +142,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index b466ac78f61..392737189d2 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index 1d714b5ff9c..cf8f667d362 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -117,7 +117,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 199d6633930..3aacbe3888e 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -70,7 +70,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index 21ebea9829c..0e20c2135b1 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -120,7 +120,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -180,7 +180,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -242,7 +242,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -347,7 +347,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index e39414d21c0..dea54ac9533 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 174fbd574de..0622c055c7b 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName "instanceOf" (printf "%s-job" .eclAgentName)) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -147,7 +147,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName "instanceOf" (printf "%s-thormanager-job" .me.name)) | indent 12 }} @@ -214,7 +214,7 @@ data: accessEsp: "yes" app: "thor" component: "thorworker" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName "instanceOf" (printf "%s-thorworker-job" .me.name)) | indent 12 }} @@ -347,7 +347,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -412,7 +412,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 9.0.53-closedown0 + helmVersion: 9.0.55-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index c7d82c665cf..d76df017b4c 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 9 ) set ( HPCC_MINOR 0 ) -set ( HPCC_POINT 53 ) +set ( HPCC_POINT 55 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ###