diff --git a/build-config.h.cmake b/build-config.h.cmake index b9300ed1217..6976d3eddf3 100644 --- a/build-config.h.cmake +++ b/build-config.h.cmake @@ -94,6 +94,14 @@ #define BUILD_VERSION_POINT ${HPCC_POINT} #endif +#ifndef BUILD_MATURITY + #define BUILD_MATURITY "${HPCC_MATURITY}" +#endif + +#ifndef BUILD_TAG_TIMESTAMP + #define BUILD_TAG_TIMESTAMP "${HPCC_TAG_TIMESTAMP}" +#endif + #ifndef BASE_BUILD_TAG #cmakedefine BASE_BUILD_TAG "${BASE_BUILD_TAG}" #endif diff --git a/common/thorhelper/thorcommon.hpp b/common/thorhelper/thorcommon.hpp index 5ddc7e762a8..9edd2ea3741 100644 --- a/common/thorhelper/thorcommon.hpp +++ b/common/thorhelper/thorcommon.hpp @@ -678,7 +678,7 @@ class CStatsContextLogger : public CSimpleInterfaceOf protected: const LogMsgJobInfo job; unsigned traceLevel = 1; - Owned activeSpan; + Owned activeSpan = getNullSpan(); mutable CRuntimeStatisticCollection stats; public: CStatsContextLogger(const CRuntimeStatisticCollection &_mapping, const LogMsgJobInfo & _job=unknownJob) : job(_job), stats(_mapping) {} @@ -723,26 +723,18 @@ class CStatsContextLogger : public CSimpleInterfaceOf } virtual IProperties * getClientHeaders() const override { - if (!activeSpan) - return nullptr; return ::getClientHeaders(activeSpan); } virtual const char *queryGlobalId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryGlobalId(); } virtual const char *queryLocalId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryLocalId(); } virtual const char *queryCallerId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryCallerId(); } virtual const CRuntimeStatisticCollection &queryStats() const override diff --git a/common/wuanalysis/anacommon.cpp b/common/wuanalysis/anacommon.cpp index d13c84d11f6..89d451607f6 100644 --- a/common/wuanalysis/anacommon.cpp +++ b/common/wuanalysis/anacommon.cpp @@ -71,7 +71,7 @@ void PerformanceIssue::createException(IWorkUnit * wu, double costRate) s.appendf(" cost %.2f", timePenaltyPerHour*costRate); } we->setExceptionMessage(s.str()); - we->setExceptionSource("Workunit Analyzer"); + we->setExceptionSource(CostOptimizerName); } void PerformanceIssue::set(AnalyzerErrorCode _errorCode, stat_type _timePenalty, const char * msg, ...) diff --git a/common/wuanalysis/anacommon.hpp b/common/wuanalysis/anacommon.hpp index 796540da08d..3c55e5cf737 100644 --- a/common/wuanalysis/anacommon.hpp +++ b/common/wuanalysis/anacommon.hpp @@ -23,6 +23,7 @@ #include "eclhelper.hpp" #include "anaerrorcodes.hpp" +constexpr char CostOptimizerName[] = "Cost Optimizer"; interface IWuScope { diff --git a/common/wuanalysis/anawu.cpp b/common/wuanalysis/anawu.cpp index 73662958b77..a8b7aac1c96 100644 --- a/common/wuanalysis/anawu.cpp +++ b/common/wuanalysis/anawu.cpp @@ -2098,7 +2098,7 @@ void WUANALYSIS_API analyseAndPrintIssues(IConstWorkUnit * wu, double costRate, if (updatewu) { Owned lockedwu = &(wu->lock()); - lockedwu->clearExceptions("Workunit Analyzer"); + lockedwu->clearExceptions(CostOptimizerName); analyser.update(lockedwu, costRate); } } diff --git a/dali/base/dadfs.cpp b/dali/base/dadfs.cpp index cbe2f957fc8..6ad138b9e3e 100644 --- a/dali/base/dadfs.cpp +++ b/dali/base/dadfs.cpp @@ -316,6 +316,8 @@ class DECL_EXCEPTION CDFS_Exception: implements IDFS_Exception, public CInterfac return str.append(": Storage plane missing: ").append(errstr); case DFSERR_PhysicalCompressedPartInvalid: return str.append(": Compressed part is not in the valid format: ").append(errstr); + case DFSERR_InvalidRemoteFileContext: + return str.append(": Lookup of remote files must use wsdfs::lookup - file: ").append(errstr); } return str.append("Unknown DFS Exception"); } @@ -8247,6 +8249,11 @@ IDistributedFile *CDistributedFileDirectory::dolookup(CDfsLogicalFileName &_logi IDistributedFile *CDistributedFileDirectory::lookup(CDfsLogicalFileName &logicalname, IUserDescriptor *user, AccessMode accessMode, bool hold, bool lockSuperOwner, IDistributedFileTransaction *transaction, bool privilegedUser, unsigned timeout) { + if (logicalname.isRemote()) + { + PrintStackReport(); // to help locate contexts it was called in + throw new CDFS_Exception(DFSERR_InvalidRemoteFileContext, logicalname.get()); + } Owned distributedFile = dolookup(logicalname, user, accessMode, hold, lockSuperOwner, transaction, timeout); // Restricted access is currently designed to stop users viewing sensitive information. It is not designed to stop users deleting or overwriting existing restricted files if (!isWrite(accessMode) && distributedFile && distributedFile->isRestrictedAccess() && !privilegedUser) @@ -10194,25 +10201,27 @@ class CInitGroups switch (groupType) { case grp_thor: - getClusterGroupName(cluster, gname); - if (!streq(cluster.queryProp("@name"), gname.str())) + getClusterGroupName(cluster, gname); // NB: ensures lowercases + if (!strieq(cluster.queryProp("@name"), gname.str())) realCluster = false; if (oldEnvCluster) { - getClusterGroupName(*oldEnvCluster, oldGname); - if (!streq(oldEnvCluster->queryProp("@name"), oldGname.str())) + getClusterGroupName(*oldEnvCluster, oldGname); // NB: ensures lowercases + if (!strieq(oldEnvCluster->queryProp("@name"), oldGname.str())) oldRealCluster = false; } break; case grp_thorspares: - getClusterSpareGroupName(cluster, gname); + getClusterSpareGroupName(cluster, gname); // ensures lowercase oldRealCluster = realCluster = false; break; case grp_roxie: gname.append(cluster.queryProp("@name")); + gname.toLowerCase(); break; case grp_dropzone: gname.append(cluster.queryProp("@name")); + gname.toLowerCase(); oldRealCluster = realCluster = false; defDir = cluster.queryProp("@directory"); break; @@ -10584,8 +10593,12 @@ class CInitGroups void ensureStorageGroup(bool force, const char * name, unsigned numDevices, const char * path, StringBuffer & messages) { - Owned newClusterGroup = createStorageGroup(name, numDevices, path); - ensureConsistentStorageGroup(force, name, newClusterGroup, messages); + //Lower case the group name - see CNamedGroupStore::dolookup which lower cases before resolving. + StringBuffer gname; + gname.append(name).toLowerCase(); + + Owned newClusterGroup = createStorageGroup(gname, numDevices, path); + ensureConsistentStorageGroup(force, gname, newClusterGroup, messages); } void constructStorageGroups(bool force, StringBuffer &messages) @@ -10624,7 +10637,7 @@ class CInitGroups } else if (plane.hasProp("hostGroup")) { - throw makeStringExceptionV(-1, "Use 'hosts' rather than 'hostGroup' for inline list of hosts for plane %s", name); + throw makeStringExceptionV(-1, "Use 'hosts' rather than 'hostGroup' for inline list of hosts for plane %s", gname.str()); } else if (plane.hasProp("hosts")) { @@ -10639,9 +10652,9 @@ class CInitGroups { //Locally mounted, or url accessed storage plane - no associated hosts, localhost used as a placeholder unsigned numDevices = plane.getPropInt("@numDevices", 1); - newClusterGroup.setown(createStorageGroup(name, numDevices, prefix)); + newClusterGroup.setown(createStorageGroup(gname, numDevices, prefix)); } - ensureConsistentStorageGroup(force, name, newClusterGroup, messages); + ensureConsistentStorageGroup(force, gname, newClusterGroup, messages); } } } diff --git a/dali/base/dadfs.hpp b/dali/base/dadfs.hpp index 24fdb806f63..c1fa5f44a5c 100644 --- a/dali/base/dadfs.hpp +++ b/dali/base/dadfs.hpp @@ -803,7 +803,8 @@ enum DistributedFileSystemError DFSERR_RestrictedFileAccessDenied, DFSERR_EmptyStoragePlane, DFSERR_MissingStoragePlane, - DFSERR_PhysicalCompressedPartInvalid + DFSERR_PhysicalCompressedPartInvalid, + DFSERR_InvalidRemoteFileContext }; diff --git a/esp/scm/ws_workunits_req_resp.ecm b/esp/scm/ws_workunits_req_resp.ecm index 22673090203..0b8bbf28c8a 100644 --- a/esp/scm/ws_workunits_req_resp.ecm +++ b/esp/scm/ws_workunits_req_resp.ecm @@ -939,6 +939,7 @@ ESPresponse [exceptions_inline] WUCreateZAPInfoResponse ESPrequest [nil_remove] WUCheckFeaturesRequest { + boolean IncludeFullVersion(false); }; ESPresponse [exceptions_inline] WUCheckFeaturesResponse @@ -948,6 +949,12 @@ ESPresponse [exceptions_inline] WUCheckFeaturesResponse int BuildVersionPoint; unsigned maxRequestEntityLength; ESPStruct DeploymentFeatures Deployment; + //The following fields are only generated if IncludeFullVersion is set. Normally the fields would be versioned, + //but this change is applied to a much earlier version and the version number has been incremented several times, + //so that approach cannot be used. + string BuildVersion; + string BuildMaturity; + string BuildTagTimestamp; }; ESPrequest [nil_remove] WUGetStatsRequest diff --git a/esp/services/esdl_svc_engine/esdl_binding.cpp b/esp/services/esdl_svc_engine/esdl_binding.cpp index 8dfc6cfede3..5c63e363737 100755 --- a/esp/services/esdl_svc_engine/esdl_binding.cpp +++ b/esp/services/esdl_svc_engine/esdl_binding.cpp @@ -1573,8 +1573,7 @@ void EsdlServiceImpl::sendTargetSOAP(IEspContext & context, Owned clientSpan; ISpan * activeSpan = context.queryActiveSpan(); - if (activeSpan) - clientSpan.setown(activeSpan->createClientSpan("soapcall")); + clientSpan.setown(activeSpan->createClientSpan("soapcall")); Owned headers = ::getClientHeaders(clientSpan); StringBuffer status; diff --git a/esp/services/ws_ecl/ws_ecl_service.cpp b/esp/services/ws_ecl/ws_ecl_service.cpp index a444ce2d13c..34556ce2448 100644 --- a/esp/services/ws_ecl/ws_ecl_service.cpp +++ b/esp/services/ws_ecl/ws_ecl_service.cpp @@ -1996,12 +1996,9 @@ int CWsEclBinding::submitWsEclWorkunit(IEspContext & context, WsEclWuInfo &wsinf Owned clientSpan; ISpan * activeSpan = context.queryActiveSpan(); - if (activeSpan) - { - clientSpan.setown(activeSpan->createClientSpan("wsecl/SubmitWorkunit")); - Owned httpHeaders = ::getClientHeaders(clientSpan); - recordTraceDebugOptions(workunit, httpHeaders); - } + clientSpan.setown(activeSpan->createClientSpan("wsecl/SubmitWorkunit")); + Owned httpHeaders = ::getClientHeaders(clientSpan); + recordTraceDebugOptions(workunit, httpHeaders); if (httpreq) { diff --git a/esp/services/ws_workunits/ws_workunitsService.cpp b/esp/services/ws_workunits/ws_workunitsService.cpp index 6af6c3d23fd..efd3b32d4e4 100644 --- a/esp/services/ws_workunits/ws_workunitsService.cpp +++ b/esp/services/ws_workunits/ws_workunitsService.cpp @@ -5085,6 +5085,13 @@ bool CWsWorkunitsEx::onWUCheckFeatures(IEspContext &context, IEspWUCheckFeatures resp.setBuildVersionMajor(hpccBuildInfo.buildVersionMajor); resp.setBuildVersionMinor(hpccBuildInfo.buildVersionMinor); resp.setBuildVersionPoint(hpccBuildInfo.buildVersionPoint); + //This does not check version because a consistent version could not be used for all the places it was included + if (req.getIncludeFullVersion()) + { + resp.setBuildVersion(hpccBuildInfo.buildTag); + resp.setBuildMaturity(hpccBuildInfo.buildMaturity); + resp.setBuildTagTimestamp(hpccBuildInfo.buildTagTimestamp); + } resp.setMaxRequestEntityLength(maxRequestEntityLength); resp.updateDeployment().setUseCompression(true); return true; diff --git a/esp/src/eclwatch/stub.js b/esp/src/eclwatch/stub.js index cd91622159f..f56fe35a010 100644 --- a/esp/src/eclwatch/stub.js +++ b/esp/src/eclwatch/stub.js @@ -30,8 +30,7 @@ define([ const params = ioQuery.queryToObject(dojo.doc.location.search.substr((dojo.doc.location.search.substr(0, 1) === "?" ? 1 : 0))); const hpccWidget = params.Widget ? params.Widget : "HPCCPlatformWidget"; - const store = KeyValStore.userKeyValStore(); - store.getEx(BuildInfo.ModernMode, { defaultValue: String(true) }).then(modernMode => { + Session.fetchModernMode().then(modernMode => { if (modernMode === String(true) && hpccWidget !== "IFrameWidget") { switch (hpccWidget) { case "WUDetailsWidget": diff --git a/esp/src/src-react/components/InfoGrid.tsx b/esp/src/src-react/components/InfoGrid.tsx index 65d763cb4da..9c78f88e221 100644 --- a/esp/src/src-react/components/InfoGrid.tsx +++ b/esp/src/src-react/components/InfoGrid.tsx @@ -84,6 +84,12 @@ export const InfoGrid: React.FunctionComponent = ({ }, Column: { label: nlsHPCC.Col, field: "", width: 36, sortable: false }, LineNo: { label: nlsHPCC.Line, field: "", width: 36, sortable: false }, + Activity: { + label: nlsHPCC.Activity, field: "", width: 56, sortable: false, + formatter: (activityId, row) => { + return activityId ? a{activityId} : ""; + } + }, FileName: { label: nlsHPCC.FileName, field: "", width: 360, sortable: false } }; }, [wuid]); diff --git a/esp/src/src-react/components/Logs.tsx b/esp/src/src-react/components/Logs.tsx index 6ae979e3b6e..c2c37b119a0 100644 --- a/esp/src/src-react/components/Logs.tsx +++ b/esp/src/src-react/components/Logs.tsx @@ -18,7 +18,7 @@ const startTimeOffset = 6 * 60 * 60 * 1000; const defaultStartDate = new Date(new Date().getTime() - startTimeOffset); const FilterFields: Fields = { - containerName: { type: "cloud-containername", label: nlsHPCC.ContainerName }, + components: { type: "cloud-containername", label: nlsHPCC.ContainerName }, audience: { type: "dropdown", label: nlsHPCC.Audience, options: [ { key: TargetAudience.Operator, text: "Operator" }, @@ -37,9 +37,9 @@ const FilterFields: Fields = { { key: LogType.Metric, text: "Metric" }, ] }, - jobId: { type: "string", label: nlsHPCC.JobID }, - procId: { type: "string", label: nlsHPCC.ProcessID }, - threadId: { type: "string", label: nlsHPCC.ThreadID }, + workunits: { type: "string", label: nlsHPCC.JobID }, + processid: { type: "string", label: nlsHPCC.ProcessID }, + threadid: { type: "string", label: nlsHPCC.ThreadID }, message: { type: "string", label: nlsHPCC.Message }, StartDate: { type: "datetime", label: nlsHPCC.FromDate }, EndDate: { type: "datetime", label: nlsHPCC.ToDate }, @@ -117,7 +117,7 @@ export const Logs: React.FunctionComponent = ({ return { timestamp: { label: nlsHPCC.TimeStamp, width: 140, sortable: false, }, message: { label: nlsHPCC.Message, sortable: false, }, - containerName: { label: nlsHPCC.ContainerName, width: 100, sortable: false }, + components: { label: nlsHPCC.ContainerName, width: 150, sortable: false }, audience: { label: nlsHPCC.Audience, width: 60, sortable: false, }, class: { label: nlsHPCC.Class, width: 40, sortable: false, @@ -127,10 +127,10 @@ export const Logs: React.FunctionComponent = ({ return {level}; } }, - jobId: { label: nlsHPCC.JobID, width: 140, sortable: false, hidden: wuid !== undefined, }, - procId: { label: nlsHPCC.ProcessID, width: 46, sortable: false, }, - sequence: { label: nlsHPCC.Sequence, width: 70, sortable: false, }, - threadId: { label: nlsHPCC.ThreadID, width: 60, sortable: false, }, + workunits: { label: nlsHPCC.JobID, width: 50, sortable: false, hidden: wuid !== undefined, }, + processid: { label: nlsHPCC.ProcessID, width: 75, sortable: false, }, + logid: { label: nlsHPCC.Sequence, width: 70, sortable: false, }, + threadid: { label: nlsHPCC.ThreadID, width: 60, sortable: false, }, }; }, [wuid]); diff --git a/esp/src/src-react/components/Title.tsx b/esp/src/src-react/components/Title.tsx index b482d08fadd..fe0ee520aea 100644 --- a/esp/src/src-react/components/Title.tsx +++ b/esp/src/src-react/components/Title.tsx @@ -8,12 +8,11 @@ import { cookie } from "dojo/main"; import nlsHPCC from "src/nlsHPCC"; import * as Utility from "src/Utility"; -import { ModernMode } from "src/BuildInfo"; import { useBanner } from "../hooks/banner"; import { useECLWatchLogger } from "../hooks/logging"; -import { useBuildInfo } from "../hooks/platform"; -import { useGlobalStore, useUserStore } from "../hooks/store"; +import { useBuildInfo, useModernMode } from "../hooks/platform"; +import { useGlobalStore } from "../hooks/store"; import { useMyAccount, useUserSession } from "../hooks/user"; import { replaceUrl } from "../util/history"; @@ -71,7 +70,7 @@ export const DevTitle: React.FunctionComponent = ({ const [log, logLastUpdated] = useECLWatchLogger(); - const [_modernMode, setModernMode] = useUserStore(ModernMode, String(true)); + const { setModernMode } = useModernMode(); const onTechPreviewClick = React.useCallback( (ev?: React.MouseEvent, item?: IContextualMenuItem): void => { setModernMode(String(false)); diff --git a/esp/src/src-react/components/controls/ComingSoon.tsx b/esp/src/src-react/components/controls/ComingSoon.tsx index 2ebc88d4562..211c667d70a 100644 --- a/esp/src/src-react/components/controls/ComingSoon.tsx +++ b/esp/src/src-react/components/controls/ComingSoon.tsx @@ -1,9 +1,7 @@ import * as React from "react"; import { IStyle, Toggle } from "@fluentui/react"; import nlsHPCC from "src/nlsHPCC"; -import { ModernMode } from "src/BuildInfo"; -import { useUserStore } from "../../hooks/store"; -import { useBuildInfo } from "../../hooks/platform"; +import { useBuildInfo, useModernMode } from "../../hooks/platform"; const legacyIndex = {}; const modernIndex = {}; @@ -75,7 +73,7 @@ export const ComingSoon: React.FunctionComponent = ({ }) => { const [, { opsCategory }] = useBuildInfo(); - const [modernMode, setModernMode] = useUserStore(ModernMode, String(defaultValue)); + const { modernMode, setModernMode } = useModernMode(); React.useEffect(() => { if (value !== undefined) { diff --git a/esp/src/src-react/hooks/platform.ts b/esp/src/src-react/hooks/platform.ts index 9a278eaff34..d98f74f545e 100644 --- a/esp/src/src-react/hooks/platform.ts +++ b/esp/src/src-react/hooks/platform.ts @@ -1,8 +1,10 @@ import * as React from "react"; +import { useConst } from "@fluentui/react-hooks"; import { scopedLogger } from "@hpcc-js/util"; import { Topology, WsTopology } from "@hpcc-js/comms"; -import { getBuildInfo, BuildInfo } from "src/Session"; -import { cmake_build_type, containerized } from "src/BuildInfo"; +import { getBuildInfo, BuildInfo, fetchModernMode } from "src/Session"; +import { cmake_build_type, containerized, ModernMode } from "src/BuildInfo"; +import { sessionKeyValStore, userKeyValStore } from "src/KeyValStore"; const logger = scopedLogger("src-react/hooks/platform.ts"); @@ -66,3 +68,27 @@ export function useLogicalClusters(): [WsTopology.TpLogicalCluster[] | undefined return [targetClusters, defaultCluster]; } + +export function useModernMode(): { + modernMode: string; + setModernMode: (value: string) => void; +} { + const userStore = useConst(() => userKeyValStore()); + const sessionStore = useConst(() => sessionKeyValStore()); + + const [modernMode, setModernMode] = React.useState(""); + + React.useEffect(() => { + fetchModernMode().then(mode => { + setModernMode(String(mode)); + }); + }, [sessionStore, userStore]); + + React.useEffect(() => { + if (modernMode === "") return; + sessionStore.set(ModernMode, modernMode); + userStore.set(ModernMode, modernMode); + }, [modernMode, sessionStore, userStore]); + + return { modernMode, setModernMode }; +} \ No newline at end of file diff --git a/esp/src/src-react/index.tsx b/esp/src/src-react/index.tsx index 3203e3f27ed..99a2d0b09c7 100644 --- a/esp/src/src-react/index.tsx +++ b/esp/src/src-react/index.tsx @@ -2,8 +2,8 @@ import * as React from "react"; import * as ReactDOM from "react-dom"; import { initializeIcons } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; -import { cookieKeyValStore, userKeyValStore } from "src/KeyValStore"; -import { ModernMode } from "src/BuildInfo"; +import { cookieKeyValStore } from "src/KeyValStore"; +import { fetchModernMode } from "src/Session"; import { ECLWatchLogger } from "./hooks/logging"; import "css!dijit-themes/flat/flat.css"; @@ -31,8 +31,7 @@ dojoConfig.urlInfo = { }; dojoConfig.disableLegacyHashing = true; -const store = userKeyValStore(); -store.getEx(ModernMode, { defaultValue: String(true) }).then(async modernMode => { +fetchModernMode().then(async modernMode => { if (modernMode === String(false)) { window.location.replace("/esp/files/stub.htm"); } else { diff --git a/esp/src/src/Session.ts b/esp/src/src/Session.ts index 6d86536a919..06de1672db0 100644 --- a/esp/src/src/Session.ts +++ b/esp/src/src/Session.ts @@ -3,8 +3,9 @@ import * as xhr from "dojo/request/xhr"; import * as topic from "dojo/topic"; import { format as d3Format } from "@hpcc-js/common"; import { SMCService } from "@hpcc-js/comms"; -import { cookieKeyValStore } from "src/KeyValStore"; +import { cookieKeyValStore, sessionKeyValStore, userKeyValStore } from "src/KeyValStore"; import { singletonDebounce } from "../src-react/util/throttle"; +import { ModernMode } from "./BuildInfo"; import * as ESPUtil from "./ESPUtil"; import { scopedLogger } from "@hpcc-js/util"; @@ -21,7 +22,18 @@ let _prevReset = Date.now(); declare const dojoConfig; -const userStore = cookieKeyValStore(); +const cookieStore = cookieKeyValStore(); +const sessionStore = sessionKeyValStore(); +const userStore = userKeyValStore(); + +export async function fetchModernMode(): Promise { + return Promise.all([ + sessionStore.get(ModernMode), + userStore.getEx(ModernMode, { defaultValue: String(true) }) + ]).then(([sessionModernMode, userModernMode]) => { + return sessionModernMode ?? userModernMode; + }); +} const smc = new SMCService({ baseUrl: "" }); @@ -96,7 +108,7 @@ export function fireIdle() { } async function resetESPTime() { - const userSession = userStore.getAll(); + const userSession = cookieStore.getAll(); if (!userSession || !userSession["ECLWatchUser"] || !userSession["Status"] || userSession["Status"] === "Locked") return; if (Date.now() - _prevReset > SESSION_RESET_FREQ) { _prevReset = Date.now(); diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index f2d02eefa54..6728c57788c 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -2733,3 +2733,16 @@ spec: {{- end -}} {{- end -}} {{- end -}} + +{{/* +A template to generate component annotations, merges static default annotations with global and component annotations +Pass in dict with .root, .me +*/}} +{{- define "hpcc.generateAnnotations" -}} +{{- $annotations := dict -}} +{{- if hasKey .root.Values.global "componentAnnotations" -}}{{- $annotations = merge $annotations .root.Values.global.componentAnnotations -}}{{- end -}} +{{- if hasKey .me "annotations" -}}{{- $annotations = merge $annotations .me.annotations -}}{{- end -}} +{{- range $key, $value := $annotations }} +{{ $key }}: {{ $value | quote }} +{{- end -}} +{{- end -}} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index 21dd84b9cc8..5b6d317eff8 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -53,6 +53,7 @@ spec: helmVersion: 9.5.0-trunk0 annotations: checksum/config: {{ $configSHA }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" .name "type" "dafilesrv") | indent 6 }} serviceAccountName: "hpcc-default" diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index 5566710a105..02d1d97b479 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -94,9 +94,7 @@ spec: {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.addPrometheusScrapeAnnotations" $.Values.global.metrics | nindent 8 }} {{- end }} - {{- if hasKey $dali "annotations" }} - {{- toYaml $dali.annotations | nindent 8 }} - {{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} {{- end }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $dali.name "type" "dali") | indent 6 }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 9f3570bf761..2ba359a965b 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -62,9 +62,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey . "annotations" }} -{{ toYaml .annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" .name "target" .name "type" "dfuserver") | indent 6 }} serviceAccountName: "hpcc-default" diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index 273c7b5831a..f18741c4511 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -62,10 +62,8 @@ data: {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} -{{- if hasKey .me "annotations" }} annotations: -{{ toYaml .me.annotations | indent 12 }} -{{- end }} + {{- include "hpcc.generateAnnotations" . | indent 12 }} spec: {{- include "hpcc.placementsByJobTargetType" (dict "root" .root "job" $appJobName "target" .me.name "type" "eclagent") | indent 10 }} serviceAccountName: "hpcc-agent" @@ -143,9 +141,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey . "annotations" }} -{{ toYaml .annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" .name "target" .name "type" "eclagent") | indent 6 }} serviceAccountName: "hpcc-agent" diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index 6dff5b286b0..40d65ae7415 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -61,10 +61,8 @@ data: {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} -{{- if hasKey .me "annotations" }} annotations: -{{ toYaml .me.annotations | indent 12 }} -{{- end }} + {{- include "hpcc.generateAnnotations" . | indent 12 }} spec: {{- include "hpcc.placementsByJobTargetType" (dict "root" .root "job" $compileJobName "target" .me.name "type" "eclccserver") | indent 10 }} serviceAccountName: "hpcc-default" @@ -150,9 +148,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey . "annotations" }} -{{ toYaml .annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" .name "target" .name "type" "eclccserver") | indent 6 }} serviceAccountName: {{ .useChildProcesses | default false | ternary "hpcc-default" "hpcc-agent" }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index d63b254257b..5802bc6dfea 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -70,9 +70,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey . "annotations" }} -{{ toYaml .annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" .name "target" .name "type" "eclscheduler") | indent 6 }} serviceAccountName: "hpcc-default" diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index 35ed3be3da4..f420b6ce546 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -130,11 +130,9 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.addPrometheusScrapeAnnotations" $.Values.global.metrics | nindent 8 }} -{{- end }} -{{- if hasKey . "annotations" }} -{{ toYaml .annotations | indent 8 }} {{- end }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" .name "type" "esp") | indent 6 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 376647d11ec..5e5d5cfdb6a 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -77,9 +77,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey . "annotations" }} -{{ toYaml .annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $roxie.name "target" $roxie.name "type" "roxie") | indent 6 }} serviceAccountName: "hpcc-default" diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index b35b630c521..4df18ff9ac8 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -132,11 +132,9 @@ spec: {{- end }} annotations: checksum/config: {{ $topoconfigSHA }} + {{- include "hpcc.generateAnnotations" (dict "root" $commonCtx.root "me" $toposerver) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.addPrometheusScrapeAnnotations" $.Values.global.metrics | nindent 8 }} -{{- end }} -{{- if hasKey $toposerver "annotations" }} -{{ toYaml $toposerver.annotations | indent 8 }} {{- end }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $commonCtx.toponame "target" $roxie.name "type" "roxie") | indent 6 }} @@ -255,6 +253,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.addPrometheusScrapeAnnotations" $.Values.global.metrics | nindent 8 }} {{- end }} @@ -361,11 +360,9 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.addPrometheusScrapeAnnotations" $.Values.global.metrics | nindent 8 }} -{{- end }} -{{- if hasKey $roxie "annotations" }} -{{ toYaml $roxie.annotations | indent 8 }} {{- end }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $name "target" $roxie.name "type" "roxie") | indent 6 }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index 2141941bd82..d677bef133d 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -59,9 +59,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey $sasha "annotations" }} -{{ toYaml $sasha.annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $serviceName "type" "sasha") | indent 6 }} serviceAccountName: "hpcc-default" diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index cfff7906a5f..4d891d72654 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -87,10 +87,8 @@ data: {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} -{{- if hasKey .me "annotations" }} annotations: -{{ toYaml .me.annotations | indent 12 }} -{{- end }} + {{- include "hpcc.generateAnnotations" . | indent 12 }} spec: {{- include "hpcc.placementsByJobTargetType" (dict "root" .root "job" $eclAgentJobName "target" .me.name "type" "thor") | indent 10 }} serviceAccountName: "hpcc-agent" @@ -156,10 +154,8 @@ data: {{- if hasKey $thorScope "labels" }} {{ toYaml $thorScope.labels | indent 12 }} {{- end }} -{{- if hasKey $thorScope "annotations" }} annotations: -{{ toYaml $thorScope.annotations | indent 12 }} -{{- end }} + {{- include "hpcc.generateAnnotations" . | indent 12 }} spec: {{- include "hpcc.placementsByJobTargetType" (dict "root" .root "job" $thorManagerJobName "target" .me.name "type" "thor") | indent 10 }} serviceAccountName: hpcc-thoragent @@ -225,10 +221,8 @@ data: {{- if hasKey $thorScope "labels" }} {{ toYaml $thorScope.labels | indent 12 }} {{- end }} -{{- if hasKey $thorScope "annotations" }} annotations: -{{ toYaml $thorScope.annotations | indent 12 }} -{{- end }} + {{- include "hpcc.generateAnnotations" . | indent 12 }} spec: {{- include "hpcc.placementsByJobTargetType" (dict "root" .root "job" $thorWorkerJobName "target" .me.name "type" "thor") | indent 10 }} serviceAccountName: hpcc-default @@ -361,9 +355,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey $commonCtx.me "annotations" }} -{{ toYaml $commonCtx.me.annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $commonCtx.eclAgentName "target" .name "type" "thor") | indent 6 }} {{- include "hpcc.addImagePullSecrets" $commonCtx | nindent 6 -}} @@ -428,9 +420,7 @@ spec: {{- end }} annotations: checksum/config: {{ $configSHA }} -{{- if hasKey $commonCtx.me "annotations" }} -{{ toYaml $commonCtx.me.annotations | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $commonCtx.thorAgentName "target" .name "type" "thor") | indent 6 }} {{- include "hpcc.addImagePullSecrets" $commonCtx | nindent 6 -}} diff --git a/helm/hpcc/values.schema.json b/helm/hpcc/values.schema.json index 426bd86bc52..3ec37c7dcbd 100644 --- a/helm/hpcc/values.schema.json +++ b/helm/hpcc/values.schema.json @@ -404,6 +404,11 @@ }, "additionalProperties": false } + }, + "componentAnnotations": { + "type": "object", + "additionalProperties": { "type": "string" }, + "description": "Global component annotations, generated into all components" } }, "additionalProperties": false @@ -1102,6 +1107,10 @@ "alwaysCreateGlobalIds": { "type": "boolean", "description": "If true, allocate global ids to any requests that do not supply one" + }, + "disabled": { + "type": "boolean", + "description": "If true, disable OTel based trace/span generation" } }, "additionalProperties": { "type": ["integer", "string", "boolean"] } @@ -2849,6 +2858,14 @@ "image": { "$ref": "#/definitions/image" }, + "annotations": { + "type": "object", + "additionalProperties": { "type": "string" } + }, + "labels": { + "type": "object", + "additionalProperties": { "type": "string" } + }, "logging": { "$ref": "#/definitions/logging" }, diff --git a/helm/hpcc/values.yaml b/helm/hpcc/values.yaml index 33143fa08b7..8cf1695b94f 100644 --- a/helm/hpcc/values.yaml +++ b/helm/hpcc/values.yaml @@ -25,6 +25,10 @@ global: logging: detail: 80 + # tracing sets the default tracing information for all components. Can be overridden locally + tracing: + disabled: false + ## resource settings for stub components #stubInstanceResources: # memory: "200Mi" diff --git a/plugins/parquet/README.md b/plugins/parquet/README.md index 095499a212f..159cff61d59 100644 --- a/plugins/parquet/README.md +++ b/plugins/parquet/README.md @@ -32,7 +32,14 @@ The Parquet Plugin offers the following main functions: The Read function allows ECL programmers to create an ECL dataset from both regular and partitioned Parquet files. It leverages the Apache Arrow interface for Parquet to efficiently stream data from ECL to the plugin, ensuring optimized data transfer. +In order to read a Parquet file it is necessary to define the record structure of the file you intend to read with the names of the fields as stored in the Parquet file and the type that you wish to read them as. It is possible for a Parquet file to have field names that contain characters that are incompatible with the ECL field name definition. For example, ECL field names are case insensitive causing an issue when trying to read Parquet fields with uppercase letters. To read field names of this type an XPATH can be passed as seen in the following example: + ``` +layout := RECORD + STRING name; + STRING job_id {XPATH('jobID')}; +END; + dataset := ParquetIO.Read(layout, '/source/directory/data.parquet'); ``` diff --git a/plugins/parquet/examples/taxi_data.ecl b/plugins/parquet/examples/taxi_data.ecl index b2ed9deef06..65b78b389c9 100644 --- a/plugins/parquet/examples/taxi_data.ecl +++ b/plugins/parquet/examples/taxi_data.ecl @@ -1,15 +1,15 @@ IMPORT PARQUET; EXPORT Layout := RECORD - STRING VendorID; + STRING VendorID {XPATH('VendorID')}; STRING tpep_pickup_datetime; STRING tpep_dropoff_datetime; STRING passenger_count; STRING trip_distance; - STRING RatecodeID; + STRING RatecodeID {XPATH('RatecodeID')}; STRING store_and_fwd_flag; - STRING PULocationID; - STRING DOLocationID; + STRING PULocationID {XPATH('PULocationID')}; + STRING DOLocationID {XPATH('DOLocationID')}; STRING payment_type; STRING fare_amount; STRING extra; @@ -18,9 +18,10 @@ EXPORT Layout := RECORD STRING tolls_amount; STRING improvement_surcharge; STRING total_amount; + STRING congestion_surcharge; + STRING airport_fee; END; -tripData := '/datadrive/dev/test_data/yellow_tripdata_2017-01.parquet'; +tripData := '/datadrive/dev/test_data/yellow_tripdata_2023-01.parquet'; read_in := ParquetIO.Read(Layout, tripData); -COUNT(read_in); OUTPUT(CHOOSEN(read_in, 100)); \ No newline at end of file diff --git a/plugins/parquet/parquetembed.cpp b/plugins/parquet/parquetembed.cpp index c605bb522f2..6139539656a 100644 --- a/plugins/parquet/parquetembed.cpp +++ b/plugins/parquet/parquetembed.cpp @@ -792,7 +792,6 @@ ParquetRowStream::ParquetRowStream(IEngineRowAllocator *_resultAllocator, std::s : m_resultAllocator(_resultAllocator), s_parquet(_parquet) { rowsCount = _parquet->queryRowsCount(); - array_visitor = std::make_shared(); } const void *ParquetRowStream::nextRow() @@ -1388,7 +1387,8 @@ void ParquetRowBuilder::nextField(const RtlFieldInfo *field) nextFromStruct(field); return; } - auto column = result_rows->find(field->name); + (*array_visitor) = std::make_shared(); + auto column = result_rows->find(field->xpath ? field->xpath : field->name); if (column != result_rows->end()) { reportIfFailure(column->second->Accept((*array_visitor).get())); diff --git a/roxie/ccd/ccd.hpp b/roxie/ccd/ccd.hpp index 422153cdfb8..ff954e4e619 100644 --- a/roxie/ccd/ccd.hpp +++ b/roxie/ccd/ccd.hpp @@ -589,7 +589,7 @@ class ContextLogger : implements IRoxieContextLogger, public CInterface mutable bool aborted; mutable CIArrayOf log; private: - Owned activeSpan; + Owned activeSpan = getNullSpan(); ContextLogger(const ContextLogger &); // Disable copy constructor public: IMPLEMENT_IINTERFACE; @@ -748,26 +748,18 @@ class ContextLogger : implements IRoxieContextLogger, public CInterface } virtual IProperties * getClientHeaders() const override { - if (!activeSpan) - return nullptr; return ::getClientHeaders(activeSpan); } virtual const char *queryGlobalId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryGlobalId(); } virtual const char *queryCallerId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryCallerId(); } virtual const char *queryLocalId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryLocalId(); } virtual const CRuntimeStatisticCollection & queryStats() const override diff --git a/system/jlib/jlog.cpp b/system/jlib/jlog.cpp index d7485432d06..2ac17b56f35 100644 --- a/system/jlib/jlog.cpp +++ b/system/jlib/jlog.cpp @@ -2829,7 +2829,7 @@ class CRuntimeStatisticCollection; class DummyLogCtx : implements IContextLogger { private: - Owned activeSpan; + Owned activeSpan = getNullSpan(); public: DummyLogCtx() {} @@ -2874,26 +2874,18 @@ class DummyLogCtx : implements IContextLogger } virtual IProperties * getClientHeaders() const override { - if (!activeSpan) - return nullptr; return ::getClientHeaders(activeSpan); } virtual const char *queryGlobalId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryGlobalId(); } virtual const char *queryCallerId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryCallerId(); } virtual const char *queryLocalId() const override { - if (!activeSpan) - return nullptr; return activeSpan->queryLocalId(); } virtual const CRuntimeStatisticCollection &queryStats() const override diff --git a/system/jlib/jtrace.cpp b/system/jlib/jtrace.cpp index 2eaa65d7c7f..dba4d9138ee 100644 --- a/system/jlib/jtrace.cpp +++ b/system/jlib/jtrace.cpp @@ -536,6 +536,33 @@ class CSpan : public CInterfaceOf CSpan * localParentSpan = nullptr; }; +class CNullSpan : public CInterfaceOf +{ +public: + CNullSpan() = default; + + virtual void setSpanAttribute(const char * key, const char * val) override {} + virtual void setSpanAttributes(const IProperties * attributes) override {} + virtual void addSpanEvent(const char * eventName) override {} + virtual bool getSpanContext(IProperties * ctxProps, bool otelFormatted) const override { return false; } + + virtual void toString(StringBuffer & out) const override {} + virtual void toLog(StringBuffer & out) const override {} + virtual void getLogPrefix(StringBuffer & out) const override {} + + virtual const char* queryGlobalId() const override { return nullptr; } + virtual const char* queryCallerId() const override { return nullptr; } + virtual const char* queryLocalId() const override { return nullptr; } + + virtual ISpan * createClientSpan(const char * name) override { return getNullSpan(); } + virtual ISpan * createInternalSpan(const char * name) override { return getNullSpan(); } + +private: + CNullSpan(const CNullSpan&) = delete; + CNullSpan& operator=(const CNullSpan&) = delete; +}; + + class CInternalSpan : public CSpan { public: @@ -817,7 +844,7 @@ class CTraceManager : implements ITraceManager, public CInterface Expected Configuration format: global: tracing: #optional - tracing enabled by default - disable: true #optional - disable OTel tracing + disabled: true #optional - disable OTel tracing alwaysCreateGlobalIds : false #optional - should global ids always be created? exporter: #optional - Controls how trace data is exported/reported type: OTLP #OS|OTLP|Prometheus|HPCC (default: no export, jlog entry) @@ -854,7 +881,7 @@ class CTraceManager : implements ITraceManager, public CInterface toXML(traceConfig, xml); DBGLOG("traceConfig tree: %s", xml.str()); #endif - bool disableTracing = traceConfig && traceConfig->getPropBool("@disable", false); + bool disableTracing = traceConfig && traceConfig->getPropBool("@disabled", false); using namespace opentelemetry::trace; if (disableTracing) @@ -920,13 +947,13 @@ class CTraceManager : implements ITraceManager, public CInterface throw makeStringExceptionV(-1, "TraceManager must be intialized!"); } - ISpan * createServerSpan(const char * name, StringArray & httpHeaders, SpanFlags flags) override + ISpan * createServerSpan(const char * name, StringArray & httpHeaders, SpanFlags flags) const override { Owned headerProperties = getHeadersAsProperties(httpHeaders); return new CServerSpan(name, moduleName.get(), headerProperties, flags); } - ISpan * createServerSpan(const char * name, const IProperties * httpHeaders, SpanFlags flags) override + ISpan * createServerSpan(const char * name, const IProperties * httpHeaders, SpanFlags flags) const override { return new CServerSpan(name, moduleName.get(), httpHeaders, flags); } @@ -959,6 +986,13 @@ MODULE_EXIT() theTraceManager.destroy(); } +static Owned nullSpan = new CNullSpan(); + +ISpan * getNullSpan() +{ + return nullSpan.getLink(); +} + void initTraceManager(const char * componentName, const IPropertyTree * componentConfig, const IPropertyTree * globalConfig) { theTraceManager.query([=] () { return new CTraceManager(componentName, componentConfig, globalConfig); }); diff --git a/system/jlib/jtrace.hpp b/system/jlib/jtrace.hpp index a9ec14becca..71fe9ccc41f 100644 --- a/system/jlib/jtrace.hpp +++ b/system/jlib/jtrace.hpp @@ -58,13 +58,14 @@ extern jlib_decl IProperties * getClientHeaders(const ISpan * span); interface ITraceManager : extends IInterface { - virtual ISpan * createServerSpan(const char * name, StringArray & httpHeaders, SpanFlags flags = SpanFlags::None) = 0; - virtual ISpan * createServerSpan(const char * name, const IProperties * httpHeaders, SpanFlags flags = SpanFlags::None) = 0; + virtual ISpan * createServerSpan(const char * name, StringArray & httpHeaders, SpanFlags flags = SpanFlags::None) const = 0; + virtual ISpan * createServerSpan(const char * name, const IProperties * httpHeaders, SpanFlags flags = SpanFlags::None) const = 0; virtual bool isTracingEnabled() const = 0; virtual bool alwaysCreateGlobalIds() const = 0; virtual const char * getTracedComponentName() const = 0; }; +extern jlib_decl ISpan * getNullSpan(); extern jlib_decl void initTraceManager(const char * componentName, const IPropertyTree * componentConfig, const IPropertyTree * globalConfig); extern jlib_decl ITraceManager & queryTraceManager(); diff --git a/system/jlib/jutil.cpp b/system/jlib/jutil.cpp index 90779f3b702..3aabce9f92e 100644 --- a/system/jlib/jutil.cpp +++ b/system/jlib/jutil.cpp @@ -109,6 +109,8 @@ static void initBuildVars() hpccBuildInfo.buildVersionMinor = BUILD_VERSION_MINOR; hpccBuildInfo.buildVersionPoint = BUILD_VERSION_POINT; hpccBuildInfo.buildVersion = estringify(BUILD_VERSION_MAJOR) "." estringify(BUILD_VERSION_MINOR) "." estringify(BUILD_VERSION_POINT); + hpccBuildInfo.buildMaturity = BUILD_MATURITY; + hpccBuildInfo.buildTagTimestamp = BUILD_TAG_TIMESTAMP; hpccBuildInfo.dirName = DIR_NAME; hpccBuildInfo.prefix = PREFIX; diff --git a/system/jlib/jutil.hpp b/system/jlib/jutil.hpp index e90c44f36fe..a6fa94f2a46 100644 --- a/system/jlib/jutil.hpp +++ b/system/jlib/jutil.hpp @@ -640,6 +640,8 @@ struct HPCCBuildInfo unsigned buildVersionMinor; unsigned buildVersionPoint; const char *buildVersion; + const char *buildMaturity; + const char *buildTagTimestamp; }; extern jlib_decl HPCCBuildInfo hpccBuildInfo; diff --git a/testing/unittests/jlibtests.cpp b/testing/unittests/jlibtests.cpp index 0f3a99b29fe..83925b32c49 100644 --- a/testing/unittests/jlibtests.cpp +++ b/testing/unittests/jlibtests.cpp @@ -54,6 +54,7 @@ class JlibTraceTest : public CppUnit::TestFixture CPPUNIT_TEST(testInvalidPropegatedServerSpan); CPPUNIT_TEST(testInternalSpan); CPPUNIT_TEST(testMultiNestedSpanTraceOutput); + CPPUNIT_TEST(testNullSpan); CPPUNIT_TEST_SUITE_END(); const char * simulatedGlobalYaml = R"!!(global: @@ -153,6 +154,26 @@ class JlibTraceTest : public CppUnit::TestFixture initTraceManager("somecomponent", traceConfig, nullptr); } + void testNullSpan() + { + if (!queryTraceManager().isTracingEnabled()) + { + DBGLOG("Skipping testNullSpan, tracing is not enabled"); + return; + } + + Owned nullSpan = getNullSpan(); + CPPUNIT_ASSERT_EQUAL_MESSAGE("Unexpected nullptr nullspan detected", true, nullSpan != nullptr); + + { + Owned headers = createProperties(true); + nullSpan->getSpanContext(headers, true); + } + + Owned nullSpanChild = nullSpan->createClientSpan("nullSpanChild"); + CPPUNIT_ASSERT_EQUAL_MESSAGE("Unexpected nullptr nullSpanChild detected", true, nullSpanChild != nullptr); + } + void testClientSpan() { Owned emptyMockHTTPHeaders = createProperties(); diff --git a/tools/wutool/wutool.cpp b/tools/wutool/wutool.cpp index 39b9522c8bc..6c4ca5f8ac1 100644 --- a/tools/wutool/wutool.cpp +++ b/tools/wutool/wutool.cpp @@ -88,7 +88,7 @@ void usage(const char * action = nullptr) " results - Dump results from specified workunits\n" " info \n" " - Display information from a workunit\n" - " analyze - Analyse the workunit to highlight performance issues\n" + " analyze - Analyse the workunit to highlight potential cost savings\n" "\n" " archive - Archive to xml files [TO=] [DEL=1] [DELETERESULTS=1] [INCLUDEFILES=1]\n" " restore - Restore from xml files [INCLUDEFILES=1]\n"