From 1b3a238ff02deb43829efa76eff0315b304b1a10 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Tue, 9 Apr 2024 11:52:46 +0100 Subject: [PATCH 01/18] HPCC-31580 Avoid spurious 'eclcc killed' warning if compile is aborted Signed-off-by: Gavin Halliday --- ecl/eclccserver/eclccserver.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ecl/eclccserver/eclccserver.cpp b/ecl/eclccserver/eclccserver.cpp index 858eb2dc67d..6974d750ad0 100644 --- a/ecl/eclccserver/eclccserver.cpp +++ b/ecl/eclccserver/eclccserver.cpp @@ -778,7 +778,7 @@ class EclccCompileThread : implements IPooledThread, implements IErrorReporter, } else { - if (processKilled) + if (processKilled && !workunit->aborting()) addExceptionToWorkunit(workunit, SeverityError, "eclccserver", 9999, "eclcc killed - likely to be out of memory - see compile log for details", nullptr, 0, 0, 0); #ifndef _CONTAINERIZED Owned query = workunit->updateQuery(); From 436bad2b178aa508c8ce4e3cbb60d3664966b29d Mon Sep 17 00:00:00 2001 From: Ken Rowland Date: Fri, 5 Apr 2024 13:58:31 -0400 Subject: [PATCH 02/18] HPCC-31574 Add option in Dali LDAP support to ignore default file user Added option to disable use of default user. Deny access if no user provided and default user is not defined. Do not automatically set user to authenticated. Signed-Off-By: Kenneth Rowland kenneth.rowland@lexisnexisrisk.com --- dali/server/daldap.cpp | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/dali/server/daldap.cpp b/dali/server/daldap.cpp index 747c62fb32b..b2be8460896 100644 --- a/dali/server/daldap.cpp +++ b/dali/server/daldap.cpp @@ -54,6 +54,7 @@ class CDaliLdapConnection: implements IDaliLdapConnection, public CInterface Owned ldapsecurity; StringAttr filesdefaultuser; StringAttr filesdefaultpassword; + bool disableFilesDefaultUser; unsigned ldapflags; IDigitalSignatureManager * pDSM = nullptr; @@ -82,6 +83,7 @@ class CDaliLdapConnection: implements IDaliLdapConnection, public CInterface { filesdefaultuser.set(ldapprops->queryProp("@filesDefaultUser")); filesdefaultpassword.set(ldapprops->queryProp("@filesDefaultPassword")); + disableFilesDefaultUser = ldapprops->getPropBool("@disableDefaultUser", false); try { ignoreSigPipe(); // LDAP can generate @@ -115,29 +117,36 @@ class CDaliLdapConnection: implements IDaliLdapConnection, public CInterface return SecAccess_Full; + Owned user; StringBuffer username; StringBuffer password; if (udesc) { udesc->getUserName(username); udesc->getPassword(password); + user.setown(ldapsecurity->createUser(username)); + user->setAuthenticateStatus(AS_AUTHENTICATED); // treat caller passing user as trusted } else { DBGLOG("NULL UserDescriptor in daldap.cpp getPermissions('%s')", key); - } + logNullUser(nullptr); + + // If no user was provided, try to use the default user + if (disableFilesDefaultUser || filesdefaultuser.isEmpty()) + { + OWARNLOG("Default user missing or disabled, access denied for request %s %s", key, nullText(obj)); + return SecAccess_None; // no access if no default user or disabled + } - if (0 == username.length()) - { username.append(filesdefaultuser); decrypt(password, filesdefaultpassword); - OWARNLOG("Missing credentials, injecting deprecated filesdefaultuser for request %s %s", key, nullText(obj)); - logNullUser(nullptr); + OWARNLOG("Missing credentials, injecting deprecated filesdefaultuser (%s) for request %s %s", filesdefaultuser.str(), key, + nullText(obj)); + user.setown(ldapsecurity->createUser(username)); + user->credentials().setPassword(password); // Force authentication of default user when used } - Owned user = ldapsecurity->createUser(username); - user->setAuthenticateStatus(AS_AUTHENTICATED); - SecAccessFlags perm = SecAccess_None; unsigned start = msTick(); if (filescope) From c8594ef26c48ac9da1ab4ce5fb0eca862fff3bcb Mon Sep 17 00:00:00 2001 From: James McMullan Date: Fri, 12 Apr 2024 15:04:56 -0400 Subject: [PATCH 03/18] HPCC-31596 Link to ESP regression suite information wiki - Added additional information link about running tests - Fixed unit test command - Added missing python import - Added link to the github action run URL Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/actions/hpcc4j-esp-regression-suite/action.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/actions/hpcc4j-esp-regression-suite/action.yaml b/.github/actions/hpcc4j-esp-regression-suite/action.yaml index ad93e170258..b45ad9efaad 100644 --- a/.github/actions/hpcc4j-esp-regression-suite/action.yaml +++ b/.github/actions/hpcc4j-esp-regression-suite/action.yaml @@ -38,17 +38,20 @@ runs: github_token: ${{ inputs.github-token }} branch_name: ${{ inputs.branch-name }} repository: ${{ inputs.repository }} + actions_run_url: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} shell: python run: | import os import csv import textwrap import json + import sys branch_name = os.getenv('branch_name') comments_url = os.getenv('comments_url') github_token = os.getenv('github_token') repository = os.getenv('repository') + actions_run_url = os.getenv('actions_run_url') file_path = "./HPCC4j/wsclient/FailedTests.csv" if os.path.exists(file_path): @@ -60,17 +63,19 @@ runs: testFailureMessages += textwrap.dedent(f"""\ ## {row[0]}.{row[1]} Failed **Error:** ```{row[2]}``` - **Test Command:** ```mvn -B -Dhpccconn=https://eclwatch.default:8010 -Dtest={row[0]}#{row[1]} test``` + **Test Command:** ```mvn -B --activate-profiles jenkins-on-demand -pl wsclient -Dhpccconn=https://eclwatch.default:8010 -Dtest={row[0]}#{row[1]} test``` """) if testFailureMessages: message = textwrap.dedent(f"""\ # ESP Regression Suite Test Failures: + **Action Run:** {actions_run_url} The following tests failed; checkout a copy of the HPCC4j project with the following command and run the individual test commands below to debug the failures. ``` git clone https://github.com/{repository}.git hpcc4j cd hpcc4j && git checkout {branch_name} ``` + **See:** https://github.com/hpcc-systems/hpcc4j/wiki/Running-Individual-Unit-Tests for detailed information on running the tests below. """) message += testFailureMessages From dd44863a317c453878d7de786930426b55d2969e Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Fri, 12 Apr 2024 16:44:29 -0400 Subject: [PATCH 04/18] HPCC-31442 ECL Watch v9 ZAP Dialog log inclusion options Expose the "IncludeRelatedLogs" and "IncludePerComponentLogs" request parameters for /WsWorkunits/WUCreateAndDownloadZAPInfo, which were added in HPCC-31441 Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- .../src-react/components/forms/ZAPDialog.tsx | 20 +++++++++++++++++++ esp/src/src/nls/hpcc.ts | 2 ++ 2 files changed, 22 insertions(+) diff --git a/esp/src/src-react/components/forms/ZAPDialog.tsx b/esp/src/src-react/components/forms/ZAPDialog.tsx index 750e997efba..2ee5225dd60 100644 --- a/esp/src/src-react/components/forms/ZAPDialog.tsx +++ b/esp/src/src-react/components/forms/ZAPDialog.tsx @@ -56,6 +56,8 @@ interface ZAPDialogValues { WhereSlow: string; Password: string; IncludeThorSlaveLog: boolean; + IncludeRelatedLogs: boolean; + IncludePerComponentLogs: boolean; SendEmail: boolean; EmailTo: string; EmailFrom: string; @@ -92,6 +94,8 @@ const defaultValues: ZAPDialogValues = { WhereSlow: "", Password: "", IncludeThorSlaveLog: true, + IncludeRelatedLogs: true, + IncludePerComponentLogs: false, SendEmail: false, EmailTo: "", EmailFrom: "", @@ -380,6 +384,22 @@ export const ZAPDialog: React.FunctionComponent = ({ }) => } /> +
+ } + /> +
+
+ } + /> +
Date: Fri, 12 Apr 2024 18:11:07 -0400 Subject: [PATCH 05/18] HPCC-27280 Alert users elastic4hpcclogs does not implement security Signed-off-by: g-pan --- .../ContainerizedMods/ContainerLogging.xml | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml b/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml index be7460be659..c296b6b329f 100644 --- a/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml +++ b/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml @@ -108,6 +108,30 @@ The next sections will show you how to install and set up the Elastic stack logging solution for HPCC Systems. + + + + + + + + + + + + + NOTE: The + elastic4hpcclogs chart does not enable any security. The + responsibility of determining the need for security and + enabling security on any deployed Elastic Stack instance or + components is entirely your organization and your + responsibility. + + + + + Add the HPCC Systems Repository From c6cbbf83d60302a72c46309ec1cb70f19bcbd18f Mon Sep 17 00:00:00 2001 From: Rodrigo Pastrana Date: Mon, 8 Apr 2024 22:31:46 -0400 Subject: [PATCH 06/18] HPCC-31535 ALA declare KQL timespan - Declares request level timespan to minimize query scope - Adds method to serialize time range as ISO 8601 Time intervals - Supply query time interval as KQL timespan - Replace api.loganalytics.io endpoint with api.loganalytics.azure.com. Signed-off-by: Rodrigo Pastrana --- .../AzureLogAnalyticsCurlClient.cpp | 39 ++++++++++++++++--- .../AzureLogAnalyticsCurlClient.hpp | 1 + 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp index cd02bcceb9a..92a92d3931f 100644 --- a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp +++ b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.cpp @@ -170,7 +170,7 @@ size_t stringCallback(char *contents, size_t size, size_t nmemb, void *userp) return size * nmemb; } -static void submitKQLQuery(std::string & readBuffer, const char * token, const char * kql, const char * workspaceID) +static void submitKQLQuery(std::string & readBuffer, const char * token, const char * kql, const char * workspaceID, const char * timeSpan) { if (isEmptyString(token)) throw makeStringExceptionV(-1, "%s KQL request: Empty LogAnalytics Workspace Token detected!", COMPONENT_NAME); @@ -181,6 +181,9 @@ static void submitKQLQuery(std::string & readBuffer, const char * token, const c if (isEmptyString(workspaceID)) throw makeStringExceptionV(-1, "%s KQL request: Empty WorkspaceID detected!", COMPONENT_NAME); + if (isEmptyString(timeSpan)) + throw makeStringExceptionV(-1, "%s KQL request: Empty timeSpan detected!", COMPONENT_NAME); + OwnedPtrCustomFree curlHandle = curl_easy_init(); if (curlHandle) { @@ -190,14 +193,18 @@ static void submitKQLQuery(std::string & readBuffer, const char * token, const c curlErrBuffer[0] = '\0'; char * encodedKQL = curl_easy_escape(curlHandle, kql, strlen(kql)); - VStringBuffer tokenRequestURL("https://api.loganalytics.io/v1/workspaces/%s/query?query=%s", workspaceID, encodedKQL); + char * encodedTimeSpan = curl_easy_escape(curlHandle, timeSpan, strlen(timeSpan)); + VStringBuffer kqlQueryString("https://api.loganalytics.azure.com/v1/workspaces/%s/query?query=%s×pan=%s", workspaceID, encodedKQL, encodedTimeSpan); + curl_free(encodedTimeSpan); curl_free(encodedKQL); + DBGLOG("%s: Full ALA API query request: '%s'", COMPONENT_NAME, kqlQueryString.str()); + VStringBuffer bearerHeader("Authorization: Bearer %s", token); /*curl -X GET -H "Authorization: Bearer " - "https://api.loganalytics.io/v1/workspaces//query?query=ContainerLog20%7C%20limit%20100" + "https://api.loganalytics.azure.com/v1/workspaces//query?query=ContainerLog20%7C%20limit%20100×pan=2022-05-11T06:45:00.000Z%2F2022-05-11T13:00:00.000Z" */ headers = curl_slist_append(headers, bearerHeader.str()); @@ -205,8 +212,8 @@ static void submitKQLQuery(std::string & readBuffer, const char * token, const c if (curl_easy_setopt(curlHandle, CURLOPT_HTTPHEADER, headers.getClear()) != CURLE_OK) throw makeStringExceptionV(-1, "%s: Log query request: Could not set 'CURLOPT_HTTPHEADER'", COMPONENT_NAME); - if (curl_easy_setopt(curlHandle, CURLOPT_URL, tokenRequestURL.str()) != CURLE_OK) - throw makeStringExceptionV(-1, "%s: Log query request: Could not set 'CURLOPT_URL' (%s)!", COMPONENT_NAME, tokenRequestURL.str()); + if (curl_easy_setopt(curlHandle, CURLOPT_URL, kqlQueryString.str()) != CURLE_OK) + throw makeStringExceptionV(-1, "%s: Log query request: Could not set 'CURLOPT_URL' (%s)!", COMPONENT_NAME, kqlQueryString.str()); if (curl_easy_setopt(curlHandle, CURLOPT_POST, 0) != CURLE_OK) throw makeStringExceptionV(-1, "%s: Log query request: Could not disable 'CURLOPT_POST' option!", COMPONENT_NAME); @@ -583,6 +590,23 @@ void AzureLogAnalyticsCurlClient::searchMetaData(StringBuffer & search, const Lo search.appendf("\n| limit %s", std::to_string(size).c_str()); } +void AzureLogAnalyticsCurlClient::azureLogAnalyticsQueryTimeSpanString(StringBuffer & queryTimeSpan, std::time_t from, std::time_t to) +{ + if (from == -1) + throw makeStringExceptionV(-1, "%s: Invalid 'from' timestamp detected", COMPONENT_NAME); + + char fromTimeStr[40]; + std::strftime(fromTimeStr, sizeof(fromTimeStr), "%Y-%m-%dT%H:%M:%S", std::gmtime(&from)); + queryTimeSpan.set(fromTimeStr); + + if (to != -1) + { + char toTimeStr[40]; + std::strftime(toTimeStr, sizeof(toTimeStr), "%Y-%m-%dT%H:%M:%S", std::gmtime(&to)); + queryTimeSpan.appendf("/%s", toTimeStr); + } +} + void AzureLogAnalyticsCurlClient::azureLogAnalyticsTimestampQueryRangeString(StringBuffer & range, const char * timeStampField, std::time_t from, std::time_t to) { if (isEmptyString(timeStampField)) @@ -978,7 +1002,10 @@ bool AzureLogAnalyticsCurlClient::fetchLog(LogQueryResultDetails & resultDetails populateKQLQueryString(queryString, queryIndex, options); std::string readBuffer; - submitKQLQuery(readBuffer, token.str(), queryString.str(), m_logAnalyticsWorkspaceID.str()); + StringBuffer queryTimeSpan; + const LogAccessTimeRange & trange = options.getTimeRange(); + azureLogAnalyticsQueryTimeSpanString(queryTimeSpan, trange.getStartt().getSimple(), trange.getEndt().isNull() ? -1 : trange.getEndt().getSimple()); + submitKQLQuery(readBuffer, token.str(), queryString.str(), m_logAnalyticsWorkspaceID.str(), queryTimeSpan.str()); return processSearchJsonResp(resultDetails, readBuffer, returnbuf, format, true); } diff --git a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp index bedcff652a1..edc0c3d84cb 100644 --- a/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp +++ b/system/logaccess/Azure/LogAnalytics/CurlClient/AzureLogAnalyticsCurlClient.hpp @@ -81,6 +81,7 @@ class AzureLogAnalyticsCurlClient : public CInterfaceOf void populateKQLQueryString(StringBuffer & queryString, StringBuffer& queryIndex, const ILogAccessFilter * filter); void declareContainerIndexJoinTable(StringBuffer & queryString, const LogAccessConditions & options); static void azureLogAnalyticsTimestampQueryRangeString(StringBuffer & range, const char * timestampfield, std::time_t from, std::time_t to); + static void azureLogAnalyticsQueryTimeSpanString(StringBuffer & queryTimeSpan, std::time_t from, std::time_t to); static unsigned processHitsJsonResp(IPropertyTreeIterator * lines, IPropertyTreeIterator * columns, StringBuffer & returnbuf, LogAccessLogFormat format, bool wrapped, bool reportHeader); static bool processSearchJsonResp(LogQueryResultDetails & resultDetails, const std::string & retrievedDocument, StringBuffer & returnbuf, LogAccessLogFormat format, bool reportHeader); From 88046bb52572e706044ef90f0cd23484f995f4df Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Mon, 15 Apr 2024 20:21:26 +0100 Subject: [PATCH 07/18] HPCC-31622 Improve Dali unused file message Signed-off-by: Jake Smith --- dali/base/dasds.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dali/base/dasds.cpp b/dali/base/dasds.cpp index ce63df31a7a..31838962e9d 100644 --- a/dali/base/dasds.cpp +++ b/dali/base/dasds.cpp @@ -5527,7 +5527,7 @@ class CStoreHelper : implements IStoreHelper, public CInterface if (newName.charAt(i)==':') newName.setCharAt(i,'_'); newName.append(".unused"); - PROGLOG("Detected spurious data file : '%s' - renaming to %s", file.queryFilename(), newName.str()); + PROGLOG("Detected spurious data file : '%s' - renaming to %s (current edition=%u, file edition=%u)", file.queryFilename(), newName.str(), edition, fileEdition); try { file.rename(newName.str()); From a2d919028b469add8decd16db030dc44b64a31b8 Mon Sep 17 00:00:00 2001 From: g-pan Date: Mon, 15 Apr 2024 16:14:30 -0400 Subject: [PATCH 08/18] HPCC-27280 Alert users elastic4hpcclogs does not implement security Signed-off-by: g-pan --- .../ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml b/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml index c296b6b329f..f623a8668fe 100644 --- a/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml +++ b/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml @@ -125,8 +125,7 @@ elastic4hpcclogs chart does not enable any security. The responsibility of determining the need for security and enabling security on any deployed Elastic Stack instance or - components is entirely your organization and your - responsibility. + components is up to you and your organization. From 92468a2b00b05e4a664d3c151e272883ad0d77b2 Mon Sep 17 00:00:00 2001 From: Jack Del Vecchio Date: Tue, 16 Apr 2024 12:35:12 +0000 Subject: [PATCH 09/18] HPCC-31625 Fix whitespace issues in MongoDB plugin --- plugins/mongodb/CMakeLists.txt | 2 +- plugins/mongodb/README.md | 15 +- plugins/mongodb/examples/mongodb-test.ecl | 32 +- plugins/mongodb/mongodbembed.cpp | 784 +++++++++++----------- plugins/mongodb/mongodbembed.hpp | 106 +-- 5 files changed, 469 insertions(+), 470 deletions(-) diff --git a/plugins/mongodb/CMakeLists.txt b/plugins/mongodb/CMakeLists.txt index b165fa35cc4..ba50a3128f4 100644 --- a/plugins/mongodb/CMakeLists.txt +++ b/plugins/mongodb/CMakeLists.txt @@ -63,7 +63,7 @@ IF(MONGODBEMBED) FILES ${LIBMONGOCXX_LIB} ${LIBMONGOCXX_LIB_ABI} DESTINATION ${LIB_DIR} COMPONENT Runtime) - + install( FILES ${LIBBSONCXX_LIB_REAL} DESTINATION ${LIB_DIR} diff --git a/plugins/mongodb/README.md b/plugins/mongodb/README.md index 9458fd06510..6c9747080d9 100755 --- a/plugins/mongodb/README.md +++ b/plugins/mongodb/README.md @@ -1,11 +1,11 @@ # MongoDB plugin for ECL The MongoDB plugin allows an ECL user to embed MongoDB function calls into their code and run it -on the HPCC Platform. The plugin supports inserting a dataset into a database using `insert_many`, and can +on the HPCC Platform. The plugin supports inserting a dataset into a database using `insert_many`, and can build ECL datasets from MongoDB result documents returned by the `find`, `update`, `delete`, `aggregate`, and `runCommand` methods. The embedded script that gets passed to the plugin can be used to create complex documents to support almost every -MongoDB command. +MongoDB command. It is important to use the same keys as the ones in MongoDB when declaring a return type or when creating a BSON document. Otherwise, the plugin will look for a field that might not exist to return when building the resulting dataset. @@ -56,7 +56,7 @@ To create the uri for the MongoDB connection instance the ECL user needs to pass | limit| Limit the number of documents returned from the find command (To limit the documents returned from an aggregation use the [$limit stage](https://www.mongodb.com/docs/manual/reference/operator/aggregation/limit/)). The default is no limit. For more information on how limit works visit the [Manual](https://www.mongodb.com/docs/manual/reference/method/cursor.limit/#behavior). | connectionOptions | A string of connection options used to make the connection to the cluster. Currently only one set of connection options will be used per workunit. | -#### Connection Options +#### Connection Options To specify connection options to the MongoDB cluster use the connectionOptions option in the embed statement. The format for the connection options is ampersand separated options like so: \&\ @@ -78,14 +78,14 @@ Multiple options are seperated by '&', and more information about additional ope ``` getConnection() := EMBED(mongodb : user(user), password(pwd), server(server), database(dbname), collection(collname), batchSize(100)) -ENDEMBED; +ENDEMBED; ``` For connecting to a local MongoDB instance you just need to pass in the port number that the server is listening on. ``` getConnection() := EMBED(mongodb : port(port), database(dbname), collection(collname)) -ENDEMBED; +ENDEMBED; ``` ### Parameters @@ -95,7 +95,7 @@ To use function parameters within the MongoDB statement, prefix them with a '\$' ``` dataset({STRING _id}) getCount(REAL salary) := EMBED(mongodb : user(user), password(pwd), server(server), database(dbname), collection(collname), batchSize(100)) find({ salary: { $gt: $salary}}); -ENDEMBED; +ENDEMBED; ``` ### Limitations @@ -141,7 +141,7 @@ ENDEMBED; insertMany(ds); insertOne(STRING first, STRING last, REAL salary) := EMBED(mongodb : user(user), password(pwd), server(server), database(db), collection(coll)) - insert({first: $first, last: $last, salary: $salary}); + insert({first: $first, last: $last, salary: $salary}); ENDEMBED; insertOne(); ``` @@ -264,4 +264,3 @@ createIndex(1, true); ``` In this example an index is created with the keys first and last and in ascending order. - diff --git a/plugins/mongodb/examples/mongodb-test.ecl b/plugins/mongodb/examples/mongodb-test.ecl index 63df23a3c77..08af1447304 100644 --- a/plugins/mongodb/examples/mongodb-test.ecl +++ b/plugins/mongodb/examples/mongodb-test.ecl @@ -12,22 +12,22 @@ collectionName := 'test1'; // Records for defining the layout of example datasets reviewsRec := RECORD - INTEGER review_scores_cleanliness; - INTEGER review_scores_checkin; - INTEGER review_scores_communication; - INTEGER review_scores_location; - INTEGER review_scores_value; - INTEGER review_scores_rating; + INTEGER review_scores_cleanliness; + INTEGER review_scores_checkin; + INTEGER review_scores_communication; + INTEGER review_scores_location; + INTEGER review_scores_value; + INTEGER review_scores_rating; INTEGER review_scores_accuracy; END; layoutairbnb := RECORD - UTF8 name; - UTF8 space; - UTF8 description; - INTEGER beds; + UTF8 name; + UTF8 space; + UTF8 description; + INTEGER beds; INTEGER accommodates; - SET OF STRING amenities; + SET OF STRING amenities; DATASET(reviewsRec) review_scores; END; @@ -62,7 +62,7 @@ layoutperson := {String username, String address, String email}; // Returns the unique _id and name every document in the listingsAndReviews collection dataset({STRING _id, STRING name}) getAll() := EMBED(mongodb : user(user), password(pwd), server(server), database('sample_airbnb'), collection('listingsAndReviews')) find({}); -ENDEMBED; +ENDEMBED; INTEGER beds := 3; INTEGER accommodates := 5; @@ -128,13 +128,13 @@ ENDEMBED; // Matches all the documents where the price is greater than or equal to the min argument and less than the max argument. Then sorts the results by price first then extra_people then security_deposit. dataset(layoutFees) findAndSort(REAL4 max, REAL4 min, INTEGER asc) := Embed(mongodb : user(user), password(pwd), server(server), database('sample_airbnb'), collection('listingsAndReviews')) aggregate([{$match: { price: { $gte: $min, $lt: $max}}}, {$sort: {price: $asc, extra_people: $asc, security_deposit: $asc}}]); -ENDEMBED; +ENDEMBED; // Inserts a dataset using insert_many and returns the count of documents that were inserted. dataset(mongodb.insertManyResultRecord) insertMany(dataset(layoutEmployee) employees) := Embed(mongodb : user(user), password(pwd), server(server), database('mydb'), collection('test2')) insert({$employees}); ENDEMBED; -employeeDS := DATASET ([{1, 'John', 'Andrews', 101000.5}, {2, 'Anne', 'Smith', 100000.7}, {3, 'Amy', 'Isaac', 103000.1}, {4, 'Kirk', 'Captain', 109000.9}, {5, 'Steve', 'Rogers', 99000.6}, +employeeDS := DATASET ([{1, 'John', 'Andrews', 101000.5}, {2, 'Anne', 'Smith', 100000.7}, {3, 'Amy', 'Isaac', 103000.1}, {4, 'Kirk', 'Captain', 109000.9}, {5, 'Steve', 'Rogers', 99000.6}, {6, 'Evan', 'Bosch', 104000.5}, {7, 'Jack', 'Adams', 101000.5}, {8, 'Vince', 'Carter', 306000.5}, {9, 'Beth', 'Stevens', 102000.2}, {10, 'Samantha', 'Rogers', 107000.5}], layoutEmployee); // Creates an Index on the fields "first" and "last" and sorts them in ascending order. @@ -165,8 +165,8 @@ ENDEMBED; INTEGER ppl := 8; // Matches all the documents that match either expression. Then it groups them by the number of beds they have and counts the number of documents in each group. dataset({String _id, Integer count}) findCountOR(INTEGER min_nights, INTEGER people) := EMBED(mongodb : user(user), password(pwd), server(server), database('sample_airbnb'), collection('listingsAndReviews')) - aggregate([{ $match: - { $or: [ + aggregate([{ $match: + { $or: [ {"$expr" : {"$gt" : [{"$toInt" : "$minimum_nights"} , $min_nights]}}, {"$expr" : {"$gte" : [$accommodates, $people]}} ] diff --git a/plugins/mongodb/mongodbembed.cpp b/plugins/mongodb/mongodbembed.cpp index 7b99f0247c4..9b7c3a548c6 100755 --- a/plugins/mongodb/mongodbembed.cpp +++ b/plugins/mongodb/mongodbembed.cpp @@ -71,11 +71,11 @@ static const NullFieldProcessor NULLFIELD(NULL); /** * @brief Takes a pointer to an ECLPluginDefinitionBlock and passes in all the important info - * about the plugin. + * about the plugin. */ extern "C" MONGODBEMBED_PLUGIN_API bool getECLPluginDefinition(ECLPluginDefinitionBlock *pb) { - if (pb->size == sizeof(ECLPluginDefinitionBlockEx)) + if (pb->size == sizeof(ECLPluginDefinitionBlockEx)) { ECLPluginDefinitionBlockEx * pbx = (ECLPluginDefinitionBlockEx *) pb; pbx->compatibleVersions = COMPATIBLE_VERSIONS; @@ -107,20 +107,20 @@ namespace mongodbembed /** * @brief Takes an exception object and outputs the error to the user - * + * * @param e mongocxx::exception has some values that are useful to look for */ void reportQueryFailure(const mongocxx::exception &e) { - if (e.code() == mongocxx::error_code::k_invalid_collection_object) + if (e.code() == mongocxx::error_code::k_invalid_collection_object) { failx("Collection not found: %s",e.what()); } - if (e.code().value() == 26) + if (e.code().value() == 26) { failx("NamespaceNotFound: %s",e.what()); } - if (e.code().value() == 11000) + if (e.code().value() == 11000) { failx("Duplicate Key: %s",e.what()); } @@ -132,104 +132,104 @@ namespace mongodbembed /** * @brief Helper method for converting an Extended JSON structure into standard JSON - * + * * @param result Result string for appending results * @param start Pointer to beginning of structure * @param row Pointer to the last place where there was standard JSON * @param lastBrkt Pointer to the begining of the EJSON structure * @param depth Depth of the structure */ - void convertEJSONTypes(std::string &result, const char * &start, const char * &row, const char * &lastBrkt, int &depth) + void convertEJSONTypes(std::string &result, const char * &start, const char * &row, const char * &lastBrkt, int &depth) { - while (*start && *start != '$') + while (*start && *start != '$') start++; const char * end = start; - while (*end && *end != '\"') + while (*end && *end != '\"') end++; std::string key = std::string(start, end - start); // Get datatype result += std::string(row, lastBrkt - row); // Add everything before we went into nested document // Some data types are unsupported as they are not straightforward to deserialize - if (key == "$regularExpression") + if (key == "$regularExpression") { UNSUPPORTED("Regular Expressions"); // TO DO handle unsupported types by not throwing an exception. - } - else if (key == "$timestamp") + } + else if (key == "$timestamp") { - while (*end && *end != '}') + while (*end && *end != '}') end++; // Skip over timestamp row = ++end; start = end; result += "\"\""; - } + } // Both of these get deserialized to strings and are surround by quotation marks - else if (key == "$date" || key == "$oid") + else if (key == "$date" || key == "$oid") { end++; - while (*end && *end != '\"') + while (*end && *end != '\"') end++; // Move to opposite quotation mark // The $date datatype can have a nested $numberLong and this checks for that - if (*(end+1) == '$') + if (*(end+1) == '$') { - while (*end) + while (*end) { - if (*end == '\"') + if (*end == '\"') { - if (*(end+1) == ' ' || *(end+1) == ':' || *(end+1) == '$') + if (*(end+1) == ' ' || *(end+1) == ':' || *(end+1) == '$') { end++; - } - else + } + else break; } end++; } start = ++end; - while (*end && *end != '\"') + while (*end && *end != '\"') end++; result += std::string(start, end - start); // Only add the data inside the quotation marks to result string - while (*end && *end != '}') + while (*end && *end != '}') end++; // Get out of both nested documents end++; - while (*end && *end != '}') + while (*end && *end != '}') end++; end++; depth--; row = end; // Set row to just after the nested document start = end; // move start to the next place for parsing - } - else + } + else { start = end++; - while (*end && *end != '\"') + while (*end && *end != '\"') end++; result += std::string(start, ++end - start); // Only add the data inside the quotation marks to result string - while (*end && *end != '}') + while (*end && *end != '}') end++; // Only have to get out of one nested document end++; depth--; row = end; // Set row to just after the nested document start = end; // move start to the next place for parsing } - } - else if (key == "$numberDouble" || key == "$numberDecimal" || key == "$numberLong") + } + else if (key == "$numberDouble" || key == "$numberDecimal" || key == "$numberLong") { // Since these types all represent numbers we don't want to include quotation marks when adding to string result end++; - while (*end && *end != '\"') + while (*end && *end != '\"') end++; start = ++end; - while (*end && *end != '\"') + while (*end && *end != '\"') end++; result += std::string(start, end++ - start); // Only add the data inside the quotation marks to result string - while (*end && *end != '}') + while (*end && *end != '}') end++; // Only have to get out of one nested document end++; depth--; @@ -248,7 +248,7 @@ namespace mongodbembed * "dateField": {"$date":{"$numberLong":"1565546054692"}} * "doubleField": {"$numberDouble":"10.5"} * For more documentation on EJSON. https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/ - * + * * @param result Reference to a result string where the standard JSON should be written. * @param row Pointer to the beginning of the result row. */ @@ -258,7 +258,7 @@ namespace mongodbembed int depth = 0; // Keeps track of depth so we don't exit on wrong '}' const char * lastBrkt; - while (*start && (*start != '}' || depth > 1)) + while (*start && (*start != '}' || depth > 1)) { if (*start == '\"') { @@ -269,13 +269,13 @@ namespace mongodbembed start++; } // If we see a document increase the depth - if (*start == '{') + if (*start == '{') { depth++; lastBrkt = start; // Keep track of last bracket in case we need to backtrack // Look for "{ \"$" to mark the start of a datatype - if (*(start + 1) == ' ' && *(start + 2) == '\"' && *(start + 3) == '$') - { + if (*(start + 1) == ' ' && *(start + 2) == '\"' && *(start + 3) == '$') + { convertEJSONTypes(result, start, row, lastBrkt, depth); // Since we are looking at an Extended JSON structure convert it to normal JSON } } @@ -296,19 +296,19 @@ namespace mongodbembed m_shouldRead = true; } - MongoDBRowStream::~MongoDBRowStream() + MongoDBRowStream::~MongoDBRowStream() { } /** * @brief Builds a result row from the query operation using the MongoDBRowBuilder. - * + * * @return const void* If a row was built returns the complete row, and if the result rows * are empty or it has reached the end it will return a null pointer. */ const void * MongoDBRowStream::nextRow() { - if (m_shouldRead && m_currentRow < m_query->result()->length()) + if (m_shouldRead && m_currentRow < m_query->result()->length()) { auto json = m_query->result()->item(m_currentRow++); Owned contentTree = createPTreeFromJSONString(json,ipt_caseInsensitive); @@ -329,7 +329,7 @@ namespace mongodbembed } /** - * @brief Stops the MongoDBRowStream from reading any more rows. Called by the engine. + * @brief Stops the MongoDBRowStream from reading any more rows. Called by the engine. */ void MongoDBRowStream::stop() { @@ -339,7 +339,7 @@ namespace mongodbembed /** * @brief Throws an exception and gets called when an operation that is unsupported is attempted. - * + * * @param feature Name of the feature that is currently unsupported. */ extern void UNSUPPORTED(const char *feature) @@ -349,7 +349,7 @@ namespace mongodbembed /** * @brief Exits the program with a failure code and a message to display. - * + * * @param message Message to display. * @param ... Takes any number of arguments that can be inserted into the string using %. */ @@ -365,7 +365,7 @@ namespace mongodbembed /** * @brief Exits the program with a failure code and a message to display. - * + * * @param message Message to display. */ extern void fail(const char *message) @@ -378,7 +378,7 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param len Number of chars in value. * @param value pointer to value of parameter. * @param field RtlFielInfo holds a lot of information about the embed context and here we grab @@ -393,9 +393,9 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param value pointer to value of parameter. - * @param field RtlFielInfo holds a lot of information about the embed context. + * @param field RtlFielInfo holds a lot of information about the embed context. * @param query Object holding the bsoncxx::builder::basic::document. */ void bindBoolParam(bool value, const RtlFieldInfo * field, std::shared_ptr query) @@ -406,10 +406,10 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param len Number of chars in value. * @param value pointer to value of parameter. - * @param field RtlFielInfo holds a lot of information about the embed context. + * @param field RtlFielInfo holds a lot of information about the embed context. * @param query Object holding the bsoncxx::builder::basic::document. */ void bindDataParam(unsigned len, const void *value, const RtlFieldInfo * field, std::shared_ptr query) @@ -424,9 +424,9 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param value pointer to value of parameter. - * @param field RtlFielInfo holds a lot of information about the embed context. + * @param field RtlFielInfo holds a lot of information about the embed context. * @param query Object holding the bsoncxx::builder::basic::document. */ void bindIntParam(__int64 value, const RtlFieldInfo * field, std::shared_ptr query) @@ -437,7 +437,7 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param value pointer to value of parameter. * @param field RtlFielInfo holds a lot of information about the embed context. * @param query Object holding the bsoncxx::builder::basic::document. @@ -451,7 +451,7 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param value pointer to value of parameter. * @param field RtlFielInfo holds a lot of information about the embed context. * @param query Object holding the bsoncxx::builder::basic::document. @@ -464,7 +464,7 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param chars Number of chars in value. * @param value pointer to value of parameter. * @param field RtlFielInfo holds a lot of information about the embed context. @@ -481,7 +481,7 @@ namespace mongodbembed /** * @brief Appends the key value pair to the document. This is used for inserting values into the * query string that gets parsed into a bson document. - * + * * @param value Decimal value represented as a string. * @param field RtlFielInfo holds a lot of information about the embed context. * @param query Object holding the bsoncxx::builder::basic::document. @@ -494,7 +494,7 @@ namespace mongodbembed /** * @brief Counts the number of fields in the typeInfo object. This method is used to help bind a Dataset * param to a bson document. - * + * * @return int Count of fields in Record Structure. */ int MongoDBRecordBinder::numFields() @@ -502,14 +502,14 @@ namespace mongodbembed int count = 0; const RtlFieldInfo * const *fields = typeInfo->queryFields(); assertex(fields); - while (*fields++) + while (*fields++) count++; return count; } /** * @brief Calls the typeInfo member function process to bind an ECL row to bson. - * + * * @param row Pointer to ECL row. */ void MongoDBRecordBinder::processRow(const byte *row) @@ -520,7 +520,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param len Number of chars in value. * @param value Data to be bound to bson. * @param field Object with information about the current field. @@ -533,7 +533,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param value Data to be bound to bson. * @param field Object with information about the current field. */ @@ -544,7 +544,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param len Number of chars in value. * @param value Data to be bound to bson. * @param field Object with information about the current field. @@ -556,7 +556,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param value Data to be bound to bson. * @param field Object with information about the current field. */ @@ -567,7 +567,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param value Data to be bound to bson. * @param field Object with information about the current field. */ @@ -578,7 +578,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param value Data to be bound to bson. * @param field Object with information about the current field. */ @@ -589,7 +589,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param value Data to be bound to bson. * @param digits Number of digits in decimal. * @param precision Number of digits of precision. @@ -607,7 +607,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param chars Number of chars in the value. * @param value Data to be bound to bson. * @param field Object with information about the current field. @@ -619,7 +619,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param len Length of QString * @param value Data to be bound to bson. * @param field Object with information about the current field. @@ -634,7 +634,7 @@ namespace mongodbembed /** * @brief Calls the bind function for the data type of the value. - * + * * @param chars Number of chars in the value. * @param value Data to be bound to bson. * @param field Object with information about the current field. @@ -646,7 +646,7 @@ namespace mongodbembed /** * @brief Checks the next param in the Record. - * + * * @param field Object with information about the current field. * @return unsigned Index keeping track of current parameter. */ @@ -658,7 +658,7 @@ namespace mongodbembed /** * @brief Creates a MongoDBRecordBinder objects and starts processing the row - * + * * @param name Name of the Row. Not necessarily useful here. * @param metaVal Information about the types in the row. * @param val Pointer to the row for processing. @@ -672,14 +672,14 @@ namespace mongodbembed /** * @brief Creates a MongoDBDatasetBinder object and starts processing the dataset. - * + * * @param name Name of the Dataset. Not necessarily useful here. * @param metaVal Information about the types in the dataset. * @param val Pointer to dataset stream for processing. */ void MongoDBEmbedFunctionContext::bindDatasetParam(const char *name, IOutputMetaData & metaVal, IRowStream * val) { - if (m_oInputStream) + if (m_oInputStream) { fail("At most one dataset parameter supported"); } @@ -689,7 +689,7 @@ namespace mongodbembed /** * @brief Binds an ECL Data param to a bsoncxx::types::b_utf8 - * + * * @param name Name of the parameter. * @param len Length of the value. * @param val Pointer to data for binding. @@ -706,7 +706,7 @@ namespace mongodbembed /** * @brief Binds an ECL Boolean param to a bsoncxx::types::b_bool. - * + * * @param name Name of the parameter. * @param val Boolean value. */ @@ -720,7 +720,7 @@ namespace mongodbembed /** * @brief Binds an ECL Size param to a bsoncxx::types::b_int64. - * + * * @param name Name of the parameter. * @param size Size of the value. * @param val Integer value. @@ -729,10 +729,10 @@ namespace mongodbembed { bindSignedParam(name, val); } - + /** * @brief Binds an ECL Unsigned Size param to a bsoncxx::types::b_int64. - * + * * @param name Name of the parameter. * @param size Size of the value. * @param val Integer value. @@ -741,10 +741,10 @@ namespace mongodbembed { bindUnsignedParam(name, val); } - + /** * @brief Binds an ECL Real4 param to a bsoncxx::types::b_double. - * + * * @param name Name of the parameter. * @param val float value. */ @@ -753,10 +753,10 @@ namespace mongodbembed checkNextParam(name); query->build()->append(kvp(std::string(name), bsoncxx::types::b_double{val})); } - + /** * @brief Binds an ECL Real param to a bsoncxx::types::b_double. - * + * * @param name Name of the parameter. * @param val Double value. */ @@ -765,10 +765,10 @@ namespace mongodbembed checkNextParam(name); query->build()->append(kvp(std::string(name), bsoncxx::types::b_double{val})); } - + /** * @brief Binds an ECL Integer param to a bsoncxx::types::b_int64. - * + * * @param name Name of the parameter. * @param val Signed Integer value. */ @@ -783,7 +783,7 @@ namespace mongodbembed /** * @brief Binds an ECL Unsigned Integer param to a bsoncxx::types::b_int64. - * + * * @param name Name of the parameter. * @param val Unsigned Integer value. */ @@ -794,10 +794,10 @@ namespace mongodbembed query->build()->append(kvp(std::string(name), bsoncxx::types::b_int64{mongoVal})); } - + /** * @brief Binds an ECL String param to a bsoncxx::types::b_utf8. - * + * * @param name Name of the parameter. * @param len Number of chars in string. * @param val String value. @@ -811,10 +811,10 @@ namespace mongodbembed query->build()->append(kvp(std::string(name), bsoncxx::types::b_utf8{std::string(utf8.getstr(), rtlUtf8Size(utf8Chars, utf8.getdata()))})); } - + /** * @brief Binds an ECL VString param to a bsoncxx::types::b_utf8. - * + * * @param name Name of the parameter. * @param val VString value. */ @@ -830,7 +830,7 @@ namespace mongodbembed /** * @brief Binds an ECL UTF8 param to a bsoncxx::types::b_utf8. - * + * * @param name Name of the parameter. * @param chars Number of chars in string. * @param val UTF8 value. @@ -844,12 +844,12 @@ namespace mongodbembed /** * @brief Binds an ECL Unicode param to a bsoncxx::types::b_utf8. - * + * * @param name Name of the parameter. * @param chars Number of chars in string. * @param val Unicode value. */ - void MongoDBEmbedFunctionContext::bindUnicodeParam(const char *name, size32_t chars, const UChar *val) + void MongoDBEmbedFunctionContext::bindUnicodeParam(const char *name, size32_t chars, const UChar *val) { checkNextParam(name); size32_t utf8chars; @@ -863,9 +863,9 @@ namespace mongodbembed * @brief Configures a mongocxx::instance allowing for multiple threads to use it for making connections. * The instance is accessed through the MongoDBConnection class. */ - static void configure() + static void configure() { - class noop_logger : public mongocxx::logger + class noop_logger : public mongocxx::logger { public: virtual void operator()(mongocxx::log_level, @@ -880,7 +880,7 @@ namespace mongodbembed /** * @brief Construct a new MongoDBEmbedFunctionContext object - * + * * @param _logctx Context logger for use with the MongoDBRecordBinder * MongoDBDatasetBinder classes. * @param options Pointer to the list of options that are passed into the Embed function. @@ -905,7 +905,7 @@ namespace mongodbembed // Iterate over the options from the user StringArray inputOptions; inputOptions.appendList(options, ","); - ForEachItemIn(idx, inputOptions) + ForEachItemIn(idx, inputOptions) { const char *opt = inputOptions.item(idx); const char *val = strchr(opt, '='); @@ -960,7 +960,7 @@ namespace mongodbembed if(!isEmptyString(connectionOptions)) connectionString.appendf("&%s", connectionOptions); } - else + else failx("Username or Password not supplied. Use the user() or password() options in the EMBED declaration."); } else @@ -970,7 +970,7 @@ namespace mongodbembed std::shared_ptr ptr(new MongoDBQuery(databaseName, collectionName, connectionString, batchSize, limit)); query = ptr; - std::call_once(CONNECTION_CACHE_INIT_FLAG, configure); + std::call_once(CONNECTION_CACHE_INIT_FLAG, configure); } /** @@ -982,7 +982,7 @@ namespace mongodbembed /** * @brief Builds an ECL dataset from the result documents of MongoDB query - * + * * @param _resultAllocator Used for building the ECL dataset by the engine. * @return IRowStream* Stream to ECL dataset handed back to the engine. */ @@ -995,7 +995,7 @@ namespace mongodbembed /** * @brief Builds an ECL row from the result documents of MongoDB query - * + * * @param _resultAllocator Used for building the ECL row by the engine. * @return byte* Pointer to ECL row handed back to the engine. */ @@ -1014,69 +1014,69 @@ namespace mongodbembed return 0; } - bool MongoDBEmbedFunctionContext::getBooleanResult() + bool MongoDBEmbedFunctionContext::getBooleanResult() { UNIMPLEMENTED_X("MongoDB Scalar Return type BOOLEAN"); return false; } - - __int64 MongoDBEmbedFunctionContext::getSignedResult() + + __int64 MongoDBEmbedFunctionContext::getSignedResult() { UNIMPLEMENTED_X("MongoDB Scalar Return type SIGNED INTEGER"); return 0; } - - unsigned __int64 MongoDBEmbedFunctionContext::getUnsignedResult() + + unsigned __int64 MongoDBEmbedFunctionContext::getUnsignedResult() { UNIMPLEMENTED_X("MongoDB Scalar Return type UNSIGNED INTEGER"); return 0; } - void MongoDBEmbedFunctionContext::getDataResult(size32_t &len, void * &result) + void MongoDBEmbedFunctionContext::getDataResult(size32_t &len, void * &result) { UNIMPLEMENTED_X("MongoDB Scalar Return type DATA"); } - - double MongoDBEmbedFunctionContext::getRealResult() + + double MongoDBEmbedFunctionContext::getRealResult() { UNIMPLEMENTED_X("MongoDB Scalar Return type REAL"); return 0.0; } - - void MongoDBEmbedFunctionContext::getStringResult(size32_t &chars, char * &result) + + void MongoDBEmbedFunctionContext::getStringResult(size32_t &chars, char * &result) { UNIMPLEMENTED_X("MongoDB Scalar Return type STRING"); } - - void MongoDBEmbedFunctionContext::getUTF8Result(size32_t &chars, char * &result) + + void MongoDBEmbedFunctionContext::getUTF8Result(size32_t &chars, char * &result) { UNIMPLEMENTED_X("MongoDB Scalar Return type UTF8"); } - - void MongoDBEmbedFunctionContext::getUnicodeResult(size32_t &chars, UChar * &result) + + void MongoDBEmbedFunctionContext::getUnicodeResult(size32_t &chars, UChar * &result) { UNIMPLEMENTED_X("MongoDB Scalar Return type UNICODE"); } - - void MongoDBEmbedFunctionContext::getDecimalResult(Decimal &value) + + void MongoDBEmbedFunctionContext::getDecimalResult(Decimal &value) { UNIMPLEMENTED_X("MongoDB Scalar Return type DECIMAL"); } /** * @brief Compiles the embedded script and stores it in the MongoDBQuery object. - * + * * @param chars Length of the embedded script. * @param script Pointer to the script. */ void MongoDBEmbedFunctionContext::compileEmbeddedScript(size32_t chars, const char *script) { - if (script && *script) + if (script && *script) { // Incoming script is not necessarily null terminated. Note that the chars refers to utf8 characters and not bytes. size32_t size = rtlUtf8Size(chars, script); - if (size > 0) + if (size > 0) { StringAttr queryScript; queryScript.set(script, size); @@ -1096,7 +1096,7 @@ namespace mongodbembed /** * @brief Checks the type of a MongoDB element and inserts the key and value using the document builder. - * + * * @tparam T bsoncxx::stream::builder::document or bsoncxx::stream::builder::key_context * @param builder Context for streaming elements into * @param param Key of the pair. @@ -1105,35 +1105,35 @@ namespace mongodbembed template void insertValue(T &builder, std::string param, const bsoncxx::document::element& ele) { - if (ele.type() == bsoncxx::type::k_int64) + if (ele.type() == bsoncxx::type::k_int64) { builder << param << ele.get_int64().value; - } - else if (ele.type() == bsoncxx::type::k_bool) + } + else if (ele.type() == bsoncxx::type::k_bool) { builder << param << ele.get_bool().value; - } - else if (ele.type() == bsoncxx::type::k_decimal128) + } + else if (ele.type() == bsoncxx::type::k_decimal128) { builder << param << ele.get_decimal128().value; - } - else if (ele.type() == bsoncxx::type::k_double) + } + else if (ele.type() == bsoncxx::type::k_double) { builder << param << ele.get_double().value; - } - else if (ele.type() == bsoncxx::type::k_utf8) + } + else if (ele.type() == bsoncxx::type::k_utf8) { builder << param << ele.get_string().value; - } - else + } + else { failx("Error retrieving bound value. Result not built."); - } + } } /** * @brief Checks the type of a MongoDB element and inserts the value into the array_context. - * + * * @tparam T bsoncxx::stream::builder::array or bsoncxx::stream::builder::array_context * @param ctx Context for streaming elements into * @param ele Value that needs to be checked for type before inserting. @@ -1141,35 +1141,35 @@ namespace mongodbembed template void insertValueArr(T &ctx, const bsoncxx::document::element& ele) { - if (ele.type() == bsoncxx::type::k_int64) + if (ele.type() == bsoncxx::type::k_int64) { ctx << ele.get_int64().value; - } - else if (ele.type() == bsoncxx::type::k_bool) + } + else if (ele.type() == bsoncxx::type::k_bool) { ctx << ele.get_bool().value; - } - else if (ele.type() == bsoncxx::type::k_decimal128) + } + else if (ele.type() == bsoncxx::type::k_decimal128) { ctx << ele.get_decimal128().value; - } - else if (ele.type() == bsoncxx::type::k_double) + } + else if (ele.type() == bsoncxx::type::k_double) { ctx << ele.get_double().value; - } - else if (ele.type() == bsoncxx::type::k_utf8) + } + else if (ele.type() == bsoncxx::type::k_utf8) { ctx << ele.get_string().value; - } - else + } + else { failx("Error retrieving bound value. Array not appended."); - } + } } /** * @brief Checks the document for a particular key. - * + * * @param param The param is coming from the script and will be prefixed * with a '$', but it will not be stored in the document with the '$'. * @param view View of the document that is to be searched. @@ -1178,7 +1178,7 @@ namespace mongodbembed */ bool checkDoc(std::string& param, const bsoncxx::document::view &view) { - if (param[0] == '$') + if (param[0] == '$') { return view.find(param.substr(1)) != view.end(); } @@ -1187,7 +1187,7 @@ namespace mongodbembed /** * @brief Helper method for checking whether the param or value are stored in view - * + * * @tparam T bsoncxx::stream::builder::document or bsoncxx::stream::builder::key_context * @param builder Context for streaming elements into * @param view Document View for looking up bound parameters from the function definition. @@ -1198,27 +1198,27 @@ namespace mongodbembed template void insertPair(T &builder, const bsoncxx::document::view &view, const std::string &key, bool isRsvd, std::string value) { - if (checkDoc(value, view)) + if (checkDoc(value, view)) { if (isRsvd) { insertValue(builder, key, view[value.substr(1)]); - } - else + } + else { - if (view[key].type() == bsoncxx::type::k_utf8) + if (view[key].type() == bsoncxx::type::k_utf8) { insertValue(builder, std::string(view[key].get_string().value), view[value.substr(1)]); } } - } - else + } + else { - if (isRsvd) + if (isRsvd) { builder << key << value; - } - else + } + else { builder << view[key].get_string().value << value; } @@ -1227,9 +1227,9 @@ namespace mongodbembed /** * @brief Builds an array object that can hold any number of elements including documents and other arrays. - * + * * @tparam T Either a bsoncxx::stream::builder::array or a bsoncxx::stream::builder::array_context - * @param ctx Context object for streaming array elements into. + * @param ctx Context object for streaming array elements into. * @param view Document View for looking up bound parameters from the function definition. * @param start The point in the embedded script to start parsing. */ @@ -1239,44 +1239,44 @@ namespace mongodbembed std::string findStr = " ,(){}[]\t\n\""; const char *end; - while (*start) + while (*start) { - if (findStr.find(*start) == std::string::npos) + if (findStr.find(*start) == std::string::npos) { end = start + 1; - while (*end && *end != ',' && *end != ' ' && *end != ']') + while (*end && *end != ',' && *end != ' ' && *end != ']') end++; auto param = std::string(start, end - start); bool isRsvd = !checkDoc(param, view); - if (!isRsvd) + if (!isRsvd) { param = param.substr(1); insertValueArr(ctx, view[param]); } - else + else ctx << param; start = end; } // Open subdocument - if (*start == '{') + if (*start == '{') { ctx << open_document << [&](key_context<> kctx) { buildDocument(kctx, view, ++start); } << close_document; - } + } // Open subarray - if (*start == '[') + if (*start == '[') { ctx << open_array << [&](array_context<> actx) { buildArray(actx, view, ++start); } << close_array; } - if (*start == ']') + if (*start == ']') { start++; break; - } - else + } + else start++; } } @@ -1284,7 +1284,7 @@ namespace mongodbembed /** * @brief Helper function for streaming Key Value pairs into a builder object. It is called on document and * key_context objects. Array_context objects don't need Key Value pairs and can take any number of elements. - * + * * @tparam T Either a bsoncxx::stream::builder::document or bsoncxx::stream::builder::key_context * @param builder Builder object for streaming elements into. * @param view Document View for looking up bound parameters from the function definition. @@ -1294,74 +1294,74 @@ namespace mongodbembed void MongoDBEmbedFunctionContext::bindKVP(T &builder, const bsoncxx::document::view &view, const char *&start) { const char *end = start + 1; - while (*end && *end != ':' && *end != '\"' && *end != ' ') + while (*end && *end != ':' && *end != '\"' && *end != ' ') end++; // pointer to end of key auto key = std::string(start, end - start); bool isRsvd = !checkDoc(key, view); // If this is a parameter of the function we need to set a flag // Remove "$" for the key if it's in view - if (!isRsvd) - key = key.substr(1); + if (!isRsvd) + key = key.substr(1); start = end + 1; - while (*start && (*start == ' ' || *start == ':')) + while (*start && (*start == ' ' || *start == ':')) start++; - if (*start == '\"') + if (*start == '\"') { end = ++start; - while (*end && *end != '\"') + while (*end && *end != '\"') end++; insertPair(builder, view, key, isRsvd, std::string(start, end - start)); start = end + 1; - } - else if (*start == '$') + } + else if (*start == '$') { end = start + 1; - while (*end && *end != ',' && *end != '}' && *end != '\n' && *end != ' ') + while (*end && *end != ',' && *end != '}' && *end != '\n' && *end != ' ') end++; insertPair(builder, view, key, isRsvd, std::string(start, end - start)); start = end; - } + } // Open subdocument - else if (*start == '{') + else if (*start == '{') { - if (isRsvd) + if (isRsvd) { builder << key << open_document << [&](key_context<> ctx) { buildDocument(ctx, view, ++start); } << close_document; - } - else + } + else { - if (view[key].type() == bsoncxx::type::k_utf8) + if (view[key].type() == bsoncxx::type::k_utf8) { builder << key << open_document << [&](key_context<> ctx) { buildDocument(ctx, view, ++start); } << close_document; - } - else + } + else failx("Key must be type String."); } } // Open subarray - else if (*start == '[') - { - if (!isRsvd) + else if (*start == '[') + { + if (!isRsvd) { if (view[key].type() == bsoncxx::type::k_utf8) key = std::string{view[key].get_string().value}; - else + else failx("Key must be type String."); } builder << key << open_array << [&](array_context<> ctx) { buildArray(ctx, view, ++start); } << close_array; - } + } } /** * @brief Builds a MongoDB document by parsing the embedded script. - * + * * @tparam T Can take any object of the type bsoncxx::stream::builder. Only documents, * key_contexts, and array_contexts are passed in. * @param builder Object to stream elements into. @@ -1376,14 +1376,14 @@ namespace mongodbembed while (*start) { // If key is found bind pair - if (findStr.find(*start) == std::string::npos) + if (findStr.find(*start) == std::string::npos) bindKVP(builder, view, start); - if (*start == '}' || *start == ';') + if (*start == '}' || *start == ';') { start++; break; - } - else + } + else start++; } } @@ -1391,20 +1391,20 @@ namespace mongodbembed /** * @brief Creates a MongoDB pipeline from the document builder. A pipeline must be of the form * [, ...] where each stage is a MongoDB document. - * + * * @param view Document view that holds the bound parameters from the function definition. * @param stages pipeline that holds the stages for the aggregation. * @param start Pointer reference to the current place in the embedded script. */ void MongoDBEmbedFunctionContext::buildPipeline(const bsoncxx::document::view &view, mongocxx::pipeline &stages, const char *&start) { - while (*start && *start != '[') + while (*start && *start != '[') start++; auto builder = document{}; - while (*start && *start != ';') + while (*start && *start != ';') { - if (*start == '{') + if (*start == '{') { buildDocument(builder, view, ++start); // buildDocument will bring start to the ending "}" of the document bsoncxx::document::value doc_value = builder << finalize; @@ -1417,7 +1417,7 @@ namespace mongodbembed /** * @brief Builds insert arguments and inserts a document. - * + * * @param coll MongoDB collection to do insert into. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. @@ -1429,11 +1429,11 @@ namespace mongodbembed doc_value = builder << finalize; // finalize returns a document::value view = doc_value.view(); - try + try { coll.insert_one(view); // Inserts one MongoDB document into coll, the collection chosen by the URI - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); } @@ -1441,7 +1441,7 @@ namespace mongodbembed /** * @brief Builds find arguments and finds all documents matching a filter. If find_one is called only one document will be returned. - * + * * @param coll MongoDB collection to do insert into. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. @@ -1455,37 +1455,37 @@ namespace mongodbembed auto filter_value = builder << finalize; auto filter_view = filter_value.view(); - if (com == "find_one") + if (com == "find_one") { bsoncxx::stdx::optional doc; - try + try { doc = coll.find_one(filter_view); // Returns single document - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); } - if (doc) + if (doc) { std::string deserialized; deserializeEJSON(deserialized, bsoncxx::to_json(doc->view(), bsoncxx::ExtendedJsonMode::k_relaxed).c_str()); // Deserialize result row query->result()->append(deserialized.c_str()); } - } - else + } + else { - try + try { mongocxx::options::find opts{}; if (query->size() != 0) opts.batch_size(query->size()); // Batch size default is 100 and is set by user in MongoDBEmbedFunctionContext constructor opts.limit(query->queryLimit()); - - while (*start && *start == ' ') + + while (*start && *start == ' ') start++; // Move past whitespace if there is any // if there is a comma then we have a projection to build - if (*start == ',') + if (*start == ',') { auto projection = document{}; buildDocument(projection, view, start); @@ -1495,13 +1495,13 @@ namespace mongodbembed } mongocxx::cursor cursor = coll.find(filter_view, opts); // Get result documents and append them to the row StringArray - for (auto&& doc : cursor) + for (auto&& doc : cursor) { std::string deserialized; - deserializeEJSON(deserialized, bsoncxx::to_json(doc, bsoncxx::ExtendedJsonMode::k_relaxed).c_str()); + deserializeEJSON(deserialized, bsoncxx::to_json(doc, bsoncxx::ExtendedJsonMode::k_relaxed).c_str()); query->result()->append(deserialized.c_str()); - } - } + } + } catch (const mongocxx::exception& e) { reportQueryFailure(e); @@ -1511,32 +1511,32 @@ namespace mongodbembed /** * @brief Builds aggregate arguments and runs the aggregate on a collection. - * + * * @param coll MongoDB collection to do insert into. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. */ void MongoDBEmbedFunctionContext::mdbaggregate(mongocxx::collection &coll, bsoncxx::document::view &view, bsoncxx::document::value &doc_value) { - mongocxx::pipeline stages; + mongocxx::pipeline stages; auto start = query->script(); buildPipeline(view, stages, start); // Builds a document for each stage and appends it to the pipeline - try + try { mongocxx::options::aggregate opts{}; if (query->size() != 0) opts.batch_size(query->size()); // Batch size from user input. mongocxx::cursor cursor = coll.aggregate(stages, opts); // Returns a cursor object of documents - for (auto&& doc : cursor) + for (auto&& doc : cursor) { std::string deserialized; deserializeEJSON(deserialized, bsoncxx::to_json(doc, bsoncxx::ExtendedJsonMode::k_relaxed).c_str()); query->result()->append(deserialized.c_str()); } - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); } @@ -1544,7 +1544,7 @@ namespace mongodbembed /** * @brief Builds a document and runs a command on a MongoDB database. - * + * * @param db MongoDB Database for running the command on. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. @@ -1552,22 +1552,22 @@ namespace mongodbembed void MongoDBEmbedFunctionContext::mdbrunCommand(mongocxx::database &db, bsoncxx::document::view &view, bsoncxx::document::value &doc_value) { auto builder = document{}; - auto start = query->script(); + auto start = query->script(); buildDocument(builder, view, start); doc_value = builder << finalize; view = doc_value.view(); - try + try { bsoncxx::document::value doc = db.run_command(view); // Returns a single document with operation specific output - - if (doc.view()["ok"].get_double() == double{1}) + + if (doc.view()["ok"].get_double() == double{1}) { std::string deserialized; deserializeEJSON(deserialized, bsoncxx::to_json(doc.view()["value"].get_document().view(), bsoncxx::ExtendedJsonMode::k_relaxed).c_str()); query->result()->append(deserialized.c_str()); } - } + } catch (const mongocxx::operation_exception& e) { failx("runcommand Error: %s",e.what()); @@ -1576,7 +1576,7 @@ namespace mongodbembed /** * @brief Creates a mongodb Index for searching a collection. Takes either one or two documents. - * + * * @param coll MongoDB collection to do insert into. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. @@ -1587,10 +1587,10 @@ namespace mongodbembed auto start = query->script(); buildDocument(builder, view, start); - while (*start && *start == ' ') + while (*start && *start == ' ') start++; // Remove Whitespace // If there is a comma then we have an options document to build - if (*start == ',') + if (*start == ',') { auto options = document{}; buildDocument(options, view, start); @@ -1598,33 +1598,33 @@ namespace mongodbembed auto keys_val = builder << finalize; auto options_val = options << finalize; - try + try { coll.create_index(keys_val.view(), options_val.view()); - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); - } - } - else + } + } + else { auto keys_val = builder << finalize; - try + try { coll.create_index(keys_val.view()); - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); - } + } } } /** * @brief Builds a document and runs either delete_one or delete_many on a collection. - * + * * @param coll MongoDB collection to do insert into. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. @@ -1639,29 +1639,29 @@ namespace mongodbembed doc_value = builder << finalize; view = doc_value.view(); - if (com == "delete_one") + if (com == "delete_one") { - try + try { result = coll.delete_one(view); } - catch (const mongocxx::exception& e) + catch (const mongocxx::exception& e) { reportQueryFailure(e); - } - } - else + } + } + else { - try + try { result = coll.delete_many(view); } - catch (const mongocxx::exception& e) + catch (const mongocxx::exception& e) { reportQueryFailure(e); } } - if (result) + if (result) { StringBuffer json; json.appendf("{ \"deleted_count\" : %i }", result->deleted_count()); @@ -1671,7 +1671,7 @@ namespace mongodbembed /** * @brief Builds a document and updates one or many documents based on the filter. - * + * * @param coll MongoDB collection to do insert into. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. @@ -1682,7 +1682,7 @@ namespace mongodbembed // takes either (bsoncxx::document, bsoncxx::document) or (bsoncxx::document, mongocxx::pipeline) auto builder = document{}; bsoncxx::stdx::optional result; - auto start = query->script(); + auto start = query->script(); buildDocument(builder, view, start); // Build filter document auto filter_value = builder << finalize; auto filter_view = filter_value.view(); @@ -1690,67 +1690,67 @@ namespace mongodbembed while (*start && (*start == ',' || *start == ' ')) start++; // Move to next argument // Look for document ('{'), a pipeline ('['), or an error - if (*start == '{') + if (*start == '{') { builder.clear(); buildDocument(builder, view, ++start); auto update_value = builder << finalize; auto update_view = update_value.view(); - if (com == "update_one") + if (com == "update_one") { - try + try { result = coll.update_one(filter_view, update_view); // Returns an update object with counts of affected documents - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); } - } - else - { - try + } + else + { + try { result = coll.update_many(filter_view, update_view); - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); } } - } - else if (*start == '[') + } + else if (*start == '[') { - mongocxx::pipeline stages; + mongocxx::pipeline stages; buildPipeline(view, stages, start); // Builds a document for each stage and appends it to the pipeline - if (com == "update_one") + if (com == "update_one") { - try + try { result = coll.update_one(filter_view, stages); - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); - } - } - else + } + } + else { - try + try { result = coll.update_many(filter_view, stages); - } - catch (const mongocxx::exception& e) + } + catch (const mongocxx::exception& e) { reportQueryFailure(e); } } - } - else + } + else failx("Incorrect Arguments given to update(). Expected: (bsoncxx::document, bsoncxx::document), (bsoncxx::document, mongocxx::pipeline)."); - // Check if there is a result document and extract useful fields - if (result) + // Check if there is a result document and extract useful fields + if (result) { StringBuffer json; json.appendf("{ \"matched_count\" : %i, \"modified_count\" : %i }", result->matched_count(), result->modified_count()); @@ -1760,59 +1760,59 @@ namespace mongodbembed /** * @brief Helper function for deciding which query to run based on the command. - * + * * @param db MongoDB database for the runCommand function. * @param coll MongoDB collection to do insert into. * @param view View of document where stored params are. * @param doc_value Document value gets passed in so we don't have to make a new one. */ - void MongoDBEmbedFunctionContext::runQuery(mongocxx::database &db, mongocxx::collection &coll, bsoncxx::document::view &view, bsoncxx::document::value &doc_value) + void MongoDBEmbedFunctionContext::runQuery(mongocxx::database &db, mongocxx::collection &coll, bsoncxx::document::view &view, bsoncxx::document::value &doc_value) { std::string com = query->cmd(); // Handle multiple MongoDB Operations - if (com == "insert") + if (com == "insert") { mdbinsert(coll, view, doc_value); - } + } // Returns the first document found that matches the filter - else if (com == "find_one" || com == "find") + else if (com == "find_one" || com == "find") { mdbfind(coll, view, doc_value, com); - } - else if (com == "update_one" || com == "update_many") + } + else if (com == "update_one" || com == "update_many") { mdbupdate(coll, view, doc_value, com); } - // Takes a MongoDB document and can run various commands at a database level. - else if (com == "runCommand") + // Takes a MongoDB document and can run various commands at a database level. + else if (com == "runCommand") { mdbrunCommand(db, view, doc_value); } - // Takes a MongoDB pipeline with various stages for chaining commands together - else if (com == "aggregate") + // Takes a MongoDB pipeline with various stages for chaining commands together + else if (com == "aggregate") { mdbaggregate(coll, view, doc_value); - } - else if (com == "delete_one" || com == "delete_many") + } + else if (com == "delete_one" || com == "delete_many") { mdbdelete(coll, view, doc_value, com); - } - else if (com == "create_index") + } + else if (com == "create_index") { mdbcreateIndex(coll, view, doc_value); - } - else + } + else { StringBuffer err; err.appendf("Unsupported operation: %s", com.c_str()); UNSUPPORTED(err.str()); } } - + /** * @brief Calls the execute function - * + * */ void MongoDBEmbedFunctionContext::callFunction() { @@ -1820,16 +1820,16 @@ namespace mongodbembed } /** - * @brief If a dataset or row was passed in it called executeAll otherwise it gets + * @brief If a dataset or row was passed in it called executeAll otherwise it gets * a connection and runs the query using the embedded script. - * + * */ void MongoDBEmbedFunctionContext::execute() { m_oMDBConnection->createInstance().create_connection(query->queryConnectionString(), query->queryQueryString()); if (m_oInputStream) m_oInputStream->executeAll(m_oMDBConnection); - else + else { // Get a MongoDB instance from the connection object auto conn = m_oMDBConnection->createInstance().get_connection(query->queryConnectionString(), query->queryQueryString()); @@ -1845,7 +1845,7 @@ namespace mongodbembed /** * @brief Checks the next param to see if it was passed in. - * + * * @param name Parameter name * @return unsigned Index of next parameter to check. */ @@ -1858,15 +1858,15 @@ namespace mongodbembed /** * @brief Gets a Boolean result for an ECL Row - * + * * @param field Holds the value of the field. - * @return bool Returns the boolean value from the result row. + * @return bool Returns the boolean value from the result row. */ bool MongoDBRowBuilder::getBooleanResult(const RtlFieldInfo *field) { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); return p.boolResult; @@ -1879,7 +1879,7 @@ namespace mongodbembed /** * @brief Gets a data result from the result row and passes it back to engine through result. - * + * * @param field Holds the value of the field. * @param len Length of the Data value. * @param result Used for returning the result to the caller. @@ -1888,7 +1888,7 @@ namespace mongodbembed { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); rtlStrToDataX(len, result, p.resultChars, p.stringResult); @@ -1899,7 +1899,7 @@ namespace mongodbembed /** * @brief Gets a real result from the result row. - * + * * @param field Holds the value of the field. * @return double Double value to return. */ @@ -1907,7 +1907,7 @@ namespace mongodbembed { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); return p.doubleResult; @@ -1920,14 +1920,14 @@ namespace mongodbembed /** * @brief Gets the Signed Integer result from the result row. - * + * * @param field Holds the value of the field. * @return __int64 Value to return. */ __int64 MongoDBRowBuilder::getSignedResult(const RtlFieldInfo *field) { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); return p.uintResult; @@ -1940,14 +1940,14 @@ namespace mongodbembed /** * @brief Gets the Unsigned Integer result from the result row. - * + * * @param field Holds the value of the field. * @return unsigned Value to return. */ unsigned __int64 MongoDBRowBuilder::getUnsignedResult(const RtlFieldInfo *field) { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); @@ -1961,7 +1961,7 @@ namespace mongodbembed /** * @brief Gets a String from the result row. - * + * * @param field Holds the value of the field. * @param chars Number of chars in the String. * @param result Variable used for returning string back to the caller. @@ -1970,7 +1970,7 @@ namespace mongodbembed { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); rtlUtf8ToStrX(chars, result, p.resultChars, p.stringResult); @@ -1984,7 +1984,7 @@ namespace mongodbembed /** * @brief Gets a UTF8 from the result row. - * + * * @param field Holds the value of the field. * @param chars Number of chars in the UTF8. * @param result Variable used for returning UTF8 back to the caller. @@ -1993,7 +1993,7 @@ namespace mongodbembed { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); rtlUtf8ToUtf8X(chars, result, p.resultChars, p.stringResult); @@ -2007,7 +2007,7 @@ namespace mongodbembed /** * @brief Gets a Unicode from the result row. - * + * * @param field Holds the value of the field. * @param chars Number of chars in the Unicode. * @param result Variable used for returning Unicode back to the caller. @@ -2016,7 +2016,7 @@ namespace mongodbembed { const char * value = nextField(field); - if (!value || !*value) + if (!value || !*value) { NullFieldProcessor p(field); rtlUnicodeToUnicodeX(chars, result, p.resultChars, p.unicodeResult); @@ -2030,14 +2030,14 @@ namespace mongodbembed /** * @brief Gets a decimal from the result row. - * + * * @param field Holds the value of the field. * @param value Variable used for returning decimal to caller. */ void MongoDBRowBuilder::getDecimalResult(const RtlFieldInfo *field, Decimal &value) { const char * dvalue = nextField(field); - if (!dvalue || !*dvalue) + if (!dvalue || !*dvalue) { NullFieldProcessor p(field); value.set(p.decimalResult); @@ -2053,7 +2053,7 @@ namespace mongodbembed /** * @brief Starts a new Set. - * + * * @param field Field with information about the context of the set. * @param isAll Not Supported. */ @@ -2064,7 +2064,7 @@ namespace mongodbembed StringBuffer xpath; xpathOrName(xpath, field); - if (!xpath.isEmpty()) + if (!xpath.isEmpty()) { PathTracker newPathNode(xpath, CPNTSet); StringBuffer newXPath; @@ -2073,8 +2073,8 @@ namespace mongodbembed newPathNode.childCount = m_oResultRow->getCount(newXPath); m_pathStack.push_back(newPathNode); - } - else + } + else { failx("processBeginSet: Field name or xpath missing"); } @@ -2082,7 +2082,7 @@ namespace mongodbembed /** * @brief Checks if we should process another set. - * + * * @param field Context information about the set. * @return true If the children that we have process is less than the total child count. * @return false If all the children sets have been processed. @@ -2094,7 +2094,7 @@ namespace mongodbembed /** * @brief Starts a new Dataset. - * + * * @param field Information about the context of the dataset. */ void MongoDBRowBuilder::processBeginDataset(const RtlFieldInfo * field) @@ -2102,7 +2102,7 @@ namespace mongodbembed StringBuffer xpath; xpathOrName(xpath, field); - if (!xpath.isEmpty()) + if (!xpath.isEmpty()) { PathTracker newPathNode(xpath, CPNTDataset); StringBuffer newXPath; @@ -2111,8 +2111,8 @@ namespace mongodbembed newPathNode.childCount = m_oResultRow->getCount(newXPath); m_pathStack.push_back(newPathNode); - } - else + } + else { failx("processBeginDataset: Field name or xpath missing"); } @@ -2120,7 +2120,7 @@ namespace mongodbembed /** * @brief Starts a new Row. - * + * * @param field Information about the context of the row. */ void MongoDBRowBuilder::processBeginRow(const RtlFieldInfo * field) @@ -2128,26 +2128,26 @@ namespace mongodbembed StringBuffer xpath; xpathOrName(xpath, field); - if (!xpath.isEmpty()) + if (!xpath.isEmpty()) { - if (strncmp(xpath.str(), "", 12) == 0) + if (strncmp(xpath.str(), "", 12) == 0) { // Row within child dataset - if (m_pathStack.back().nodeType == CPNTDataset) + if (m_pathStack.back().nodeType == CPNTDataset) { m_pathStack.back().currentChildIndex++; - } - else + } + else { failx(" received with no outer dataset designated"); } - } - else + } + else { m_pathStack.push_back(PathTracker(xpath, CPNTScalar)); } - } - else + } + else { failx("processBeginRow: Field name or xpath missing"); } @@ -2155,7 +2155,7 @@ namespace mongodbembed /** * @brief Checks whether we should process the next row. - * + * * @param field Information about the context of the row. * @return true If the number of child rows process is less than the total count of children. * @return false If all of the child rows have been processed. @@ -2167,7 +2167,7 @@ namespace mongodbembed /** * @brief Ends a set. - * + * * @param field Information about the context of the set. */ void MongoDBRowBuilder::processEndSet(const RtlFieldInfo * field) @@ -2175,7 +2175,7 @@ namespace mongodbembed StringBuffer xpath; xpathOrName(xpath, field); - if (!xpath.isEmpty() && !m_pathStack.empty() && strcmp(xpath.str(), m_pathStack.back().nodeName.str()) == 0) + if (!xpath.isEmpty() && !m_pathStack.empty() && strcmp(xpath.str(), m_pathStack.back().nodeName.str()) == 0) { m_pathStack.pop_back(); } @@ -2183,7 +2183,7 @@ namespace mongodbembed /** * @brief Ends a dataset. - * + * * @param field Information about the context of the dataset. */ void MongoDBRowBuilder::processEndDataset(const RtlFieldInfo * field) @@ -2191,14 +2191,14 @@ namespace mongodbembed StringBuffer xpath; xpathOrName(xpath, field); - if (!xpath.isEmpty()) + if (!xpath.isEmpty()) { - if (!m_pathStack.empty() && strcmp(xpath.str(), m_pathStack.back().nodeName.str()) == 0) + if (!m_pathStack.empty() && strcmp(xpath.str(), m_pathStack.back().nodeName.str()) == 0) { m_pathStack.pop_back(); } - } - else + } + else { failx("processEndDataset: Field name or xpath missing"); } @@ -2206,7 +2206,7 @@ namespace mongodbembed /** * @brief Ends a row. - * + * * @param field Information about the context of the row. */ void MongoDBRowBuilder::processEndRow(const RtlFieldInfo * field) @@ -2214,21 +2214,21 @@ namespace mongodbembed StringBuffer xpath; xpathOrName(xpath, field); - if (!xpath.isEmpty()) + if (!xpath.isEmpty()) { - if (!m_pathStack.empty()) + if (!m_pathStack.empty()) { - if (m_pathStack.back().nodeType == CPNTDataset) + if (m_pathStack.back().nodeType == CPNTDataset) { m_pathStack.back().childrenProcessed++; - } - else if (strcmp(xpath.str(), m_pathStack.back().nodeName.str()) == 0) + } + else if (strcmp(xpath.str(), m_pathStack.back().nodeName.str()) == 0) { m_pathStack.pop_back(); } } - } - else + } + else { failx("processEndRow: Field name or xpath missing"); } @@ -2236,7 +2236,7 @@ namespace mongodbembed /** * @brief Gets the next field and processes it. - * + * * @param field Information about the context of the next field. * @return const char* Result of building field. */ @@ -2245,19 +2245,19 @@ namespace mongodbembed StringBuffer xpath; xpathOrName(xpath, field); - if (xpath.isEmpty()) + if (xpath.isEmpty()) { failx("nextField: Field name or xpath missing"); } StringBuffer fullXPath; - if (!m_pathStack.empty() && m_pathStack.back().nodeType == CPNTSet && strncmp(xpath.str(), "", 13) == 0) + if (!m_pathStack.empty() && m_pathStack.back().nodeType == CPNTSet && strncmp(xpath.str(), "", 13) == 0) { m_pathStack.back().currentChildIndex++; constructNewXPath(fullXPath, NULL); m_pathStack.back().childrenProcessed++; - } - else + } + else { constructNewXPath(fullXPath, xpath.str()); } @@ -2269,27 +2269,27 @@ namespace mongodbembed { outXPath.clear(); - if (field->xpath) + if (field->xpath) { - if (field->xpath[0] == xpathCompoundSeparatorChar) + if (field->xpath[0] == xpathCompoundSeparatorChar) { outXPath.append(field->xpath + 1); - } - else + } + else { const char * sep = strchr(field->xpath, xpathCompoundSeparatorChar); - if (!sep) + if (!sep) { outXPath.append(field->xpath); - } - else + } + else { outXPath.append(field->xpath, 0, static_cast(sep - field->xpath)); } } - } - else + } + else { outXPath.append(field->name); } @@ -2301,19 +2301,19 @@ namespace mongodbembed outXPath.clear(); - if (!nextNodeIsFromRoot) + if (!nextNodeIsFromRoot) { // Build up full parent xpath using our previous components - for (std::vector::const_iterator iter = m_pathStack.begin(); iter != m_pathStack.end(); iter++) + for (std::vector::const_iterator iter = m_pathStack.begin(); iter != m_pathStack.end(); iter++) { - if (strncmp(iter->nodeName, "", 5) != 0) + if (strncmp(iter->nodeName, "", 5) != 0) { - if (!outXPath.isEmpty()) + if (!outXPath.isEmpty()) { outXPath.append("/"); } outXPath.append(iter->nodeName); - if (iter->nodeType == CPNTDataset || iter->nodeType == CPNTSet) + if (iter->nodeType == CPNTDataset || iter->nodeType == CPNTSet) { outXPath.appendf("[%d]", iter->currentChildIndex); } @@ -2321,9 +2321,9 @@ namespace mongodbembed } } - if (nextNode && *nextNode) + if (nextNode && *nextNode) { - if (!outXPath.isEmpty()) + if (!outXPath.isEmpty()) { outXPath.append("/"); } @@ -2332,9 +2332,9 @@ namespace mongodbembed } /** - * @brief Serves as the entry point for the HPCC Engine into the plugin and is how it obtains a + * @brief Serves as the entry point for the HPCC Engine into the plugin and is how it obtains a * MongoDBEmbedFunctionContext object for creating the query and executing it. - * + * */ class MongoDBEmbedContext : public CInterfaceOf { @@ -2346,12 +2346,12 @@ namespace mongodbembed virtual IEmbedFunctionContext * createFunctionContextEx(ICodeContext * ctx, const IThorActivityContext *activityCtx, unsigned flags, const char *options) override { - if (flags & EFimport) + if (flags & EFimport) { UNSUPPORTED("IMPORT"); return nullptr; - } - else + } + else return new MongoDBEmbedFunctionContext(ctx ? ctx->queryContextLogger() : queryDummyContextLogger(), options, flags); } diff --git a/plugins/mongodb/mongodbembed.hpp b/plugins/mongodb/mongodbembed.hpp index 6d2326ac24a..f78eac053ef 100755 --- a/plugins/mongodb/mongodbembed.hpp +++ b/plugins/mongodb/mongodbembed.hpp @@ -187,20 +187,20 @@ namespace mongodbembed /** * @brief Holds information about where to send the query, how to query, and the results of the query. - * + * */ class MongoDBQuery { public: /** * @brief Stores the databaseName and CollectionName for executing the operations. - * + * * @param database MongoDB database to connect to. * @param collection MongoDB collection to connect to. * @param _connectionString Connection string for creating the mongocxx::uri. * @param _batchSize The number of documents MongoDB should return per batch. */ - MongoDBQuery(const char *database, const char *collection, const char *_connectionString, std::int32_t _batchSize, std::int32_t _limit) + MongoDBQuery(const char *database, const char *collection, const char *_connectionString, std::int32_t _batchSize, std::int32_t _limit) { databaseName = database; collectionName = collection; @@ -211,10 +211,10 @@ namespace mongodbembed /** * @brief Set the Embed object and remove leading characters. - * + * * @param eScript pointer to beginning of the script. */ - void setEmbed(const char *eScript) + void setEmbed(const char *eScript) { const char *script = eScript; const char *end; @@ -237,7 +237,7 @@ namespace mongodbembed /** * @brief Gets pointer to database name. - * + * * @return const char* Name of the database. */ const char* database() @@ -247,57 +247,57 @@ namespace mongodbembed /** * @brief Gets pointer to collection name. - * + * * @return const char* Name of the collection. */ - const char* collection() + const char* collection() { return collectionName.c_str(); } /** * @brief Gets pointer to script. - * + * * @return const char* Beginning of the script. */ - const char*& script() + const char*& script() { return cursor; } /** * @brief Gets pointer to command. - * + * * @return const char* Beginning of the command. */ - const char* cmd() + const char* cmd() { return queryCMD.c_str(); } - + /** * @brief Returns a pointer to the basic builder used for binding ECL datasets and rows. - * + * * @return bsoncxx::builder::basic::document* Pointer to builder. */ - bsoncxx::builder::basic::document* build() + bsoncxx::builder::basic::document* build() { return &builder; - } + } /** * @brief Returns a pointer to the result rows from the MongoDB operation. - * + * * @return StringArray* Result rows in standard json. */ - StringArray* result() + StringArray* result() { return &result_rows; } /** * @brief Returns a copy of the batch size to use in mongocxx::options - * + * * @return std::int32_t Batch Size */ std::int32_t size() @@ -308,7 +308,7 @@ namespace mongodbembed /** * @brief Returns a const char pointer to the connection string for * creating the uri. - * + * * @return The connection string as a const char pointer. */ const char * queryConnectionString() @@ -318,7 +318,7 @@ namespace mongodbembed /** * @brief Returns the query string for building the key to the hash. - * + * * @return const char* Query string for hashing. */ const char * queryQueryString() @@ -328,7 +328,7 @@ namespace mongodbembed /** * @brief Returns the maximum number of documents return by the cursor - * + * * @return std::int32_t argument for the mongocxx::cursor::limit() function. */ std::int32_t queryLimit() @@ -352,7 +352,7 @@ namespace mongodbembed /** * @brief Builds ECL Records from MongoDB result rows. - * + * */ class MongoDBRowStream : public RtlCInterface, implements IRowStream { @@ -374,35 +374,35 @@ namespace mongodbembed * @brief Class for keeping a heap allocated MongoDB instance. The reason this is important is because * MongoDB only allows one instance to be created at any one time, so we create an instance on the heap * for every thread to share. - * + * */ - class MongoDBConnection + class MongoDBConnection { private: typedef std::map> ObjMap; - + public: /** * @brief Creates a static reference to a MongoDB instance that is alive * for the entire time MongoDBEmbedFunctionContext is used. - * + * * @return MongoDBConnection& A reference to a MongoDBConnection * instance used for connecting to a database. */ - static MongoDBConnection& createInstance() + static MongoDBConnection& createInstance() { static MongoDBConnection _instance; return _instance; } /** - * @brief Configures the MongoDB instance for the client objects to use for connections. It + * @brief Configures the MongoDB instance for the client objects to use for connections. It * should only be called once because only one instance object is allowed per program. - * + * * @param instance The instance object that is to be kept alive for multiple * threads to have access to. */ - void configure(std::unique_ptr && _instance) + void configure(std::unique_ptr && _instance) { instance = std::move(_instance); } @@ -415,18 +415,18 @@ namespace mongodbembed * Replica Set: mongodb://:@mongodb0.example.com:27017,mongodb1.example.com:27017/?authSource=admin&replicaSet=myRepl * Sharded: mongodb://:@mongos0.example.com:27017,mongos1.example.com:27017/?authSource=admin * Additional arguments can be passed after the server name/s are added via ?=&= - * + * * @param connectionString The connection string for constructing the client object. - * + * * @param queryString A const char * holding the query string for hashing. */ - void create_connection(const char *connectionString, const char * queryString) + void create_connection(const char *connectionString, const char * queryString) { // Use a hash of the connection string as the key to finding // any connection objects hash64_t key = rtlHash64VStr(queryString, rtlHash64VStr(connectionString, 0)); - std::shared_ptr client_ptr = std::make_shared(mongocxx::uri{connectionString}); + std::shared_ptr client_ptr = std::make_shared(mongocxx::uri{connectionString}); { CriticalBlock block(connLock); @@ -437,16 +437,16 @@ namespace mongodbembed /** * @brief Acquires a mongocxx client from the connections map. - * + * * @param connectionString A const char * holding the connection parameters. - * + * * @param queryString A const char * holding the query string for hashing. - * + * * @return A shared pointer to the mongocxx:client object for connecting to the database. */ std::shared_ptr get_connection(const char *connectionString, const char * queryString) { - // Get key for client object + // Get key for client object hash64_t key = rtlHash64VStr(queryString, rtlHash64VStr(connectionString, 0)); { @@ -466,7 +466,7 @@ namespace mongodbembed /** * @brief Builds ECL records for MongoDBRowStream. - * + * */ class MongoDBRowBuilder : public CInterfaceOf { @@ -509,13 +509,13 @@ namespace mongodbembed /** * @brief Binds ECL records to bson Documents - * + * */ class MongoDBRecordBinder : public CInterfaceOf { public: MongoDBRecordBinder(const IContextLogger &_logctx, const RtlTypeInfo *_typeInfo, std::shared_ptr _query, int _firstParam) - : logctx(_logctx), typeInfo(_typeInfo), firstParam(_firstParam), dummyField("", NULL, typeInfo), thisParam(_firstParam) + : logctx(_logctx), typeInfo(_typeInfo), firstParam(_firstParam), dummyField("", NULL, typeInfo), thisParam(_firstParam) { query = _query; } @@ -576,14 +576,14 @@ namespace mongodbembed /** * @brief Binds an ECL dataset to a vector of bson documents. - * + * */ class MongoDBDatasetBinder : public MongoDBRecordBinder { public: /** * @brief Construct a new MongoDBDataset Binder object - * + * * @param _logctx logger for building the dataset. * @param _input Stream of input of dataset. * @param _typeInfo Field type info. @@ -597,7 +597,7 @@ namespace mongodbembed /** * @brief Gets the next ECL row and binds it to a MongoDB document. - * + * * @return true If there is a row to process. * @return false If there are no rows left. */ @@ -612,7 +612,7 @@ namespace mongodbembed /** * @brief Binds all the rows of the dataset to bson documents and adds them to an array for calling insert many. - * + * * @param m_oMDBConnection Connection object for getting acces to the mongocxx::instance object. */ void executeAll(MongoDBConnection * m_oMDBConnection) @@ -635,11 +635,11 @@ namespace mongodbembed } if(cmd != "insert") failx("Only insert operations are supported for Dataset arguments."); - try + try { result = coll.insert_many(documents); - } - catch (const mongocxx::exception &e) + } + catch (const mongocxx::exception &e) { failx("insert_many execution error: %s", e.what()); } @@ -655,10 +655,10 @@ namespace mongodbembed protected: Owned input; }; - + /** - * @brief Main interface for the engine to interact with the plugin. The get functions return results to the engine and the Rowstream and - * + * @brief Main interface for the engine to interact with the plugin. The get functions return results to the engine and the Rowstream and + * */ class MongoDBEmbedFunctionContext : public CInterfaceOf { @@ -753,7 +753,7 @@ namespace mongodbembed Owned m_oInputStream; //! Input Stream used for building a dataset. MongoDBConnection * m_oMDBConnection; //! Pointer to a heap allocated mongocxx::instance. std::shared_ptr query; //! Holds the script for performing query ,and the database and collection to operate on. - + TokenDeserializer m_tokenDeserializer; TokenSerializer m_tokenSerializer; unsigned m_nextParam; //! Index of the next parameter to process. From bab20ed5af5ed89f154559f4e0299739c4ce44d9 Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Mon, 1 Apr 2024 16:30:01 -0400 Subject: [PATCH 10/18] HPCC-30771 ECL Watch v9 WUs list filter allow multiple clusters Changed our AsyncDropdown component to accept a "multiSelect" parameter (which is already built-in to the extended FluentUI Dropdown) Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/src-react/components/DataPatterns.tsx | 4 +- esp/src/src-react/components/Workunits.tsx | 2 +- .../src-react/components/forms/AddBinding.tsx | 6 +- .../components/forms/AddPermission.tsx | 4 +- .../src-react/components/forms/CopyFile.tsx | 4 +- .../components/forms/DesprayFile.tsx | 6 +- esp/src/src-react/components/forms/Fields.tsx | 135 +++++++++++++----- .../components/forms/GroupAddUser.tsx | 4 +- .../src-react/components/forms/Optimize.tsx | 4 +- .../src-react/components/forms/RemoteCopy.tsx | 4 +- .../components/forms/ReplicateFile.tsx | 4 +- .../components/forms/UserAddGroup.tsx | 4 +- .../forms/landing-zone/BlobImportForm.tsx | 6 +- .../landing-zone/DelimitedImportForm.tsx | 6 +- .../forms/landing-zone/FileListForm.tsx | 6 +- .../forms/landing-zone/FixedImportForm.tsx | 6 +- .../forms/landing-zone/JsonImportForm.tsx | 6 +- .../forms/landing-zone/VariableImportForm.tsx | 6 +- .../forms/landing-zone/XmlImportForm.tsx | 6 +- 19 files changed, 144 insertions(+), 79 deletions(-) diff --git a/esp/src/src-react/components/DataPatterns.tsx b/esp/src/src-react/components/DataPatterns.tsx index 9abc1e3e5cd..703ed29891a 100644 --- a/esp/src/src-react/components/DataPatterns.tsx +++ b/esp/src/src-react/components/DataPatterns.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { CommandBar, ContextualMenuItemType, ICommandBarItemProps, mergeStyleSets, ScrollablePane, ScrollbarVisibility, Sticky, StickyPositionType } from "@fluentui/react"; +import { CommandBar, ContextualMenuItemType, ICommandBarItemProps, IDropdownOption, mergeStyleSets, ScrollablePane, ScrollbarVisibility, Sticky, StickyPositionType } from "@fluentui/react"; import nlsHPCC from "src/nlsHPCC"; import { DPWorkunit } from "src/DataPatterns/DPWorkunit"; import { Report } from "src/DataPatterns/Report"; @@ -87,7 +87,7 @@ export const DataPatterns: React.FunctionComponent = ({ className={dpStyles.inlineDropdown} required={true} selectedKey={targetCluster} - onChange={(ev, row) => { + onChange={(ev, row: IDropdownOption) => { setTargetCluster(row.key as string); }} /> diff --git a/esp/src/src-react/components/Workunits.tsx b/esp/src/src-react/components/Workunits.tsx index 5dc413d3664..88b3236c38a 100644 --- a/esp/src/src-react/components/Workunits.tsx +++ b/esp/src/src-react/components/Workunits.tsx @@ -25,7 +25,7 @@ const FilterFields: Fields = { "Wuid": { type: "string", label: nlsHPCC.WUID, placeholder: "W20200824-060035" }, "Owner": { type: "string", label: nlsHPCC.Owner, placeholder: nlsHPCC.jsmi }, "Jobname": { type: "string", label: nlsHPCC.JobName, placeholder: nlsHPCC.log_analysis_1 }, - "Cluster": { type: "target-cluster", label: nlsHPCC.Cluster, placeholder: "" }, + "Cluster": { type: "target-cluster", label: nlsHPCC.Cluster, placeholder: "", multiSelect: true }, "State": { type: "workunit-state", label: nlsHPCC.State, placeholder: "" }, "ECL": { type: "string", label: nlsHPCC.ECL, placeholder: nlsHPCC.dataset }, "LogicalFile": { type: "string", label: nlsHPCC.LogicalFile, placeholder: nlsHPCC.somefile }, diff --git a/esp/src/src-react/components/forms/AddBinding.tsx b/esp/src/src-react/components/forms/AddBinding.tsx index 0ebf60ad358..33df4fa0127 100644 --- a/esp/src/src-react/components/forms/AddBinding.tsx +++ b/esp/src/src-react/components/forms/AddBinding.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { DefaultButton, PrimaryButton, TextField, } from "@fluentui/react"; +import { DefaultButton, IDropdownOption, PrimaryButton, TextField, } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import { EsdlDefinitionsTextField, EsdlEspProcessesTextField } from "./Fields"; @@ -76,7 +76,7 @@ export const AddBindingForm: React.FunctionComponent = ({ fieldState: { error } }) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} required={true} @@ -108,7 +108,7 @@ export const AddBindingForm: React.FunctionComponent = ({ fieldState: { error } }) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} required={true} diff --git a/esp/src/src-react/components/forms/AddPermission.tsx b/esp/src/src-react/components/forms/AddPermission.tsx index b527db7d6ca..39f7df0f866 100644 --- a/esp/src/src-react/components/forms/AddPermission.tsx +++ b/esp/src/src-react/components/forms/AddPermission.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { DefaultButton, MessageBar, MessageBarType, PrimaryButton, TextField, } from "@fluentui/react"; +import { DefaultButton, IDropdownOption, MessageBar, MessageBarType, PrimaryButton, TextField, } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import nlsHPCC from "src/nlsHPCC"; @@ -82,7 +82,7 @@ export const AddPermissionForm: React.FunctionComponent required={true} label={nlsHPCC.Type} selectedKey={value} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/CopyFile.tsx b/esp/src/src-react/components/forms/CopyFile.tsx index c3662170c72..289f310d6c9 100644 --- a/esp/src/src-react/components/forms/CopyFile.tsx +++ b/esp/src/src-react/components/forms/CopyFile.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, mergeStyleSets, PrimaryButton, Stack, TextField, } from "@fluentui/react"; +import { Checkbox, DefaultButton, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField, } from "@fluentui/react"; import { useForm, Controller } from "react-hook-form"; import nlsHPCC from "src/nlsHPCC"; import * as FileSpray from "src/FileSpray"; @@ -130,7 +130,7 @@ export const CopyFile: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error.message} diff --git a/esp/src/src-react/components/forms/DesprayFile.tsx b/esp/src/src-react/components/forms/DesprayFile.tsx index 15e297270a8..b57a171d0d1 100644 --- a/esp/src/src-react/components/forms/DesprayFile.tsx +++ b/esp/src/src-react/components/forms/DesprayFile.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, mergeStyleSets, PrimaryButton, Stack, TextField, } from "@fluentui/react"; +import { Checkbox, DefaultButton, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField, } from "@fluentui/react"; import { useForm, Controller } from "react-hook-form"; import { FileSpray, FileSprayService } from "@hpcc-js/comms"; import { scopedLogger } from "@hpcc-js/util"; @@ -151,7 +151,7 @@ export const DesprayFile: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { setDropzone(option.key as string); setDirectory(option["path"] as string); if (option["path"].indexOf("\\") > -1) { @@ -179,7 +179,7 @@ export const DesprayFile: React.FunctionComponent = ({ label={nlsHPCC.IPAddress} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { setMachine(option.key as string); setOs(option["OS"] as number); onChange(option.key); diff --git a/esp/src/src-react/components/forms/Fields.tsx b/esp/src/src-react/components/forms/Fields.tsx index d9221b96775..5a72453c409 100644 --- a/esp/src/src-react/components/forms/Fields.tsx +++ b/esp/src/src-react/components/forms/Fields.tsx @@ -81,8 +81,9 @@ interface AsyncDropdownProps { selectedKey?: string; required?: boolean; disabled?: boolean; + multiSelect?: boolean; errorMessage?: string; - onChange?: (event: React.FormEvent, option?: IDropdownOption, index?: number) => void; + onChange?: (event: React.FormEvent, option?: IDropdownOption | IDropdownOption[], index?: number) => void; placeholder?: string; className?: string; } @@ -93,6 +94,7 @@ const AsyncDropdown: React.FunctionComponent = ({ selectedKey, required = false, disabled, + multiSelect = false, errorMessage, onChange, placeholder, @@ -101,43 +103,96 @@ const AsyncDropdown: React.FunctionComponent = ({ const selOptions = React.useMemo(() => { if (options !== undefined) { - return !required ? [{ key: "", text: "" }, ...options] : options; + return !required && !multiSelect ? [{ key: "", text: "" }, ...options] : options; } return []; - }, [options, required]); + }, [multiSelect, options, required]); const [selectedItem, setSelectedItem] = React.useState(); const [selectedIdx, setSelectedIdx] = React.useState(); - React.useEffect(() => { - let item; - if (selectedItem?.key) { - item = selOptions?.find(row => row.key === selectedItem?.key) ?? selOptions[0]; + const [selectedItems, setSelectedItems] = React.useState([]); + + const changeSelectedItems = React.useCallback(() => { + let items = [...selectedItems]; + if (selectedKey === "") return; + const keys = selectedKey.split("|"); + items = keys.map(key => { return { key: key, text: key }; }); + if (!items.length) return; + if (items.map(item => item.key).join("|") === selectedKey) { + // do nothing, unless + if (!selectedItems.length) { + setSelectedItems(items); + } } else { - item = selOptions?.find(row => row.key === selectedKey) ?? selOptions[0]; + setSelectedItems(items); } - if (!item) return; - if (item.key === selectedKey) { - // do nothing, unless - if (!selectedItem) { + }, [selectedKey, selectedItems]); + + React.useEffect(() => { + // only on mount, pre-populate selectedItems from url + if (multiSelect) { + changeSelectedItems(); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + React.useEffect(() => { + if (multiSelect) { + if (!selectedItems.length) return; + changeSelectedItems(); + } else { + let item; + if (selectedItem?.key) { + item = selOptions?.find(row => row.key === selectedItem?.key) ?? selOptions[0]; + } else { + item = selOptions?.find(row => row.key === selectedKey) ?? selOptions[0]; + } + if (!item) return; + if (item.key === selectedKey) { + // do nothing, unless + if (!selectedItem) { + setSelectedItem(item); + setSelectedIdx(selOptions.indexOf(item)); + } + } else { setSelectedItem(item); setSelectedIdx(selOptions.indexOf(item)); } - } else { - setSelectedItem(item); - setSelectedIdx(selOptions.indexOf(item)); } - }, [selectedKey, selOptions, selectedItem]); + }, [changeSelectedItems, multiSelect, selectedKey, selOptions, selectedItem, selectedItems]); React.useEffect(() => { - if (!selectedItem || selectedItem?.key === selectedKey) return; - if (selectedItem !== undefined) { - onChange(undefined, selectedItem, selectedIdx); + if (multiSelect) { + if (!selectedItems.length && selectedKey === "") return; + if (selectedItems.map(item => item.key).join("|") === selectedKey) return; + onChange(undefined, selectedItems, null); + } else { + if (!selectedItem || selectedItem?.key === selectedKey) return; + if (selectedItem !== undefined) { + onChange(undefined, selectedItem, selectedIdx); + } } - }, [onChange, selectedItem, selectedIdx, selectedKey]); - - return options === undefined ? - : - setSelectedItem(item)} placeholder={placeholder} disabled={disabled} required={required} errorMessage={errorMessage} className={className} />; + }, [onChange, multiSelect, selectedItem, selectedIdx, selectedKey, selectedItems]); + + if (multiSelect) { + return options === undefined ? + : + item.key as string)} onChange={ + (_, item: IDropdownOption) => { + if (item) { + let selected = selectedItems.filter(i => i.key !== item.key); + if (item.selected) { + selected = [...selectedItems, item]; + } + setSelectedItems(selected); + } + } + } placeholder={placeholder} disabled={disabled} required={required} errorMessage={errorMessage} className={className} />; + } else { + return options === undefined ? + : + setSelectedItem(item)} placeholder={placeholder} disabled={disabled} required={required} errorMessage={errorMessage} className={className} />; + } }; interface DropdownMultiProps { @@ -286,6 +341,7 @@ interface QueriesActiveStateField extends BaseField { interface TargetClusterField extends BaseField { type: "target-cluster"; + multiSelect?: boolean; value?: string; } @@ -965,7 +1021,9 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a text: state }; })} - onChange={(ev, row) => onChange(fieldID, row.key)} + onChange={(ev, row) => { + onChange(fieldID, row.key); + }} placeholder={field.placeholder} /> }); @@ -1074,8 +1132,15 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a label: field.label, field: onChange(fieldID, row.key)} + onChange={(ev, row) => { + if (field.multiSelect) { + onChange(fieldID, (row as IDropdownOption[]).map(i => i.key).join("|")); + } else { + onChange(fieldID, (row as IDropdownOption).key); + } + }} placeholder={field.placeholder} /> }); @@ -1088,7 +1153,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a field: { + onChange={(ev, row: IDropdownOption) => { onChange(fieldID, row.key); setDropzone(row.key as string); }} @@ -1104,7 +1169,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a field: onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} setSetDropzone={_ => setDropzone = _} /> @@ -1119,7 +1184,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a key={fieldID} required={field.required} selectedKey={field.value} - onChange={(ev, row) => onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} /> }); @@ -1134,7 +1199,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a username={field.username} required={field.required} selectedKey={field.value} - onChange={(ev, row) => onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} /> }); @@ -1149,7 +1214,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a groupname={field.groupname} required={field.required} selectedKey={field.value} - onChange={(ev, row) => onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} /> }); @@ -1163,7 +1228,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a key={fieldID} required={field.required} selectedKey={field.value} - onChange={(ev, row) => onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} /> }); @@ -1176,7 +1241,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a field: onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} /> }); @@ -1189,7 +1254,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a field: onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} /> }); @@ -1202,7 +1267,7 @@ export function createInputs(fields: Fields, onChange?: (id: string, newValue: a field: onChange(fieldID, row.key)} + onChange={(ev, row: IDropdownOption) => onChange(fieldID, row.key)} placeholder={field.placeholder} /> }); diff --git a/esp/src/src-react/components/forms/GroupAddUser.tsx b/esp/src/src-react/components/forms/GroupAddUser.tsx index 08230ac5007..c8f1cef78b9 100644 --- a/esp/src/src-react/components/forms/GroupAddUser.tsx +++ b/esp/src/src-react/components/forms/GroupAddUser.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { DefaultButton, MessageBar, MessageBarType, PrimaryButton, } from "@fluentui/react"; +import { DefaultButton, IDropdownOption, MessageBar, MessageBarType, PrimaryButton, } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import nlsHPCC from "src/nlsHPCC"; @@ -83,7 +83,7 @@ export const GroupAddUserForm: React.FunctionComponent = ({ required={true} label={nlsHPCC.Username} selectedKey={value} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/Optimize.tsx b/esp/src/src-react/components/forms/Optimize.tsx index 722014eb03a..b3e482ace92 100644 --- a/esp/src/src-react/components/forms/Optimize.tsx +++ b/esp/src/src-react/components/forms/Optimize.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, PrimaryButton, TextField, } from "@fluentui/react"; +import { Checkbox, DefaultButton, IDropdownOption, PrimaryButton, TextField, } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import { DPWorkunit } from "src/DataPatterns/DPWorkunit"; @@ -70,7 +70,7 @@ export const Optimize: React.FunctionComponent = ({ placeholder={nlsHPCC.Target} selectedKey={value} required={true} - onChange={(ev, row) => { + onChange={(ev, row: IDropdownOption) => { return onChange(row.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/RemoteCopy.tsx b/esp/src/src-react/components/forms/RemoteCopy.tsx index 160c29d3885..baf5fcce2c2 100644 --- a/esp/src/src-react/components/forms/RemoteCopy.tsx +++ b/esp/src/src-react/components/forms/RemoteCopy.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, mergeStyleSets, MessageBar, MessageBarType, PrimaryButton, Stack, TextField } from "@fluentui/react"; +import { Checkbox, DefaultButton, IDropdownOption, mergeStyleSets, MessageBar, MessageBarType, PrimaryButton, Stack, TextField } from "@fluentui/react"; import { Controller, useForm } from "react-hook-form"; import { scopedLogger } from "@hpcc-js/util"; import nlsHPCC from "src/nlsHPCC"; @@ -187,7 +187,7 @@ export const RemoteCopy: React.FunctionComponent = ({ key={fieldName} label={nlsHPCC.Group} required={true} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { setSelectedDestGroup(option.key.toString()); onChange(option.key); }} diff --git a/esp/src/src-react/components/forms/ReplicateFile.tsx b/esp/src/src-react/components/forms/ReplicateFile.tsx index 6bc5e625ba7..4a8b61034f9 100644 --- a/esp/src/src-react/components/forms/ReplicateFile.tsx +++ b/esp/src/src-react/components/forms/ReplicateFile.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { DefaultButton, PrimaryButton, Stack, TextField, } from "@fluentui/react"; +import { DefaultButton, IDropdownOption, PrimaryButton, Stack, TextField, } from "@fluentui/react"; import { useForm, Controller } from "react-hook-form"; import nlsHPCC from "src/nlsHPCC"; import * as FileSpray from "src/FileSpray"; @@ -115,7 +115,7 @@ export const ReplicateFile: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error.message} diff --git a/esp/src/src-react/components/forms/UserAddGroup.tsx b/esp/src/src-react/components/forms/UserAddGroup.tsx index 48fd13be177..b09393c30f1 100644 --- a/esp/src/src-react/components/forms/UserAddGroup.tsx +++ b/esp/src/src-react/components/forms/UserAddGroup.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { DefaultButton, MessageBar, MessageBarType, PrimaryButton, } from "@fluentui/react"; +import { DefaultButton, IDropdownOption, MessageBar, MessageBarType, PrimaryButton, } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import nlsHPCC from "src/nlsHPCC"; @@ -83,7 +83,7 @@ export const UserAddGroupForm: React.FunctionComponent = ({ required={true} label={nlsHPCC.GroupName} selectedKey={value} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/landing-zone/BlobImportForm.tsx b/esp/src/src-react/components/forms/landing-zone/BlobImportForm.tsx index 2ce53e8f583..8c313e65725 100644 --- a/esp/src/src-react/components/forms/landing-zone/BlobImportForm.tsx +++ b/esp/src/src-react/components/forms/landing-zone/BlobImportForm.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; +import { Checkbox, DefaultButton, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import * as FileSpray from "src/FileSpray"; @@ -168,7 +168,7 @@ export const BlobImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} @@ -188,7 +188,7 @@ export const BlobImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/landing-zone/DelimitedImportForm.tsx b/esp/src/src-react/components/forms/landing-zone/DelimitedImportForm.tsx index 6e4d8accf49..93510cf8191 100644 --- a/esp/src/src-react/components/forms/landing-zone/DelimitedImportForm.tsx +++ b/esp/src/src-react/components/forms/landing-zone/DelimitedImportForm.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, Dropdown, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; +import { Checkbox, DefaultButton, Dropdown, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import * as FileSpray from "src/FileSpray"; @@ -190,7 +190,7 @@ export const DelimitedImportForm: React.FunctionComponent { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} @@ -210,7 +210,7 @@ export const DelimitedImportForm: React.FunctionComponent { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/landing-zone/FileListForm.tsx b/esp/src/src-react/components/forms/landing-zone/FileListForm.tsx index bda1c8bc8e3..366196f4d2d 100644 --- a/esp/src/src-react/components/forms/landing-zone/FileListForm.tsx +++ b/esp/src/src-react/components/forms/landing-zone/FileListForm.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, keyframes, mergeStyleSets, PrimaryButton, Stack } from "@fluentui/react"; +import { Checkbox, DefaultButton, IDropdownOption, keyframes, mergeStyleSets, PrimaryButton, Stack } from "@fluentui/react"; import { ProgressRingDotsIcon } from "@fluentui/react-icons-mdl2"; import { FileSprayService } from "@hpcc-js/comms"; import { scopedLogger } from "@hpcc-js/util"; @@ -189,7 +189,7 @@ export const FileListForm: React.FunctionComponent = ({ label={nlsHPCC.LandingZone} required={true} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { setDirectory(option["path"] as string); if (option["path"].indexOf("\\") > -1) { setPathSep("\\"); @@ -214,7 +214,7 @@ export const FileListForm: React.FunctionComponent = ({ dropzone={dropzone} required={true} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { if (option) { setMachine(option.key as string); setOs(option["OS"] as number); diff --git a/esp/src/src-react/components/forms/landing-zone/FixedImportForm.tsx b/esp/src/src-react/components/forms/landing-zone/FixedImportForm.tsx index 756ea9b8ad0..6b8a5cbaa02 100644 --- a/esp/src/src-react/components/forms/landing-zone/FixedImportForm.tsx +++ b/esp/src/src-react/components/forms/landing-zone/FixedImportForm.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; +import { Checkbox, DefaultButton, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import * as FileSpray from "src/FileSpray"; @@ -175,7 +175,7 @@ export const FixedImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} @@ -195,7 +195,7 @@ export const FixedImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/landing-zone/JsonImportForm.tsx b/esp/src/src-react/components/forms/landing-zone/JsonImportForm.tsx index f97a0a64b05..7132df1e0fa 100644 --- a/esp/src/src-react/components/forms/landing-zone/JsonImportForm.tsx +++ b/esp/src/src-react/components/forms/landing-zone/JsonImportForm.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, Dropdown, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; +import { Checkbox, DefaultButton, Dropdown, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import * as FileSpray from "src/FileSpray"; @@ -180,7 +180,7 @@ export const JsonImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} @@ -200,7 +200,7 @@ export const JsonImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/landing-zone/VariableImportForm.tsx b/esp/src/src-react/components/forms/landing-zone/VariableImportForm.tsx index d9042425a31..64fadc72493 100644 --- a/esp/src/src-react/components/forms/landing-zone/VariableImportForm.tsx +++ b/esp/src/src-react/components/forms/landing-zone/VariableImportForm.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, Dropdown, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; +import { Checkbox, DefaultButton, Dropdown, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import * as FileSpray from "src/FileSpray"; @@ -170,7 +170,7 @@ export const VariableImportForm: React.FunctionComponent { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} @@ -190,7 +190,7 @@ export const VariableImportForm: React.FunctionComponent { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} diff --git a/esp/src/src-react/components/forms/landing-zone/XmlImportForm.tsx b/esp/src/src-react/components/forms/landing-zone/XmlImportForm.tsx index 93ce5f27107..b2dc3197ff3 100644 --- a/esp/src/src-react/components/forms/landing-zone/XmlImportForm.tsx +++ b/esp/src/src-react/components/forms/landing-zone/XmlImportForm.tsx @@ -1,5 +1,5 @@ import * as React from "react"; -import { Checkbox, DefaultButton, Dropdown, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; +import { Checkbox, DefaultButton, Dropdown, IDropdownOption, mergeStyleSets, PrimaryButton, Stack, TextField } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { useForm, Controller } from "react-hook-form"; import * as FileSpray from "src/FileSpray"; @@ -178,7 +178,7 @@ export const XmlImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} @@ -198,7 +198,7 @@ export const XmlImportForm: React.FunctionComponent = ({ required={true} selectedKey={value} placeholder={nlsHPCC.SelectValue} - onChange={(evt, option) => { + onChange={(evt, option: IDropdownOption) => { onChange(option.key); }} errorMessage={error && error?.message} From 5e5246f36c3bddfbb403643cb4bbbc09ff21b19f Mon Sep 17 00:00:00 2001 From: Rodrigo Pastrana Date: Tue, 16 Apr 2024 17:02:20 -0400 Subject: [PATCH 11/18] HPCC-31628 Handle and report Otel exporter errors - Catches potential exceptions thrown during exporter creation - Reports caught errors - Avoids uncaught exceptions Signed-off-by: Rodrigo Pastrana --- system/jlib/jtrace.cpp | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/system/jlib/jtrace.cpp b/system/jlib/jtrace.cpp index 8c18061fa4b..a4a7a1310a5 100644 --- a/system/jlib/jtrace.cpp +++ b/system/jlib/jtrace.cpp @@ -1268,7 +1268,20 @@ std::unique_ptr CTraceManager::createEx std::unique_ptr CTraceManager::createProcessor(const IPropertyTree * exportConfig) { - auto exporter = createExporter(exportConfig); + std::unique_ptr exporter; + try + { + exporter = createExporter(exportConfig); + } + catch(const std::exception& e) //polymorphic type std::exception + { + LOG(MCoperatorError, "JTRACE: Error creating Tracing exporter: %s", e.what()); + } + catch (...) + { + LOG(MCoperatorError, "JTRACE: Unknown error creating Tracing exporter"); + } + if (!exporter) return nullptr; From 9e45b28a1eee21e9abdd725a328dcb342879c635 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Wed, 17 Apr 2024 11:45:40 +0100 Subject: [PATCH 12/18] HPCC-31620 Fix avoidRename (ensure directory needed) When avoidRename is on, ensureDirectoryForFile was not being called, causing 'No such file or directory' errors if the parent scope directory did not already exist. Signed-off-by: Jake Smith --- thorlcr/activities/thactivityutil.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/thorlcr/activities/thactivityutil.cpp b/thorlcr/activities/thactivityutil.cpp index cd7c323eb66..88e1605f758 100644 --- a/thorlcr/activities/thactivityutil.cpp +++ b/thorlcr/activities/thactivityutil.cpp @@ -760,8 +760,8 @@ IFileIO *createMultipleWrite(CActivityBase *activity, IPartDescriptor &partDesc, GetTempFilePath(outLocationName, "partial"); assertex(outLocationName.length()); - ensureDirectoryForFile(outLocationName.str()); } + ensureDirectoryForFile(outLocationName.str()); OwnedIFile file = createIFile(outLocationName.str()); Owned fileio; if (compress) From ad1f7bc43b9abade12b50d4fbc81e407d9197f2e Mon Sep 17 00:00:00 2001 From: Shamser Ahmed Date: Wed, 17 Apr 2024 12:46:54 +0100 Subject: [PATCH 13/18] HPCC-31630 StatsScopeId::setScopeText not working for SSToperation scope type This change updates StatsScopeId::setScopeText so that it correctly sets the id for scope types that have a string value for a scope id (i.e. set 'name' member var in StatsScopeId) and correctly updates the '_next' argument. Signed-off-by: Shamser Ahmed --- system/jlib/jstats.cpp | 55 +++++++++++++++++++----------------------- 1 file changed, 25 insertions(+), 30 deletions(-) diff --git a/system/jlib/jstats.cpp b/system/jlib/jstats.cpp index 4dcab0b9ef1..e118ceebb38 100644 --- a/system/jlib/jstats.cpp +++ b/system/jlib/jstats.cpp @@ -1633,6 +1633,26 @@ void StatsScopeId::setId(StatisticScopeType _scopeType, unsigned _id, unsigned _ bool StatsScopeId::setScopeText(const char * text, const char * * _next) { + auto setScope = [&](StatisticScopeType _scopeType, const char *text, const char * *next) + { + if (!*text) + return false; + scopeType = _scopeType; + const char * endScopeNamePtr = strchr(text, ':'); + if (endScopeNamePtr) + { + name.set(text, endScopeNamePtr-text); + if (next) + * next = endScopeNamePtr; + } + else + { + name.set(text); + if (next) + *next = text + strlen(text); + } + return true; + }; char * * next = (char * *)_next; switch (*text) { @@ -1683,21 +1703,11 @@ bool StatsScopeId::setScopeText(const char * text, const char * * _next) break; case FunctionScopePrefix[0]: if (MATCHES_CONST_PREFIX(text, FunctionScopePrefix)) - { - setFunctionId(text+ strlen(FunctionScopePrefix)); - if (_next) - *_next = text + strlen(text); - return true; - } + return setScope(SSTfunction, text+strlen(FunctionScopePrefix), _next); break; case FileScopePrefix[0]: if (MATCHES_CONST_PREFIX(text, FileScopePrefix)) - { - setFileId(text+strlen(FileScopePrefix)); - if (_next) - *_next = text + strlen(text); - return true; - } + return setScope(SSTfile, text+strlen(FileScopePrefix), _next); break; case WorkflowScopePrefix[0]: if (MATCHES_CONST_PREFIX(text, WorkflowScopePrefix) && isdigit(text[strlen(WorkflowScopePrefix)])) @@ -1727,30 +1737,15 @@ bool StatsScopeId::setScopeText(const char * text, const char * * _next) break; case DFUWorkunitScopePrefix[0]: if (MATCHES_CONST_PREFIX(text, DFUWorkunitScopePrefix)) - { - setDfuWorkunitId(text+ strlen(DFUWorkunitScopePrefix)); - if (_next) - *_next = text + strlen(text); - return true; - } + return setScope(SSTdfuworkunit, text+strlen(DFUWorkunitScopePrefix), _next); break; case SectionScopePrefix[0]: if (MATCHES_CONST_PREFIX(text, SectionScopePrefix)) - { - setSectionId(text+strlen(SectionScopePrefix)); - if (_next) - *_next = text + strlen(text); - return true; - } + return setScope(SSTsection, text+strlen(SectionScopePrefix), _next); break; case OperationScopePrefix[0]: if (MATCHES_CONST_PREFIX(text, OperationScopePrefix)) - { - setOperationId(text+strlen(OperationScopePrefix)); - if (_next) - *_next = text + strlen(text); - return true; - } + return setScope(SSToperation, text+strlen(OperationScopePrefix), _next); break; case '\0': setId(SSTglobal, 0); From baa6415310cd6aed312ec3434255029e2d42cf70 Mon Sep 17 00:00:00 2001 From: "Dan S. Camper" Date: Tue, 16 Apr 2024 14:08:16 -0500 Subject: [PATCH 14/18] HPCC-31590 Fix incorrect TZ abbreviation for Chammoro --- ecllibrary/std/Date.ecl | 1 + 1 file changed, 1 insertion(+) diff --git a/ecllibrary/std/Date.ecl b/ecllibrary/std/Date.ecl index 5ee88d9c817..e0a0ef946bb 100644 --- a/ecllibrary/std/Date.ecl +++ b/ecllibrary/std/Date.ecl @@ -1647,6 +1647,7 @@ EXPORT TZ_DATA := DATASET {'CHAST', 45900, ['PACIFIC']}, // Chatham Island Standard Time {'CHOST', 32400, ['ASIA']}, // Choibalsan Summer Time {'CHOT', 28800, ['ASIA']}, // Choibalsan Time + {'CHST', 36000, ['PACIFIC']}, // Chamorro Standard Time {'ChST', 36000, ['PACIFIC']}, // Chamorro Standard Time {'CHUT', 36000, ['PACIFIC']}, // Chuuk Time {'CIDST', -14400, ['CARIBBEAN']}, // Cayman Islands Daylight Saving Time From 1248a2024f77eb52c58f1def0386e8f722fb60b2 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Wed, 17 Apr 2024 18:48:04 +0100 Subject: [PATCH 15/18] HPCC-31510 Use WsDfs for fileservices calls for: 1) FileExists 2) GetExpiryDays 3) SuperFileExists 4) GetFileDescription 5) GetLogicalFileAttribute Signed-off-by: Jake Smith --- esp/clients/ws_dfsclient/ws_dfsclient.cpp | 29 ++++++++++ esp/clients/ws_dfsclient/ws_dfsclient.hpp | 2 + plugins/fileservices/CMakeLists.txt | 67 ++++++++++++----------- plugins/fileservices/fileservices.cpp | 14 ++--- 4 files changed, 72 insertions(+), 40 deletions(-) diff --git a/esp/clients/ws_dfsclient/ws_dfsclient.cpp b/esp/clients/ws_dfsclient/ws_dfsclient.cpp index 3aa39d35bbd..e97f31ea0ee 100644 --- a/esp/clients/ws_dfsclient/ws_dfsclient.cpp +++ b/esp/clients/ws_dfsclient/ws_dfsclient.cpp @@ -862,6 +862,35 @@ IDistributedFile *lookup(const char *logicalFilename, IUserDescriptor *user, Acc return lookup(lfn, user, accessMode, hold, lockSuperOwner, transaction, priviledged, timeout); } +bool exists(CDfsLogicalFileName &lfn, IUserDescriptor *user, bool notSuper, bool superOnly, unsigned timeout) +{ + if (!lfn.isRemote()) + return queryDistributedFileDirectory().exists(lfn.get(), user, notSuper, superOnly); + + Owned file = lookup(lfn, user, AccessMode::read, false, false, nullptr, false, timeout); + if (!file) + return false; + bool isSuper = nullptr != file->querySuperFile(); + if (superOnly) + { + if (!isSuper) + return false; + } + else if (notSuper) + { + if (isSuper) + return false; + } + return true; +} + +bool exists(const char *logicalFilename, IUserDescriptor *user, bool notSuper, bool superOnly, unsigned timeout) +{ + CDfsLogicalFileName lfn; + lfn.set(logicalFilename); + return exists(lfn, user, notSuper, superOnly, timeout); +} + } // namespace wsdfs diff --git a/esp/clients/ws_dfsclient/ws_dfsclient.hpp b/esp/clients/ws_dfsclient/ws_dfsclient.hpp index 0769d0e0634..777787c0d65 100644 --- a/esp/clients/ws_dfsclient/ws_dfsclient.hpp +++ b/esp/clients/ws_dfsclient/ws_dfsclient.hpp @@ -53,6 +53,8 @@ WS_DFSCLIENT_API IDistributedFile *lookupLegacyDFSFile(const char *logicalName, WS_DFSCLIENT_API IDistributedFile *lookup(CDfsLogicalFileName &lfn, IUserDescriptor *user, AccessMode accessMode, bool hold, bool lockSuperOwner, IDistributedFileTransaction *transaction, bool priviledged, unsigned timeout); WS_DFSCLIENT_API IDistributedFile *lookup(const char *logicalFilename, IUserDescriptor *user, AccessMode accessMode, bool hold, bool lockSuperOwner, IDistributedFileTransaction *transaction, bool priviledged, unsigned timeout); +WS_DFSCLIENT_API bool exists(CDfsLogicalFileName &lfn, IUserDescriptor *user, bool notSuper, bool superonly, unsigned timeout); +WS_DFSCLIENT_API bool exists(const char *logicalFilename, IUserDescriptor *user, bool notWuper, bool superOnly, unsigned timeout); } // end of namespace wsdfs diff --git a/plugins/fileservices/CMakeLists.txt b/plugins/fileservices/CMakeLists.txt index 48e459fdb6e..39c1190f305 100644 --- a/plugins/fileservices/CMakeLists.txt +++ b/plugins/fileservices/CMakeLists.txt @@ -30,35 +30,35 @@ set ( SRCS ${ESPSCM_GENERATED_DIR}/ws_dfu_esp.cpp ${ESPSCM_GENERATED_DIR}/ws_fs_esp.cpp fileservices.cpp - ../../esp/bindings/bindutil.cpp - ../../dali/dfuplus/dfuplus.cpp + ${HPCC_SOURCE_DIR}/esp/bindings/bindutil.cpp + ${HPCC_SOURCE_DIR}/dali/dfuplus/dfuplus.cpp ) include_directories ( . - ./../../common/environment - ./../../common/remote - ./../../system/jhtree - ./../../system/mp - ./../../common/workunit - ./../../esp/clients - ./../../dali/ft - ./../../system/security/shared - ./../../esp/bindings/SOAP/xpp - ./../../common/deftype - ./../../system/include - ./../../esp/bindings - ./../../dali/base - ./../../ecl/eclagent - ./../../rtl/include - ./../../system/xmllib - ./../../esp/platform - ./../../system/jlib - ./../../common/thorhelper - ./../../rtl/eclrtl - ./../../dali/dfu - ./../../dali/dfuplus - ./../../system/jlib + ${HPCC_SOURCE_DIR}/common/deftype + ${HPCC_SOURCE_DIR}/common/environment + ${HPCC_SOURCE_DIR}/common/remote + ${HPCC_SOURCE_DIR}/common/thorhelper + ${HPCC_SOURCE_DIR}/common/workunit + ${HPCC_SOURCE_DIR}/dali/ft + ${HPCC_SOURCE_DIR}/dali/base + ${HPCC_SOURCE_DIR}/dali/dfu + ${HPCC_SOURCE_DIR}/dali/dfuplus + ${HPCC_SOURCE_DIR}/ecl/eclagent + ${HPCC_SOURCE_DIR}/esp/bindings + ${HPCC_SOURCE_DIR}/esp/bindings/SOAP/xpp + ${HPCC_SOURCE_DIR}/esp/clients + ${HPCC_SOURCE_DIR}/esp/clients/ws_dfsclient + ${HPCC_SOURCE_DIR}/esp/platform + ${HPCC_SOURCE_DIR}/rtl/include + ${HPCC_SOURCE_DIR}/rtl/eclrtl + ${HPCC_SOURCE_DIR}/system/include + ${HPCC_SOURCE_DIR}/system/jhtree + ${HPCC_SOURCE_DIR}/system/jlib + ${HPCC_SOURCE_DIR}/system/mp + ${HPCC_SOURCE_DIR}/system/security/shared + ${HPCC_SOURCE_DIR}/system/xmllib ) ADD_DEFINITIONS( -D_USRDLL -DFILESERVICES_EXPORTS -DWSDFU_API_LOCAL -DFileSpray_API_LOCAL ) @@ -67,15 +67,16 @@ HPCC_ADD_LIBRARY( fileservices SHARED ${SRCS} ) add_dependencies ( fileservices espscm ) install ( TARGETS fileservices DESTINATION plugins ) target_link_libraries ( fileservices - jlib - remote - dalibase + deftype + dalibase + dllserver esphttp - dllserver - nbcd - eclrtl - deftype - workunit + eclrtl + jlib + nbcd + remote + workunit + ws_dfsclient ) if (NOT CONTAINERIZED) diff --git a/plugins/fileservices/fileservices.cpp b/plugins/fileservices/fileservices.cpp index 51cf3e26f5f..8531f74eebd 100644 --- a/plugins/fileservices/fileservices.cpp +++ b/plugins/fileservices/fileservices.cpp @@ -35,6 +35,7 @@ #include "dasds.hpp" #include "enginecontext.hpp" #include "environment.hpp" +#include "ws_dfsclient.hpp" #define USE_DALIDFS #define SDS_LOCK_TIMEOUT 10000 @@ -395,8 +396,7 @@ FILESERVICES_API bool FILESERVICES_CALL fsFileExists(ICodeContext *ctx, const ch constructLogicalName(ctx, name, lfn); if (physical) return queryDistributedFileDirectory().existsPhysical(lfn.str(),ctx->queryUserDescriptor()); - - return queryDistributedFileDirectory().exists(lfn.str(),ctx->queryUserDescriptor(),false,false); + return wsdfs::exists(lfn.str(), ctx->queryUserDescriptor(), false, false, INFINITE); } FILESERVICES_API bool FILESERVICES_CALL fsFileValidate(ICodeContext *ctx, const char *name) @@ -405,7 +405,7 @@ FILESERVICES_API bool FILESERVICES_CALL fsFileValidate(ICodeContext *ctx, const constructLogicalName(ctx, name, lfn); Linked udesc = ctx->queryUserDescriptor(); - Owned df = queryDistributedFileDirectory().lookup(lfn.str(),udesc, AccessMode::tbdRead, false, false, nullptr, defaultPrivilegedUser); + Owned df = wsdfs::lookup(lfn.str(), udesc, AccessMode::tbdRead, false, false, nullptr, defaultPrivilegedUser, INFINITE); if (df) { Owned partIter = df->getIterator(); @@ -1591,7 +1591,7 @@ FILESERVICES_API bool FILESERVICES_CALL fsSuperFileExists(ICodeContext *ctx, con { StringBuffer lsfn; constructLogicalName(ctx, lsuperfn, lsfn); - return queryDistributedFileDirectory().exists(lsfn,ctx->queryUserDescriptor(),false,true); + return wsdfs::exists(lsfn, ctx->queryUserDescriptor(), false, true, INFINITE); } FILESERVICES_API void FILESERVICES_CALL fsDeleteSuperFile(ICodeContext *ctx, const char *lsuperfn,bool deletesub) @@ -2081,7 +2081,7 @@ FILESERVICES_API char * FILESERVICES_CALL fsGetFileDescription(ICodeContext *ct constructLogicalName(ctx, logicalfilename, lfn); Linked udesc = ctx->queryUserDescriptor(); - Owned df = queryDistributedFileDirectory().lookup(lfn.str(),udesc, AccessMode::tbdRead, false, false, nullptr, defaultPrivilegedUser); + Owned df = wsdfs::lookup(lfn.str(), udesc, AccessMode::tbdRead, false, false, nullptr, defaultPrivilegedUser, INFINITE); if (!df) throw MakeStringException(0, "GetFileDescription: Could not locate file %s", lfn.str()); const char * ret = df->queryAttributes().queryProp("@description"); @@ -2820,7 +2820,7 @@ FILESERVICES_API char * FILESERVICES_CALL fsfGetLogicalFileAttribute(ICodeContex StringBuffer lfn; constructLogicalName(ctx, _lfn, lfn); Linked udesc = ctx->queryUserDescriptor(); - Owned df = queryDistributedFileDirectory().lookup(lfn.str(),udesc, AccessMode::tbdRead,false, false, nullptr, defaultPrivilegedUser); + Owned df = wsdfs::lookup(lfn.str(), udesc, AccessMode::tbdRead, false, false, nullptr, defaultPrivilegedUser, INFINITE); StringBuffer ret; if (df) { if (strcmp(attrname,"ECL")==0) @@ -3161,7 +3161,7 @@ FILESERVICES_API int FILESERVICES_CALL fsGetExpireDays(ICodeContext * ctx, const StringBuffer lfn; constructLogicalName(ctx, _lfn, lfn); Linked udesc = ctx->queryUserDescriptor(); - Owned df = queryDistributedFileDirectory().lookup(lfn.str(),udesc, AccessMode::tbdRead,false, false, nullptr, defaultPrivilegedUser); + Owned df = wsdfs::lookup(lfn.str(), udesc, AccessMode::tbdRead, false, false, nullptr, defaultPrivilegedUser, INFINITE); if (df) return df->getExpire(nullptr); else From 44b1f9e528ee51322a163703aae8fa662e42352d Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 18 Apr 2024 12:23:16 +0100 Subject: [PATCH 16/18] HPCC-31639 Dali missing generated annotations Signed-off-by: Jake Smith --- helm/hpcc/templates/dali.yaml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index e9e104e2d2c..fdb3a54081f 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -91,13 +91,11 @@ spec: {{- if hasKey $dali "labels" }} {{ toYaml $dali.labels | indent 8 }} {{- end }} -{{- if or (hasKey $dali "annotations") (hasKey $.Values.global "metrics") }} annotations: - {{- if hasKey $.Values.global "metrics" }} - {{- include "hpcc.addPrometheusScrapeAnnotations" $.Values.global.metrics | nindent 8 }} - {{- end }} - {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} -{{- end }} + {{- include "hpcc.generateAnnotations" $commonCtx | indent 8 }} + {{- if hasKey $.Values.global "metrics" }} + {{- include "hpcc.addPrometheusScrapeAnnotations" $.Values.global.metrics | nindent 8 }} + {{- end }} spec: {{- include "hpcc.placementsByPodTargetType" (dict "root" $ "pod" $dali.name "type" "dali") | indent 6 }} serviceAccountName: "hpcc-dali" From a0d57f7ceb55b0ed14fc10faebc26637ad2b93a2 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 18 Apr 2024 08:41:45 +0100 Subject: [PATCH 17/18] HPCC-31629 Bump WsWorkunits to latest version Signed-off-by: Gordon Smith --- esp/src/package-lock.json | 254 +++++++++--------- esp/src/package.json | 28 +- esp/src/src-react/components/ECLArchive.tsx | 10 +- esp/src/src-react/components/Metrics.tsx | 6 +- .../components/WorkunitsDashboard.tsx | 6 +- esp/src/src-react/hooks/metrics.ts | 36 +-- esp/src/src-react/hooks/workunit.ts | 22 +- esp/src/src/ECLArchiveWidget.ts | 10 +- esp/src/src/ESPWorkunit.ts | 12 +- esp/src/src/Timings.ts | 12 +- esp/src/src/WUScopeController.ts | 12 +- 11 files changed, 206 insertions(+), 202 deletions(-) diff --git a/esp/src/package-lock.json b/esp/src/package-lock.json index b0b5b53a176..650354ec8f3 100644 --- a/esp/src/package-lock.json +++ b/esp/src/package-lock.json @@ -15,21 +15,21 @@ "@fluentui/react-hooks": "8.7.0", "@fluentui/react-icons-mdl2": "1.3.59", "@fluentui/react-migration-v8-v9": "9.6.3", - "@hpcc-js/chart": "2.83.1", - "@hpcc-js/codemirror": "2.61.2", - "@hpcc-js/common": "2.71.15", - "@hpcc-js/comms": "2.91.3", + "@hpcc-js/chart": "2.83.2", + "@hpcc-js/codemirror": "2.61.3", + "@hpcc-js/common": "2.71.16", + "@hpcc-js/comms": "2.92.0", "@hpcc-js/dataflow": "8.1.6", - "@hpcc-js/eclwatch": "2.73.44", - "@hpcc-js/graph": "2.85.13", - "@hpcc-js/html": "2.42.18", - "@hpcc-js/layout": "2.49.20", - "@hpcc-js/map": "2.77.19", - "@hpcc-js/other": "2.15.20", - "@hpcc-js/phosphor": "2.18.6", - "@hpcc-js/react": "2.53.14", - "@hpcc-js/tree": "2.40.15", - "@hpcc-js/util": "2.50.6", + "@hpcc-js/eclwatch": "2.74.0", + "@hpcc-js/graph": "2.85.14", + "@hpcc-js/html": "2.42.19", + "@hpcc-js/layout": "2.49.21", + "@hpcc-js/map": "2.77.20", + "@hpcc-js/other": "2.15.21", + "@hpcc-js/phosphor": "2.18.7", + "@hpcc-js/react": "2.53.15", + "@hpcc-js/tree": "2.40.16", + "@hpcc-js/util": "2.51.0", "@kubernetes/client-node": "0.20.0", "clipboard": "2.0.11", "d3-dsv": "3.0.1", @@ -1806,37 +1806,37 @@ } }, "node_modules/@hpcc-js/api": { - "version": "2.12.15", - "resolved": "https://registry.npmjs.org/@hpcc-js/api/-/api-2.12.15.tgz", - "integrity": "sha512-JW1UDMiBOuI5MhH4BEttwn5zZuPEyUGD1AcA8Bsjynz0is2nV7CNlEljIBIF3tj6v9bYelj2E/Yrzg83sT3f7w==", + "version": "2.12.16", + "resolved": "https://registry.npmjs.org/@hpcc-js/api/-/api-2.12.16.tgz", + "integrity": "sha512-lmRvwoAHWcrTSKEbe/SR0Y61p4j/+VRnu7CViBe5wQ2nCrK9yFTLZzDlmHhvMIDyVJj4khI4/4ZZCUI13zfuzA==", "dependencies": { - "@hpcc-js/common": "^2.71.15" + "@hpcc-js/common": "^2.71.16" } }, "node_modules/@hpcc-js/chart": { - "version": "2.83.1", - "resolved": "https://registry.npmjs.org/@hpcc-js/chart/-/chart-2.83.1.tgz", - "integrity": "sha512-Ns8IHPIbtg5diEDiUEBqeNfIfs23k7RppJbAt8ydLaNLh8pN0/sjZBNf7IApgZHwZlI0SpNYRFtLE+cHJ+lNSg==", + "version": "2.83.2", + "resolved": "https://registry.npmjs.org/@hpcc-js/chart/-/chart-2.83.2.tgz", + "integrity": "sha512-PePaV/68if4dp+iBDpmBwRbTfOsKht8DkZ7B8NafKfGxL1+khCXf4p4Q/Dlgqig1OfVt35z1eR4lbEUrARhVJw==", "dependencies": { - "@hpcc-js/api": "^2.12.15", - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/util": "^2.50.6" + "@hpcc-js/api": "^2.12.16", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/codemirror": { - "version": "2.61.2", - "resolved": "https://registry.npmjs.org/@hpcc-js/codemirror/-/codemirror-2.61.2.tgz", - "integrity": "sha512-gwbskqoo0iZpdv4E9onqxMqLQW6nj5b3llL/4yPaT1XJVilC57eh394iQF+qt86H39VRSh/OWmTEzJIHNoLnYw==", + "version": "2.61.3", + "resolved": "https://registry.npmjs.org/@hpcc-js/codemirror/-/codemirror-2.61.3.tgz", + "integrity": "sha512-GKLuro8GiMUKu9sCoIsyIXMRbTogG3xGb/yvAuoAKcJQRyJsVamLB7ohyjZkcgY7/JWa3+UBUcABOxiWqG8M/Q==", "dependencies": { - "@hpcc-js/common": "^2.71.15" + "@hpcc-js/common": "^2.71.16" } }, "node_modules/@hpcc-js/common": { - "version": "2.71.15", - "resolved": "https://registry.npmjs.org/@hpcc-js/common/-/common-2.71.15.tgz", - "integrity": "sha512-uosRQo3DVGAKsCyYVqCk88n44E0TXbkKGViOtsinBYHDuwqUMihWAiXcaVnRTOeu3H57pA43kC8NdYq9JeEjaA==", + "version": "2.71.16", + "resolved": "https://registry.npmjs.org/@hpcc-js/common/-/common-2.71.16.tgz", + "integrity": "sha512-hz5i9zUXBJrXW5tl30XwgFXwJ2nipzLD9pXQrg1Rw8zfXkQ1Xax22RvGZdASAGPsmHxefyWTK7fpcJd+ipOGOg==", "dependencies": { - "@hpcc-js/util": "^2.50.6", + "@hpcc-js/util": "^2.51.0", "@types/d3-array": "1.2.12", "@types/d3-brush": "1.1.8", "@types/d3-collection": "1.0.13", @@ -1855,18 +1855,18 @@ } }, "node_modules/@hpcc-js/comms": { - "version": "2.91.3", - "resolved": "https://registry.npmjs.org/@hpcc-js/comms/-/comms-2.91.3.tgz", - "integrity": "sha512-1LN1C7HihX+rPUMOcpBObd2BkQk8OpJZO+8YlYEOrbbs3diaNT1HJN1Q6rV7bnXDu4jZN3lZrAxwsDVJqK8w8A==", + "version": "2.92.0", + "resolved": "https://registry.npmjs.org/@hpcc-js/comms/-/comms-2.92.0.tgz", + "integrity": "sha512-hGWFUIywb/DHR/yk43C911JY9mwNrion/wt71adfjbckjQJ267GjPkw4tyz8K1YGt5NgCCW+njnR6lAkpjYGfw==", "dependencies": { "@hpcc-js/ddl-shim": "^2.20.6", - "@hpcc-js/util": "^2.50.6", + "@hpcc-js/util": "^2.51.0", "@xmldom/xmldom": "0.8.10", "abort-controller": "3.0.0", "node-fetch": "2.7.0", "safe-buffer": "5.2.1", "tmp": "0.2.3", - "undici": "5.28.3" + "undici": "5.28.4" } }, "node_modules/@hpcc-js/comms/node_modules/safe-buffer": { @@ -1906,14 +1906,14 @@ } }, "node_modules/@hpcc-js/dgrid": { - "version": "2.32.16", - "resolved": "https://registry.npmjs.org/@hpcc-js/dgrid/-/dgrid-2.32.16.tgz", - "integrity": "sha512-uwbRDzC9ZZTC83Y22SnkRumVde3gFgu2xFZXRwhS4dJ8jngVcQ/6buwWy65BlCy3wSBFfKOx4TvvbAbGXIdBWw==", + "version": "2.32.17", + "resolved": "https://registry.npmjs.org/@hpcc-js/dgrid/-/dgrid-2.32.17.tgz", + "integrity": "sha512-M0QP4vvylMlAMl5iAWKe94zx6xK7SjeQt+iAsN7izwJrZ4PlAPym/bn05VLGfI7iQLT72d/6TRrku/Lh2PyDSg==", "dependencies": { - "@hpcc-js/common": "^2.71.15", + "@hpcc-js/common": "^2.71.16", "@hpcc-js/ddl-shim": "^2.20.6", "@hpcc-js/dgrid-shim": "^2.24.8", - "@hpcc-js/util": "^2.50.6" + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/dgrid-shim": { @@ -1922,63 +1922,63 @@ "integrity": "sha512-04+r+7Qa2LSc/aWx+d/QzdRoerPCIpiCXcrXPBf7tBHxOzU8gAIW0WU7wiilUmL2ZdHyLXQrzcT0gKVHkKlJaQ==" }, "node_modules/@hpcc-js/dgrid2": { - "version": "2.3.17", - "resolved": "https://registry.npmjs.org/@hpcc-js/dgrid2/-/dgrid2-2.3.17.tgz", - "integrity": "sha512-k1PiXtx/BeCyW3wG6zf9VUv5v2IWa1uTbvdIHxXjlpsSQ+vtixPcRaaut/JvzwdXsKrlYxe0b9zW/7gEXBdwCw==", + "version": "2.3.18", + "resolved": "https://registry.npmjs.org/@hpcc-js/dgrid2/-/dgrid2-2.3.18.tgz", + "integrity": "sha512-7OtEREk9xJYfjDGGP9abSvQWQCDTgwYIx+OfXqFArb031FhTC1rWrkc5svySRb/0VVVAvsQFkHK435GaNy6PHQ==", "dependencies": { - "@hpcc-js/common": "^2.71.15", + "@hpcc-js/common": "^2.71.16", "@hpcc-js/preact-shim": "^2.16.10", - "@hpcc-js/util": "^2.50.6" + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/eclwatch": { - "version": "2.73.44", - "resolved": "https://registry.npmjs.org/@hpcc-js/eclwatch/-/eclwatch-2.73.44.tgz", - "integrity": "sha512-pWIZyKfI2h9aRnPCrxlYno2orM3yF6j/T6tyFxN8oHKsu273i3yAw3GH2VBNKE/rwCwxWmAaSY+1oYM6ugZLDQ==", - "dependencies": { - "@hpcc-js/codemirror": "^2.61.2", - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/comms": "^2.91.3", - "@hpcc-js/dgrid": "^2.32.16", - "@hpcc-js/graph": "^2.85.13", - "@hpcc-js/layout": "^2.49.20", - "@hpcc-js/phosphor": "^2.18.6", - "@hpcc-js/timeline": "^2.51.23", - "@hpcc-js/tree": "^2.40.15", - "@hpcc-js/util": "^2.50.6" + "version": "2.74.0", + "resolved": "https://registry.npmjs.org/@hpcc-js/eclwatch/-/eclwatch-2.74.0.tgz", + "integrity": "sha512-l33wC724CKZ/XCeErt6fGNbXrUHFJAY8TInl7KhpYRbimYW/rdLEQ8DuqzPFqHGm1ev2ym8HGn8tIk84M/3g8g==", + "dependencies": { + "@hpcc-js/codemirror": "^2.61.3", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/comms": "^2.92.0", + "@hpcc-js/dgrid": "^2.32.17", + "@hpcc-js/graph": "^2.85.14", + "@hpcc-js/layout": "^2.49.21", + "@hpcc-js/phosphor": "^2.18.7", + "@hpcc-js/timeline": "^2.51.24", + "@hpcc-js/tree": "^2.40.16", + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/graph": { - "version": "2.85.13", - "resolved": "https://registry.npmjs.org/@hpcc-js/graph/-/graph-2.85.13.tgz", - "integrity": "sha512-B1KVWkCPq5J0DzU4WAezbJ/ZkbjlzHB7LMG7o9nvdG71Li6N8uETP97D5QVMgDWLDMLycAeOw3VrbghDAunQmA==", + "version": "2.85.14", + "resolved": "https://registry.npmjs.org/@hpcc-js/graph/-/graph-2.85.14.tgz", + "integrity": "sha512-grofTqK944A8b/LgigDJHBuM9R9+JIDYfqA5wBssbvty3MtLAuN2seoGFn+I7UEojrCggQllKSxNyi/OXoJrCQ==", "dependencies": { - "@hpcc-js/api": "^2.12.15", - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/html": "^2.42.18", - "@hpcc-js/react": "^2.53.14", - "@hpcc-js/util": "^2.50.6" + "@hpcc-js/api": "^2.12.16", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/html": "^2.42.19", + "@hpcc-js/react": "^2.53.15", + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/html": { - "version": "2.42.18", - "resolved": "https://registry.npmjs.org/@hpcc-js/html/-/html-2.42.18.tgz", - "integrity": "sha512-zhVVtjGykWRDVyw3NRQ31SaD6i3Ml69SXNvqhDWRjZ/CKL0CXFGYj0ki8xS9MzBofRznAGK0ijNJY8JMWX7T7Q==", + "version": "2.42.19", + "resolved": "https://registry.npmjs.org/@hpcc-js/html/-/html-2.42.19.tgz", + "integrity": "sha512-qocVJXQvwUVaHVXQvn8gIZCXfNHiQOVuMai5wyegYH9KgWJUX3MjUNGHCEYpMsBkk6LBX+D+3myC+VFGlLSgjg==", "dependencies": { - "@hpcc-js/common": "^2.71.15", + "@hpcc-js/common": "^2.71.16", "@hpcc-js/preact-shim": "^2.16.10", - "@hpcc-js/util": "^2.50.6" + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/layout": { - "version": "2.49.20", - "resolved": "https://registry.npmjs.org/@hpcc-js/layout/-/layout-2.49.20.tgz", - "integrity": "sha512-Dad5R/fJVtMQsWsDOakN8qehkSLgTv1teQpKs18m6+pIIx1p20JeLTLW1Y3Epz92tqrZEHYigs09GpUZw87r7g==", + "version": "2.49.21", + "resolved": "https://registry.npmjs.org/@hpcc-js/layout/-/layout-2.49.21.tgz", + "integrity": "sha512-gZSqCBrLDriWW9mhw1bqUL/dzNCsf372CnPjsMLEuMdln7TOOTQm5L0BCjMPVf5kMgyZeT2xRZGr7YURPJIw+g==", "dependencies": { - "@hpcc-js/api": "^2.12.15", - "@hpcc-js/chart": "^2.83.1", - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/dgrid2": "^2.3.17" + "@hpcc-js/api": "^2.12.16", + "@hpcc-js/chart": "^2.83.2", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/dgrid2": "^2.3.18" } }, "node_modules/@hpcc-js/leaflet-shim": { @@ -1991,38 +1991,38 @@ } }, "node_modules/@hpcc-js/map": { - "version": "2.77.19", - "resolved": "https://registry.npmjs.org/@hpcc-js/map/-/map-2.77.19.tgz", - "integrity": "sha512-cWNtIZMaYK3tFh1PXlS+hKwUB2n5OlXbVtlaSiISrogTUl+224VFc7XCXxoKvblDoYxaeIhkjnOHX4PHyFKcOA==", - "dependencies": { - "@hpcc-js/api": "^2.12.15", - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/graph": "^2.85.13", - "@hpcc-js/layout": "^2.49.20", + "version": "2.77.20", + "resolved": "https://registry.npmjs.org/@hpcc-js/map/-/map-2.77.20.tgz", + "integrity": "sha512-smA6i2viO/DsEaNGIbRKLxHTLWEO8qd7nBgtEFOYugyiiIeyZaBtHVEeTEpOMJAv672L+SxbxAteSbfbdTdd/w==", + "dependencies": { + "@hpcc-js/api": "^2.12.16", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/graph": "^2.85.14", + "@hpcc-js/layout": "^2.49.21", "@hpcc-js/leaflet-shim": "^2.3.5", - "@hpcc-js/other": "^2.15.20", - "@hpcc-js/util": "^2.50.6" + "@hpcc-js/other": "^2.15.21", + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/other": { - "version": "2.15.20", - "resolved": "https://registry.npmjs.org/@hpcc-js/other/-/other-2.15.20.tgz", - "integrity": "sha512-UVjZrCbNertvTtdj4icGlTPmA+OowoMPFxT4vXVfKgiGsjYnESe4jrJv9bPpzuPrECiGl7xhzQiqTLTERmzLFw==", + "version": "2.15.21", + "resolved": "https://registry.npmjs.org/@hpcc-js/other/-/other-2.15.21.tgz", + "integrity": "sha512-QUIlQv7nP9+fKNdE8458pqy/cYrEZnVYXBne8uSu2h5q64VSSreCZmO7XY/Rjxf822RigYlZ+fC8K2fZiDOULw==", "dependencies": { - "@hpcc-js/api": "^2.12.15", - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/layout": "^2.49.20" + "@hpcc-js/api": "^2.12.16", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/layout": "^2.49.21" } }, "node_modules/@hpcc-js/phosphor": { - "version": "2.18.6", - "resolved": "https://registry.npmjs.org/@hpcc-js/phosphor/-/phosphor-2.18.6.tgz", - "integrity": "sha512-YzcR3TiwnXapPKq3PHe+tUKr5DcF/MN2F7au/TdqXlyJ7/gptPx5qZvh9VHqnivVSqDuTdlaxA6Iw7Wh1zikCw==", + "version": "2.18.7", + "resolved": "https://registry.npmjs.org/@hpcc-js/phosphor/-/phosphor-2.18.7.tgz", + "integrity": "sha512-iSQX6vIpawQPbDVhc/CbH8Z4ysSzb+uFjeasd1zIfE77Km5ImH9IiI9OZMOoFYi1zCTCfce/Y7NLC8ADOgg2XQ==", "dependencies": { - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/other": "^2.15.20", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/other": "^2.15.21", "@hpcc-js/phosphor-shim": "^2.14.6", - "@hpcc-js/util": "^2.50.6" + "@hpcc-js/util": "^2.51.0" } }, "node_modules/@hpcc-js/phosphor-shim": { @@ -2045,40 +2045,40 @@ } }, "node_modules/@hpcc-js/react": { - "version": "2.53.14", - "resolved": "https://registry.npmjs.org/@hpcc-js/react/-/react-2.53.14.tgz", - "integrity": "sha512-IUER5VbUaqc12jK5PMDNYGSNK3y7SY/98CVO6KRxudsMQb/816E0YPxTXnYoWbSxtKgO9PgCAhqwyC96Q8RqCg==", + "version": "2.53.15", + "resolved": "https://registry.npmjs.org/@hpcc-js/react/-/react-2.53.15.tgz", + "integrity": "sha512-X8e1lIk4oRXFNTFxrcZ1YbJDi6t7IU431Lzq0nTIFJqWIDRZgjJ3gKxSGcjtjNYzhUqq4A9KfcdvKNE+wHrdjw==", "dependencies": { - "@hpcc-js/common": "^2.71.15", + "@hpcc-js/common": "^2.71.16", "@hpcc-js/preact-shim": "^2.16.10" } }, "node_modules/@hpcc-js/timeline": { - "version": "2.51.23", - "resolved": "https://registry.npmjs.org/@hpcc-js/timeline/-/timeline-2.51.23.tgz", - "integrity": "sha512-+puiTarYe0he9XUcVP9NA08X8SAvqTMcoRkHNYrKfTIa0MI1i67nbo/D9MdoBGbVN7S5dyz40CcX13a1wMk1DA==", + "version": "2.51.24", + "resolved": "https://registry.npmjs.org/@hpcc-js/timeline/-/timeline-2.51.24.tgz", + "integrity": "sha512-QNgXhJ6/hQHfP2Lge2zL1X5ERI813KKpFN+DNFqufhWoZIT/7x3kr1If8r1mC74hYt4xqkFAdoveEepFT+lYhQ==", "dependencies": { - "@hpcc-js/api": "^2.12.15", - "@hpcc-js/chart": "^2.83.1", - "@hpcc-js/common": "^2.71.15", - "@hpcc-js/html": "^2.42.18", - "@hpcc-js/layout": "^2.49.20", - "@hpcc-js/react": "^2.53.14" + "@hpcc-js/api": "^2.12.16", + "@hpcc-js/chart": "^2.83.2", + "@hpcc-js/common": "^2.71.16", + "@hpcc-js/html": "^2.42.19", + "@hpcc-js/layout": "^2.49.21", + "@hpcc-js/react": "^2.53.15" } }, "node_modules/@hpcc-js/tree": { - "version": "2.40.15", - "resolved": "https://registry.npmjs.org/@hpcc-js/tree/-/tree-2.40.15.tgz", - "integrity": "sha512-tOl0dfKDmZlpstwndVbnXF+OYI/K+rBbCzXWNQkRoh2UdGbYpRYQfx/d0hgnLTIgbeoqWVlIG8QL1FVTIF6dPA==", + "version": "2.40.16", + "resolved": "https://registry.npmjs.org/@hpcc-js/tree/-/tree-2.40.16.tgz", + "integrity": "sha512-UCFA3ky9aB0XqrN4PyNmwkY3zl3VSc4araEfHpjtOcT7r7pUVJNEG+KjYPkCTUvvKYoPIuE2FBGtr6ec0bM5Aw==", "dependencies": { - "@hpcc-js/api": "^2.12.15", - "@hpcc-js/common": "^2.71.15" + "@hpcc-js/api": "^2.12.16", + "@hpcc-js/common": "^2.71.16" } }, "node_modules/@hpcc-js/util": { - "version": "2.50.6", - "resolved": "https://registry.npmjs.org/@hpcc-js/util/-/util-2.50.6.tgz", - "integrity": "sha512-k4yDXdYX5h2RcccVy9sW1djcOP1w6G/GYAAs7duEPbx0TyvpMuK/Bo/gKW6Wq89Ce46moOtEq+7WyOqvLXsQgg==", + "version": "2.51.0", + "resolved": "https://registry.npmjs.org/@hpcc-js/util/-/util-2.51.0.tgz", + "integrity": "sha512-xTcU3JhXA0OZQgu2LBLt7rh2O+107IILlEzUIBSS/rR/hVaVOKZK/Jtm5aMIevoAqD/XvKLf8RzurwCF7j/ccw==", "dependencies": { "tslib": "2.6.2" } @@ -11087,9 +11087,9 @@ } }, "node_modules/undici": { - "version": "5.28.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", - "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", "dependencies": { "@fastify/busboy": "^2.0.0" }, diff --git a/esp/src/package.json b/esp/src/package.json index 3a6501298f3..745acde3fe6 100644 --- a/esp/src/package.json +++ b/esp/src/package.json @@ -41,21 +41,21 @@ "@fluentui/react-hooks": "8.7.0", "@fluentui/react-icons-mdl2": "1.3.59", "@fluentui/react-migration-v8-v9": "9.6.3", - "@hpcc-js/chart": "2.83.1", - "@hpcc-js/codemirror": "2.61.2", - "@hpcc-js/common": "2.71.15", - "@hpcc-js/comms": "2.91.3", + "@hpcc-js/chart": "2.83.2", + "@hpcc-js/codemirror": "2.61.3", + "@hpcc-js/common": "2.71.16", + "@hpcc-js/comms": "2.92.0", "@hpcc-js/dataflow": "8.1.6", - "@hpcc-js/eclwatch": "2.73.44", - "@hpcc-js/graph": "2.85.13", - "@hpcc-js/html": "2.42.18", - "@hpcc-js/layout": "2.49.20", - "@hpcc-js/map": "2.77.19", - "@hpcc-js/other": "2.15.20", - "@hpcc-js/phosphor": "2.18.6", - "@hpcc-js/react": "2.53.14", - "@hpcc-js/tree": "2.40.15", - "@hpcc-js/util": "2.50.6", + "@hpcc-js/eclwatch": "2.74.0", + "@hpcc-js/graph": "2.85.14", + "@hpcc-js/html": "2.42.19", + "@hpcc-js/layout": "2.49.21", + "@hpcc-js/map": "2.77.20", + "@hpcc-js/other": "2.15.21", + "@hpcc-js/phosphor": "2.18.7", + "@hpcc-js/react": "2.53.15", + "@hpcc-js/tree": "2.40.16", + "@hpcc-js/util": "2.51.0", "@kubernetes/client-node": "0.20.0", "clipboard": "2.0.11", "d3-dsv": "3.0.1", diff --git a/esp/src/src-react/components/ECLArchive.tsx b/esp/src/src-react/components/ECLArchive.tsx index ca4ee1de6ae..e48267c3114 100644 --- a/esp/src/src-react/components/ECLArchive.tsx +++ b/esp/src/src-react/components/ECLArchive.tsx @@ -1,6 +1,6 @@ import * as React from "react"; import { CommandBar, ContextualMenuItemType, ICommandBarItemProps } from "@fluentui/react"; -import { Workunit, WUDetails, IScope } from "@hpcc-js/comms"; +import { Workunit, WsWorkunits, IScope } from "@hpcc-js/comms"; import { scopedLogger } from "@hpcc-js/util"; import nlsHPCC from "src/nlsHPCC"; import { useWorkunitArchive } from "../hooks/workunit"; @@ -15,14 +15,14 @@ import { MetricsPropertiesTables } from "./MetricsPropertiesTables"; const logger = scopedLogger("src-react/components/ECLArchive.tsx"); -const scopeFilterDefault: WUDetails.RequestNS.ScopeFilter = { +const scopeFilterDefault: Partial = { MaxDepth: 999999, - ScopeTypes: ["graph"] + ScopeTypes: { ScopeType: ["graph"] } }; -const nestedFilterDefault: WUDetails.RequestNS.NestedFilter = { +const nestedFilterDefault: WsWorkunits.NestedFilter = { Depth: 999999, - ScopeTypes: ["activity"] + ScopeTypes: { ScopeType: ["activity"] } }; interface ECLArchiveProps { diff --git a/esp/src/src-react/components/Metrics.tsx b/esp/src/src-react/components/Metrics.tsx index cfb5286a2a0..15c43167af1 100644 --- a/esp/src/src-react/components/Metrics.tsx +++ b/esp/src/src-react/components/Metrics.tsx @@ -198,17 +198,17 @@ export const Metrics: React.FunctionComponent = ({ .request({ ScopeFilter: { MaxDepth: 3, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }, NestedFilter: { Depth: 0, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }, PropertiesToReturn: { AllProperties: false, AllStatistics: true, AllHints: false, - Properties: ["WhenStarted", "TimeElapsed", "TimeLocalExecute"] + Properties: { Property: ["WhenStarted", "TimeElapsed", "TimeLocalExecute"] } }, ScopeOptions: { IncludeId: true, diff --git a/esp/src/src-react/components/WorkunitsDashboard.tsx b/esp/src/src-react/components/WorkunitsDashboard.tsx index e7821553827..b3cced2aafb 100644 --- a/esp/src/src-react/components/WorkunitsDashboard.tsx +++ b/esp/src/src-react/components/WorkunitsDashboard.tsx @@ -2,7 +2,7 @@ import * as React from "react"; import { Dropdown, IStackItemStyles, IStackStyles, IStackTokens, Overlay, Spinner, SpinnerSize, Stack, Text } from "@fluentui/react"; import { useConst } from "@fluentui/react-hooks"; import { Card, CardHeader, CardPreview } from "@fluentui/react-components"; -import { WorkunitsService, WUQuery } from "@hpcc-js/comms"; +import { WorkunitsService, WsWorkunits } from "@hpcc-js/comms"; import { Area, Column, Pie, Bar } from "@hpcc-js/chart"; import { chain, filter, group, map, sort } from "@hpcc-js/dataflow"; import * as Observable from "dojo/store/Observable"; @@ -33,7 +33,7 @@ const innerStackTokens: IStackTokens = { const service = new WorkunitsService({ baseUrl: "" }); -interface WorkunitEx extends WUQuery.ECLWorkunit { +interface WorkunitEx extends WsWorkunits.ECLWorkunit { Day: string; } @@ -72,7 +72,7 @@ export const WorkunitsDashboard: React.FunctionComponent { - setWorkunits([...map(response.Workunits.ECLWorkunit, (row: WUQuery.ECLWorkunit) => ({ ...row, Day: wuidToDate(row.Wuid) }))]); + setWorkunits([...map(response.Workunits.ECLWorkunit, (row: WsWorkunits.ECLWorkunit) => ({ ...row, Day: wuidToDate(row.Wuid) }))]); setLoading(false); }); }, [filterProps.lastNDays]); diff --git a/esp/src/src-react/hooks/metrics.ts b/esp/src/src-react/hooks/metrics.ts index 10f6d8b44b4..efdfa3da2ea 100644 --- a/esp/src/src-react/hooks/metrics.ts +++ b/esp/src/src-react/hooks/metrics.ts @@ -1,6 +1,6 @@ import * as React from "react"; import { useConst, useForceUpdate } from "@fluentui/react-hooks"; -import { WUDetails, WUDetailsMeta, WorkunitsService, IScope } from "@hpcc-js/comms"; +import { WsWorkunits, WorkunitsService, IScope } from "@hpcc-js/comms"; import { scopedLogger } from "@hpcc-js/util"; import { userKeyValStore } from "src/KeyValStore"; import { useWorkunit } from "./workunit"; @@ -102,27 +102,27 @@ export enum FetchStatus { COMPLETE } -const scopeFilterDefault: WUDetails.RequestNS.ScopeFilter = { +const scopeFilterDefault: Partial = { MaxDepth: 999999, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }; -const nestedFilterDefault: WUDetails.RequestNS.NestedFilter = { +const nestedFilterDefault: WsWorkunits.NestedFilter = { Depth: 0, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }; export function useWorkunitMetrics( wuid: string, - scopeFilter: WUDetails.RequestNS.ScopeFilter = scopeFilterDefault, - nestedFilter: WUDetails.RequestNS.NestedFilter = nestedFilterDefault -): [IScope[], { [id: string]: any }, WUDetailsMeta.Activity[], WUDetailsMeta.Property[], string[], string[], FetchStatus, () => void] { + scopeFilter: Partial = scopeFilterDefault, + nestedFilter: WsWorkunits.NestedFilter = nestedFilterDefault +): [IScope[], { [id: string]: any }, WsWorkunits.Activity2[], WsWorkunits.Property2[], string[], string[], FetchStatus, () => void] { const [workunit, state] = useWorkunit(wuid); const [data, setData] = React.useState([]); const [columns, setColumns] = React.useState<{ [id: string]: any }>([]); - const [activities, setActivities] = React.useState([]); - const [properties, setProperties] = React.useState([]); + const [activities, setActivities] = React.useState([]); + const [properties, setProperties] = React.useState([]); const [measures, setMeasures] = React.useState([]); const [scopeTypes, setScopeTypes] = React.useState([]); const [status, setStatus] = React.useState(FetchStatus.COMPLETE); @@ -175,15 +175,15 @@ export function useWorkunitMetrics( export function useQueryMetrics( querySet: string, queryId: string, - scopeFilter: WUDetails.RequestNS.ScopeFilter = scopeFilterDefault, - nestedFilter: WUDetails.RequestNS.NestedFilter = nestedFilterDefault -): [IScope[], { [id: string]: any }, WUDetailsMeta.Activity[], WUDetailsMeta.Property[], string[], string[], FetchStatus, () => void] { + scopeFilter: Partial = scopeFilterDefault, + nestedFilter: WsWorkunits.NestedFilter = nestedFilterDefault +): [IScope[], { [id: string]: any }, WsWorkunits.Activity2[], WsWorkunits.Property2[], string[], string[], FetchStatus, () => void] { const [query, state, _refresh] = useQuery(querySet, queryId); const [data, setData] = React.useState([]); const [columns, setColumns] = React.useState<{ [id: string]: any }>([]); - const [activities, setActivities] = React.useState([]); - const [properties, setProperties] = React.useState([]); + const [activities, setActivities] = React.useState([]); + const [properties, setProperties] = React.useState([]); const [measures, setMeasures] = React.useState([]); const [scopeTypes, setScopeTypes] = React.useState([]); const [status, setStatus] = React.useState(FetchStatus.COMPLETE); @@ -237,9 +237,9 @@ export function useWUQueryMetrics( wuid: string, querySet: string, queryId: string, - scopeFilter: WUDetails.RequestNS.ScopeFilter = scopeFilterDefault, - nestedFilter: WUDetails.RequestNS.NestedFilter = nestedFilterDefault -): [IScope[], { [id: string]: any }, WUDetailsMeta.Activity[], WUDetailsMeta.Property[], string[], string[], FetchStatus, () => void] { + scopeFilter: Partial = scopeFilterDefault, + nestedFilter: WsWorkunits.NestedFilter = nestedFilterDefault +): [IScope[], { [id: string]: any }, WsWorkunits.Activity2[], WsWorkunits.Property2[], string[], string[], FetchStatus, () => void] { const wuMetrics = useWorkunitMetrics(wuid, scopeFilter, nestedFilter); const queryMetrics = useQueryMetrics(querySet, queryId, scopeFilter, nestedFilter); return querySet && queryId ? [...queryMetrics] : [...wuMetrics]; diff --git a/esp/src/src-react/hooks/workunit.ts b/esp/src/src-react/hooks/workunit.ts index 9faff42ea3f..662b44dccdb 100644 --- a/esp/src/src-react/hooks/workunit.ts +++ b/esp/src/src-react/hooks/workunit.ts @@ -1,6 +1,6 @@ import * as React from "react"; import { useConst } from "@fluentui/react-hooks"; -import { Workunit, DFUWorkunit, Result, WUDetails, WUStateID, WUInfo, WorkunitsService } from "@hpcc-js/comms"; +import { Workunit, DFUWorkunit, Result, WsWorkunits, WUStateID, WorkunitsService } from "@hpcc-js/comms"; import { scopedLogger } from "@hpcc-js/util"; import nlsHPCC from "src/nlsHPCC"; import * as Utility from "src/Utility"; @@ -122,7 +122,7 @@ export function useWorkunitVariables(wuid: string): [Variable[], Workunit, WUSta return [variables, workunit, state, inc]; } -export interface SourceFile extends WUInfo.ECLSourceFile { +export interface SourceFile extends WsWorkunits.ECLSourceFile { __hpcc_parentName: string; } @@ -159,10 +159,10 @@ export function useWorkunitSourceFiles(wuid: string): [SourceFile[], Workunit, W return [sourceFiles, workunit, state, inc]; } -export function useWorkunitWorkflows(wuid: string): [WUInfo.ECLWorkflow[], Workunit, () => void] { +export function useWorkunitWorkflows(wuid: string): [WsWorkunits.ECLWorkflow[], Workunit, () => void] { const [workunit, state] = useWorkunit(wuid); - const [workflows, setWorkflows] = React.useState([]); + const [workflows, setWorkflows] = React.useState([]); const [count, increment] = useCounter(); React.useEffect(() => { @@ -186,7 +186,7 @@ export function useWorkunitXML(wuid: string): [string] { const [xml, setXML] = React.useState(""); React.useEffect(() => { - service.WUFile({ + service.WUFileEx({ Wuid: wuid, Type: "XML" }).then(response => { @@ -197,10 +197,10 @@ export function useWorkunitXML(wuid: string): [string] { return [xml]; } -export function useWorkunitExceptions(wuid: string): [WUInfo.ECLException[], Workunit, () => void] { +export function useWorkunitExceptions(wuid: string): [WsWorkunits.ECLException[], Workunit, () => void] { const [workunit, state] = useWorkunit(wuid); - const [exceptions, setExceptions] = React.useState([]); + const [exceptions, setExceptions] = React.useState([]); const [count, increment] = useCounter(); React.useEffect(() => { @@ -285,7 +285,7 @@ export interface HelperRow { workunit: Workunit; } -function mapHelpers(workunit: Workunit, helpers: WUInfo.ECLHelpFile[] = []): HelperRow[] { +function mapHelpers(workunit: Workunit, helpers: WsWorkunits.ECLHelpFile[] = []): HelperRow[] { return helpers.map((helper, i): HelperRow => { return { id: "H:" + i, @@ -298,7 +298,7 @@ function mapHelpers(workunit: Workunit, helpers: WUInfo.ECLHelpFile[] = []): Hel }); } -function mapThorLogInfo(workunit: Workunit, thorLogInfo: WUInfo.ThorLogInfo[] = []): HelperRow[] { +function mapThorLogInfo(workunit: Workunit, thorLogInfo: WsWorkunits.ThorLogInfo[] = []): HelperRow[] { const retVal: HelperRow[] = []; for (let i = 0; i < thorLogInfo.length; ++i) { for (let j = 0; j < thorLogInfo[i].NumberSlaves; ++j) { @@ -352,9 +352,9 @@ export function useWorkunitHelpers(wuid: string): [HelperRow[], () => void] { return [helpers, incCounter]; } -export function useGlobalWorkunitNotes(): [WUDetails.Note[]] { +export function useGlobalWorkunitNotes(): [WsWorkunits.Note[]] { - const [notes, setNotes] = React.useState([]); + const [notes, setNotes] = React.useState([]); React.useEffect(() => { const workunit = Workunit.attach({ baseUrl: "" }, ""); diff --git a/esp/src/src/ECLArchiveWidget.ts b/esp/src/src/ECLArchiveWidget.ts index e1ac7eb193b..2f5efaa752a 100644 --- a/esp/src/src/ECLArchiveWidget.ts +++ b/esp/src/src/ECLArchiveWidget.ts @@ -1,11 +1,11 @@ /* eslint-disable @typescript-eslint/no-unsafe-declaration-merging */ import { ECLEditor } from "@hpcc-js/codemirror"; import { extent, Palette } from "@hpcc-js/common"; -import { Workunit } from "@hpcc-js/comms"; +import { Workunit, WsWorkunits } from "@hpcc-js/comms"; import { Table } from "@hpcc-js/dgrid"; import { SplitPanel } from "@hpcc-js/phosphor"; import { DirectoryTree } from "@hpcc-js/tree"; -import { xml2json } from "@hpcc-js/util"; +import { RecursivePartial, xml2json } from "@hpcc-js/util"; import "dijit/form/Button"; import "dijit/layout/BorderContainer"; import "dijit/layout/ContentPane"; @@ -168,10 +168,10 @@ export class ECLArchiveWidget { .relativeSizes([0.2, 0.8]) .lazyRender() ; - const scopesOptions = { + const scopesOptions: RecursivePartial = { ScopeFilter: { MaxDepth: 999999, - ScopeTypes: ["graph"] + ScopeTypes: { ScopeType: ["graph"] } }, ScopeOptions: { IncludeMatchedScopesInResults: true, @@ -189,7 +189,7 @@ export class ECLArchiveWidget { }, NestedFilter: { Depth: 999999, - ScopeTypes: ["activity"] + ScopeTypes: { ScopeType: ["activity"] } }, PropertiesToReturn: { AllStatistics: true, diff --git a/esp/src/src/ESPWorkunit.ts b/esp/src/src/ESPWorkunit.ts index 124dabbf137..472105a6a16 100644 --- a/esp/src/src/ESPWorkunit.ts +++ b/esp/src/src/ESPWorkunit.ts @@ -6,7 +6,7 @@ import * as all from "dojo/promise/all"; import * as Observable from "dojo/store/Observable"; import * as topic from "dojo/topic"; -import { Workunit as HPCCWorkunit, WorkunitsService, WUQuery, WUUpdate } from "@hpcc-js/comms"; +import { Workunit as HPCCWorkunit, WorkunitsService, WsWorkunits as WsWorkunitsNS, WUUpdate } from "@hpcc-js/comms"; import { IEvent } from "@hpcc-js/util"; import * as ESPRequest from "./ESPRequest"; @@ -888,7 +888,7 @@ const Workunit = declare([ESPUtil.Singleton], { // jshint ignore:line return (this._hpccWU as HPCCWorkunit).fetchDetails({ ScopeFilter: { MaxDepth: 999999, - ScopeTypes: ["graph"] + ScopeTypes: { ScopeType: ["graph"] } }, ScopeOptions: { IncludeMatchedScopesInResults: true, @@ -906,7 +906,7 @@ const Workunit = declare([ESPUtil.Singleton], { // jshint ignore:line }, NestedFilter: { Depth: 999999, - ScopeTypes: ["activity"] + ScopeTypes: { ScopeType: ["activity"] } }, PropertiesToReturn: { AllStatistics: false, @@ -1043,10 +1043,10 @@ export function CreateWUQueryStoreLegacy(options) { const service = new WorkunitsService({ baseUrl: "" }); -export type WUQueryStore = BaseStore; +export type WUQueryStore = BaseStore; -export function CreateWUQueryStore(): BaseStore { - const store = new Paged({ +export function CreateWUQueryStore(): BaseStore { + const store = new Paged({ start: "PageStartFrom", count: "PageSize", sortBy: "Sortby", diff --git a/esp/src/src/Timings.ts b/esp/src/src/Timings.ts index a420927bf77..fc26677955b 100644 --- a/esp/src/src/Timings.ts +++ b/esp/src/src/Timings.ts @@ -71,17 +71,17 @@ export class Timings { .request({ ScopeFilter: { MaxDepth: 3, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }, NestedFilter: { Depth: 0, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }, PropertiesToReturn: { AllProperties: false, AllStatistics: true, AllHints: false, - Properties: ["WhenStarted", "TimeElapsed"] + Properties: { Property: ["WhenStarted", "TimeElapsed"] } }, ScopeOptions: { IncludeId: true, @@ -212,17 +212,17 @@ export class Timings { this.fetchDetailsNormalizedPromise = Promise.all([this.wu.fetchDetailsMeta(), this.wu.fetchDetailsRaw({ ScopeFilter: { MaxDepth: 999999, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }, NestedFilter: { Depth: 0, - ScopeTypes: [] + ScopeTypes: { ScopeType: [] } }, PropertiesToReturn: { AllProperties: false, AllStatistics: true, AllHints: false, - Properties: [] + Properties: { Property: [] } }, ScopeOptions: { IncludeId: true, diff --git a/esp/src/src/WUScopeController.ts b/esp/src/src/WUScopeController.ts index b32efdf5a03..d72a6689e3a 100644 --- a/esp/src/src/WUScopeController.ts +++ b/esp/src/src/WUScopeController.ts @@ -225,7 +225,8 @@ export abstract class WUScopeControllerBase Date: Thu, 18 Apr 2024 08:41:01 +0100 Subject: [PATCH 18/18] HPCC-31633 Issue fetching WsECL port from WsTopology When built for containerised environment, the port number needs to be fetched from WsResources rather than WsTopology. Signed-off-by: Gordon Smith --- esp/src/src/WsTopology.ts | 97 ++++++++++++++++++++++----------------- 1 file changed, 54 insertions(+), 43 deletions(-) diff --git a/esp/src/src/WsTopology.ts b/esp/src/src/WsTopology.ts index d23e4bc97f3..7a467c662a5 100644 --- a/esp/src/src/WsTopology.ts +++ b/esp/src/src/WsTopology.ts @@ -1,4 +1,7 @@ -import { Connection } from "@hpcc-js/comms"; +import { Connection, ResourcesService, Topology } from "@hpcc-js/comms"; +import { scopedLogger } from "@hpcc-js/util"; +import { containerized } from "src/BuildInfo"; +import { Memory } from "src/store/Memory"; import * as arrayUtil from "dojo/_base/array"; import * as Deferred from "dojo/_base/Deferred"; import * as lang from "dojo/_base/lang"; @@ -9,7 +12,8 @@ import * as aspect from "dojo/aspect"; import * as ESPRequest from "./ESPRequest"; import * as Utility from "./Utility"; -import { Memory } from "src/store/Memory"; + +const logger = scopedLogger("src/ESPRequest.ts"); declare const dojoConfig; @@ -119,61 +123,68 @@ export function TpClusterQuery(params) { return ESPRequest.send("WsTopology", "TpClusterQuery", params); } -export function GetESPServiceBaseURL(type) { - const deferred = new Deferred(); - this.TpServiceQuery({}).then(function (response) { +const eclqueriesPromise: { [id: string]: Promise } = {}; +export function GetESPServiceBaseURL(type: string): Promise { + if (!eclqueriesPromise[type]) { let retVal = ESPRequest.getURL({ port: window.location.protocol === "https:" ? 18002 : 8002, pathname: "" }); - if (lang.exists("TpServiceQueryResponse.ServiceList.TpEspServers.TpEspServer", response)) { - arrayUtil.forEach(response.TpServiceQueryResponse.ServiceList.TpEspServers.TpEspServer, function (item, idx) { - if (lang.exists("TpBindings.TpBinding", item)) { - arrayUtil.forEach(item.TpBindings.TpBinding, function (binding, idx) { - if (binding.Service === type && binding.Protocol + ":" === location.protocol) { - retVal = ESPRequest.getURL({ - port: binding.Port, - pathname: "" - }); - return true; - } + if (containerized) { + const resources = new ResourcesService({ baseUrl: "" }); + eclqueriesPromise[type] = resources.ServiceQuery({ Type: type }).then(response => { + const service = response?.Services?.Service?.find(s => s.Type === type); + if (service) { + retVal = ESPRequest.getURL({ + protocol: service.TLSSecure ? "https:" : "http:", + port: service.Port, + pathname: "" }); } - if (retVal !== "") - return true; + return retVal; + }).catch(e => { + logger.error(e); + return retVal; + }); + } else { + const topology = Topology.attach({ baseUrl: "" }); + eclqueriesPromise[type] = topology.fetchServices({ Type: type }).then(response => { + const service = response?.TpEspServers?.TpEspServer?.find(s => s.Type === type); + if (service) { + const binding = service.TpBindings?.TpBinding?.find(b => b.Service === type && b.Protocol + ":" === location.protocol); + if (binding) { + retVal = ESPRequest.getURL({ + port: binding.Port, + pathname: "" + }); + } + } + return retVal; + }).catch(e => { + logger.error(e); + return retVal; }); } - deferred.resolve(retVal); - }); - return deferred.promise; + } + return eclqueriesPromise[type]; } -export const WsEclURL = ""; -export function GetWsEclURL(type) { - const deferred = new Deferred(); - if (this.WsEclURL === "") { - const context = this; - this.GetESPServiceBaseURL("ws_ecl").then(function (response) { - context.WsEclURL = response + "/WsEcl/"; - deferred.resolve(context.WsEclURL + type + "/query/"); +let WsEclURL: Promise; +export function GetWsEclURL(type): Promise { + if (!WsEclURL) { + WsEclURL = GetESPServiceBaseURL(containerized ? "eclqueries" : "ws_ecl").then(response => { + return response + "/WsEcl/"; }); - } else { - deferred.resolve(this.WsEclURL + type + "/query/"); } - return deferred.promise; + return this.WsEclURL.then(response => response + type + "/query/"); } -export const WsEclIFrameURL = ""; -export function GetWsEclIFrameURL(type) { - const deferred = new Deferred(); - if (this.WsEclIFrameURL === "") { - const context = this; - this.GetESPServiceBaseURL("ws_ecl").then(function (response) { - context.WsEclIFrameURL = response + dojoConfig.urlInfo.basePath + "/stub.htm?Widget=IFrameWidget&src=" + encodeURIComponent("/WsEcl/"); - deferred.resolve(context.WsEclIFrameURL + encodeURIComponent(type + "/query/")); +let WsEclIFrameURL: Promise; +export function GetWsEclIFrameURL(type): Promise { + if (!WsEclIFrameURL) { + WsEclIFrameURL = GetESPServiceBaseURL(containerized ? "eclqueries" : "ws_ecl").then(response => { + return response + dojoConfig.urlInfo.basePath + "/stub.htm?Widget=IFrameWidget&src=" + encodeURIComponent("/WsEcl/"); }); - } else { - deferred.resolve(this.WsEclIFrameURL + encodeURIComponent(type + "/query/")); } - return deferred.promise; + return WsEclIFrameURL.then(url => url + encodeURIComponent(type + "/query/")); } export function TpTargetClusterQuery(params) { return ESPRequest.send("WsTopology", "TpTargetClusterQuery", params);