From 65a2856b32448fde939ff180f71a966077e55dbf Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Thu, 15 Jun 2023 17:47:17 +0100 Subject: [PATCH 01/46] Split off 9.2.0 Signed-off-by: Gavin Halliday --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 8e511cb60..7e8d60cd0 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.1.0-0-SNAPSHOT + 9.3.0-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 4d03d1a38..d1668d7aa 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.1.0-0-SNAPSHOT + 9.3.0-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index f344e3e36..d6a969f14 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.1.0-0-SNAPSHOT + 9.3.0-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index b331c05eb..c2291c799 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.1.0-0-SNAPSHOT + 9.3.0-0-SNAPSHOT From 48d067cd9ed9122351b97ab3255cc28c0da937dd Mon Sep 17 00:00:00 2001 From: Rodrigo Date: Wed, 21 Jun 2023 09:14:47 -0400 Subject: [PATCH 02/46] HPCC4J-514 Ensure WUQueryTest creates multiple WUs (#618) * HPCC4J-334 Automate WsClient Interface (#616) - Adds python to generate new wsdls, stub, and wrappers - Adds vscode launch profile - Updates filespray wsdl reference in pom - Targets source code wrapper utility rather than jared version Signed-off-by: Rodrigo Pastrana * HPCC4J-514 Ensure WUQueryTest creates multiple WUs - Ensures WUQueryTest creates multiple WUs Signed-off-by: Rodrigo Pastrana --------- Signed-off-by: Rodrigo Pastrana --- .../hpccsystems/ws/client/WUQueryTest.java | 38 +++++++++++++++---- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/wsclient/src/test/java/org/hpccsystems/ws/client/WUQueryTest.java b/wsclient/src/test/java/org/hpccsystems/ws/client/WUQueryTest.java index e8bb7b373..e6275c320 100644 --- a/wsclient/src/test/java/org/hpccsystems/ws/client/WUQueryTest.java +++ b/wsclient/src/test/java/org/hpccsystems/ws/client/WUQueryTest.java @@ -26,19 +26,35 @@ public static void setup() throws Exception wswuclient = wsclient.getWsWorkunitsClient(); } + public WorkunitWrapper createWU(String ecl, String cluster, String jobName, String owner, ApplicationValueWrapper av) + { + WorkunitWrapper wu=new WorkunitWrapper(); + wu.setECL(ecl); + wu.setCluster(thorclustername); + wu.setJobname(jobName); + wu.setOwner(owner); + wu.getApplicationValues().add(av); + try + { + wu=wswuclient.compileWUFromECL(wu); + wu=wswuclient.runWorkunit(wu.getWuid(),null,null,null,false,null); + } + catch (Exception e) + { + System.err.println("WUQueryTest: Failed to create new WU for test: '" + ecl + "'" ); + System.err.println(e.getLocalizedMessage()); + return null; + } + return wu; + } @Test public void testGetWorkunitByAppName() throws Exception, ArrayOfEspExceptionWrapper, ArrayOfECLExceptionWrapper { - WorkunitWrapper wu=new WorkunitWrapper(); - wu.setECL("OUTPUT('1');"); - wu.setCluster(thorclustername); - wu.setJobname("testGetWorkunitByAppName"); - wu.setOwner("user"); ApplicationValueWrapper av=new ApplicationValueWrapper("HIPIE","testkey","testvalue"); - wu.getApplicationValues().add(av); - wu=wswuclient.compileWUFromECL(wu); - wu=wswuclient.runWorkunit(wu.getWuid(),null,null,null,false,null); + WorkunitWrapper wu = createWU("OUTPUT('1');", thorclustername, "1testGetWorkunitByAppName", "1user", av); + + assumeTrue("testGetWorkunitByAppName failed to create WU!", wu != null); WUQueryWrapper info = new WUQueryWrapper().setSortBy(SortBy.WUID).setDescending(true); info.getApplicationValues().add(av); @@ -58,8 +74,14 @@ public void testGetWorkunitByAppName() throws Exception, ArrayOfEspExceptionWrap @Test public void testGetWorkunitSort() throws Exception { + + assumeTrue("Testing WU sortBy failed to create First WU!", null != createWU("OUTPUT('a');", thorclustername, "aTestWorkunitSortBA", "aTestUser", new ApplicationValueWrapper("AppA","testkeyA","testvalueA"))); + assumeTrue("Testing WU sortBy failed to create Second WU!", null != createWU("OUTPUT('b');", thorclustername, "bTestWorkunitSortBy", "bTestUser", new ApplicationValueWrapper("AppB","testkeyB","testvalueB"))); + //wuid descending List result=wswuclient.getWorkunits(new WUQueryWrapper().setSortBy(SortBy.WUID).setDescending(true)); + assumeTrue("Testing WU sortBy failed to find enough WUs!", result.size() > 1); + if (result.get(0).getWuid().compareToIgnoreCase(result.get(1).getWuid())<0) { Assert.fail("descending workunits in wrong order:" + result.get(0).getWuid() + " then " + result.get(1).getWuid()); From d378484e9213ac07e92a3ad7117a28b908cf1632 Mon Sep 17 00:00:00 2001 From: Rodrigo Date: Thu, 22 Jun 2023 13:09:32 -0400 Subject: [PATCH 03/46] Update publish-snapshots-on-merge.yml (#617) extend action to on branch creations as well --- .github/workflows/publish-snapshots-on-merge.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish-snapshots-on-merge.yml b/.github/workflows/publish-snapshots-on-merge.yml index 4b57324cd..3b305d462 100644 --- a/.github/workflows/publish-snapshots-on-merge.yml +++ b/.github/workflows/publish-snapshots-on-merge.yml @@ -4,6 +4,9 @@ on: branches: - 'master' - 'candidate-*' + create: + branches: + - 'candidate-*' jobs: publish: runs-on: ubuntu-latest @@ -21,4 +24,4 @@ jobs: run: mvn --batch-mode deploy env: MAVEN_USERNAME: ${{ secrets.OSSRH_USER_NAME }} - MAVEN_PASSWORD: ${{ secrets.OSSRH_PASS }} \ No newline at end of file + MAVEN_PASSWORD: ${{ secrets.OSSRH_PASS }} From 8532695d957170ad5fb1517b200410e34d1734e4 Mon Sep 17 00:00:00 2001 From: Rodrigo Date: Tue, 11 Jul 2023 12:13:03 -0400 Subject: [PATCH 04/46] HPCC4J-522 GithutAction targets java8 (#620) Signed-off-by: Rodrigo Pastrana Co-authored-by: Pastrana --- .github/workflows/publish-snapshots-on-merge.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-snapshots-on-merge.yml b/.github/workflows/publish-snapshots-on-merge.yml index 3b305d462..baecdbfaf 100644 --- a/.github/workflows/publish-snapshots-on-merge.yml +++ b/.github/workflows/publish-snapshots-on-merge.yml @@ -15,7 +15,7 @@ jobs: - name: Set up Maven Central Repository uses: actions/setup-java@v3 with: - java-version: '11' + java-version: '8' distribution: 'adopt' server-id: ossrh server-username: MAVEN_USERNAME From 678afb5a4e7956d21e5517f609408da52e4e94f0 Mon Sep 17 00:00:00 2001 From: Rodrigo Date: Mon, 24 Jul 2023 12:07:25 -0400 Subject: [PATCH 05/46] Create Jirabot.yml Signed-off-by: Rodrigo --- .github/workflows/Jirabot.yml | 101 ++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 .github/workflows/Jirabot.yml diff --git a/.github/workflows/Jirabot.yml b/.github/workflows/Jirabot.yml new file mode 100644 index 000000000..948bccbac --- /dev/null +++ b/.github/workflows/Jirabot.yml @@ -0,0 +1,101 @@ +# JiraBot github action +# ===================== +# +name: jirabot + +on: + pull_request_target: + types: [opened, reopened] + branches: + - "master" + - "candidate-*" + +jobs: + jirabot: + runs-on: ubuntu-20.04 + steps: + - uses: "actions/setup-python@v2" + with: + python-version: "3.8" + - name: "Install dependencies" + run: | + set -xe + python -VV + python -m site + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade jira + - name: "Run" + env: + JIRABOT_USERNAME : ${{ secrets.JIRABOT_USERNAME }} + JIRABOT_PASSWORD : ${{ secrets.JIRABOT_PASSWORD }} + JIRA_URL : ${{ secrets.JIRA_URL }} + PULL_REQUEST_NUMBER : ${{ github.event.pull_request.number }} + PULL_REQUEST_TITLE : ${{ github.event.pull_request.title }} + PULL_REQUEST_AUTHOR_NAME : ${{ github.event.pull_request.user.login }} + PULL_URL: ${{ github.event.pull_request.html_url }} + COMMENTS_URL: ${{ github.event.pull_request.comments_url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + run: | + import os + import re + from jira.client import JIRA + + jirabot_user = os.environ['JIRABOT_USERNAME'] + jirabot_pass = os.environ['JIRABOT_PASSWORD'] + jira_url = os.environ['JIRA_URL'] + pr = os.environ['PULL_REQUEST_NUMBER'] + title = os.environ['PULL_REQUEST_TITLE'] + user = os.environ['PULL_REQUEST_AUTHOR_NAME'] + comments_url = os.environ['COMMENTS_URL'] + pull_url = os.environ['PULL_URL'] + github_token = os.environ['GITHUB_TOKEN'] + + print("%s %s %s" % (title, user, comments_url)) + status = '' + issuem = re.search("(HPCC4J|JAPI)-[0-9]+", title) + if issuem: + issue_name = issuem.group() + if user == 'kunalaswani': + user = 'kunal.aswani' + if user == 'timothyklemm': + user = 'klemti01' + if user == 'jpmcmu': + user = 'mcmuja01' + if user == 'asselitx': + user = 'terrenceasselin' + if user == 'jeclrsg': + user = 'clemje01' + if user == 'jackdelv': + user = 'delvecja' + options = { + 'server': jira_url + } + jira = JIRA(options=options, basic_auth=(jirabot_user, jirabot_pass)) + issue = jira.issue(issue_name) + status = jira_url + '/browse/' + issue_name + '\\n' + if False and issue.fields.status.name != 'Active' and issue.fields.status.name != 'Open' and issue.fields.status.name != 'New' and issue.fields.status.name != 'Discussing' and issue.fields.status.name != 'Awaiting Information': + status += 'Jira not updated (state was not active or new)' + elif issue.fields.customfield_10010 != None: + if issue.fields.customfield_10010 != pull_url: + status += 'Jira not updated (pull request "%s" already registered)' % issue.fields.customfield_10010 + else: + status += 'This pull request is already registered' + elif issue.fields.assignee is not None and issue.fields.assignee.name.lower() != user.lower(): + status += 'Jira not updated (user does not match)' + else: + if issue.fields.assignee is None: + jira.assign_issue(issue, user) + issue.update(fields={'customfield_10010': pull_url}) + issue = jira.issue(issue_name) + try: + transitions = jira.transitions(issue) + jira.transition_issue(issue, '291') # Attach Pull Request + except: + status += 'Failed to set to merge pending: transitions=%s' % transitions + status += 'Jira updated' + print('curl -X POST %s -H "Content-Type: application/json" -H "Authorization: token %s" --data \'{ "body": "%s" }\'' % ( comments_url, github_token, status )) + os.system('curl -X POST %s -H "Content-Type: application/json" -H "Authorization: token %s" --data \'{ "body": "%s" }\'' % ( comments_url, github_token, status )) + + print(status) + shell: python From a5611b82778b32dc8b91f2332b6eb8ec40d806d5 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Fri, 18 Aug 2023 10:29:36 -0400 Subject: [PATCH 06/46] HPCC4J-532 Jirabot should translate hpcc4j prefix (#633) - Added logic to translate hpcc4j prefix Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/workflows/Jirabot.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Jirabot.yml b/.github/workflows/Jirabot.yml index 948bccbac..0f28a2876 100644 --- a/.github/workflows/Jirabot.yml +++ b/.github/workflows/Jirabot.yml @@ -25,7 +25,7 @@ jobs: python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade jira - name: "Run" - env: + env: JIRABOT_USERNAME : ${{ secrets.JIRABOT_USERNAME }} JIRABOT_PASSWORD : ${{ secrets.JIRABOT_PASSWORD }} JIRA_URL : ${{ secrets.JIRA_URL }} @@ -35,12 +35,12 @@ jobs: PULL_URL: ${{ github.event.pull_request.html_url }} COMMENTS_URL: ${{ github.event.pull_request.comments_url }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - + run: | import os import re from jira.client import JIRA - + jirabot_user = os.environ['JIRABOT_USERNAME'] jirabot_pass = os.environ['JIRABOT_PASSWORD'] jira_url = os.environ['JIRA_URL'] @@ -50,12 +50,13 @@ jobs: comments_url = os.environ['COMMENTS_URL'] pull_url = os.environ['PULL_URL'] github_token = os.environ['GITHUB_TOKEN'] - + print("%s %s %s" % (title, user, comments_url)) status = '' issuem = re.search("(HPCC4J|JAPI)-[0-9]+", title) if issuem: - issue_name = issuem.group() + nameCorrectionPattern = re.compile("hpcc4j", re.IGNORECASE) + issue_name = nameCorrectionPattern.sub("JAPI",issuem.group()) if user == 'kunalaswani': user = 'kunal.aswani' if user == 'timothyklemm': @@ -96,6 +97,6 @@ jobs: status += 'Jira updated' print('curl -X POST %s -H "Content-Type: application/json" -H "Authorization: token %s" --data \'{ "body": "%s" }\'' % ( comments_url, github_token, status )) os.system('curl -X POST %s -H "Content-Type: application/json" -H "Authorization: token %s" --data \'{ "body": "%s" }\'' % ( comments_url, github_token, status )) - + print(status) shell: python From 50acafd3c1fae13ceb2f00a5a7e04482047a8eef Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 7 Sep 2023 17:38:14 +0100 Subject: [PATCH 07/46] Split off 9.4.0 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 7e8d60cd0..55812de0b 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.3.0-0-SNAPSHOT + 9.4.1-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index d1668d7aa..494684cc9 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.3.0-0-SNAPSHOT + 9.4.1-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index d6a969f14..f8c0cce54 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.3.0-0-SNAPSHOT + 9.4.1-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index c2291c799..342f0f699 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.3.0-0-SNAPSHOT + 9.4.1-0-SNAPSHOT From acc0ecba312b6298d9fac92023a93eeb95e9ca78 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 5 Oct 2023 17:26:34 +0100 Subject: [PATCH 08/46] Split off 9.4.2 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 55812de0b..2d2a9f4de 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.1-0-SNAPSHOT + 9.4.3-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 494684cc9..7a50bc27f 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.1-0-SNAPSHOT + 9.4.3-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index f8c0cce54..1cc2dc8a8 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.1-0-SNAPSHOT + 9.4.3-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 342f0f699..67a202a11 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.1-0-SNAPSHOT + 9.4.3-0-SNAPSHOT From 905b095a6bf8df30498eee693af5e5875f8942ae Mon Sep 17 00:00:00 2001 From: James McMullan Date: Fri, 13 Oct 2023 09:49:33 -0400 Subject: [PATCH 09/46] HPCC4J-542 DFSClient: Create JUnit for read retry (#648) * HPCC4J-542 DFSClient: Create JUnit for read retry - Added file part failure retry test - Fixed retry issue on initial connection Signed-off-by: James McMullan James.McMullan@lexisnexis.com * Code review changes --------- Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../hpccsystems/dfs/client/DataPartition.java | 55 +++- .../dfs/client/RowServiceInputStream.java | 241 +++++++++--------- .../dfs/client/DFSReadWriteTest.java | 47 ++++ 3 files changed, 227 insertions(+), 116 deletions(-) diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java index bc648b49c..589e62f4c 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java @@ -16,6 +16,10 @@ package org.hpccsystems.dfs.client; import java.io.Serializable; +import java.security.InvalidParameterException; +import java.util.List; +import java.util.ArrayList; +import java.util.Arrays; import org.hpccsystems.commons.ecl.FileFilter; import org.hpccsystems.commons.errors.HpccFileException; @@ -261,11 +265,60 @@ public String[] getCopyLocations() public String getCopyIP(int copyindex) { int copiescount = copyLocations.length; - if (copyindex < 0 || copyindex >= copiescount) return null; + if (copyindex < 0 || copyindex >= copiescount) + { + return null; + } return copyLocations[copyindex]; } + /** + * Set the copy IP + * + * @param copyIndex + * the copyindex + * @param copyIP The IP of the file part copy + */ + public void setCopyIP(int copyIndex, String copyIP) + { + if (copyIndex < 0 || copyIndex >= copyLocations.length) + { + return; + } + + copyLocations[copyIndex] = copyIP; + } + + /** + * Add file part copy + * + * @param index The index at which to insert the file part copy + * @param copyIP The IP of the new file part copy + * @param copyPath The path of the new file part copy + */ + public void add(int index, String copyIP, String copyPath) throws Exception + { + if (index < 0 || index > copyLocations.length) + { + throw new InvalidParameterException("Insertion index: " + index + " is invalid." + + "Expected index in range of: [0," + copyLocations.length + "]"); + } + + if (copyIP == null || copyPath == null) + { + throw new InvalidParameterException("Copy IP or Path are invalid, must be non-null."); + } + + List copyLocationsList = new ArrayList<>(Arrays.asList(copyLocations)); + copyLocationsList.add(index, copyIP); + copyLocations = copyLocationsList.toArray(new String[0]); + + List copyPathList = new ArrayList<>(Arrays.asList(copyPaths)); + copyPathList.add(index, copyPath); + copyPaths = copyPathList.toArray(new String[0]); + } + /** * Count of copies available for this file part. * @return copy locations size diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/RowServiceInputStream.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/RowServiceInputStream.java index 795fd8478..0ea652111 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/RowServiceInputStream.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/RowServiceInputStream.java @@ -520,6 +520,11 @@ public String getIP() return this.dataPart.getCopyIP(prioritizedCopyIndexes.get(getFilePartCopy())); } + private String getCopyPath() + { + return this.dataPart.getCopyPath(prioritizedCopyIndexes.get(getFilePartCopy())); + } + private int getFilePartCopy() { return filePartCopyIndexPointer; @@ -1528,150 +1533,156 @@ private void makeActive() throws HpccFileException this.active.set(false); this.handle = 0; - try + boolean needsRetry = false; + do { - log.debug("Attempting to connect to file part : '" + dataPart.getThisPart() + "' Copy: '" + (getFilePartCopy() + 1) + "' on IP: '" - + getIP() + "'"); - + needsRetry = false; try { - if (getUseSSL()) + log.debug("Attempting to connect to file part : '" + dataPart.getThisPart() + "' Copy: '" + + (getFilePartCopy() + 1) + "' on IP: '" + getIP() + "'" + " for Path: '" + getCopyPath() + "'"); + try + { + if (getUseSSL()) + { + SSLSocketFactory ssf = (SSLSocketFactory) SSLSocketFactory.getDefault(); + sock = (SSLSocket) ssf.createSocket(); + + // Optimize for bandwidth over latency and connection time. + // We are opening up a long standing connection and potentially reading a significant amount of + // data + // So we don't care as much about individual packet latency or connection time overhead + sock.setPerformancePreferences(0, 1, 2); + sock.connect(new InetSocketAddress(this.getIP(), this.dataPart.getPort()), this.connectTimeout); + + log.debug("Attempting SSL handshake..."); + ((SSLSocket) sock).startHandshake(); + log.debug("SSL handshake successful..."); + log.debug(" Remote address = " + sock.getInetAddress().toString() + " Remote port = " + sock.getPort()); + } + else + { + SocketFactory sf = SocketFactory.getDefault(); + sock = sf.createSocket(); + + // Optimize for bandwidth over latency and connection time. + // We are opening up a long standing connection and potentially reading a significant amount of + // data + // So we don't care as much about individual packet latency or connection time overhead + sock.setPerformancePreferences(0, 1, 2); + sock.connect(new InetSocketAddress(this.getIP(), this.dataPart.getPort()), this.connectTimeout); + } + + this.sock.setSoTimeout(socketOpTimeoutMs); + + log.debug("Connected: Remote address = " + sock.getInetAddress().toString() + " Remote port = " + sock.getPort()); + } + catch (java.net.UnknownHostException e) { - SSLSocketFactory ssf = (SSLSocketFactory) SSLSocketFactory.getDefault(); - sock = (SSLSocket) ssf.createSocket(); - - // Optimize for bandwidth over latency and connection time. - // We are opening up a long standing connection and potentially reading a significant amount of - // data - // So we don't care as much about individual packet latency or connection time overhead - sock.setPerformancePreferences(0, 1, 2); - sock.connect(new InetSocketAddress(this.getIP(), this.dataPart.getPort()), this.connectTimeout); - - log.debug("Attempting SSL handshake..."); - ((SSLSocket) sock).startHandshake(); - log.debug("SSL handshake successful..."); - log.debug(" Remote address = " + sock.getInetAddress().toString() + " Remote port = " + sock.getPort()); + throw new HpccFileException("Bad file part IP address or host name: " + this.getIP(), e); } - else + catch (java.io.IOException e) { - SocketFactory sf = SocketFactory.getDefault(); - sock = sf.createSocket(); - - // Optimize for bandwidth over latency and connection time. - // We are opening up a long standing connection and potentially reading a significant amount of - // data - // So we don't care as much about individual packet latency or connection time overhead - sock.setPerformancePreferences(0, 1, 2); - sock.connect(new InetSocketAddress(this.getIP(), this.dataPart.getPort()), this.connectTimeout); + throw new HpccFileException(e); } - this.sock.setSoTimeout(socketOpTimeoutMs); + try + { + this.dos = new java.io.DataOutputStream(sock.getOutputStream()); + this.dis = new java.io.DataInputStream(sock.getInputStream()); + } + catch (java.io.IOException e) + { + throw new HpccFileException("Failed to create streams", e); + } - log.debug("Connected: Remote address = " + sock.getInetAddress().toString() + " Remote port = " + sock.getPort()); - } - catch (java.net.UnknownHostException e) - { - throw new HpccFileException("Bad file part addr " + this.getIP(), e); - } - catch (java.io.IOException e) - { - throw new HpccFileException(e); - } + //------------------------------------------------------------------------------ + // Check protocol version + //------------------------------------------------------------------------------ - try - { - this.dos = new java.io.DataOutputStream(sock.getOutputStream()); - this.dis = new java.io.DataInputStream(sock.getInputStream()); - } - catch (java.io.IOException e) - { - throw new HpccFileException("Failed to create streams", e); - } + try + { + String msg = makeGetVersionRequest(); + int msgLen = msg.length(); - //------------------------------------------------------------------------------ - // Check protocol version - //------------------------------------------------------------------------------ + this.dos.writeInt(msgLen); + this.dos.write(msg.getBytes(HPCCCharSet), 0, msgLen); + this.dos.flush(); + } + catch (IOException e) + { + throw new HpccFileException("Failed on initial remote read trans", e); + } - try - { - String msg = makeGetVersionRequest(); - int msgLen = msg.length(); + RowServiceResponse response = readResponse(); + if (response.len == 0) + { + useOldProtocol = true; + } + else + { + useOldProtocol = false; - this.dos.writeInt(msgLen); - this.dos.write(msg.getBytes(HPCCCharSet), 0, msgLen); - this.dos.flush(); - } - catch (IOException e) - { - throw new HpccFileException("Failed on initial remote read trans", e); - } + byte[] versionBytes = new byte[response.len]; + try + { + this.dis.readFully(versionBytes); + } + catch (IOException e) + { + throw new HpccFileException("Error while attempting to read version response.", e); + } - RowServiceResponse response = readResponse(); - if (response.len == 0) - { - useOldProtocol = true; - } - else - { - useOldProtocol = false; + rowServiceVersion = new String(versionBytes, HPCCCharSet); + } + + //------------------------------------------------------------------------------ + // Send initial read request + //------------------------------------------------------------------------------ - byte[] versionBytes = new byte[response.len]; try { - this.dis.readFully(versionBytes); + String readTrans = null; + if (this.tokenBin == null) + { + this.tokenBin = new byte[0]; + readTrans = makeInitialRequest(); + } + else + { + readTrans = makeTokenRequest(); + } + + int transLen = readTrans.length(); + this.dos.writeInt(transLen); + this.dos.write(readTrans.getBytes(HPCCCharSet), 0, transLen); + this.dos.flush(); } catch (IOException e) { - throw new HpccFileException("Error while attempting to read version response.", e); + throw new HpccFileException("Failed on initial remote read read trans", e); } - rowServiceVersion = new String(versionBytes, HPCCCharSet); - } - - //------------------------------------------------------------------------------ - // Send initial read request - //------------------------------------------------------------------------------ - - try - { - String readTrans = null; - if (this.tokenBin == null) - { - this.tokenBin = new byte[0]; - readTrans = makeInitialRequest(); - } - else + if (CompileTimeConstants.PROFILE_CODE) { - readTrans = makeTokenRequest(); + firstByteTimeNS = System.nanoTime(); } - int transLen = readTrans.length(); - this.dos.writeInt(transLen); - this.dos.write(readTrans.getBytes(HPCCCharSet), 0, transLen); - this.dos.flush(); + this.active.set(true); } - catch (IOException e) + catch (Exception e) { - throw new HpccFileException("Failed on initial remote read read trans", e); - } + log.error("Could not reach file part: '" + dataPart.getThisPart() + "' copy: '" + (getFilePartCopy() + 1) + "' on IP: '" + getIP() + + "'"); + log.error(e.getMessage()); - if (CompileTimeConstants.PROFILE_CODE) - { - firstByteTimeNS = System.nanoTime(); + needsRetry = true; + if (!setNextFilePartCopy()) + { + throw new HpccFileException("Unsuccessfuly attempted to connect to all file part copies", e); + } } - - this.active.set(true); - } - catch (Exception e) - { - log.error("Could not reach file part: '" + dataPart.getThisPart() + "' copy: '" + (getFilePartCopy() + 1) + "' on IP: '" + getIP() - + "'"); - log.error(e.getMessage()); - - if (!setNextFilePartCopy()) - // This should be a multi exception - throw new HpccFileException("Unsuccessfuly attempted to connect to all file part copies", e); - } + } while (needsRetry); } /* Notes on protocol: diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java index 2fc59c86d..2c6f0cd80 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java @@ -1039,6 +1039,53 @@ public void emptyCompressedFileTest() } } + @Test + public void filePartReadRetryTest() + { + { + HPCCFile readFile = null; + try + { + readFile = new HPCCFile(datasets[0], connString, hpccUser, hpccPass); + DataPartition[] fileParts = readFile.getFileParts(); + for (int i = 0; i < fileParts.length; i++) + { + String firstCopyIP = fileParts[i].getCopyIP(0); + String firstCopyPath = fileParts[i].getCopyPath(0); + fileParts[i].setCopyIP(0, "1.1.1.1"); + fileParts[i].add(1, firstCopyIP, firstCopyPath); + } + + List records = readFile(readFile, null, false); + System.out.println("Record count: " + records.size()); + } + catch (Exception e) + { + Assert.fail(e.getMessage()); + } + } + + { + HPCCFile readFile = null; + try + { + readFile = new HPCCFile(datasets[0], connString, hpccUser, hpccPass); + DataPartition[] fileParts = readFile.getFileParts(); + for (int i = 0; i < fileParts.length; i++) + { + fileParts[i].add(0,"1.1.1.1", ""); + } + + List records = readFile(readFile, null, false); + System.out.println("Record count: " + records.size()); + } + catch (Exception e) + { + Assert.fail(e.getMessage()); + } + } + } + @Test public void invalidSignatureTest() { From 80d27a8dabe5096697f3650660434307e630944e Mon Sep 17 00:00:00 2001 From: James McMullan Date: Fri, 13 Oct 2023 09:50:42 -0400 Subject: [PATCH 10/46] HPCC4J-545 Resume read test causes OOM error (#649) - Updated resume read test to periodically run gc Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../java/org/hpccsystems/dfs/client/DFSReadWriteTest.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java index 2c6f0cd80..b963ea4b1 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java @@ -178,6 +178,7 @@ public void readResumeTest() throws Exception System.out.println("Messages from file part (" + i + ") read operation:\n" + fileReader.getRemoteReadMessages()); } + Runtime runtime = Runtime.getRuntime(); ArrayList resumedRecords = new ArrayList(); for (int i = 0; i < resumeInfo.size(); i++) { @@ -194,6 +195,13 @@ public void readResumeTest() throws Exception resumedRecords.add(record); } + + // Periodically run garbage collector to prevent buffers in remote file readers from exhausting free memory + // This is only needed due to rapidly creating / destroying thousands of HpccRemoteFileReaders + if ((i % 10) == 0) + { + runtime.gc(); + } } assertEquals("Number of records did not match during read resume.", records.size(), resumedRecords.size()); From 28183cfde04f0f67be018d4543a1e9041a83f88a Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 13 Oct 2023 18:29:24 +0100 Subject: [PATCH 11/46] Split off 9.4.4 Signed-off-by: Gavin Halliday --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 2d2a9f4de..addbacb14 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.3-0-SNAPSHOT + 9.4.5-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 7a50bc27f..c161375f0 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.3-0-SNAPSHOT + 9.4.5-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 1cc2dc8a8..0b16cccb4 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.3-0-SNAPSHOT + 9.4.5-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 67a202a11..a802a5d78 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.3-0-SNAPSHOT + 9.4.5-0-SNAPSHOT From b5e89ddde86999c4046bff499f828db668a3e7f0 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 26 Oct 2023 17:17:38 +0100 Subject: [PATCH 12/46] Split off 9.4.6 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index addbacb14..4369dcbec 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.5-0-SNAPSHOT + 9.4.7-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index c161375f0..84a52d717 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.5-0-SNAPSHOT + 9.4.7-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 0b16cccb4..b859c4abc 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.5-0-SNAPSHOT + 9.4.7-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index a802a5d78..82f5999b3 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.5-0-SNAPSHOT + 9.4.7-0-SNAPSHOT From 2dc82796f4752d06b3098c6655aadd207df5f0b8 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Wed, 1 Nov 2023 09:19:59 -0400 Subject: [PATCH 13/46] HPCC4J-551 HPCCFile Make TLK Use Optional (#653) - Added an option to HPCCFile to make TLK reading optional - Fixed Javadoc issue Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../hpccsystems/dfs/client/DataPartition.java | 1 + .../org/hpccsystems/dfs/client/HPCCFile.java | 58 ++++++++++++++++--- .../hpccsystems/dfs/client/DFSIndexTest.java | 23 ++++++++ 3 files changed, 73 insertions(+), 9 deletions(-) diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java index 589e62f4c..a846969e0 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/DataPartition.java @@ -296,6 +296,7 @@ public void setCopyIP(int copyIndex, String copyIP) * @param index The index at which to insert the file part copy * @param copyIP The IP of the new file part copy * @param copyPath The path of the new file part copy + * @throws Exception The exception */ public void add(int index, String copyIP, String copyPath) throws Exception { diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java index afec67413..0f745450e 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java @@ -50,6 +50,7 @@ public class HPCCFile implements Serializable private DataPartition[] dataParts; private DataPartition tlkPartition = null; + private boolean useTLK = true; private PartitionProcessor partitionProcessor = null; private long dataPartsCreationTimeMS = -1; @@ -130,12 +131,44 @@ public HPCCFile(String fileName, String connectionString, String user, String pa */ public HPCCFile(String fileName, Connection espconninfo, String targetColumnList, String filter, RemapInfo remap_info, int maxParts, String targetfilecluster) throws HpccFileException + { + this(fileName, espconninfo, targetColumnList, filter, remap_info, maxParts, targetfilecluster, true); + } + + /** + * Constructor for the HpccFile. Captures HPCC logical file information from the DALI Server for the clusters behind + * the ESP named by the IP address and re-maps the address information for the THOR nodes to visible addresses when + * the THOR clusters are virtual. + * + * @param fileName + * The HPCC file name + * @param espconninfo + * the espconninfo + * @param targetColumnList + * a comma separated list of column names in dotted notation for columns within compound columns. + * @param filter + * a file filter to select records of interest (SQL where syntax) + * @param remap_info + * address and port re-mapping info for THOR cluster + * @param maxParts + * optional the maximum number of partitions or zero for no max + * @param targetfilecluster + * optional - the hpcc cluster the target file resides in + * @param useTLK + * optional - whether or not the top level key should be used to help filter index files + * @throws HpccFileException + * the hpcc file exception + */ + public HPCCFile(String fileName, Connection espconninfo, String targetColumnList, String filter, RemapInfo remap_info, int maxParts, + String targetfilecluster, boolean useTLK) throws HpccFileException { this.fileName = fileName; this.recordDefinition = null; this.projectedRecordDefinition = null; this.columnPruner = new ColumnPruner(targetColumnList); this.espConnInfo = espconninfo; + this.useTLK = useTLK; + try { if (filter != null && !filter.isEmpty()) @@ -163,12 +196,12 @@ public static int getFilePartFromFPos(long fpos) } /** - * Extracts the offset in the file part from a fileposition value. + * Extracts the offset in the file part from a fileposition value. * * @param fpos file position * @return the project list */ - public static long getOffsetFromFPos(long fpos) + public static long getOffsetFromFPos(long fpos) { // First 48 bits store the offset return fpos & 0xffffffffffffL; @@ -424,13 +457,20 @@ private void createDataParts() throws HpccFileException this.recordDefinition = RecordDefinitionTranslator.parseJsonRecordDefinition(new JSONObject(originalRecDefInJSON)); - try + if (this.useTLK) { - this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, this.tlkPartition); + try + { + this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, this.tlkPartition); + } + catch (Exception e) + { + log.error("Error while constructing partition processor, reading will continue without partition filtering: " + e.getMessage()); + this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, null); + } } - catch (Exception e) + else { - log.error("Error while constructing partition processor, reading will continue without partition filtering: " + e.getMessage()); this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, null); } @@ -622,13 +662,13 @@ private static String acquireFileAccess(String fileName, HPCCWsDFUClient hpcc, i String uniqueID = "HPCC-FILE: " + UUID.randomUUID().toString(); return hpcc.getFileAccessBlob(fileName, clusterName, expirySeconds, uniqueID); } - + /** * @return the file metadata information for this HPCCFile (if it exists) */ - public DFUFileDetailWrapper getOriginalFileMetadata() + public DFUFileDetailWrapper getOriginalFileMetadata() { - if (originalFileMetadata==null) + if (originalFileMetadata==null) { HPCCWsDFUClient dfuClient = HPCCWsDFUClient.get(espConnInfo); if (dfuClient.hasInitError()) diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java index d4abc553a..74b92e4cb 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java @@ -30,6 +30,7 @@ import org.hpccsystems.ws.client.HPCCWsDFUClient; import org.hpccsystems.ws.client.HPCCWsWorkUnitsClient; import org.hpccsystems.ws.client.platform.test.BaseRemoteTest; +import org.hpccsystems.ws.client.utils.Connection; import org.hpccsystems.ws.client.wrappers.wsworkunits.WorkunitWrapper; import org.hpccsystems.ws.client.wrappers.wsdfu.DFUCreateFileWrapper; import org.hpccsystems.ws.client.wrappers.wsdfu.DFUFileDetailWrapper; @@ -222,6 +223,28 @@ public void tlkFilterExample() throws Exception fileReader.close(); } + @Test + public void tlkBypassTest() throws Exception + { + //------------------------------------------------------------------------------ + // Read index ignoring TLK and check that all partitions are returned + //------------------------------------------------------------------------------ + + Connection espConn = new Connection(connString); + espConn.setUserName(hpccUser); + espConn.setPassword(hpccPass); + + HPCCFile file = new HPCCFile("~test::index::integer::key", espConn, "", "", new RemapInfo(), 0, "", false); + DataPartition[] dataParts = file.getFileParts(); + + Long searchValue = 3L; + FileFilter filter = new FileFilter("key = " + searchValue); + List filteredPartitions = file.findMatchingPartitions(filter); + + // Without the TLK being read the above filter should return all file parts + assertTrue("Unexpected number of partitions", filteredPartitions.size() == dataParts.length); + } + @Test public void biasedIntTest() throws Exception { From f0ec51e413e0388bd37f8fe1e6223ded5fff6fda Mon Sep 17 00:00:00 2001 From: James McMullan Date: Wed, 1 Nov 2023 14:53:49 -0400 Subject: [PATCH 14/46] HPCC4J-551 HPCCFile Make TLK Use Optional - Added getter & setter for the useTLK option Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../org/hpccsystems/dfs/client/HPCCFile.java | 28 +++++++++++++++++++ .../hpccsystems/dfs/client/DFSIndexTest.java | 7 ++--- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java index 0f745450e..8df2ba73e 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java @@ -318,6 +318,34 @@ public HPCCFile setClusterRemapInfo(RemapInfo remapinfo) return this; } + /** + * Get the value of useTLK option + * + * @return a boolean value indicating use of the TLK to filter index file reads + */ + public boolean getUseTLK() + { + return this.useTLK; + } + + /** + * Sets the useTLK option. + * Note: the value must be set before querying any data from the file, including record definition information. + * + * @param useTLK should the TLK be used to filter index file reads + * + * @return this HPCCFile + */ + public HPCCFile setUseTLK(boolean useTLK) + { + this.useTLK = useTLK; + + // Force the data parts to be re-created + this.dataParts = null; + + return this; + } + /** * Gets the filter. * diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java index 74b92e4cb..1b95c5f11 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java @@ -230,11 +230,8 @@ public void tlkBypassTest() throws Exception // Read index ignoring TLK and check that all partitions are returned //------------------------------------------------------------------------------ - Connection espConn = new Connection(connString); - espConn.setUserName(hpccUser); - espConn.setPassword(hpccPass); - - HPCCFile file = new HPCCFile("~test::index::integer::key", espConn, "", "", new RemapInfo(), 0, "", false); + HPCCFile file = new HPCCFile("~test::index::integer::key", connString , hpccUser, hpccPass); + file.setUseTLK(false); DataPartition[] dataParts = file.getFileParts(); Long searchValue = 3L; From 5f8d0b571b16f6c2d505d29049aa3b3136bf1d97 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Thu, 2 Nov 2023 17:43:29 +0000 Subject: [PATCH 15/46] Split off 9.4.8 Signed-off-by: Gavin Halliday --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 4369dcbec..8a5bcf7a3 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.7-0-SNAPSHOT + 9.4.9-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 84a52d717..30cfd20a1 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.7-0-SNAPSHOT + 9.4.9-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index b859c4abc..58e0e9b43 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.7-0-SNAPSHOT + 9.4.9-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 82f5999b3..411766195 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.7-0-SNAPSHOT + 9.4.9-0-SNAPSHOT From 15e295ffd3d168dfc15092ecb90ec385aaeab4aa Mon Sep 17 00:00:00 2001 From: James McMullan Date: Tue, 7 Nov 2023 10:03:54 -0500 Subject: [PATCH 16/46] HPCC4J-532 Jirabot debugging improvements (#656) - Added exception to error output - Fixed transition array output - Added python, pip, jira version output Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/workflows/Jirabot.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Jirabot.yml b/.github/workflows/Jirabot.yml index 029ce605b..b03530464 100644 --- a/.github/workflows/Jirabot.yml +++ b/.github/workflows/Jirabot.yml @@ -24,6 +24,8 @@ jobs: python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade jira + python -m pip --version + python -m pip freeze | grep jira - name: "Run" env: JIRABOT_USERNAME : ${{ secrets.JIRABOT_USERNAME }} @@ -60,9 +62,9 @@ jobs: try: jira.transition_issue(issue, transition) result += 'Workflow Transition: ' + transition + '\n' - except: + except Exception as error: transitions = jira.transitions(issue) - result += 'Error: Transition: "' + transition + '" failed. Valid transitions=[' + (', '.join(transitions)) + ']\n' + result += 'Error: Transition: "' + transition + '" failed with: "' + str(error) + '" Valid transitions=' + str(transitions) + '\n' if issue.fields.customfield_10010 is None: issue.update(fields={'customfield_10010': pull_url}) From 16e3bf6aac4c7dd0593a73737d705be03d20b97e Mon Sep 17 00:00:00 2001 From: Rodrigo Date: Tue, 7 Nov 2023 13:37:56 -0500 Subject: [PATCH 17/46] Bump org.json:json from 20230227 to 20231013 (#650) (#657) Bumps [org.json:json](https://github.com/douglascrockford/JSON-java) from 20230227 to 20231013. - [Release notes](https://github.com/douglascrockford/JSON-java/releases) - [Changelog](https://github.com/stleary/JSON-java/blob/master/docs/RELEASES.md) - [Commits](https://github.com/douglascrockford/JSON-java/commits) --- updated-dependencies: - dependency-name: org.json:json dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 58e0e9b43..4c8a131f0 100644 --- a/pom.xml +++ b/pom.xml @@ -52,7 +52,7 @@ 0.1.54 2.7 4.13.1 - 20230227 + 20231013 2.17.1 1.8.1 2.10.1 From 5ea3beb51baadc481a26cd9196f88180c4f2d961 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Tue, 7 Nov 2023 13:38:16 -0500 Subject: [PATCH 18/46] HPCC4J-511 Test failures in containerized environment (#654) - Re-enabled tests that are now passing Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../java/org/hpccsystems/dfs/client/DFSIndexTest.java | 1 + .../org/hpccsystems/ws/client/WSFileIOClientTest.java | 8 +------- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java index 1b95c5f11..fc82df93c 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java @@ -375,6 +375,7 @@ String createIndexOnDataset(String datasetName, FieldDef recordDef) throws Excep return indexName; } + @Test public void testBatchRandomAccess() throws Exception { try diff --git a/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java b/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java index 4b81cea1c..d2494eca0 100644 --- a/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java +++ b/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java @@ -70,6 +70,7 @@ public void copyFile() throws Exception { Assume.assumeFalse("Test not valid on containerized HPCC environment", client.isTargetHPCCContainerized()); assumeTrue("Ignoring test 'copyFile' because HPCC-30117 is not fixed", HPCC_30117.equalsIgnoreCase("fixed")); + String lzfile=System.currentTimeMillis() + "_csvtest.csv"; String hpccfilename="temp::" + lzfile; client.createHPCCFile(lzfile, targetLZ, true); @@ -145,8 +146,6 @@ public void copyFile() throws Exception @Test public void AcreateHPCCFile() throws Exception, ArrayOfEspExceptionWrapper { - Assume.assumeFalse("Test not valid on containerized HPCC environment", client.isTargetHPCCContainerized()); - assumeTrue("Ignoring test 'copyFile' because HPCC-30117 is not fixed", HPCC_30117.equalsIgnoreCase("fixed")); System.out.println("Creating file: '" + testfilename + "' on LandingZone: '" + targetLZ + "' on HPCC: '" + super.connString +"'"); Assert.assertTrue(client.createHPCCFile(testfilename, targetLZ, true)); } @@ -154,8 +153,6 @@ public void AcreateHPCCFile() throws Exception, ArrayOfEspExceptionWrapper @Test public void BwriteHPCCFile() throws Exception, ArrayOfEspExceptionWrapper { - assumeTrue("Ignoring test 'copyFile' because HPCC-30117 is not fixed", HPCC_30117.equalsIgnoreCase("fixed")); - System.out.println("Writing data to file: '" + testfilename + "' on LandingZone: '" + targetLZ + "' on HPCC: '" + super.connString +"'"); byte[] data = "HELLO MY DARLING, HELLO MY DEAR!1234567890ABCDEFGHIJKLMNOPQRSTUVXYZ".getBytes(); Assert.assertTrue(client.writeHPCCFileData(data, testfilename, targetLZ, true, 0, 20)); } @@ -163,9 +160,6 @@ public void BwriteHPCCFile() throws Exception, ArrayOfEspExceptionWrapper @Test public void CreadHPCCFile() throws Exception, ArrayOfEspExceptionWrapper { - Assume.assumeFalse("Test not valid on containerized HPCC environment", client.isTargetHPCCContainerized()); - assumeTrue("Ignoring test 'copyFile' because HPCC-30117 is not fixed", HPCC_30117.equalsIgnoreCase("fixed")); - System.out.println("reading data from file: '" + testfilename + "' on LandingZone: '" + targetLZ + "' on HPCC: '" + super.connString +"'"); byte[] data = "HELLO MY DARLING, HELLO MY DEAR!1234567890ABCDEFGHIJKLMNOPQRSTUVXYZ".getBytes(); String response = client.readFileData(targetLZ, testfilename, data.length, 0); From feb19f520eb7caf0c2e10654b0e08b6455aaa73c Mon Sep 17 00:00:00 2001 From: James McMullan Date: Wed, 8 Nov 2023 08:48:07 -0500 Subject: [PATCH 19/46] HPCC4J-552 File Utility Read Invididual File Parts (#658) - Added support for reading a file without writing it to disk - Added support for test reading individual file parts Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../hpccsystems/dfs/client/FileUtility.java | 220 ++++++++++++++++++ .../dfs/client/FileUtilityTest.java | 12 + 2 files changed, 232 insertions(+) diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/FileUtility.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/FileUtility.java index 3da02c805..2cf0e3b25 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/FileUtility.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/FileUtility.java @@ -417,6 +417,24 @@ private static Options getReadOptions() return options; } + private static Options getReadTestOptions() + { + Options options = new Options(); + options.addRequiredOption("read_test", "Read test", true, "Specifies the file that should be read."); + options.addRequiredOption("url", "Source Cluster URL", true, "Specifies the URL of the ESP to connect to."); + options.addOption("user", true, "Specifies the username used to connect. Defaults to null."); + options.addOption("pass", true, "Specifies the password used to connect. Defaults to null."); + options.addOption("num_threads", true, "Specifies the number of parallel to use to perform operations."); + + options.addOption(Option.builder("file_parts") + .argName("_file_parts") + .hasArgs() + .valueSeparator(',') + .desc("Specifies the file parts that should be read. Defaults to all file parts.") + .build()); + return options; + } + private static Options getCopyOptions() { Options options = new Options(); @@ -463,6 +481,7 @@ private static Options getTopLevelOptions() { Options options = new Options(); options.addOption("read", "Reads the specified file(s) and writes a copy of the files to the local directory."); + options.addOption("read_test", "Reads the specified file and/or particular file parts without writing it locally."); options.addOption("copy", "Copies the specified remote source file to the specified remote destination cluster / file."); options.addOption("write", "Writes the specified local source file to the specified remote destination cluster / file."); @@ -660,6 +679,44 @@ public void run() } } + private static Runnable[] createReadTestTasks(DataPartition[] fileParts, FieldDef recordDef, TaskContext context) throws Exception + { + Runnable[] tasks = new Runnable[fileParts.length]; + for (int i = 0; i < tasks.length; i++) + { + final int taskIndex = i; + final DataPartition filePart = fileParts[taskIndex]; + final HpccRemoteFileReader filePartReader = new HpccRemoteFileReader(filePart, recordDef, new HPCCRecordBuilder(recordDef)); + + tasks[taskIndex] = new Runnable() + { + HpccRemoteFileReader fileReader = filePartReader; + + public void run() + { + try + { + while (fileReader.hasNext()) + { + HPCCRecord record = fileReader.next(); + context.recordsRead.incrementAndGet(); + } + + fileReader.close(); + context.bytesRead.addAndGet(fileReader.getStreamPosition()); + } + catch (Exception e) + { + context.addError("Error while reading file part index: '" + filePart.getThisPart() + " Error message: " + e.getMessage()); + return; + } + } + }; + } + + return tasks; + } + private static Runnable[] createReadToThorTasks(DataPartition[] fileParts, SplitTable[] splitTables, String[] outFilePaths, FieldDef recordDef, TaskContext context) throws Exception { Runnable[] tasks = new Runnable[fileParts.length]; @@ -1159,6 +1216,165 @@ private static void performRead(String[] args, TaskContext context) } } + private static void performReadTest(String[] args, TaskContext context) + { + Options options = getReadTestOptions(); + CommandLineParser parser = new DefaultParser(); + + CommandLine cmd = null; + try + { + cmd = parser.parse(options, args); + } + catch (ParseException e) + { + System.out.println("Error parsing commandline options:\n" + e.getMessage()); + return; + } + + String connString = cmd.getOptionValue("url"); + String user = cmd.getOptionValue("user"); + String pass = cmd.getOptionValue("pass"); + + String outputPath = cmd.getOptionValue("out","."); + + int numThreads = NUM_DEFAULT_THREADS; + String numThreadsStr = cmd.getOptionValue("num_threads", "" + numThreads); + try + { + numThreads = Integer.parseInt(numThreadsStr); + } + catch(Exception e) + { + System.out.println("Invalid option value for num_threads: " + + numThreadsStr + ", must be an integer. Defaulting to: " + NUM_DEFAULT_THREADS + " threads."); + } + + String formatStr = cmd.getOptionValue("format"); + if (formatStr == null) + { + formatStr = "THOR"; + } + + FileFormat format = FileFormat.THOR; + switch (formatStr.toUpperCase()) + { + case "THOR": + format = FileFormat.THOR; + break; + case "PARQUET": + format = FileFormat.PARQUET; + break; + default: + System.out.println("Error unsupported format specified: " + format); + return; + } + + String datasetName = cmd.getOptionValue("read_test"); + context.startOperation("Read Test " + datasetName); + + HPCCFile file = null; + try + { + file = new HPCCFile(datasetName, connString, user, pass); + } + catch (Exception e) + { + System.out.println("Error while attempting to open file: '" + datasetName + "': " + e.getMessage()); + return; + } + + DataPartition[] fileParts = null; + FieldDef recordDef = null; + try + { + fileParts = file.getFileParts(); + recordDef = file.getRecordDefinition(); + } + catch (Exception e) + { + System.out.println("Error while retrieving file parts for: '" + datasetName + "': " + e.getMessage()); + return; + } + + String[] filePartsStrs = cmd.getOptionValues("file_parts"); + if (filePartsStrs != null && filePartsStrs.length > 0) + { + ArrayList filePartList = new ArrayList(); + for (int i = 0; i < filePartsStrs.length; i++) + { + try + { + int filePartIndex = Integer.parseInt(filePartsStrs[i]) - 1; + if (filePartIndex < 0 || filePartIndex >= fileParts.length) + { + System.out.println("Skipping invalid file part index: " + filePartsStrs[i] + + " outside of range: [0," + fileParts.length + "]"); + continue; + } + + filePartList.add(fileParts[filePartIndex]); + } + catch (NumberFormatException e) + { + System.out.println("Skipping invalid file part index: " + filePartsStrs[i]); + } + } + } + + Runnable[] tasks = null; + try + { + switch (format) + { + case THOR: + tasks = createReadTestTasks(fileParts, recordDef, context); + break; + case PARQUET: + default: + throw new Exception("Error unsupported format specified: " + format); + }; + } + catch (Exception e) + { + context.addError("Error while attempting to create read tasks for file: '" + datasetName + "': " + e.getMessage()); + return; + } + + try + { + executeTasks(tasks, numThreads); + } + catch (Exception e) + { + context.addError("Error while attempting to execute read tasks for file: '" + datasetName + "': " + e.getMessage()); + return; + } + + if (context.hasError()) + { + return; + } + + try + { + String fileName = file.getFileName().replace(":","_"); + String filePath = outputPath + File.separator + fileName + ".meta"; + FileOutputStream metaFile = new FileOutputStream(filePath); + + String metaStr = RecordDefinitionTranslator.toJsonRecord(file.getRecordDefinition()).toString(); + metaFile.write(metaStr.getBytes()); + metaFile.close(); + } + catch (Exception e) + { + context.addError("Error while attempting to write meta-data for file: '" + datasetName + "': " + e.getMessage()); + return; + } + + context.endOperation(); + } + private static void performCopy(String[] args, TaskContext context) { Options options = getCopyOptions(); @@ -1576,6 +1792,10 @@ public static JSONArray run(String[] args) { performRead(args, context); } + else if (cmd.hasOption("read_test")) + { + performReadTest(args, context); + } else if (cmd.hasOption("copy")) { performCopy(args, context); diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/FileUtilityTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/FileUtilityTest.java index 84ea5b022..e89b87483 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/FileUtilityTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/FileUtilityTest.java @@ -56,6 +56,18 @@ public void thorFileTests() Assert.assertTrue("FileUtility operation didn't complete successfully", success); } + { + String readArgs[] = {"-read_test", "benchmark::integer::20kb", "-url", this.connString, + "-user", this.hpccUser, "-pass", this.hpccPass, "-file_parts", "1" }; + + JSONArray results = FileUtility.run(readArgs); + JSONObject result = results.optJSONObject(0); + Assert.assertNotNull("FileUtility result should not be null.", result); + + boolean success = result.optBoolean("successful",false); + Assert.assertTrue("FileUtility operation didn't complete successfully", success); + } + { String copyArgs[] = {"-copy", "benchmark::integer::20kb benchmark::integer::20kb-copy", "-url", this.connString, "-dest_url", this.connString, From 30c2e6abfe3e4ffd269be27d86ec37ef4ed07f7f Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Thu, 9 Nov 2023 18:13:36 +0000 Subject: [PATCH 20/46] Split off 9.4.10 Signed-off-by: Gavin Halliday --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 8a5bcf7a3..23b5fa9ee 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.9-0-SNAPSHOT + 9.4.11-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 30cfd20a1..f08382eaa 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.9-0-SNAPSHOT + 9.4.11-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 4c8a131f0..0e679104c 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.9-0-SNAPSHOT + 9.4.11-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 411766195..433ca3a97 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.9-0-SNAPSHOT + 9.4.11-0-SNAPSHOT From 4a667da5d4abcdd9494a8e1b3f0486370eb64b70 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Thu, 16 Nov 2023 15:30:19 -0500 Subject: [PATCH 21/46] HPCC4J-556: File Reader Early Close Behavior (#659) - Modified HPCCRemoteFileReader to enforce consistent early close behavior - Added unit test to ensure consistent behavior on early close - Minor changes to exception handling Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../dfs/client/HpccRemoteFileReader.java | 52 +++++++++++--- .../dfs/client/DFSReadWriteTest.java | 71 +++++++++++++++++++ 2 files changed, 115 insertions(+), 8 deletions(-) diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/HpccRemoteFileReader.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/HpccRemoteFileReader.java index 78e3e6500..ade51a57e 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/HpccRemoteFileReader.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/HpccRemoteFileReader.java @@ -35,6 +35,8 @@ public class HpccRemoteFileReader implements Iterator private BinaryRecordReader binaryRecordReader; private IRecordBuilder recordBuilder = null; private boolean handlePrefetch = true; + private boolean isClosed = false; + private boolean canReadNext = true; private long openTimeMs = 0; private long recordsRead = 0; @@ -234,7 +236,6 @@ public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilde this.binaryRecordReader.initialize(this.recordBuilder); } - log.info("HPCCRemoteFileReader: Opening file part: " + dataPartition.getThisPart() + (resumeInfo != null ? " resume position: " + resumeInfo.inputStreamPos : "" )); log.trace("Original record definition:\n" @@ -315,12 +316,18 @@ public String getRemoteReadMessages() */ public void prefetch() { - if (this.handlePrefetch) + if (handlePrefetch) { log.warn("Prefetch called on an HpccRemoteFileReader that has an internal prefetch thread."); return; } + if (isClosed) + { + log.warn("Prefetch called on an HpccRemoteFileReader that has been closed."); + return; + } + this.inputStream.prefetchData(); } @@ -332,10 +339,19 @@ public void prefetch() @Override public boolean hasNext() { - boolean rslt = false; + if (isClosed) + { + log.warn("hasNext() called on an HpccRemoteFileReader that has been closed."); + return false; + } + + // Keep track of whether we have said there is another record. + // This allows us to handle edge cases around close() being called between hasNext() and next() + canReadNext = false; + try { - rslt = this.binaryRecordReader.hasNext(); + canReadNext = this.binaryRecordReader.hasNext(); // Has next may not catch the prefetch exception if it occurs at the beginning of a read // This is due to InputStream.hasNext() being allowed to throw an IOException when closed. @@ -346,12 +362,14 @@ public boolean hasNext() } catch (HpccFileException e) { - rslt = false; + canReadNext = false; log.error("Read failure for " + this.dataPartition.toString()); - throw new java.util.NoSuchElementException("Fatal read error: " + e.getMessage()); + java.util.NoSuchElementException exception = new java.util.NoSuchElementException("Fatal read error: " + e.getMessage()); + exception.initCause(e); + throw exception; } - return rslt; + return canReadNext; } /** @@ -362,6 +380,11 @@ public boolean hasNext() @Override public T next() { + if (isClosed && !canReadNext) + { + throw new java.util.NoSuchElementException("Fatal read error: Attempting to read next() from a closed file reader."); + } + Object rslt = null; try { @@ -370,10 +393,16 @@ public T next() catch (HpccFileException e) { log.error("Read failure for " + this.dataPartition.toString() + " " + e.getMessage()); - throw new java.util.NoSuchElementException("Fatal read error: " + e.getMessage()); + java.util.NoSuchElementException exception = new java.util.NoSuchElementException("Fatal read error: " + e.getMessage()); + exception.initCause(e); + throw exception; } recordsRead++; + + // Reset this after each read so we can handle edge cases where close() was called between hasNext() / next() + canReadNext = false; + return (T) rslt; } @@ -385,8 +414,15 @@ public T next() */ public void close() throws Exception { + if (isClosed) + { + log.warn("Calling close on an already closed file reader for file part: " + this.dataPartition.toString()); + return; + } + report(); this.inputStream.close(); + isClosed = true; long closeTimeMs = System.currentTimeMillis(); double readTimeS = (closeTimeMs - openTimeMs) / 1000.0; diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java index 6eab980e1..44252cbdd 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java @@ -1179,6 +1179,77 @@ public void invalidSignatureTest() } } + @Test + public void earlyCloseTest() throws Exception + { + HPCCFile file = new HPCCFile(datasets[0], connString , hpccUser, hpccPass); + + DataPartition[] fileParts = file.getFileParts(); + if (fileParts == null || fileParts.length == 0) + { + Assert.fail("No file parts found"); + } + + FieldDef originalRD = file.getRecordDefinition(); + if (originalRD == null || originalRD.getNumDefs() == 0) + { + Assert.fail("Invalid or null record definition"); + } + + { + HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition()); + HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[0], originalRD, recordBuilder); + + int expectedRecordCounts = 10; + int numRecords = 0; + while (fileReader.hasNext()) + { + try + { + fileReader.next(); + numRecords++; + } + catch (Exception e) + { + System.out.println("Error: " + e.getMessage()); + } + + if (numRecords == expectedRecordCounts) + { + fileReader.close(); + } + } + assertTrue("Expected record count: " + expectedRecordCounts + " Actual count: " + numRecords, numRecords == expectedRecordCounts); + } + + // Check that calling close() inbetween hasNext() & next() allows the current record to be read + { + HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition()); + HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[0], originalRD, recordBuilder); + + int expectedRecordCounts = 11; + int numRecords = 0; + while (fileReader.hasNext()) + { + if (numRecords == expectedRecordCounts-1) + { + fileReader.close(); + } + + try + { + fileReader.next(); + numRecords++; + } + catch (Exception e) + { + System.out.println("Error: " + e.getMessage()); + } + } + assertTrue("Expected record count: " + expectedRecordCounts + " Actual count: " + numRecords, numRecords == expectedRecordCounts); + } + } + public List readFile(HPCCFile file, Integer connectTimeoutMillis, boolean shouldForceTimeout) throws Exception { return readFile(file, connectTimeoutMillis, shouldForceTimeout, false, BinaryRecordReader.NO_STRING_PROCESSING); From a3971986f5e5630c9eb5855cc1133573aa4fc160 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Fri, 17 Nov 2023 11:42:28 +0000 Subject: [PATCH 22/46] Split off 9.4.12 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 23b5fa9ee..c05584b7f 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.11-0-SNAPSHOT + 9.4.13-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index f08382eaa..e5298737a 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.11-0-SNAPSHOT + 9.4.13-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 0e679104c..c4f1944cf 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.11-0-SNAPSHOT + 9.4.13-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 433ca3a97..98d1294b8 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.11-0-SNAPSHOT + 9.4.13-0-SNAPSHOT From 29a631c1880d3035d7fff7a160b1fe6e3abe2e8b Mon Sep 17 00:00:00 2001 From: Pastrana Date: Mon, 20 Nov 2023 13:11:05 -0500 Subject: [PATCH 23/46] HPCC4J-557 Bump hadoop dep to 3.3.6 Signed-off-by: Rodrigo Pastrana --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 0e679104c..39f733f83 100644 --- a/pom.xml +++ b/pom.xml @@ -55,7 +55,7 @@ 20231013 2.17.1 1.8.1 - 2.10.1 + 3.3.6 1.11.1 1.10.0 1.5.0 From babf4821d6e0979051eb1da3f9921bccbfa17f06 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 23 Nov 2023 17:52:06 +0000 Subject: [PATCH 24/46] Split off 9.4.14 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index c05584b7f..ad9ff22b1 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.13-0-SNAPSHOT + 9.4.15-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index e5298737a..77bf6e1ea 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.13-0-SNAPSHOT + 9.4.15-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 3ab93aa05..8a4eee1a6 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.13-0-SNAPSHOT + 9.4.15-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 98d1294b8..523fe0e9f 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.13-0-SNAPSHOT + 9.4.15-0-SNAPSHOT From 1246b39dfa8c705d56b9c7d19284676307f9bbf2 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Mon, 27 Nov 2023 09:48:47 -0500 Subject: [PATCH 25/46] HPCC4J-558 Github Actions: Outdated HPCC Version (#662) - Changed Helm command to use Helm HPCC version instead of latest Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/workflows/httpsUnitTests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/httpsUnitTests.yml b/.github/workflows/httpsUnitTests.yml index 7382205db..215ff1bd3 100644 --- a/.github/workflows/httpsUnitTests.yml +++ b/.github/workflows/httpsUnitTests.yml @@ -69,7 +69,7 @@ jobs: echo -e "certificates:\n enabled: true\ndafilesrv:\n - name: rowservice\n disabled: false\n application: stream\n service:\n servicePort: 7600\n visibility: global" > values.yaml helm repo add hpcc https://hpcc-systems.github.io/helm-chart helm repo update - helm install myhpcc hpcc/hpcc --set global.image.version=latest -f values.yaml + helm install myhpcc hpcc/hpcc -f values.yaml - uses: actions/checkout@v3 with: From c9ecd67ba29c8dc110af3472db3b969205348b11 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 30 Nov 2023 17:05:12 +0000 Subject: [PATCH 26/46] Split off 9.4.16 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index ad9ff22b1..331173c12 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.15-0-SNAPSHOT + 9.4.17-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 77bf6e1ea..e14392c06 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.15-0-SNAPSHOT + 9.4.17-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 8a4eee1a6..ef5792c37 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.15-0-SNAPSHOT + 9.4.17-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 523fe0e9f..722fb1977 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.15-0-SNAPSHOT + 9.4.17-0-SNAPSHOT From 0e9321b12418413a0155c66b5c27114134caae4a Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 7 Dec 2023 18:03:24 +0000 Subject: [PATCH 27/46] Split off 9.4.18 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 331173c12..1edbdd336 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.17-0-SNAPSHOT + 9.4.19-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index e14392c06..ea15b7a62 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.17-0-SNAPSHOT + 9.4.19-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index ef5792c37..70facfd3e 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.17-0-SNAPSHOT + 9.4.19-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 722fb1977..1d239e14e 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.17-0-SNAPSHOT + 9.4.19-0-SNAPSHOT From deee1389c3aa3dcdc681044c3ba63f8c3a65a4ca Mon Sep 17 00:00:00 2001 From: James McMullan Date: Fri, 8 Dec 2023 08:57:39 -0500 Subject: [PATCH 28/46] HPCC4J-555 POM Fixes (#664) - Added missing pom changes - Reverted formatting style changes Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- dfsclient/pom.xml | 17 +- .../hpccsystems/dfs/client/DFSIndexTest.java | 315 ++++++++++-------- pom.xml | 7 + wsclient/pom.xml | 10 +- 4 files changed, 204 insertions(+), 145 deletions(-) diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index ea15b7a62..ee01d1bdf 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -27,12 +27,20 @@ maven-surefire-plugin ${maven.surefire.version} + + + + listener + org.hpccsystems.ws.client.TestResultNotifier + + + org.codehaus.mojo templating-maven-plugin ${codehaus.template.version} - + @@ -83,6 +91,13 @@ + + org.hpccsystems + wsclient + test-jar + test + ${project.version} + commons-cli commons-cli diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java index 9318dd297..46f02f39f 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java @@ -55,8 +55,9 @@ import static org.junit.Assert.assertTrue; @Category(org.hpccsystems.commons.annotations.RemoteTests.class) -public class DFSIndexTest extends BaseRemoteTest { - String[] datasetNames = { "~test::index::integer", "~test::index::string" }; +public class DFSIndexTest extends BaseRemoteTest +{ + String[] datasetNames = {"~test::index::integer","~test::index::string"}; FieldDef[] datasetRecordDefinitions = new FieldDef[2]; ArrayList partitionRangeStart = new ArrayList(); @@ -65,86 +66,93 @@ public class DFSIndexTest extends BaseRemoteTest { static boolean isSetup = false; @Before - public void setup() throws Exception { - if (isSetup) { + public void setup() throws Exception + { + if (isSetup) + { return; } isSetup = true; // Integer key FieldDef[] fieldDefs = new FieldDef[2]; - fieldDefs[0] = new FieldDef("key", FieldType.INTEGER, "INTEGER4", 4, true, false, HpccSrcType.LITTLE_ENDIAN, - new FieldDef[0]); - fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false, - HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]); + fieldDefs[0] = new FieldDef("key", FieldType.INTEGER, "INTEGER4", 4, true, false, HpccSrcType.LITTLE_ENDIAN, new FieldDef[0]); + fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false, HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]); - datasetRecordDefinitions[0] = new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false, - HpccSrcType.LITTLE_ENDIAN, fieldDefs); + datasetRecordDefinitions[0]= new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false, HpccSrcType.LITTLE_ENDIAN, fieldDefs); // String key fieldDefs = new FieldDef[2]; - fieldDefs[0] = new FieldDef("key", FieldType.STRING, "STRING4", 4, true, false, HpccSrcType.SINGLE_BYTE_CHAR, - new FieldDef[0]); - fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false, - HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]); + fieldDefs[0] = new FieldDef("key", FieldType.STRING, "STRING4", 4, true, false, HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]); + fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false, HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]); - datasetRecordDefinitions[1] = new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false, - HpccSrcType.LITTLE_ENDIAN, fieldDefs); + datasetRecordDefinitions[1]= new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false, HpccSrcType.LITTLE_ENDIAN, fieldDefs); - for (int i = 0; i < datasetNames.length; i++) { - // ------------------------------------------------------------------------------ + for (int i = 0; i < datasetNames.length; i++) + { + //------------------------------------------------------------------------------ // Create indexable dataset - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ String datasetName = datasetNames[i]; FieldDef recordDef = datasetRecordDefinitions[i]; createIndexableFile(datasetName, recordDef, partitionRangeStart, partitionRangeEnd); - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ // Create index - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ String indexName = null; - try { - indexName = createIndexOnDataset(datasetName, recordDef); - } catch (Exception e) { + try + { + indexName = createIndexOnDataset(datasetName,recordDef); + } + catch (Exception e) + { Assume.assumeNoException("Failed to create index with error: ", e); } } } @Test - public void hpccTLKFilterTest() throws Exception { - for (int i = 0; i < datasetNames.length; i++) { + public void hpccTLKFilterTest() throws Exception + { + for (int i = 0; i < datasetNames.length; i++) + { String datasetName = datasetNames[i]; FieldDef recordDef = datasetRecordDefinitions[i]; String indexName = datasetName + "::key"; - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ // Read index and check TLK against known partition ranges - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ - HPCCFile file = new HPCCFile(indexName, connString, hpccUser, hpccPass); + HPCCFile file = new HPCCFile(indexName, connString , hpccUser, hpccPass); assertTrue(file.isTlkIndex()); DataPartition[] fileParts = file.getFileParts(); FieldDef originalRD = file.getRecordDefinition(); - for (int j = 0; j < fileParts.length - 1; j++) { + for (int j = 0; j < fileParts.length-1; j++) + { HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition()); - HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j], - originalRD, recordBuilder); - while (fileReader.hasNext()) { + HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j], originalRD, recordBuilder); + while (fileReader.hasNext()) + { HPCCRecord record = fileReader.next(); - if (record == null) { + if (record == null) + { Assert.fail("PartitionProcessor: " + j + " failed to read record."); } // Check starting range String filterStr = null; - if (record.getField(0) instanceof String) { + if (record.getField(0) instanceof String) + { filterStr = "key = \'" + record.getField(0).toString() + "\'"; - } else { + } + else + { filterStr = "key = " + record.getField(0).toString(); } @@ -153,17 +161,19 @@ public void hpccTLKFilterTest() throws Exception { // Due to how TLK works we can get more that one partition boolean hadExpectedPartition = false; - for (int k = 0; k < matchedPartitions.size(); k++) { - if (matchedPartitions.get(k).index() == j) { + for (int k = 0; k < matchedPartitions.size(); k++) + { + if (matchedPartitions.get(k).index() == j) + { hadExpectedPartition = true; break; } } - if (hadExpectedPartition == false) { + if (hadExpectedPartition == false) + { System.out.println("Partition: " + j + " Filter: " + filterStr); - System.out.println( - "Partition range: " + file.getPartitionProcessor().getPartitionRangeAsString(j)); + System.out.println("Partition range: " + file.getPartitionProcessor().getPartitionRangeAsString(j)); Assert.fail("PartitionProcessor: " + j + " filtering result did not contain partition" + partitionListToString(matchedPartitions)); @@ -174,13 +184,14 @@ public void hpccTLKFilterTest() throws Exception { } @Test - public void tlkFilterExample() throws Exception { + public void tlkFilterExample() throws Exception + { System.out.println("Starting tlk filter test."); - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ // Read index and check TLK against known partition ranges - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ - HPCCFile file = new HPCCFile("~test::index::integer::key", connString, hpccUser, hpccPass); + HPCCFile file = new HPCCFile("~test::index::integer::key", connString , hpccUser, hpccPass); // Find partitions that match the provided filter Long searchValue = 3L; @@ -191,18 +202,20 @@ public void tlkFilterExample() throws Exception { DataPartition matchedPart = filteredPartitions.get(0); HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition()); - HpccRemoteFileReader fileReader = new HpccRemoteFileReader(matchedPart, - file.getRecordDefinition(), recordBuilder); + HpccRemoteFileReader fileReader = new HpccRemoteFileReader(matchedPart, file.getRecordDefinition(), recordBuilder); boolean foundRecord = false; - while (fileReader.hasNext()) { + while (fileReader.hasNext()) + { HPCCRecord record = fileReader.next(); - if (record == null) { + if (record == null) + { Assert.fail("Received null record during read"); } Long keyValue = (Long) record.getField(0); - if (keyValue.equals(searchValue)) { + if (keyValue.equals(searchValue)) + { foundRecord = true; } System.out.println("Key: " + keyValue + " Search value: " + searchValue + " found: " + foundRecord); @@ -211,36 +224,20 @@ public void tlkFilterExample() throws Exception { } @Test - public void tlkBypassTest() throws Exception { - // ------------------------------------------------------------------------------ - // Read index ignoring TLK and check that all partitions are returned - // ------------------------------------------------------------------------------ - - HPCCFile file = new HPCCFile("~test::index::integer::key", connString, hpccUser, hpccPass); - file.setUseTLK(false); - DataPartition[] dataParts = file.getFileParts(); - - Long searchValue = 3L; - FileFilter filter = new FileFilter("key = " + searchValue); - List filteredPartitions = file.findMatchingPartitions(filter); - - // Without the TLK being read the above filter should return all file parts - assertTrue("Unexpected number of partitions", filteredPartitions.size() == dataParts.length); - } - - @Test - public void biasedIntTest() throws Exception { - HPCCFile file = new HPCCFile("~test::index::integer::key", connString, hpccUser, hpccPass); + public void biasedIntTest() throws Exception + { + HPCCFile file = new HPCCFile("~test::index::integer::key", connString , hpccUser, hpccPass); DataPartition[] fileParts = file.getFileParts(); List records = new ArrayList(); FieldDef originalRD = file.getRecordDefinition(); - for (int j = 0; j < fileParts.length; j++) { + for (int j = 0; j < fileParts.length; j++) + { HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition()); - HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j], originalRD, - recordBuilder); - while (fileReader.hasNext()) { + HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j], originalRD, recordBuilder); + while (fileReader.hasNext()) + { records.add(fileReader.next()); } fileReader.close(); @@ -248,8 +245,7 @@ public void biasedIntTest() throws Exception { assertTrue(records.size() >= 2); - // Read the data from the first partition and make sure that biased integers - // have been corrected + // Read the data from the first partition and make sure that biased integers have been corrected int partitionIndex = 0; { HPCCRecord startRecord = records.get(0); @@ -264,9 +260,11 @@ public void biasedIntTest() throws Exception { } } - private String partitionListToString(List partitions) { + private String partitionListToString(List partitions) + { String matchedPartitionStr = "[ "; - for (DataPartition part : partitions) { + for (DataPartition part : partitions) + { matchedPartitionStr += part.index() + " "; } matchedPartitionStr += "]"; @@ -274,41 +272,41 @@ private String partitionListToString(List partitions) { return matchedPartitionStr; } - private void createIndexableFile(String fileName, FieldDef recordDef, List partitionRangeStart, - List partitionRangeEnd) { - try { - // ------------------------------------------------------------------------------ - // Request a temp file be created in HPCC to write to - // ------------------------------------------------------------------------------ + private void createIndexableFile(String fileName, FieldDef recordDef, List partitionRangeStart, List partitionRangeEnd) + { + try + { + //------------------------------------------------------------------------------ + // Request a temp file be created in HPCC to write to + //------------------------------------------------------------------------------ String eclRecordDefn = RecordDefinitionTranslator.toECLRecord(recordDef); HPCCWsDFUClient dfuClient = wsclient.getWsDFUClient(); String filegroupname = this.thorClusterFileGroup; - DFUCreateFileWrapper createResult = dfuClient.createFile(fileName, filegroupname, eclRecordDefn, 300, false, - DFUFileTypeWrapper.Flat, ""); + DFUCreateFileWrapper createResult = dfuClient.createFile(fileName, filegroupname, eclRecordDefn, 300, false, DFUFileTypeWrapper.Flat, ""); DFUFilePartWrapper[] dfuFileParts = createResult.getFileParts(); DataPartition[] hpccPartitions = DataPartition.createPartitions(dfuFileParts, - new NullRemapper(new RemapInfo(), createResult.getFileAccessInfo()), dfuFileParts.length, - createResult.getFileAccessInfoBlob()); + new NullRemapper(new RemapInfo(), createResult.getFileAccessInfo()), dfuFileParts.length, createResult.getFileAccessInfoBlob()); - // ------------------------------------------------------------------------------ - // Write partitions to file parts and keep track of record ranges - // ------------------------------------------------------------------------------ + //------------------------------------------------------------------------------ + // Write partitions to file parts and keep track of record ranges + //------------------------------------------------------------------------------ partitionRangeStart.clear(); partitionRangeEnd.clear(); long bytesWritten = 0; int numRecords = 0; - for (int partitionIndex = 0; partitionIndex < hpccPartitions.length; partitionIndex++) { + for (int partitionIndex = 0; partitionIndex < hpccPartitions.length; partitionIndex++) + { HPCCRecordAccessor recordAccessor = new HPCCRecordAccessor(recordDef); - HPCCRemoteFileWriter fileWriter = new HPCCRemoteFileWriter( - hpccPartitions[partitionIndex], recordDef, recordAccessor, CompressionAlgorithm.NONE); + HPCCRemoteFileWriter fileWriter = new HPCCRemoteFileWriter(hpccPartitions[partitionIndex], recordDef, recordAccessor, CompressionAlgorithm.NONE); - try { + try + { List recordRange = createRecordRange(partitionIndex, hpccPartitions.length, recordDef); for (HPCCRecord record : recordRange) { fileWriter.writeRecord(record); @@ -320,18 +318,22 @@ private void createIndexableFile(String fileName, FieldDef recordDef, List records = new ArrayList(); - for (long i = 0; i < fileParts.length; i++) { + for (long i = 0; i < fileParts.length; i++) + { long start = System.currentTimeMillis(); HpccRandomAccessFileReader fileReader = null; - try { - DataPartition fp = fileParts[(int) i]; + try + { + DataPartition fp = fileParts[(int)i]; HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition()); - fileReader = new HpccRandomAccessFileReader(fp, originalRD, recordBuilder, -1); - } catch (Exception e) { + fileReader = new HpccRandomAccessFileReader(fp, originalRD, recordBuilder,-1); + } + catch (Exception e) + { Assert.fail("Error: " + e.getMessage()); } long end = System.currentTimeMillis(); System.out.println("Time to create batch record reader: " + (end - start) + "ms"); start = System.currentTimeMillis(); - Long[] recOffsets = { 20L }; + Long[] recOffsets = {20L}; fileReader.addRecordRequests(Arrays.asList(recOffsets)); - while (fileReader.hasNext()) { + while (fileReader.hasNext()) + { HPCCRecord record = fileReader.next(); - if (record == null) { + if (record == null) + { Assert.fail("Error: failed to read record."); } long expectedKeyValue = 3 + 4 * i; Long keyValue = (Long) record.getField(0); - if (keyValue != expectedKeyValue) { + if (keyValue != expectedKeyValue) + { Assert.fail("Error: key values did not match."); } } @@ -398,64 +408,80 @@ public void testBatchRandomAccess() throws Exception { fileReader.close(); } - } catch (Exception e) { + } + catch (Exception e) + { System.out.println("Error: " + e.getMessage()); } } - List createRecordRange(int partitionIndex, int numPartitions, FieldDef recordDef) { + List createRecordRange(int partitionIndex, int numPartitions, FieldDef recordDef) + { Object[] rangeStartFields = new Object[recordDef.getNumDefs()]; Object[] rangeEndFields = new Object[recordDef.getNumDefs()]; - for (int i = 0; i < recordDef.getNumDefs(); i++) { + for (int i = 0; i < recordDef.getNumDefs(); i++) + { boolean isKeyField = (i == 0); boolean isStart = true; - rangeStartFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i), isKeyField, - isStart); + rangeStartFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i), isKeyField, isStart); isStart = false; - rangeEndFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i), isKeyField, - isStart); + rangeEndFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i),isKeyField, isStart); } ArrayList recordRange = new ArrayList(); - recordRange.add(new HPCCRecord(rangeStartFields, recordDef)); - recordRange.add(new HPCCRecord(rangeEndFields, recordDef)); + recordRange.add(new HPCCRecord(rangeStartFields,recordDef)); + recordRange.add(new HPCCRecord(rangeEndFields,recordDef)); return recordRange; } - Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, boolean isKeyField, boolean isStart) { - if (isKeyField) { - if (fd.isFixed() == false) { + Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, boolean isKeyField, boolean isStart) + { + if (isKeyField) + { + if (fd.isFixed() == false) + { Assert.fail("Invalid key field type"); } - switch (fd.getFieldType()) { - case BOOLEAN: { + switch (fd.getFieldType()) + { + case BOOLEAN: + { return Boolean.valueOf(isStart == false); } - case INTEGER: { - if (isStart) { - return new Integer(partitionIndex * 4); - } else { - return new Integer(partitionIndex * 4 + 3); + case INTEGER: + { + if (isStart) + { + return new Integer(partitionIndex*4); + } + else + { + return new Integer(partitionIndex*4+3); } } - case STRING: { + case STRING: + { // Convert partitionIndex * 4 + 0/3 into base 26 string int rangeNum = 0; - if (isStart) { + if (isStart) + { rangeNum = partitionIndex * 4; - } else { + } + else + { rangeNum = partitionIndex * 4 + 3; } StringBuilder builder = new StringBuilder(" "); int charIndex = (int) Math.ceil(Math.log(numPartitions) / Math.log(26)); - while (rangeNum > 0) { + while (rangeNum > 0) + { char currentLetter = (char) ('A' + (rangeNum % 26)); - builder.setCharAt(charIndex, currentLetter); + builder.setCharAt(charIndex,currentLetter); rangeNum /= 26; charIndex--; @@ -467,8 +493,11 @@ Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, bool Assert.fail("Invalid key field type"); return null; } - } else { - if (fd.getFieldType() != FieldType.STRING) { + } + else + { + if (fd.getFieldType() != FieldType.STRING) + { Assert.fail("Invalid payload field type."); return null; } @@ -477,12 +506,12 @@ Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, bool } } - public static void main(String[] args) { + public static void main(String[] args) + { DFSIndexTest test = new DFSIndexTest(); try { test.hpccTLKFilterTest(); test.tlkFilterExample(); - } catch (Exception e) { - } + } catch(Exception e) {} } -} +} \ No newline at end of file diff --git a/pom.xml b/pom.xml index 70facfd3e..1aeae2d0a 100644 --- a/pom.xml +++ b/pom.xml @@ -227,6 +227,13 @@ **/WSDLs + + + + test-jar + + + maven-install-plugin diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 1d239e14e..641263cd0 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -78,9 +78,17 @@ - + maven-surefire-plugin ${maven.surefire.version} + + + + listener + org.hpccsystems.ws.client.TestResultNotifier + + + From b3b76470cb4a7c17895a8173f299faa32e6e91a4 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Wed, 13 Dec 2023 15:34:25 -0500 Subject: [PATCH 29/46] HPCC4J-562 Odd unsigned decimals incorrect scale (#666) - Corrected BinaryRecordReader unsigned decimal parse code - Updated ECL test datasets to include odd precision decimal values Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../org/hpccsystems/dfs/client/BinaryRecordReader.java | 10 +++++++++- dfsclient/src/test/resources/generate-datasets.ecl | 7 ++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/dfsclient/src/main/java/org/hpccsystems/dfs/client/BinaryRecordReader.java b/dfsclient/src/main/java/org/hpccsystems/dfs/client/BinaryRecordReader.java index 565a59454..58470fdd8 100644 --- a/dfsclient/src/main/java/org/hpccsystems/dfs/client/BinaryRecordReader.java +++ b/dfsclient/src/main/java/org/hpccsystems/dfs/client/BinaryRecordReader.java @@ -830,7 +830,15 @@ private BigDecimal getUnsignedDecimal(int numDigits, int precision, int dataLen) BigDecimal ret = new BigDecimal(0); int idx = 0; - int curDigit = numDigits - 1; + int curDigit = numDigits; + + // If the # of digits is odd the top most nibble is unused and we don't want to include it + // in the scale calculations below. Due to how the scale calculation works below this means + // we decrement the starting value of curDigit in the case of even length decimals + if ((numDigits % 2) == 0) + { + curDigit--; + } while (idx < dataLen) { diff --git a/dfsclient/src/test/resources/generate-datasets.ecl b/dfsclient/src/test/resources/generate-datasets.ecl index 0c1fdafa7..fed129a15 100644 --- a/dfsclient/src/test/resources/generate-datasets.ecl +++ b/dfsclient/src/test/resources/generate-datasets.ecl @@ -10,7 +10,10 @@ childRec := {STRING8 childField1, INTEGER8 childField2, REAL8 childField3}; rec := {INTEGER8 int8, UNSIGNED8 uint8, INTEGER4 int4, UNSIGNED4 uint4, INTEGER2 int2, UNSIGNED2 uint2, REAL8 r8, REAL4 r4, - DECIMAL16_8 dec16, UDECIMAL16_8 udec16, + DECIMAL16_8 dec16, + DECIMAL15_8 dec15, + UDECIMAL16_8 udec16, + UDECIMAL15_8 udec15, QSTRING qStr, STRING8 fixStr8, STRING str, @@ -33,7 +36,9 @@ ds := DATASET(totalrecs1, transform(rec, self.r8 := (REAL)(random() % unique_values); self.r4 := (REAL)(random() % unique_values); self.dec16 := (REAL)(random() % unique_values); + self.dec15 := (REAL)(random() % unique_values); self.udec16 := (REAL)(random() % unique_values); + self.udec15 := (REAL)(random() % unique_values); self.qStr := (STRING)(random() % unique_values); self.fixStr8 := (STRING)(random() % unique_values); self.str := (STRING)(random() % unique_values); From 770f5713bab05669cf90fc64581cf9890759eda5 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 14 Dec 2023 17:15:25 +0000 Subject: [PATCH 30/46] Split off 9.4.20 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 1edbdd336..ead5087da 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.19-0-SNAPSHOT + 9.4.21-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index ee01d1bdf..da93ff35b 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.19-0-SNAPSHOT + 9.4.21-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 1aeae2d0a..1f9807552 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.19-0-SNAPSHOT + 9.4.21-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 641263cd0..e837c0772 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.19-0-SNAPSHOT + 9.4.21-0-SNAPSHOT From aeddaf70d47d0939e138d21687039abdb13d27c6 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Mon, 18 Dec 2023 08:58:21 -0500 Subject: [PATCH 31/46] HPCC4J-561 copyfile test wait for spray completion (#668) - Modified WSFileIOClientTest.copyfile test to correctly monitor spray progress - Updated remote unit tests values.yaml to keep file spray service Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/workflows/httpsUnitTests.yml | 23 ++++++++- .../ws/client/WSFileIOClientTest.java | 47 +++++++++++-------- 2 files changed, 49 insertions(+), 21 deletions(-) diff --git a/.github/workflows/httpsUnitTests.yml b/.github/workflows/httpsUnitTests.yml index 215ff1bd3..5a1b43655 100644 --- a/.github/workflows/httpsUnitTests.yml +++ b/.github/workflows/httpsUnitTests.yml @@ -66,7 +66,28 @@ jobs: - name: Install HPCC Cluster run: | - echo -e "certificates:\n enabled: true\ndafilesrv:\n - name: rowservice\n disabled: false\n application: stream\n service:\n servicePort: 7600\n visibility: global" > values.yaml + cat < values.yaml + certificates: + enabled: true + dafilesrv: + - name: rowservice + disabled: false + application: stream + service: + servicePort: 7600 + visibility: global + - name: direct-access + disabled: true + application: directio + service: + servicePort: 7200 + visibility: local + - name: spray-service + application: spray + service: + servicePort: 7300 + visibility: cluster + EOF helm repo add hpcc https://hpcc-systems.github.io/helm-chart helm repo update helm install myhpcc hpcc/hpcc -f values.yaml diff --git a/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java b/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java index 158538608..e27714e24 100644 --- a/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java +++ b/wsclient/src/test/java/org/hpccsystems/ws/client/WSFileIOClientTest.java @@ -21,7 +21,9 @@ HPCC SYSTEMS software Copyright (C) 2019 HPCC Systems®. import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; +import java.util.Arrays; import java.util.HashSet; +import java.util.List; import java.util.Set; import org.apache.axis2.AxisFault; @@ -67,9 +69,6 @@ public class WSFileIOClientTest extends BaseRemoteTest @Test public void copyFile() throws Exception { - Assume.assumeFalse("Test not valid on containerized HPCC environment", client.isTargetHPCCContainerized()); - assumeTrue("Ignoring test 'copyFile' because HPCC-30117 is not fixed", HPCC_30117.equalsIgnoreCase("fixed")); - String lzfile=System.currentTimeMillis() + "_csvtest.csv"; String hpccfilename="temp::" + lzfile; client.createHPCCFile(lzfile, targetLZ, true); @@ -77,35 +76,43 @@ public void copyFile() throws Exception client.writeHPCCFileData(data, lzfile, targetLZ, true, 0, 20); try { - ProgressResponseWrapper dfuspray=wsclient.getFileSprayClient().sprayVariable( + System.out.println("Starting file spray."); + ProgressResponseWrapper dfuspray = wsclient.getFileSprayClient().sprayVariable( new DelimitedDataOptions(), wsclient.getFileSprayClient().fetchLocalDropZones().get(0), lzfile,"~" + hpccfilename,"",thorClusterFileGroup,true, HPCCFileSprayClient.SprayVariableFormat.DFUff_csv, null, null, null, null, null, null, null); - Thread.sleep(1000); - int wait=60; if (dfuspray.getExceptions() != null - && dfuspray.getExceptions().getException() != null - && dfuspray.getExceptions().getException().size()>0) + && dfuspray.getExceptions().getException() != null + && dfuspray.getExceptions().getException().size()>0) { fail(dfuspray.getExceptions().getException().get(0).getMessage()); } - if (dfuspray.getSecsLeft()>0) + + List whiteListedStates = Arrays.asList( "queued", "started", "unknown", "finished", "monitoring"); + int waitCount = 0; + int MAX_WAIT_COUNT = 60; + + ProgressResponseWrapper dfuProgress = null; + do { - System.out.println("Still spraying, waiting 1 sec..."); - for (int i=wait;i>0;i--) + dfuProgress = wsclient.getFileSprayClient().getDfuProgress(dfuspray.getWuid()); + boolean stateIsWhiteListed = whiteListedStates.contains(dfuProgress.getState()); + if (!stateIsWhiteListed) { - if (dfuspray.getSecsLeft()==0) - { - i=0; - } - else - { - Thread.sleep(1000); - } + fail("File spray failed: Summary: " + dfuProgress.getSummaryMessage() + " Exceptions: " + dfuProgress.getExceptions()); } - } + + if (dfuProgress.getPercentDone() < 100) + { + Thread.sleep(1000); + System.out.println("File spray percent complete: " + dfuProgress.getPercentDone() + "% Sleeping for 1sec to wait for spray."); + waitCount++; + } + } while (waitCount < 60 && dfuProgress.getPercentDone() < 100); + + assumeTrue("File spray did not complete within: " + MAX_WAIT_COUNT + "s. Failing test.", waitCount < MAX_WAIT_COUNT); System.out.println("Test file successfully sprayed to " + "~" + hpccfilename + ", attempting copy to " + hpccfilename + "_2"); wsclient.getFileSprayClient().copyFile(hpccfilename,hpccfilename + "_2",true); From 5554f9f3c805df5269d26229d6e49d2dc50e24da Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 21 Dec 2023 11:29:13 +0000 Subject: [PATCH 32/46] Split off 9.4.22 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index ead5087da..a599d0274 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.21-0-SNAPSHOT + 9.4.23-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index da93ff35b..c9fe436ef 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.21-0-SNAPSHOT + 9.4.23-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 1f9807552..773c4cc89 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.21-0-SNAPSHOT + 9.4.23-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index e837c0772..1a77c8d70 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.21-0-SNAPSHOT + 9.4.23-0-SNAPSHOT From 2335fd03e6a568e7c5adb86f7464c4a15d46a6bb Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 4 Jan 2024 16:59:02 +0000 Subject: [PATCH 33/46] Split off 9.4.24 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index a599d0274..f522c84f5 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.23-0-SNAPSHOT + 9.4.25-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index c9fe436ef..566b52f52 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.23-0-SNAPSHOT + 9.4.25-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 773c4cc89..4fd5f7aee 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.23-0-SNAPSHOT + 9.4.25-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 1a77c8d70..d14277c8e 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.23-0-SNAPSHOT + 9.4.25-0-SNAPSHOT From 14f2a1563eee64e5d8655a152b205c64fd6bb11c Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 11 Jan 2024 17:53:10 +0000 Subject: [PATCH 34/46] Split off 9.4.26 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index f522c84f5..d348d3ad3 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.25-0-SNAPSHOT + 9.4.27-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 566b52f52..73f3ba60a 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.25-0-SNAPSHOT + 9.4.27-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 4fd5f7aee..11d6665ed 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.25-0-SNAPSHOT + 9.4.27-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index d14277c8e..5b6a8ac48 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.25-0-SNAPSHOT + 9.4.27-0-SNAPSHOT From deca046742e7d392d3359cabde841261ca6b66f9 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Fri, 19 Jan 2024 09:58:31 -0500 Subject: [PATCH 35/46] HPCC4J-553 Github Actions: Add baremetal test workflow (#669) * HPCC4J-553 Github Actions: Add baremetal test workflow - Renamed existing k8s workflow - Added baremetal workflow Signed-off-by: James McMullan James.McMullan@lexisnexis.com * Code review changes --------- Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .../workflows/baremetal-regression-suite.yml | 173 ++++++++++++++++++ ...UnitTests.yml => k8s-regression-suite.yml} | 2 +- 2 files changed, 174 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/baremetal-regression-suite.yml rename .github/workflows/{httpsUnitTests.yml => k8s-regression-suite.yml} (99%) diff --git a/.github/workflows/baremetal-regression-suite.yml b/.github/workflows/baremetal-regression-suite.yml new file mode 100644 index 000000000..57c5c74bd --- /dev/null +++ b/.github/workflows/baremetal-regression-suite.yml @@ -0,0 +1,173 @@ +name: Baremetal Regression Suite + +on: + pull_request: + branches: + - "master" + - "candidate-*" + + workflow_dispatch: + +jobs: + test-against-platform: + runs-on: ubuntu-latest + + steps: + - name: Setup JDK 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + + - uses: "actions/setup-python@v2" + with: + python-version: "3.8" + + - name: "Install Python dependencies" + run: | + set -xe + python -VV + python -m site + python -m pip install --upgrade pip setuptools wheel + + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Extract Latest Tagged Version + id: extract_version + env: + PULL_REQUEST_NUMBER : ${{ github.event.pull_request.number }} + PULL_REQUEST_TITLE : ${{ github.event.pull_request.title }} + PULL_REQUEST_AUTHOR_NAME : ${{ github.event.pull_request.user.login }} + PULL_URL: ${{ github.event.pull_request.html_url }} + COMMENTS_URL: ${{ github.event.pull_request.comments_url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BRANCH_NAME: ${{ github.base_ref }} + shell: python + run: | + import os + import re + import subprocess + import time + import sys + + def extractVersion(versionStr): + parts = versionStr.split('.') + if len(parts) != 3: + print('Invalid version: ' + version) + sys.exit(1) + if parts[2].lower() == 'x': + parts[2] = '0' + + major, minor, point = map(int, parts) + return [major, minor, point] + + def getTagVersionForCmd(cmd): + versionPattern = re.compile(r'.*([0-9]+\.[0-9]+\.[0-9]+).*') + + # Get latest release version + gitTagProcess = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) + (output, err) = gitTagProcess.communicate() + gitTagProcessStatus = gitTagProcess.wait() + + if gitTagProcessStatus != 0: + print('Unable to retrieve latest git tag. With error: ' + str(err)) + sys.exit(1) + + latestGitTag = str(output) + + versionMatch = versionPattern.match(latestGitTag) + if versionMatch: + return extractVersion(versionMatch.group(1)) + else: + print('Unable to extract version from git tag: ' + latestGitTag) + sys.exit(2) + + def buildVersionString(version): + major, minor, point = map(int, version) + return f'{major}.{minor}.{point}' + + def getLatestBranchVersion(branchName): + + latestVersion = getTagVersionForCmd("git tag --list 'hpcc4j_*-release' --sort=-v:refname | head -n 1") + + # If we are merging into master we assume it is going into the next minor release + if branchName == 'master': + return buildVersionString([latestVersion[0], latestVersion[1] + 2, 0]) + else: + # Extract candidate branch major / minor version + candidateBranchPattern = re.compile(r'candidate-([0-9]+\.[0-9]+\.([0-9]+|x)).*') + branchVersionMatch = candidateBranchPattern.match(branchName) + if branchVersionMatch is None: + print('Unable to extract version from branch name: ' + branchName) + sys.exit(3) + + branchVersion = extractVersion(branchVersionMatch.group(1)) + + # Get latest release in branch + findLatestBranchVer = "git tag --list 'hpcc4j_" + str(branchVersion[0]) + "." + str(branchVersion[1]) + "*-release' --sort=-v:refname | head -n 1" + return getTagVersionForCmd(findLatestBranchVer) + + branch_name = os.environ['BRANCH_NAME'] + + latestVersion = getLatestBranchVersion(branch_name) + + if latestVersion[2] == 0: + print('Latest version is a new minor. Setting previous version to latest version') + previousVersion = latestVersion + else: + previousVersion = [latestVersion[0], latestVersion[1], latestVersion[2] - 2] + + previousVersionStr = ".".join(map(str, previousVersion)) + previousVersionURL = 'https://cdn.hpccsystems.com/releases/CE-Candidate-' + previousVersionStr \ + + '/bin/platform/hpccsystems-platform-community_' + previousVersionStr + '-1jammy_amd64_withsymbols.deb' + + latestVersionStr = ".".join(map(str, latestVersion)) + latestVersionURL = 'https://cdn.hpccsystems.com/releases/CE-Candidate-' + latestVersionStr \ + + '/bin/platform/hpccsystems-platform-community_' + latestVersionStr + '-1jammy_amd64_withsymbols.deb' + + print(f"::set-output name=previousVersion::{previousVersionStr}") + print(f"::set-output name=previousVersionURL::{previousVersionURL}") + print(f"::set-output name=latestVersion::{latestVersionStr}") + print(f"::set-output name=latestVersionURL::{latestVersionURL}") + + - name: Install latest version + run: | + if wget -q --spider ${{ steps.extract_version.outputs.latestVersionURL }}; then + wget -q ${{ steps.extract_version.outputs.latestVersionURL }} + elif wget -q --spider ${{ steps.extract_version.outputs.previousVersionURL }}; then + wget -q ${{ steps.extract_version.outputs.previousVersionURL }} + else + echo "Unable to find HPCC version to install" + exit 1 + fi + + sudo apt-get update + sudo apt-get install -y expect + sudo dpkg -i hpccsystems-platform-community_*.deb + sudo apt-get -f install -y + + - name: Start HPCC-Platform + shell: "bash" + run: | + export LANG="en_US.UTF-8" + sudo update-locale + sudo /etc/init.d/hpcc-init start + + - name: Add Host File Entries + run: | + sudo -- sh -c -e "echo '127.0.0.1 eclwatch.default' >> /etc/hosts"; + sudo -- sh -c -e "echo '127.0.0.1 rowservice.default' >> /etc/hosts"; + sudo -- sh -c -e "echo '127.0.0.1 sql2ecl.default' >> /etc/hosts"; + + # speed things up with caching from https://docs.github.com/en/actions/guides/building-and-testing-java-with-maven + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + - name: Build with Maven + run: mvn -B --activate-profiles jenkins-on-demand -Dmaven.gpg.skip=true -Dmaven.javadoc.skip=true -Dmaven.test.failure.ignore=false -Dhpccconn=http://eclwatch.default:8010 -Dwssqlconn=http://sql2ecl.default:8510 -DHPCC30117=open install diff --git a/.github/workflows/httpsUnitTests.yml b/.github/workflows/k8s-regression-suite.yml similarity index 99% rename from .github/workflows/httpsUnitTests.yml rename to .github/workflows/k8s-regression-suite.yml index 5a1b43655..b37ddecba 100644 --- a/.github/workflows/httpsUnitTests.yml +++ b/.github/workflows/k8s-regression-suite.yml @@ -1,4 +1,4 @@ -name: https unit tests +name: K8s Regression Suite on: pull_request: From 9f73bd141216281ebcf06a3a357b477658f034da Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 19 Jan 2024 17:54:20 +0000 Subject: [PATCH 36/46] Split off 9.4.28 Signed-off-by: Gavin Halliday --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index d348d3ad3..3853fcd80 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.27-0-SNAPSHOT + 9.4.29-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 73f3ba60a..26d8748e5 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.27-0-SNAPSHOT + 9.4.29-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 11d6665ed..9f92862d8 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.27-0-SNAPSHOT + 9.4.29-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 5b6a8ac48..6dd1dc024 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.27-0-SNAPSHOT + 9.4.29-0-SNAPSHOT From 87c36bf657e4a396f4928900dc234d71b0a87373 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 25 Jan 2024 17:25:04 +0000 Subject: [PATCH 37/46] Split off 9.4.30 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 3853fcd80..3a25f7ccb 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.29-0-SNAPSHOT + 9.4.31-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 26d8748e5..19db31c99 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.29-0-SNAPSHOT + 9.4.31-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 9f92862d8..943ac5ee0 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.29-0-SNAPSHOT + 9.4.31-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 6dd1dc024..877273ddb 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.29-0-SNAPSHOT + 9.4.31-0-SNAPSHOT From f743445216e1a41e69c485ae38d2730662a43537 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Mon, 29 Jan 2024 15:47:43 -0500 Subject: [PATCH 38/46] HCCP4J-571 Github Action: Jirabot Merge fails to find tag (#673) - Corrected checkout step Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/workflows/JirabotMerge.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/JirabotMerge.yml b/.github/workflows/JirabotMerge.yml index 3fc3dadf0..112abf776 100644 --- a/.github/workflows/JirabotMerge.yml +++ b/.github/workflows/JirabotMerge.yml @@ -24,6 +24,12 @@ jobs: python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade jira + - name: "Checkout" + uses: actions/checkout@v2 + with: + ref: ${{ github.event.pull_request.base.ref }} + fetch-depth: 0 + fetch-tags: true - name: "Run" env: JIRABOT_USERNAME : ${{ secrets.JIRABOT_USERNAME }} From 72533136eadc7c39590bde85d4d8b6769a1e0374 Mon Sep 17 00:00:00 2001 From: James McMullan Date: Wed, 31 Jan 2024 10:18:13 -0500 Subject: [PATCH 39/46] HPCC4J-571 Github Action: Jirabot Merge fails to find correct tag (#677) - Added debugging information for action vars - Add a default Jira URL as a fallback Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/workflows/JirabotMerge.yml | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/JirabotMerge.yml b/.github/workflows/JirabotMerge.yml index 112abf776..634c408f4 100644 --- a/.github/workflows/JirabotMerge.yml +++ b/.github/workflows/JirabotMerge.yml @@ -1,19 +1,26 @@ name: Jirabot - Merge on: - pull_request: + pull_request_target: types: [closed] branches: - "master" - "candidate-*" -permissions: write-all - jobs: jirabot: runs-on: ubuntu-latest if: github.event.pull_request.merged == true steps: + - name: "Debug Vars" + run: | + echo "JIRA_URL: ${{ vars.JIRA_URL }}" + echo "Pull Request Number: ${{ github.event.pull_request.number }}" + echo "Pull Request Title: ${{ github.event.pull_request.title }}" + echo "Pull Request Author Name: ${{ github.event.pull_request.user.login }}" + echo "Pull Request URL: ${{ github.event.pull_request.html_url }}" + echo "Comments URL: ${{ github.event.pull_request.comments_url }}" + echo "Branch Name: ${{ github.ref_name }}" - uses: "actions/setup-python@v2" with: python-version: "3.8" @@ -182,6 +189,11 @@ jobs: jirabot_user = os.environ['JIRABOT_USERNAME'] jirabot_pass = os.environ['JIRABOT_PASSWORD'] jira_url = os.environ['JIRA_URL'] + + if not jira_url: + jira_url = 'https://track.hpccsystems.com' + print('Jira URL us empty defaulting to: ' + jira_url) + pr = os.environ['PULL_REQUEST_NUMBER'] title = os.environ['PULL_REQUEST_TITLE'] user = os.environ['PULL_REQUEST_AUTHOR_NAME'] From 9b112b390dbb063a24c003103f61e78d5bb5c7ce Mon Sep 17 00:00:00 2001 From: James McMullan Date: Thu, 1 Feb 2024 10:13:31 -0500 Subject: [PATCH 40/46] HPCC4J-553 Github Actions: Add baremetal test workflow (#674) - Fixed issue with version being returned as a string for master - Fixed logic when determining version to test against for master Signed-off-by: James McMullan James.McMullan@lexisnexis.com Signed-off-by: James McMullan James.McMullan@lexisnexis.com --- .github/workflows/baremetal-regression-suite.yml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/workflows/baremetal-regression-suite.yml b/.github/workflows/baremetal-regression-suite.yml index 57c5c74bd..56fcc4428 100644 --- a/.github/workflows/baremetal-regression-suite.yml +++ b/.github/workflows/baremetal-regression-suite.yml @@ -55,7 +55,7 @@ jobs: def extractVersion(versionStr): parts = versionStr.split('.') if len(parts) != 3: - print('Invalid version: ' + version) + print('Invalid version: ' + versionStr) sys.exit(1) if parts[2].lower() == 'x': parts[2] = '0' @@ -84,17 +84,12 @@ jobs: print('Unable to extract version from git tag: ' + latestGitTag) sys.exit(2) - def buildVersionString(version): - major, minor, point = map(int, version) - return f'{major}.{minor}.{point}' - def getLatestBranchVersion(branchName): latestVersion = getTagVersionForCmd("git tag --list 'hpcc4j_*-release' --sort=-v:refname | head -n 1") - # If we are merging into master we assume it is going into the next minor release if branchName == 'master': - return buildVersionString([latestVersion[0], latestVersion[1] + 2, 0]) + return [latestVersion[0], latestVersion[1], latestVersion[2]] else: # Extract candidate branch major / minor version candidateBranchPattern = re.compile(r'candidate-([0-9]+\.[0-9]+\.([0-9]+|x)).*') From 365369b38a51b5b9e3981c53833b47c848989abf Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 2 Feb 2024 14:40:56 +0000 Subject: [PATCH 41/46] Split off 9.4.32 Signed-off-by: Gavin Halliday --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 3a25f7ccb..5dbd048fd 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.31-0-SNAPSHOT + 9.4.33-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 19db31c99..9f6137dd5 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.31-0-SNAPSHOT + 9.4.33-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 943ac5ee0..e98009d51 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.31-0-SNAPSHOT + 9.4.33-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index 877273ddb..dff611816 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.31-0-SNAPSHOT + 9.4.33-0-SNAPSHOT From 494517cdda1d9175cad3128f757a1b7d9ac98a52 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Mon, 12 Feb 2024 15:59:27 +0000 Subject: [PATCH 42/46] Split off 9.4.34 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 5dbd048fd..4cc234e00 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.33-0-SNAPSHOT + 9.4.35-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 9f6137dd5..20999314e 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.33-0-SNAPSHOT + 9.4.35-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index e98009d51..a39e1e5e3 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.33-0-SNAPSHOT + 9.4.35-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index dff611816..a13a038f4 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.33-0-SNAPSHOT + 9.4.35-0-SNAPSHOT From 961b1e15c2016abad256c4cab241900046c1f6cf Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 15 Feb 2024 16:54:42 +0000 Subject: [PATCH 43/46] Split off 9.4.36 Signed-off-by: Gordon Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 4cc234e00..63347b656 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.35-0-SNAPSHOT + 9.4.37-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index 20999314e..c104f9747 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.35-0-SNAPSHOT + 9.4.37-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index a39e1e5e3..d04438d22 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.35-0-SNAPSHOT + 9.4.37-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index a13a038f4..cf19befb4 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.35-0-SNAPSHOT + 9.4.37-0-SNAPSHOT From 563431925a00d6e3f951d5f74678d8e1cbf6d0df Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 22 Feb 2024 17:02:27 +0000 Subject: [PATCH 44/46] Split off 9.4.38 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index 63347b656..c30a36db7 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.37-0-SNAPSHOT + 9.4.39-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index c104f9747..d0072c28a 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.37-0-SNAPSHOT + 9.4.39-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index d04438d22..cfddd5cd1 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.37-0-SNAPSHOT + 9.4.39-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index cf19befb4..d4628b15e 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.37-0-SNAPSHOT + 9.4.39-0-SNAPSHOT From 4c55669e792fdaa77961ffccd2be15ac2e326dd1 Mon Sep 17 00:00:00 2001 From: Jake Smith Date: Thu, 29 Feb 2024 16:47:39 +0000 Subject: [PATCH 45/46] Split off 9.4.40 Signed-off-by: Jake Smith --- commons-hpcc/pom.xml | 2 +- dfsclient/pom.xml | 2 +- pom.xml | 2 +- wsclient/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons-hpcc/pom.xml b/commons-hpcc/pom.xml index c30a36db7..8c21c53fd 100644 --- a/commons-hpcc/pom.xml +++ b/commons-hpcc/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.39-0-SNAPSHOT + 9.4.41-0-SNAPSHOT diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml index d0072c28a..02a9a5dca 100644 --- a/dfsclient/pom.xml +++ b/dfsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.39-0-SNAPSHOT + 9.4.41-0-SNAPSHOT diff --git a/pom.xml b/pom.xml index cfddd5cd1..f3c9818b4 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 org.hpccsystems hpcc4j - 9.4.39-0-SNAPSHOT + 9.4.41-0-SNAPSHOT pom HPCC Systems Java Projects https://hpccsystems.com diff --git a/wsclient/pom.xml b/wsclient/pom.xml index d4628b15e..dce4342d0 100644 --- a/wsclient/pom.xml +++ b/wsclient/pom.xml @@ -9,7 +9,7 @@ org.hpccsystems hpcc4j - 9.4.39-0-SNAPSHOT + 9.4.41-0-SNAPSHOT From be37a870f06ba4f838deb39993176f8d0a93f9aa Mon Sep 17 00:00:00 2001 From: Rodrigo Pastrana Date: Fri, 1 Mar 2024 15:23:20 -0500 Subject: [PATCH 46/46] HPCC4J-581 WsFS Client should add path delim only if needed - Introduces functionality to strip trailing white space - Adds trimright utility functionality - Adds new test cases - Adjusts pre-existing test cases to expected | actual format - Do not trim original string parameter - Do not duplicate Path URL param Signed-off-by: Rodrigo Pastrana --- .../ws/client/HPCCFileSprayClient.java | 13 +-- .../hpccsystems/ws/client/utils/Utils.java | 43 ++++++++ .../ws/client/utils/UtilsTest.java | 98 +++++++++++-------- 3 files changed, 104 insertions(+), 50 deletions(-) diff --git a/wsclient/src/main/java/org/hpccsystems/ws/client/HPCCFileSprayClient.java b/wsclient/src/main/java/org/hpccsystems/ws/client/HPCCFileSprayClient.java index 87d0c5d61..a7283a6cd 100644 --- a/wsclient/src/main/java/org/hpccsystems/ws/client/HPCCFileSprayClient.java +++ b/wsclient/src/main/java/org/hpccsystems/ws/client/HPCCFileSprayClient.java @@ -1004,9 +1004,8 @@ public ProgressResponseWrapper sprayVariable(DelimitedDataOptions options, DropZ if (targetDropZone == null) throw new Exception("TargetDropZone object not available!"); SprayVariable request = new SprayVariable(); - request.setSourceIP(targetDropZone.getNetAddress()); - request.setSourcePath(targetDropZone.getPath() + "/" + sourceFileName); + request.setSourcePath(Utils.ensureTrailingPathSlash(targetDropZone.getPath()) + sourceFileName); request.setDestGroup(destGroup); request.setDestLogicalName(targetFileName); request.setOverwrite(overwrite); @@ -1162,7 +1161,7 @@ public ProgressResponseWrapper sprayXML(DropZoneWrapper targetDropZone, String s request.setDestGroup(destGroup); request.setSourceIP(targetDropZone.getNetAddress()); - request.setSourcePath(targetDropZone.getPath() + "/" + sourceFileName); + request.setSourcePath(Utils.ensureTrailingPathSlash(targetDropZone.getPath()) + sourceFileName); request.setDestLogicalName(targetFileName); request.setOverwrite(overwrite); request.setSourceFormat(format.getValue()); @@ -1318,7 +1317,7 @@ public ProgressResponseWrapper sprayFixed(DropZoneWrapper targetDropZone, String request.setDestGroup(destGroup); request.setSourceRecordSize(recordSize); request.setSourceIP(targetDropZone.getNetAddress()); - request.setSourcePath(targetDropZone.getPath() + "/" + sourceFileName); + request.setSourcePath(Utils.ensureTrailingPathSlash(targetDropZone.getPath()) + sourceFileName); request.setDestLogicalName(targetFileLabel); request.setOverwrite(overwrite); request.setPrefix(prefix); @@ -1488,15 +1487,11 @@ public boolean uploadLargeFile(File uploadFile, DropZoneWrapper dropZone) return false; } - uploadurlbuilder += "&NetAddress=" + dropZone.getNetAddress() + "&Path=" + dropZone.getPath(); + uploadurlbuilder += "&NetAddress=" + dropZone.getNetAddress() + "&Path=" + Utils.ensureTrailingPathSlash(dropZone.getPath()); if (!dropZone.getName().isEmpty()) uploadurlbuilder += "&DropZoneName=" + dropZone.getName(); - String path = dropZone.getPath().trim(); - if (!path.endsWith("/")) - path += "/"; - uploadurlbuilder += "&Path=" + path; uploadurlbuilder += "&OS=" + (dropZone.getLinux().equalsIgnoreCase("true") ? "2" : "1"); uploadurlbuilder += "&rawxml_=1"; WritableByteChannel outchannel = null; diff --git a/wsclient/src/main/java/org/hpccsystems/ws/client/utils/Utils.java b/wsclient/src/main/java/org/hpccsystems/ws/client/utils/Utils.java index f33d83cf1..df7a56162 100644 --- a/wsclient/src/main/java/org/hpccsystems/ws/client/utils/Utils.java +++ b/wsclient/src/main/java/org/hpccsystems/ws/client/utils/Utils.java @@ -1061,21 +1061,64 @@ public static DocumentBuilder newSafeXMLDocBuilder() throws ParserConfigurationE return safeXMLDocBuilder; } + /** + * Ensures the given path contains a trailing path delimiter. + * Does not introduce duplicate trailing path delimiter if one already exists. + * Defaults to Linux style separator if the given path either contains a Linux style separator, or the path is empty. + * Strips all trailing white space character + * @param path The path to be postfixed + * @return original path with proper trailing path delimiter + */ public static String ensureTrailingPathSlash(String path) { return ensureTrailingPathSlash(path, (path.contains(Character.toString(LINUX_SEP)) || path.length() == 0) ? LINUX_SEP : WIN_SEP); } + /** + * Ensures the given path contains a trailing path delimiter. + * Does not introduce duplicate trailing path delimiter if one already exists. + * Uses Linux style path separator 'useLinuxSep' == "true", otherwise uses windows style path separator + * Strips all trailing white space character + * @param path path The path to be postfixed + * @param useLinuxSep String, if "true" linux styled path delimiter will be used + * @return original path with proper trailing path delimiter + */ public static String ensureTrailingPathSlash(String path, String useLinuxSep) { return ensureTrailingPathSlash(path, useLinuxSep.equalsIgnoreCase("true") ? LINUX_SEP : WIN_SEP); } + /** + * Ensures the given path contains a trailing path delimiter. + * Does not introduce duplicate trailing path delimiter if one already exists. + * Uses provided 'slash' as trailing path delimiter + * Strips all trailing white space character + * @param path The path to be postfixed + * @param slash The character to append + * @return original path with proper trailing path delimiter + */ public static String ensureTrailingPathSlash(String path, char slash) { + path = trimTrailing(path); + if (path.length() == 0 || path.charAt(path.length()-1) != slash) path = path + slash; return path; } + + /** + * Removes trailing whitespace characters from a string. + * + * @param originalStr the original string from which trailing whitespace should be removed + * @return a new string with the same characters as the original string, minus any trailing whitespace + */ + public static String trimTrailing(String originalStr) + { + int strIndex = originalStr.length()-1; + while(strIndex >= 0 && Character.isWhitespace(originalStr.charAt(strIndex))) + strIndex--; + + return originalStr.substring(0,strIndex+1); + } } diff --git a/wsclient/src/test/java/org/hpccsystems/ws/client/utils/UtilsTest.java b/wsclient/src/test/java/org/hpccsystems/ws/client/utils/UtilsTest.java index 1ff309ebf..eef268e47 100644 --- a/wsclient/src/test/java/org/hpccsystems/ws/client/utils/UtilsTest.java +++ b/wsclient/src/test/java/org/hpccsystems/ws/client/utils/UtilsTest.java @@ -8,59 +8,75 @@ public class UtilsTest { + @Test + public void testEnsureTrailingSlashTrailingWhiteSpace() + { + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash("")); + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP)+ " ")); + assertEquals(Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP) + " ")); + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)+"\t")); + assertEquals("C:\\some\\Path\\", Utils.ensureTrailingPathSlash("C:\\some\\Path ")); + assertEquals("C:\\some\\Path\\", Utils.ensureTrailingPathSlash("C:\\some\\Path\\ ")); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path ")); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path/\t\t")); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path/\n")); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path/" + '\u005Cn')); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path/ " + '\u005Ct')); + } + @Test public void testEnsureTrailingSlashNoSlashSpecified() { - assertEquals(Utils.ensureTrailingPathSlash(""), Character.toString(Utils.LINUX_SEP)); //no sep in path, default to lin sep - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP)), Character.toString(Utils.LINUX_SEP));//no change expected - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)), Character.toString(Utils.WIN_SEP)); //no change expected - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)), Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP));//no change expected - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path"), "C:\\some\\Path\\"); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path\\"), "C:\\some\\Path\\"); - assertEquals(Utils.ensureTrailingPathSlash("/another/path"), "/another/path/"); - assertEquals(Utils.ensureTrailingPathSlash("/another/path/"), "/another/path/"); + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash("")); //no sep in path, default to lin sep + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP)));//no change expected + assertEquals(Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP))); //no change expected + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)));//no change expected + assertEquals("C:\\some\\Path\\", Utils.ensureTrailingPathSlash("C:\\some\\Path")); + assertEquals("C:\\some\\Path\\", Utils.ensureTrailingPathSlash("C:\\some\\Path\\")); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path")); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path/")); } @Test public void testEnsureTrailingSlashSlashSpecified() { - assertEquals(Utils.ensureTrailingPathSlash("", Utils.LINUX_SEP), Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash("", Utils.WIN_SEP), Character.toString(Utils.WIN_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), Utils.WIN_SEP), Character.toString(Utils.LINUX_SEP)+Utils.WIN_SEP); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), Utils.LINUX_SEP), Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), Utils.WIN_SEP), Character.toString(Utils.WIN_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), Utils.LINUX_SEP), Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.LINUX_SEP), Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.WIN_SEP), Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)+Character.toString(Utils.WIN_SEP)); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path", Utils.LINUX_SEP), "C:\\some\\Path\\"+Utils.LINUX_SEP); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path", Utils.WIN_SEP), "C:\\some\\Path\\"+Utils.WIN_SEP); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path\\", Utils.LINUX_SEP), "C:\\some\\Path\\" + Utils.LINUX_SEP); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path\\", Utils.WIN_SEP), "C:\\some\\Path\\"); - assertEquals(Utils.ensureTrailingPathSlash("/another/path", Utils.LINUX_SEP), "/another/path" + Utils.LINUX_SEP); - assertEquals(Utils.ensureTrailingPathSlash("/another/path", Utils.WIN_SEP), "/another/path/"+ Utils.WIN_SEP); - assertEquals(Utils.ensureTrailingPathSlash("/another/path/", Utils.LINUX_SEP), "/another/path/"); - assertEquals(Utils.ensureTrailingPathSlash("/another/path/", Utils.WIN_SEP), "/another/path/"+Utils.WIN_SEP); + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash("", Utils.LINUX_SEP)); + assertEquals(Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash("", Utils.WIN_SEP)); + assertEquals(Character.toString(Utils.LINUX_SEP)+Utils.WIN_SEP, Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), Utils.WIN_SEP)); + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), Utils.LINUX_SEP)); + assertEquals(Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), Utils.WIN_SEP)); + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), Utils.LINUX_SEP)); + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.LINUX_SEP)); + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)+Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.WIN_SEP)); + assertEquals("C:\\some\\Path\\"+Utils.LINUX_SEP, Utils.ensureTrailingPathSlash("C:\\some\\Path", Utils.LINUX_SEP)); + assertEquals("C:\\some\\Path\\"+Utils.WIN_SEP, Utils.ensureTrailingPathSlash("C:\\some\\Path", Utils.WIN_SEP)); + assertEquals("C:\\some\\Path\\" + Utils.LINUX_SEP, Utils.ensureTrailingPathSlash("C:\\some\\Path\\", Utils.LINUX_SEP)); + assertEquals("C:\\some\\Path\\", Utils.ensureTrailingPathSlash("C:\\some\\Path\\", Utils.WIN_SEP)); + assertEquals("/another/path" + Utils.LINUX_SEP, Utils.ensureTrailingPathSlash("/another/path", Utils.LINUX_SEP)); + assertEquals("/another/path/"+ Utils.WIN_SEP, Utils.ensureTrailingPathSlash("/another/path", Utils.WIN_SEP)); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path/", Utils.LINUX_SEP)); + assertEquals("/another/path/"+Utils.WIN_SEP, Utils.ensureTrailingPathSlash("/another/path/", Utils.WIN_SEP)); } @Test public void testEnsureTrailingSlashUseLinuxBoolTest() { - assertEquals(Utils.ensureTrailingPathSlash("", "true"), Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash("", "false"), Character.toString(Utils.WIN_SEP)); - assertEquals(Utils.ensureTrailingPathSlash("", "xyz"), Character.toString(Utils.WIN_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), "false"), Character.toString(Utils.LINUX_SEP)+Utils.WIN_SEP); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), "true"), Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), "false"), Character.toString(Utils.WIN_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), "true"), Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), "true"), Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)); - assertEquals(Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), "false"), Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)+Character.toString(Utils.WIN_SEP)); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path", "true"), "C:\\some\\Path\\"+Utils.LINUX_SEP); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path", "false"), "C:\\some\\Path\\"+Utils.WIN_SEP); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path\\", "true"), "C:\\some\\Path\\" + Utils.LINUX_SEP); - assertEquals(Utils.ensureTrailingPathSlash("C:\\some\\Path\\", "false"), "C:\\some\\Path\\"); - assertEquals(Utils.ensureTrailingPathSlash("/another/path", "TRUE"), "/another/path" + Utils.LINUX_SEP); - assertEquals(Utils.ensureTrailingPathSlash("/another/path", "FALSE"), "/another/path/"+ Utils.WIN_SEP); - assertEquals(Utils.ensureTrailingPathSlash("/another/path/", "TrUe"), "/another/path/"); - assertEquals(Utils.ensureTrailingPathSlash("/another/path/", "FalSe"), "/another/path/"+Utils.WIN_SEP); + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash("", "true")); + assertEquals(Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash("", "false")); + assertEquals(Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash("", "xyz")); + assertEquals(Character.toString(Utils.LINUX_SEP)+Utils.WIN_SEP, Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), "false")); + assertEquals(Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.LINUX_SEP), "true")); + assertEquals(Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), "false")); + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP), "true")); + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), "true")); + assertEquals(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP)+Character.toString(Utils.WIN_SEP), Utils.ensureTrailingPathSlash(Character.toString(Utils.WIN_SEP)+Character.toString(Utils.LINUX_SEP), "false")); + assertEquals("C:\\some\\Path\\"+Utils.LINUX_SEP, Utils.ensureTrailingPathSlash("C:\\some\\Path", "true")); + assertEquals("C:\\some\\Path\\"+Utils.WIN_SEP, Utils.ensureTrailingPathSlash("C:\\some\\Path", "false")); + assertEquals("C:\\some\\Path\\" + Utils.LINUX_SEP, Utils.ensureTrailingPathSlash("C:\\some\\Path\\", "true")); + assertEquals("C:\\some\\Path\\", Utils.ensureTrailingPathSlash("C:\\some\\Path\\", "false")); + assertEquals("/another/path" + Utils.LINUX_SEP, Utils.ensureTrailingPathSlash("/another/path", "TRUE")); + assertEquals("/another/path/"+ Utils.WIN_SEP, Utils.ensureTrailingPathSlash("/another/path", "FALSE")); + assertEquals("/another/path/", Utils.ensureTrailingPathSlash("/another/path/", "TrUe")); + assertEquals("/another/path/"+Utils.WIN_SEP, Utils.ensureTrailingPathSlash("/another/path/", "FalSe")); } }