Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/candidate-9.4.x' into candidate-…
Browse files Browse the repository at this point in the history
…9.6.x

Signed-off-by: Gordon Smith <[email protected]>

# Conflicts:
#	commons-hpcc/pom.xml
#	dfsclient/pom.xml
#	pom.xml
#	wsclient/pom.xml
  • Loading branch information
GordonSmith committed Mar 14, 2024
2 parents 1dfabad + 04b2fb0 commit 770309a
Show file tree
Hide file tree
Showing 7 changed files with 219 additions and 94 deletions.
69 changes: 42 additions & 27 deletions .github/workflows/Jirabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,28 +39,22 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GHUB_JIRA_USER_MAP: ${{ vars.GHUB_JIRA_USER_MAP }}
JIRA_ISSUE_PROPERTY_MAP: ${{ vars.JIRA_ISSUE_PROPERTY_MAP }}
JIRA_ISSUE_TRANSITION_MAP: ${{ vars.JIRA_ISSUE_TRANSITION_MAP }}
run: |
import os
import re
import json
from jira.client import JIRA
def updateIssue(jira, issue, prAuthor: str, propertyMap: dict, pull_url: str) -> str:
def updateIssue(jira, issue, prAuthorEmail : str, transitionMap: dict, propertyMap: dict, pull_url: str) -> str:
result = ''
statusName = str(issue.fields.status)
if statusName == 'Open':
transition = 'Start Progress'
elif statusName == 'In Progress':
transition = ''
elif statusName == 'Resolved':
transition = 'Reopen Issue'
elif statusName == 'Closed':
transition = 'Reopen Issue'
else:
transition = ''
transition = transitionMap.get(statusName, None)
if transition != '':
if transition == None:
print('Error: Unable to find transition for status: ' + statusName)
elif transition != '':
try:
jira.transition_issue(issue, transition)
result += 'Workflow Transition: ' + transition + '\n'
Expand All @@ -81,13 +75,17 @@ jobs:
elif currentPR is not None and currentPR != pull_url:
result += 'Additional PR: ' + pull_url + '\n'
if prAuthor:
if prAuthorEmail:
if issue.fields.assignee is None:
jira.assign_issue(issue, prAuthor)
result += 'Assigning user: ' + prAuthor + '\n'
elif issue.fields.assignee is not None and issue.fields.assignee.name.lower() != prAuthor.lower():
result += 'Changing assignee from: ' + issue.fields.assignee.name + ' to: ' + prAuthor + '\n'
jira.assign_issue(issue, prAuthor)
jira.assign_issue(issue, prAuthorEmail)
result += 'Assigning user: ' + prAuthorEmail + '\n'
else:
assigneeEmail = None
if issue.fields.assignee:
assigneeEmail = issue.fields.assignee.emailAddress
if assigneeEmail is None or assigneeEmail.lower() != assigneeEmail.lower():
result += 'Changing assignee from: ' + assigneeEmail + ' to: ' + prAuthorEmail + '\n'
jira.assign_issue(issue, prAuthorEmail)
return result
Expand Down Expand Up @@ -122,24 +120,41 @@ jobs:
jira = JIRA(options=options, basic_auth=(jirabot_user, jirabot_pass))
# Check if prAuthor exists in Jira
try:
# Need to change how we find users for Jira Cloud, unfortunately the API doesn't provide this information.
# At the moment checking if the URL contains atlassian.net appears to be the easiest way to determine if it's Jira Cloud.
isJiraCloud = False
if jira_url.find('atlassian.net') > 0:
isJiraCloud = True
if isJiraCloud:
res = jira.search_users(query=prAuthor)
if res and len(res) > 0:
jiraUser = res[0]
else:
jiraUser = jira.user(prAuthor)
if jiraUser is None:
prAuthor = None
print('Error: Unable to find Jira user: ' + prAuthor + ' continuing without assigning')
except Exception as error:
prAuthor = None
print('Error: Unable to find Jira user: ' + prAuthor + ' with error: ' + str(error) + ' continuing without assigning')
jiraUserEmail = None
if jiraUser is None:
print('Error: Unable to find Jira user: ' + prAuthor + ' continuing without assigning')
else:
jiraUserEmail = jiraUser.emailAddress
print("Jira User Email:" + str(jiraUserEmail))
issue = jira.issue(issue_name)
result = 'Jirabot Action Result:\n'
transitionMap = json.loads(os.environ['JIRA_ISSUE_TRANSITION_MAP'])
if not isinstance(transitionMap, dict):
print('Error: JIRA_ISSUE_TRANSITION_MAP is not a valid JSON object, ignoring.')
transitionMap = {}
jiraIssuePropertyMap = json.loads(os.environ['JIRA_ISSUE_PROPERTY_MAP'])
if not isinstance(jiraIssuePropertyMap, dict):
print('Error: JIRA_ISSUE_PROPERTY_MAP is not a valid JSON object, ignoring.')
jiraIssuePropertyMap = {}
result += updateIssue(jira, issue, prAuthor, jiraIssuePropertyMap, pull_url)
result += updateIssue(jira, issue, jiraUserEmail, transitionMap, jiraIssuePropertyMap, pull_url)
jira.add_comment(issue, result)
else:
print('Unable to find Jira issue name in title')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.util.Iterator;

/**
* Remote file reader the reads the data represented by a @see org.hpccsystems.dfs.client.DataPartition
* Remote file reader the reads the data represented by a @see org.hpccsystems.dfs.client.DataPartition
* and constructs records via the provided @see org.hpccsystems.dfs.client#IRecordBuilder.
*/
Expand Down Expand Up @@ -78,7 +79,7 @@ public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilde
* @param recBuilder
* the IRecordBuilder used to construct records
* @param connectTimeout
* the connection timeout in milliseconds, -1 for default
* the connection timeout in seconds, -1 for default
* @throws Exception
* the exception
*/
Expand Down Expand Up @@ -118,7 +119,7 @@ public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilde
* @param recBuilder
* the IRecordBuilder used to construct records
* @param connectTimeout
* the connection timeout in milliseconds, -1 for default
* the connection timeout in seconds, -1 for default
* @param limit
* the maximum number of records to read from the provided data partition, -1 specifies no limit
* @param createPrefetchThread
Expand All @@ -130,7 +131,7 @@ public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilde
*/
public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilder recBuilder, int connectTimeout, int limit, boolean createPrefetchThread, int readSizeKB) throws Exception
{
this(dp, originalRD, recBuilder, connectTimeout, limit, true, DEFAULT_READ_SIZE_OPTION, null);
this(dp, originalRD, recBuilder, connectTimeout, limit, createPrefetchThread, readSizeKB, null);
}

/**
Expand All @@ -143,7 +144,7 @@ public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilde
* @param recBuilder
* the IRecordBuilder used to construct records
* @param connectTimeout
* the connection timeout in milliseconds, -1 for default
* the connection timeout in seconds, -1 for default
* @param limit
* the maximum number of records to read from the provided data partition, -1 specifies no limit
* @param createPrefetchThread
Expand All @@ -157,7 +158,7 @@ public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilde
*/
public HpccRemoteFileReader(DataPartition dp, FieldDef originalRD, IRecordBuilder recBuilder, int connectTimeout, int limit, boolean createPrefetchThread, int readSizeKB, FileReadResumeInfo resumeInfo) throws Exception
{
this(dp, originalRD, recBuilder, connectTimeout, limit, true, readSizeKB, resumeInfo, RowServiceInputStream.DEFAULT_SOCKET_OP_TIMEOUT_MS);
this(dp, originalRD, recBuilder, connectTimeout, limit, createPrefetchThread, readSizeKB, resumeInfo, RowServiceInputStream.DEFAULT_SOCKET_OP_TIMEOUT_MS);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class DFSReadWriteTest extends BaseRemoteTest
{
private static final String[] datasets = { "~benchmark::integer::20kb", "~benchmark::all_types::200kb"};
private static final int[] expectedCounts = { 1250, 5600 };
private static final String[] datasets = { "~benchmark::integer::20kb", "~unit_test::all_types::thor", "~unit_test::all_types::xml", "~unit_test::all_types::json", "~unit_test::all_types::csv" };
private static final int[] expectedCounts = { 1250, 10000, 10000, 10000, 10000, 10000};
private static final Version newProtocolVersion = new Version(8,12,10);


Expand Down Expand Up @@ -183,14 +183,16 @@ public void integrationReadWriteBackTest() throws Exception
HPCCFile file = new HPCCFile(datasets[i], connString, hpccUser, hpccPass);
file.setProjectList("");

System.out.println("Reading dataset: " + datasets[i]);
List<HPCCRecord> records = readFile(file, connTO, false);
if (records.size() != expectedCounts[i])
{
Assert.fail("Record count mismatch for dataset: " + datasets[i] + " got: " + records.size() + " expected: " + expectedCounts[i]);
}

// Write the dataset back
String copyFileName = datasets[i] + "-copy13";
String copyFileName = datasets[i] + "-copy";
System.out.println("Writing dataset: " + copyFileName);
writeFile(records, copyFileName, file.getProjectedRecordDefinition(),connTO);

// Read and compare to original dataset
Expand All @@ -203,15 +205,14 @@ public void integrationReadWriteBackTest() throws Exception
}

//read out a projected layout, confirm that this works
List<String> projectedfields=new ArrayList<String>();
List<String> projectedfields = new ArrayList<String>();
for (int j=0; j < file.getRecordDefinition().getNumDefs()-1;j++)
{
projectedfields.add(file.getRecordDefinition().getDef(j).getFieldName());
}

file=new HPCCFile(copyFileName, connString , hpccUser, hpccPass);

FieldDef recdef=file.getRecordDefinition();
file.setProjectList(String.join(",", projectedfields));
List<HPCCRecord> recs=readFile(file, connTO, false);
if (recs.get(0).getNumFields() != file.getRecordDefinition().getNumDefs()-1)
Expand Down
68 changes: 61 additions & 7 deletions dfsclient/src/test/resources/generate-datasets.ecl
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,12 @@ IMPORT Std;

unique_keys := 100000; // Should be less than number of records
unique_values := 10212; // Should be less than number of records
dataset_name := '~benchmark::all_types::200KB';
totalrecs1 := 5600;
totalrecs1 := 10000;

childRec := {STRING8 childField1, INTEGER8 childField2, REAL8 childField3};

rec := {INTEGER8 int8, UNSIGNED8 uint8, INTEGER4 int4, UNSIGNED4 uint4,
INTEGER2 int2, UNSIGNED2 uint2,
rec := { INTEGER8 int8, UNSIGNED8 uint8, INTEGER4 int4, UNSIGNED4 uint4,
INTEGER2 int2, UNSIGNED2 uint2,
REAL8 r8, REAL4 r4,
DECIMAL16_8 dec16,
DECIMAL15_8 dec15,
Expand All @@ -19,7 +18,7 @@ rec := {INTEGER8 int8, UNSIGNED8 uint8, INTEGER4 int4, UNSIGNED4 uint4,
STRING str,
VARSTRING varStr,
VARSTRING varStr8,
UTF8 utfStr,
UTF8 utfStr,
UNICODE8 uni8,
UNICODE uni,
VARUNICODE varUni,
Expand Down Expand Up @@ -52,10 +51,65 @@ ds := DATASET(totalrecs1, transform(rec,
self.int1Set := [1,2,3];
), DISTRIBUTED);

dataset_name := '~unit_test::all_types::thor';
IF(~Std.File.FileExists(dataset_name), OUTPUT(ds,,dataset_name,overwrite));

key_name := '~benchmark::all_types::200KB::key';
Ptbl := DATASET(dataset_name, {rec,UNSIGNED8 RecPtr {virtual(fileposition)}}, FLAT);
// For the text files there appears to be an issue with reading sets from the datasets
// So, for those file formats create datasets wwith all types except SETs
recWithoutSet := { INTEGER8 int8, UNSIGNED8 uint8, INTEGER4 int4, UNSIGNED4 uint4,
INTEGER2 int2, UNSIGNED2 uint2,
REAL8 r8, REAL4 r4,
DECIMAL16_8 dec16,
DECIMAL15_8 dec15,
UDECIMAL16_8 udec16,
UDECIMAL15_8 udec15,
QSTRING qStr,
STRING8 fixStr8,
STRING str,
VARSTRING varStr,
VARSTRING varStr8,
UTF8 utfStr,
UNICODE8 uni8,
UNICODE uni,
VARUNICODE varUni,
DATASET(childRec) childDataset,
};
dsWithoutSet := DATASET(totalrecs1, transform(recWithoutSet,
self.int8 := (INTEGER)(random() % unique_keys);
self.uint8 := (INTEGER)(random() % unique_values);
self.int4 := (INTEGER)(random() % unique_values);
self.uint4 := (INTEGER)(random() % unique_values);
self.int2 := (INTEGER)(random() % unique_values);
self.uint2 := (INTEGER)(random() % unique_values);
self.r8 := (REAL)(random() % unique_values);
self.r4 := (REAL)(random() % unique_values);
self.dec16 := (REAL)(random() % unique_values);
self.dec15 := (REAL)(random() % unique_values);
self.udec16 := (REAL)(random() % unique_values);
self.udec15 := (REAL)(random() % unique_values);
self.qStr := (STRING)(random() % unique_values);
self.fixStr8 := (STRING)(random() % unique_values);
self.str := (STRING)(random() % unique_values);
self.varStr := (STRING)(random() % unique_values);
self.varStr8 := (STRING)(random() % unique_values);
self.utfStr := (STRING)(random() % unique_values);
self.uni8 := (STRING)(random() % unique_values);
self.uni := (STRING)(random() % unique_values);
self.varUni := (STRING)(random() % unique_values);
self.childDataset := DATASET([{'field1',2,3},{'field1',2,3}],childRec);
), DISTRIBUTED);

xml_dataset_name := '~unit_test::all_types::xml';
IF(~Std.File.FileExists(xml_dataset_name), OUTPUT(dsWithoutSet,,xml_dataset_name,XML,overwrite));

json_dataset_name := '~unit_test::all_types::json';
IF(~Std.File.FileExists(json_dataset_name), OUTPUT(dsWithoutSet,,json_dataset_name,JSON,overwrite));

csv_dataset_name := '~unit_test::all_types::csv';
IF(~Std.File.FileExists(csv_dataset_name), OUTPUT(dsWithoutSet,,csv_dataset_name,CSV,overwrite));

key_name := '~unit_test::all_types::key';
Ptbl := DATASET('~unit_test::all_types::thor', {rec,UNSIGNED8 RecPtr {virtual(fileposition)}}, FLAT);
indexds := INDEX(Ptbl, {int8, uint8, int4, uint4, int2, uint2, udec16, fixStr8, RecPtr},key_name);
IF(~Std.File.FileExists(key_name), BUILDINDEX(indexds, overwrite));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1004,9 +1004,8 @@ public ProgressResponseWrapper sprayVariable(DelimitedDataOptions options, DropZ
if (targetDropZone == null) throw new Exception("TargetDropZone object not available!");

SprayVariable request = new SprayVariable();

request.setSourceIP(targetDropZone.getNetAddress());
request.setSourcePath(targetDropZone.getPath() + "/" + sourceFileName);
request.setSourcePath(Utils.ensureTrailingPathSlash(targetDropZone.getPath()) + sourceFileName);
request.setDestGroup(destGroup);
request.setDestLogicalName(targetFileName);
request.setOverwrite(overwrite);
Expand Down Expand Up @@ -1162,7 +1161,7 @@ public ProgressResponseWrapper sprayXML(DropZoneWrapper targetDropZone, String s

request.setDestGroup(destGroup);
request.setSourceIP(targetDropZone.getNetAddress());
request.setSourcePath(targetDropZone.getPath() + "/" + sourceFileName);
request.setSourcePath(Utils.ensureTrailingPathSlash(targetDropZone.getPath()) + sourceFileName);
request.setDestLogicalName(targetFileName);
request.setOverwrite(overwrite);
request.setSourceFormat(format.getValue());
Expand Down Expand Up @@ -1318,7 +1317,7 @@ public ProgressResponseWrapper sprayFixed(DropZoneWrapper targetDropZone, String
request.setDestGroup(destGroup);
request.setSourceRecordSize(recordSize);
request.setSourceIP(targetDropZone.getNetAddress());
request.setSourcePath(targetDropZone.getPath() + "/" + sourceFileName);
request.setSourcePath(Utils.ensureTrailingPathSlash(targetDropZone.getPath()) + sourceFileName);
request.setDestLogicalName(targetFileLabel);
request.setOverwrite(overwrite);
request.setPrefix(prefix);
Expand Down Expand Up @@ -1488,15 +1487,11 @@ public boolean uploadLargeFile(File uploadFile, DropZoneWrapper dropZone)
return false;
}

uploadurlbuilder += "&NetAddress=" + dropZone.getNetAddress() + "&Path=" + dropZone.getPath();
uploadurlbuilder += "&NetAddress=" + dropZone.getNetAddress() + "&Path=" + Utils.ensureTrailingPathSlash(dropZone.getPath());

if (!dropZone.getName().isEmpty())
uploadurlbuilder += "&DropZoneName=" + dropZone.getName();

String path = dropZone.getPath().trim();
if (!path.endsWith("/"))
path += "/";
uploadurlbuilder += "&Path=" + path;
uploadurlbuilder += "&OS=" + (dropZone.getLinux().equalsIgnoreCase("true") ? "2" : "1");
uploadurlbuilder += "&rawxml_=1";
WritableByteChannel outchannel = null;
Expand Down
Loading

0 comments on commit 770309a

Please sign in to comment.