diff --git a/README.md b/README.md index 4b9f092..dc9b6a9 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ -# Not Under Active Development -# Wave DatasetUtils -Wave DatasetUtils is a reference implementation of the Analytics cloud External data API. This tool is free to use, but it is not officially supported by Salesforce. + +# DatasetUtils + +DatasetUtils is a reference implementation of the Einstein Analytics External Data API. This tool is free to use, but it is not officially supported by Salesforce. This is a community project that have not been officially tested or documented. Please do not contact Salesforce for support when using this application. ## Downloading DatasetUtils @@ -12,11 +13,12 @@ Download the latest version from [releases](https://github.com/forcedotcom/Analy ## Prerequisite -Download and install Java JDK (not JRE) from Oracle +Download and install Java JDK (not JRE) from Zulu Open JDK + +* [Zulu Open JDK](https://www.azul.com/downloads/zulu-community/?&architecture=x86-64-bit&package=jdk) -* [Oracle JDK](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) +After installation is complete. Different versions of DatasetUtils require different versions of JDK, the latest release API 47 requires JDK 11. Open a console and check that the java version is correct for your DatasetUtils version by running the following command: -After installation is complete. open a console and check that the java version is 1.8 or higher by running the following command: ``java -version`` @@ -122,5 +124,5 @@ Input Parameter java -jar datasetutils-32.0.0.jar --action downloadErrorFile --u pgupta@force.com --p @#@#@# --dataset puntest ## Building DatasetUtils - git clone git@github.com:forcedotcom/Analytics-Cloud-Dataset-Utils.git + git clone git@github.com:timbezold/datasetutils.git mvn clean install diff --git a/pom.xml b/pom.xml index b66cebb..835b431 100644 --- a/pom.xml +++ b/pom.xml @@ -6,16 +6,16 @@ Analytics Cloud Datatset Utils datasetutils jar - 37.0.4-SNAPSHOT + 47.0.0-SNAPSHOT https://github.com/forcedotcom/Analytics-Cloud-Dataset-Utils salesforce.com http://salesforce.com - 37.0.3 - 37.0.3 - 1.8 + 47.0.0 + 47.0.0 + 11 UTF-8 @@ -54,22 +54,22 @@ org.apache.commons commons-compress - 1.9 + 1.19 org.apache.httpcomponents httpclient - 4.5 + 4.5.10 org.apache.httpcomponents httpmime - 4.5 + 4.5.10 commons-io commons-io - 2.4 + 2.6 commons-lang @@ -79,27 +79,27 @@ commons-beanutils commons-beanutils - 1.9.2 + 1.9.4 com.ibm.icu icu4j - 55.1 + 65.1 com.fasterxml.jackson.core jackson-annotations - 2.6.1 + 2.10.1 com.fasterxml.jackson.core jackson-core - 2.6.1 + 2.10.1 com.fasterxml.jackson.core jackson-databind - 2.6.1 + 2.10.1 junit @@ -110,27 +110,27 @@ com.google.code.externalsortinginjava externalsortinginjava - 0.1.9 + 0.4.0 javax.servlet javax.servlet-api - 3.1.0 + 4.0.1 commons-fileupload commons-fileupload - 1.3 + 1.4 org.eclipse.jetty jetty-server - 9.2.13.v20150730 + 9.4.24.v20191120 org.eclipse.jetty jetty-webapp - 9.2.13.v20150730 + 9.4.24.v20191120 org.quartz-scheduler diff --git a/run.command b/run.command index fa80d27..d64d1fd 100644 --- a/run.command +++ b/run.command @@ -4,7 +4,7 @@ BASEDIR=$(dirname "$0") echo "Script location: ${BASEDIR}" cd "${BASEDIR}" echo "Current dir ${PWD}" -export JAVA_HOME=$(/usr/libexec/java_home -v 1.8) +export JAVA_HOME=$(/usr/libexec/java_home -v 11) export PATH=$PATH:${JAVA_HOME}/bin LATEST_JAR=$(ls -t datasetutils-*.jar | head -1) echo ${LATEST_JAR} diff --git a/src/main/java/com/sforce/dataset/DatasetUtilConstants.java b/src/main/java/com/sforce/dataset/DatasetUtilConstants.java index f309787..1debdb1 100644 --- a/src/main/java/com/sforce/dataset/DatasetUtilConstants.java +++ b/src/main/java/com/sforce/dataset/DatasetUtilConstants.java @@ -62,9 +62,9 @@ public class DatasetUtilConstants { // public static boolean createNewDateParts = false; public static CodingErrorAction codingErrorAction = CodingErrorAction.REPORT; - public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/37.0"; - public static final String defaultTestEndpoint = "https://test.salesforce.com/services/Soap/u/37.0"; - public static final String defaultSoapEndPointPath = "/services/Soap/u/37.0"; + public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/47.0"; + public static final String defaultTestEndpoint = "https://test.salesforce.com/services/Soap/u/47.0"; + public static final String defaultSoapEndPointPath = "/services/Soap/u/47.0"; public static boolean debug = false; public static boolean ext = false; diff --git a/src/main/java/com/sforce/dataset/DatasetUtilMain.java b/src/main/java/com/sforce/dataset/DatasetUtilMain.java index 0b47c09..d53e900 100644 --- a/src/main/java/com/sforce/dataset/DatasetUtilMain.java +++ b/src/main/java/com/sforce/dataset/DatasetUtilMain.java @@ -1199,13 +1199,13 @@ public static void printClasspath() System.out.println("\n*******************************************************************************"); System.out.println("java.version:"+System.getProperty("java.version")); System.out.println("java.class.path:"+System.getProperty("java.class.path")); - System.out.print("SystemClassLoader:"); + /*System.out.print("SystemClassLoader:"); ClassLoader sysClassLoader = ClassLoader.getSystemClassLoader(); URL[] urls = ((URLClassLoader)sysClassLoader).getURLs(); for(int i=0; i< urls.length; i++) { System.out.println(urls[i].getFile()); - } + } */ System.out.println("*******************************************************************************\n"); } diff --git a/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java b/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java index dd5f2df..fb43da8 100644 --- a/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java +++ b/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java @@ -52,6 +52,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPatch; import org.apache.http.client.methods.HttpPut; +import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; @@ -72,6 +73,8 @@ public class DataFlowUtil { //private static final SimpleDateFormat defaultDateFormat = new SimpleDateFormat("EEEE MMMM d HH:mm:ss z yyyy"); //Mon Jun 15 00:12:03 GMT 2015 private static final SimpleDateFormat defaultDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX"); //Mon Jun 15 00:12:03 GMT 2015 private static final String dataflowURL = "/insights/internal_api/v1.0/esObject/workflow/%s/json"; + //New public endpoint . Only required in start as other methods deprecated + private static final String dataflowRunURL = "/services/data/v47.0/wave/dataflowjobs"; @SuppressWarnings("rawtypes") public static void uploadAndStartDataFlow(PartnerConnection partnerConnection, Map wfdef, String workflowName) throws ConnectionException, IllegalStateException, IOException, URISyntaxException @@ -132,7 +135,7 @@ public static void uploadDataFlow(PartnerConnection partnerConnection, String da emis.close(); httpClient.close(); - if (statusCode != HttpStatus.SC_OK) + if (statusCode != HttpStatus.SC_CREATED) { String errorCode = statusCode+""; try @@ -453,12 +456,18 @@ public static boolean startDataFlow(PartnerConnection partnerConnection, String URI u = new URI(serviceEndPoint); - URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), String.format(dataflowURL, dataflowId).replace("json", "start"), null,null); + //URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), String.format(dataflowURL, dataflowId).replace("json", "start"), null,null); - HttpPut httput = new HttpPut(patchURI); - httput.setConfig(requestConfig); - httput.addHeader("Authorization","OAuth "+sessionID); - CloseableHttpResponse emresponse = httpClient.execute(httput); + URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), dataflowRunURL, null,null); + + HttpPost httpPost = new HttpPost(patchURI); + StringEntity params =new StringEntity("{\"command\":\"start\",\"dataflowId\":\""+dataflowId+"\"}"); + System.out.println("DataflowId " +dataflowId); + httpPost.setConfig(requestConfig); + httpPost.addHeader("content-type", "application/json"); + httpPost.addHeader("Authorization","OAuth "+sessionID); + httpPost.setEntity(params); + CloseableHttpResponse emresponse = httpClient.execute(httpPost); String reasonPhrase = emresponse.getStatusLine().getReasonPhrase(); int statusCode = emresponse.getStatusLine().getStatusCode(); HttpEntity emresponseEntity = emresponse.getEntity(); @@ -467,7 +476,7 @@ public static boolean startDataFlow(PartnerConnection partnerConnection, String emis.close(); httpClient.close(); - if (statusCode != HttpStatus.SC_OK) + if (statusCode != HttpStatus.SC_CREATED ) { String errorCode = statusCode+""; try @@ -495,16 +504,14 @@ public static boolean startDataFlow(PartnerConnection partnerConnection, String ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); @SuppressWarnings("rawtypes") - Map res = mapper.readValue(emList, Map.class); + LinkedHashMap res = mapper.readValue(emList, LinkedHashMap.class); @SuppressWarnings({ "rawtypes", "unchecked" }) - List resList = (List) res.get("result"); - if(resList!=null && !resList.isEmpty()) + String stat = (String) res.get("status"); + if(stat!=null) { - if((boolean) resList.get(0).get("success")) - { - System.out.println(new Date()+" : Dataflow {"+dataflowAlias+"} succesfully started"); - return true; - } + + System.out.println(new Date()+" : Dataflow {"+dataflowAlias+"} succesfully started"); + return true; } throw new IOException(String.format("Dataflow %s start failed: %s", dataflowAlias,emList)); } diff --git a/src/main/java/com/sforce/dataset/loader/DatasetLoader.java b/src/main/java/com/sforce/dataset/loader/DatasetLoader.java index ae55c46..56d67a4 100644 --- a/src/main/java/com/sforce/dataset/loader/DatasetLoader.java +++ b/src/main/java/com/sforce/dataset/loader/DatasetLoader.java @@ -1819,7 +1819,7 @@ private static BulkConnection getBulkConnection(ConnectorConfig partnerConfig) // The endpoint for the Bulk API service is the same as for the normal // SOAP uri until the /Soap/ part. From here it's '/async/versionNumber' String soapEndpoint = partnerConfig.getServiceEndpoint(); - String apiVersion = "31.0"; + String apiVersion = "47.0"; String restEndpoint = soapEndpoint.substring(0, soapEndpoint.indexOf("Soap/")) + "async/" + apiVersion; config.setRestEndpoint(restEndpoint); diff --git a/src/main/java/com/sforce/dataset/scheduler/DataflowJob.java b/src/main/java/com/sforce/dataset/scheduler/DataflowJob.java index 5e59002..4b7b62f 100644 --- a/src/main/java/com/sforce/dataset/scheduler/DataflowJob.java +++ b/src/main/java/com/sforce/dataset/scheduler/DataflowJob.java @@ -152,14 +152,14 @@ public void execute(JobExecutionContext context) throws JobExecutionException public void runDataflow(DataFlow task, PartnerConnection partnerConnection) throws IllegalStateException, ConnectionException, IOException, URISyntaxException { System.out.println(new Date()+ " : Executing job: " + task.getName()); - if(!isRunning(partnerConnection, defaultDataflowId, task.getName(), null)) + if(!isRunning(partnerConnection, task.get_uid(), task.getName(), null)) { // Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); // long utcStartTime = cal.getTimeInMillis(); // System.out.println("Difference between current and utc:" + (utcStartTime-startTime)); if(task.getWorkflowType().equalsIgnoreCase("local")) { - DataFlowUtil.uploadDataFlow(partnerConnection, task.getName(), defaultDataflowId, task.getWorkflowDefinition()); + DataFlowUtil.uploadDataFlow(partnerConnection, task.getName(), task.get_uid(), task.getWorkflowDefinition()); } long startTime = 0; GetServerTimestampResult serverTimestampResult = partnerConnection.getServerTimestamp(); @@ -168,11 +168,11 @@ public void runDataflow(DataFlow task, PartnerConnection partnerConnection) thro long startTimeSeconds = startTime/1000L; startTime = startTimeSeconds*1000L; } - DataFlowUtil.startDataFlow(partnerConnection, task.getName(), defaultDataflowId); - JobEntry job = getJob(partnerConnection, defaultDataflowId, startTime); + DataFlowUtil.startDataFlow(partnerConnection, task.getName(), task.get_uid()); + JobEntry job = getJob(partnerConnection, task.get_uid(), startTime); while(true) { - if(isRunning(partnerConnection, defaultDataflowId, task.getName(), job)) + if(isRunning(partnerConnection, task.get_uid(), task.getName(), job)) { try { Thread.sleep(60000); @@ -186,7 +186,7 @@ public void runDataflow(DataFlow task, PartnerConnection partnerConnection) thro } if(task.getWorkflowType().equalsIgnoreCase("local")) { - DataFlowUtil.uploadDataFlow(partnerConnection, "Empty Dataflow", defaultDataflowId, new HashMap()); + DataFlowUtil.uploadDataFlow(partnerConnection, "Empty Dataflow", task.get_uid(), new HashMap()); } }else { @@ -258,6 +258,10 @@ public boolean isRunning(PartnerConnection partnerConnection, String dataFlowId, }else if(job.getStatus()==2) { return true; + }else if(job.getStatus()==5) + { + //completed but with warning + return false; }else { if(jobEntry!=null) diff --git a/src/main/java/com/sforce/dataset/server/JsonServlet.java b/src/main/java/com/sforce/dataset/server/JsonServlet.java index abd5e8a..01b2fea 100644 --- a/src/main/java/com/sforce/dataset/server/JsonServlet.java +++ b/src/main/java/com/sforce/dataset/server/JsonServlet.java @@ -105,7 +105,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) mapper.writerWithDefaultPrettyPrinter().writeValue(outfile , xmdObject); if(!XmdUploader.uploadXmd(outfile.getAbsolutePath(), datasetAlias, datasetId, datasetVersion, conn)) { - throw new IllegalArgumentException("Failed to uplaod XMD"); + throw new IllegalArgumentException("Failed to upload XMD"); } }else if(type.equalsIgnoreCase("dataflow")) { diff --git a/src/main/webapp/js/finder.js b/src/main/webapp/js/finder.js index e60276b..edc04a4 100644 --- a/src/main/webapp/js/finder.js +++ b/src/main/webapp/js/finder.js @@ -4,7 +4,7 @@ $(document).ready(function() { var gettingHistory = false; current = decodeURIComponent(urlParam('current')); - + if (current == undefined || isEmpty(current) ) { current = false; @@ -15,18 +15,18 @@ $(document).ready(function() { else current = false; } - + listDatasets(null); - $('button[name=searchbtn]').click(searchDatasets); - + $('button[name=searchbtn]').click(searchDatasets); + $('#searchinput').keypress(function (e) { if (e.which == 13) { $('button[name=searchbtn]').click(); - return false; - } + return false; + } }); - + }); function searchDatasets(event){ @@ -45,7 +45,7 @@ function listDatasets(query){ if (typeof data !== 'undefined' && data.length > 0) { if(data.length == 500) { - $("#header-count").text('Dataset Count: '+data.length + ' out of 500+'); + $("#header-count").text('Dataset Count: '+data.length + ' out of 500+'); }else { $("#header-count").text('Dataset Count: '+data.length); @@ -56,11 +56,11 @@ function listDatasets(query){ var tmp = $('').append('').html("No Datasets found"); tmp.attr("id","ErrorRow"); tmp.addClass("alert-danger"); - $("#result-body").append(tmp) + $("#result-body").append(tmp) } }) - .fail(function(jqXHR, textStatus, errorThrown) { - if (isEmpty(jqXHR.responseText) || jqXHR.responseText.indexOf("") > -1) + .fail(function(jqXHR, textStatus, errorThrown) { + if (isEmpty(jqXHR.responseText) || jqXHR.responseText.indexOf("") > -1) { self.location.href = 'login.html'; }else @@ -75,8 +75,8 @@ function deleteDataset(datasetAlias,datasetId){ $.getJSON(url,{},function(data){ $( "#"+datasetAlias).remove(); }) - .fail(function(jqXHR, textStatus, errorThrown) { - if (isEmpty(jqXHR.responseText) || jqXHR.responseText.indexOf("") > -1) + .fail(function(jqXHR, textStatus, errorThrown) { + if (isEmpty(jqXHR.responseText) || jqXHR.responseText.indexOf("") > -1) { self.location.href = 'login.html'; }else @@ -100,14 +100,14 @@ function deleteDataset(datasetAlias,datasetId){ { buttonClass = buttonClass + " disabled"; } - + var lastAccessed = "n/a"; if(data[i]._lastAccessed>0) - { + { lastAccessed = new Date(data[i]._lastAccessed).toLocaleString(); } - - + + var tablerow = " \ "+$('').text(data[i].name).html()+" \ \ @@ -129,7 +129,7 @@ function deleteDataset(datasetAlias,datasetId){ Download Metadata Json \ \ \ - Edit Xmd \ + View Xmd \ \ \ Delete \ @@ -137,7 +137,7 @@ function deleteDataset(datasetAlias,datasetId){ \ \ " - + var tmp = $('').append('').html(tablerow); tmp.attr("id",data[i]._alias); $("#result-body").append(tmp); @@ -146,7 +146,7 @@ function deleteDataset(datasetAlias,datasetId){ $("#result-body").append($('').attr('class', 'reset-this').append('').html(" ")); $("#result-body").append($('').attr('class', 'reset-this').append('').html(" ")); } - + $(document).ajaxSend(function(event, request, settings) { $("#title2").empty(); @@ -174,7 +174,7 @@ function loadDiv(selobj,url,nameattr,displayattr) .text(obj[displayattr])); }) }) - .fail(function(jqXHR, textStatus, errorThrown) { + .fail(function(jqXHR, textStatus, errorThrown) { if (isEmpty(jqXHR.responseText) || jqXHR.responseText.indexOf("") > -1) { self.location.href = 'login.html'; }else @@ -192,4 +192,4 @@ function urlParam(name){ else{ return results[1] || 0; } -} \ No newline at end of file +} diff --git a/src/main/webapp/xmdeditor.html b/src/main/webapp/xmdeditor.html index f29fcac..2bb1eea 100644 --- a/src/main/webapp/xmdeditor.html +++ b/src/main/webapp/xmdeditor.html @@ -6,11 +6,11 @@ - + XMD Editor -