Skip to content

Commit

Permalink
Upgrade to API 47, latest libs and Zulu Open SDK
Browse files Browse the repository at this point in the history
Upgrade to API 47, latest libs and Zulu Open SDK
  • Loading branch information
TerenceJWilson committed Jan 16, 2020
1 parent de75355 commit 9dfe3ed
Show file tree
Hide file tree
Showing 11 changed files with 97 additions and 85 deletions.
16 changes: 9 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# Not Under Active Development
# Wave DatasetUtils

Wave DatasetUtils is a reference implementation of the Analytics cloud External data API. This tool is free to use, but it is not officially supported by Salesforce.

# DatasetUtils

DatasetUtils is a reference implementation of the Einstein Analytics External Data API. This tool is free to use, but it is not officially supported by Salesforce.
This is a community project that have not been officially tested or documented. Please do not contact Salesforce for support when using this application.

## Downloading DatasetUtils
Expand All @@ -12,11 +13,12 @@ Download the latest version from [releases](https://github.com/forcedotcom/Analy

## Prerequisite

Download and install Java JDK (not JRE) from Oracle
Download and install Java JDK (not JRE) from Zulu Open JDK

* [Zulu Open JDK](https://www.azul.com/downloads/zulu-community/?&architecture=x86-64-bit&package=jdk)

* [Oracle JDK](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)
After installation is complete. Different versions of DatasetUtils require different versions of JDK, the latest release API 47 requires JDK 11. Open a console and check that the java version is correct for your DatasetUtils version by running the following command:

After installation is complete. open a console and check that the java version is 1.8 or higher by running the following command:

``java -version``

Expand Down Expand Up @@ -122,5 +124,5 @@ Input Parameter
java -jar datasetutils-32.0.0.jar --action downloadErrorFile --u [email protected] --p @#@#@# --dataset puntest

## Building DatasetUtils
git clone [email protected]:forcedotcom/Analytics-Cloud-Dataset-Utils.git
git clone [email protected]:timbezold/datasetutils.git
mvn clean install
36 changes: 18 additions & 18 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,16 @@
<description>Analytics Cloud Datatset Utils</description>
<artifactId>datasetutils</artifactId>
<packaging>jar</packaging>
<version>37.0.4-SNAPSHOT</version>
<version>47.0.0-SNAPSHOT</version>
<url>https://github.com/forcedotcom/Analytics-Cloud-Dataset-Utils</url>
<organization>
<name>salesforce.com</name>
<url>http://salesforce.com</url>
</organization>
<properties>
<force.version>37.0.3</force.version>
<force.partner.version>37.0.3</force.partner.version>
<java.compile.version>1.8</java.compile.version>
<force.version>47.0.0</force.version>
<force.partner.version>47.0.0</force.partner.version>
<java.compile.version>11</java.compile.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<distributionManagement>
Expand Down Expand Up @@ -54,22 +54,22 @@
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.9</version>
<version>1.19</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5</version>
<version>4.5.10</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
<version>4.5</version>
<version>4.5.10</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
<version>2.6</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
Expand All @@ -79,27 +79,27 @@
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.9.2</version>
<version>1.9.4</version>
</dependency>
<dependency>
<groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId>
<version>55.1</version>
<version>65.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.6.1</version>
<version>2.10.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.6.1</version>
<version>2.10.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.6.1</version>
<version>2.10.1</version>
</dependency>
<dependency>
<groupId>junit</groupId>
Expand All @@ -110,27 +110,27 @@
<dependency>
<groupId>com.google.code.externalsortinginjava</groupId>
<artifactId>externalsortinginjava</artifactId>
<version>0.1.9</version>
<version>0.4.0</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.1.0</version>
<version>4.0.1</version>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.3</version>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
<version>9.2.13.v20150730</version>
<version>9.4.24.v20191120</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
<version>9.2.13.v20150730</version>
<version>9.4.24.v20191120</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
Expand Down
2 changes: 1 addition & 1 deletion run.command
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ BASEDIR=$(dirname "$0")
echo "Script location: ${BASEDIR}"
cd "${BASEDIR}"
echo "Current dir ${PWD}"
export JAVA_HOME=$(/usr/libexec/java_home -v 1.8)
export JAVA_HOME=$(/usr/libexec/java_home -v 11)
export PATH=$PATH:${JAVA_HOME}/bin
LATEST_JAR=$(ls -t datasetutils-*.jar | head -1)
echo ${LATEST_JAR}
Expand Down
6 changes: 3 additions & 3 deletions src/main/java/com/sforce/dataset/DatasetUtilConstants.java
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,9 @@ public class DatasetUtilConstants {
// public static boolean createNewDateParts = false;
public static CodingErrorAction codingErrorAction = CodingErrorAction.REPORT;

public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/37.0";
public static final String defaultTestEndpoint = "https://test.salesforce.com/services/Soap/u/37.0";
public static final String defaultSoapEndPointPath = "/services/Soap/u/37.0";
public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/47.0";
public static final String defaultTestEndpoint = "https://test.salesforce.com/services/Soap/u/47.0";
public static final String defaultSoapEndPointPath = "/services/Soap/u/47.0";

public static boolean debug = false;
public static boolean ext = false;
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/com/sforce/dataset/DatasetUtilMain.java
Original file line number Diff line number Diff line change
Expand Up @@ -1199,13 +1199,13 @@ public static void printClasspath()
System.out.println("\n*******************************************************************************");
System.out.println("java.version:"+System.getProperty("java.version"));
System.out.println("java.class.path:"+System.getProperty("java.class.path"));
System.out.print("SystemClassLoader:");
/*System.out.print("SystemClassLoader:");
ClassLoader sysClassLoader = ClassLoader.getSystemClassLoader();
URL[] urls = ((URLClassLoader)sysClassLoader).getURLs();
for(int i=0; i< urls.length; i++)
{
System.out.println(urls[i].getFile());
}
} */
System.out.println("*******************************************************************************\n");
}

Expand Down
37 changes: 22 additions & 15 deletions src/main/java/com/sforce/dataset/flow/DataFlowUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPatch;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;

Expand All @@ -72,6 +73,8 @@ public class DataFlowUtil {
//private static final SimpleDateFormat defaultDateFormat = new SimpleDateFormat("EEEE MMMM d HH:mm:ss z yyyy"); //Mon Jun 15 00:12:03 GMT 2015
private static final SimpleDateFormat defaultDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX"); //Mon Jun 15 00:12:03 GMT 2015
private static final String dataflowURL = "/insights/internal_api/v1.0/esObject/workflow/%s/json";
//New public endpoint . Only required in start as other methods deprecated
private static final String dataflowRunURL = "/services/data/v47.0/wave/dataflowjobs";

@SuppressWarnings("rawtypes")
public static void uploadAndStartDataFlow(PartnerConnection partnerConnection, Map wfdef, String workflowName) throws ConnectionException, IllegalStateException, IOException, URISyntaxException
Expand Down Expand Up @@ -132,7 +135,7 @@ public static void uploadDataFlow(PartnerConnection partnerConnection, String da
emis.close();
httpClient.close();

if (statusCode != HttpStatus.SC_OK)
if (statusCode != HttpStatus.SC_CREATED)
{
String errorCode = statusCode+"";
try
Expand Down Expand Up @@ -453,12 +456,18 @@ public static boolean startDataFlow(PartnerConnection partnerConnection, String

URI u = new URI(serviceEndPoint);

URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), String.format(dataflowURL, dataflowId).replace("json", "start"), null,null);
//URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), String.format(dataflowURL, dataflowId).replace("json", "start"), null,null);

HttpPut httput = new HttpPut(patchURI);
httput.setConfig(requestConfig);
httput.addHeader("Authorization","OAuth "+sessionID);
CloseableHttpResponse emresponse = httpClient.execute(httput);
URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), dataflowRunURL, null,null);

HttpPost httpPost = new HttpPost(patchURI);
StringEntity params =new StringEntity("{\"command\":\"start\",\"dataflowId\":\""+dataflowId+"\"}");
System.out.println("DataflowId " +dataflowId);
httpPost.setConfig(requestConfig);
httpPost.addHeader("content-type", "application/json");
httpPost.addHeader("Authorization","OAuth "+sessionID);
httpPost.setEntity(params);
CloseableHttpResponse emresponse = httpClient.execute(httpPost);
String reasonPhrase = emresponse.getStatusLine().getReasonPhrase();
int statusCode = emresponse.getStatusLine().getStatusCode();
HttpEntity emresponseEntity = emresponse.getEntity();
Expand All @@ -467,7 +476,7 @@ public static boolean startDataFlow(PartnerConnection partnerConnection, String
emis.close();
httpClient.close();

if (statusCode != HttpStatus.SC_OK)
if (statusCode != HttpStatus.SC_CREATED )
{
String errorCode = statusCode+"";
try
Expand Down Expand Up @@ -495,16 +504,14 @@ public static boolean startDataFlow(PartnerConnection partnerConnection, String
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
@SuppressWarnings("rawtypes")
Map res = mapper.readValue(emList, Map.class);
LinkedHashMap res = mapper.readValue(emList, LinkedHashMap.class);
@SuppressWarnings({ "rawtypes", "unchecked" })
List<Map> resList = (List<Map>) res.get("result");
if(resList!=null && !resList.isEmpty())
String stat = (String) res.get("status");
if(stat!=null)
{
if((boolean) resList.get(0).get("success"))
{
System.out.println(new Date()+" : Dataflow {"+dataflowAlias+"} succesfully started");
return true;
}

System.out.println(new Date()+" : Dataflow {"+dataflowAlias+"} succesfully started");
return true;
}
throw new IOException(String.format("Dataflow %s start failed: %s", dataflowAlias,emList));
}
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/com/sforce/dataset/loader/DatasetLoader.java
Original file line number Diff line number Diff line change
Expand Up @@ -1819,7 +1819,7 @@ private static BulkConnection getBulkConnection(ConnectorConfig partnerConfig)
// The endpoint for the Bulk API service is the same as for the normal
// SOAP uri until the /Soap/ part. From here it's '/async/versionNumber'
String soapEndpoint = partnerConfig.getServiceEndpoint();
String apiVersion = "31.0";
String apiVersion = "47.0";
String restEndpoint = soapEndpoint.substring(0, soapEndpoint.indexOf("Soap/"))
+ "async/" + apiVersion;
config.setRestEndpoint(restEndpoint);
Expand Down
16 changes: 10 additions & 6 deletions src/main/java/com/sforce/dataset/scheduler/DataflowJob.java
Original file line number Diff line number Diff line change
Expand Up @@ -152,14 +152,14 @@ public void execute(JobExecutionContext context) throws JobExecutionException
public void runDataflow(DataFlow task, PartnerConnection partnerConnection) throws IllegalStateException, ConnectionException, IOException, URISyntaxException
{
System.out.println(new Date()+ " : Executing job: " + task.getName());
if(!isRunning(partnerConnection, defaultDataflowId, task.getName(), null))
if(!isRunning(partnerConnection, task.get_uid(), task.getName(), null))
{
// Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
// long utcStartTime = cal.getTimeInMillis();
// System.out.println("Difference between current and utc:" + (utcStartTime-startTime));
if(task.getWorkflowType().equalsIgnoreCase("local"))
{
DataFlowUtil.uploadDataFlow(partnerConnection, task.getName(), defaultDataflowId, task.getWorkflowDefinition());
DataFlowUtil.uploadDataFlow(partnerConnection, task.getName(), task.get_uid(), task.getWorkflowDefinition());
}
long startTime = 0;
GetServerTimestampResult serverTimestampResult = partnerConnection.getServerTimestamp();
Expand All @@ -168,11 +168,11 @@ public void runDataflow(DataFlow task, PartnerConnection partnerConnection) thro
long startTimeSeconds = startTime/1000L;
startTime = startTimeSeconds*1000L;
}
DataFlowUtil.startDataFlow(partnerConnection, task.getName(), defaultDataflowId);
JobEntry job = getJob(partnerConnection, defaultDataflowId, startTime);
DataFlowUtil.startDataFlow(partnerConnection, task.getName(), task.get_uid());
JobEntry job = getJob(partnerConnection, task.get_uid(), startTime);
while(true)
{
if(isRunning(partnerConnection, defaultDataflowId, task.getName(), job))
if(isRunning(partnerConnection, task.get_uid(), task.getName(), job))
{
try {
Thread.sleep(60000);
Expand All @@ -186,7 +186,7 @@ public void runDataflow(DataFlow task, PartnerConnection partnerConnection) thro
}
if(task.getWorkflowType().equalsIgnoreCase("local"))
{
DataFlowUtil.uploadDataFlow(partnerConnection, "Empty Dataflow", defaultDataflowId, new HashMap());
DataFlowUtil.uploadDataFlow(partnerConnection, "Empty Dataflow", task.get_uid(), new HashMap());
}
}else
{
Expand Down Expand Up @@ -258,6 +258,10 @@ public boolean isRunning(PartnerConnection partnerConnection, String dataFlowId,
}else if(job.getStatus()==2)
{
return true;
}else if(job.getStatus()==5)
{
//completed but with warning
return false;
}else
{
if(jobEntry!=null)
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/com/sforce/dataset/server/JsonServlet.java
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
mapper.writerWithDefaultPrettyPrinter().writeValue(outfile , xmdObject);
if(!XmdUploader.uploadXmd(outfile.getAbsolutePath(), datasetAlias, datasetId, datasetVersion, conn))
{
throw new IllegalArgumentException("Failed to uplaod XMD");
throw new IllegalArgumentException("Failed to upload XMD");
}
}else if(type.equalsIgnoreCase("dataflow"))
{
Expand Down
Loading

0 comments on commit 9dfe3ed

Please sign in to comment.