Skip to content

Commit

Permalink
Merge pull request #71 from kevinfcrmsf/vf-enable-incremental
Browse files Browse the repository at this point in the history
added the support for incremental upload
  • Loading branch information
kevinfcrmsf authored May 1, 2023
2 parents f8605bc + 5850770 commit 5c1b6f7
Show file tree
Hide file tree
Showing 11 changed files with 173 additions and 71 deletions.
20 changes: 11 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ Download and install Java JDK (not JRE) from Zulu Open JDK

* [Zulu Open JDK](https://www.azul.com/downloads/zulu-community/?&architecture=x86-64-bit&package=jdk)

After installation is complete. Different versions of DatasetUtils require different versions of JDK, the latest release API 48 requires JDK 11. Open a console and check that the java version is correct for your DatasetUtils version by running the following command:
After installation is complete. Different versions of DatasetUtils require different versions of JDK, the latest release API 48.1.1 requires JDK 11. Open a console and check that the java version is correct for your DatasetUtils version by running the following command:


``java -version``
Expand Down Expand Up @@ -100,33 +100,35 @@ Input Parameter

--uploadFormat : (Optional) the whether to upload as binary or csv. default binary");

--mode : (Optional) incremental upload. It can be "Incremental" or "None"(default value)

**OR**

--server : set this to true if you want to run this in server mode and use the UI. **If you give this param all other params will be ignored**

## Usage Example 1: Start the server for using the UI
java -jar datasetutils-48.1.0jar --server true
java -jar datasetutils-48.1.1jar --server true

## Usage Example 2: Upload a local csv to a dataset in production
java -jar datasetutils-48.1.0.jar --action load --u [email protected] --p @#@#@# --inputFile Opportunity.csv --dataset puntest
java -jar datasetutils-48.1.1.jar --action load --u [email protected] --p @#@#@# --inputFile Opportunity.csv --dataset puntest

## Usage Example 3: Append a local csv to a dataset
java -jar datasetutils-48.1.0.jar --action load --operation append --u [email protected] --p @#@#@# --inputFile Opportunity.csv --dataset puntest
java -jar datasetutils-48.1.1.jar --action load --operation append --u [email protected] --p @#@#@# --inputFile Opportunity.csv --dataset puntest

## Usage Example 4: Upload a local csv to a dataset in sandbox
java -jar datasetutils-48.1.0.jar --action load --u [email protected] --p @#@#@# --inputFile Opportunity.csv --dataset puntest --endpoint https://test.salesforce.com/services/Soap/u/48.0
java -jar datasetutils-48.1.1.jar --action load --u [email protected] --p @#@#@# --inputFile Opportunity.csv --dataset puntest --endpoint https://test.salesforce.com/services/Soap/u/56.0

## Usage Example 5: Download dataset main xmd json file
java -jar datasetutils-48.1.0.jar --action downloadxmd --u [email protected] --p @#@#@# --dataset puntest
java -jar datasetutils-48.1.1.jar --action downloadxmd --u [email protected] --p @#@#@# --dataset puntest

## Usage Example 6: Upload user.xmd.json
java -jar datasetutils-48.1.0.jar --action uploadxmd --u [email protected] --p @#@#@# --inputFile user.xmd.json --dataset puntest
java -jar datasetutils-48.1.1.jar --action uploadxmd --u [email protected] --p @#@#@# --inputFile user.xmd.json --dataset puntest

## Usage Example 7: Detect inputFile encoding
java -jar datasetutils-48.1.0.jar --action detectEncoding --inputFile Opportunity.csv
java -jar datasetutils-48.1.1.jar --action detectEncoding --inputFile Opportunity.csv

## Usage Example 8: download error logs file for csv uploads
java -jar datasetutils-48.1.0.jar --action downloadErrorFile --u [email protected] --p @#@#@# --dataset puntest
java -jar datasetutils-48.1.1.jar --action downloadErrorFile --u [email protected] --p @#@#@# --dataset puntest

## Building DatasetUtils
git clone https://github.com/forcedotcom/Analytics-Cloud-Dataset-Utils.git
Expand Down
6 changes: 3 additions & 3 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@
<description>Analytics Cloud Datatset Utils</description>
<artifactId>datasetutils</artifactId>
<packaging>jar</packaging>
<version>48.1.0</version>
<version>56.0.0-SNAPSHOT</version>
<url>https://github.com/forcedotcom/Analytics-Cloud-Dataset-Utils</url>
<organization>
<name>salesforce.com</name>
<url>http://salesforce.com</url>
</organization>
<properties>
<force.version>48.1.0</force.version>
<force.partner.version>48.1.0</force.partner.version>
<force.version>56.0.0</force.version>
<force.partner.version>56.0.0</force.partner.version>
<java.compile.version>11</java.compile.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
Expand Down
11 changes: 7 additions & 4 deletions src/main/java/com/sforce/dataset/DatasetUtilConstants.java
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,10 @@ public class DatasetUtilConstants {
// public static boolean createNewDateParts = false;
public static CodingErrorAction codingErrorAction = CodingErrorAction.REPORT;

public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/48.0";
public static final String defaultTestEndpoint = "https://test.salesforce.com/services/Soap/u/48.0";
public static final String defaultSoapEndPointPath = "/services/Soap/u/48.0";
public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/56.0";
public static final String defaultTestEndpoint = "https://test.salesforce.com/services/Soap/u/56.0";
public static final String defaultSoapEndPointPath = "/services/Soap/u/56.0";

public static boolean debug = false;
public static boolean ext = false;

Expand Down Expand Up @@ -96,6 +96,9 @@ public class DatasetUtilConstants {

public static final int max_error_threshhold = 10000;

public static final String INCREMENTAL_MODE_INCREMENTAL = "Incremental";
public static final String INCREMENTAL_MODE_NONE = "None";

public static boolean server = true;

static com.sforce.dataset.Config systemConfig = null;
Expand Down
48 changes: 42 additions & 6 deletions src/main/java/com/sforce/dataset/DatasetUtilMain.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,10 @@
import java.math.BigDecimal;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.Charset;
import java.nio.charset.CodingErrorAction;
import java.text.DecimalFormat;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Pattern;

import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.apache.commons.io.FileUtils;
Expand All @@ -58,11 +55,13 @@
import com.sforce.dataset.util.CharsetChecker;
import com.sforce.dataset.util.DatasetDownloader;
import com.sforce.dataset.util.DatasetUtils;
import com.sforce.dataset.util.SfdcUtils;
import com.sforce.dataset.util.XmdUploader;
import com.sforce.soap.partner.PartnerConnection;
import com.sforce.ws.ConnectionException;

import static com.sforce.dataset.DatasetUtilConstants.INCREMENTAL_MODE_INCREMENTAL;
import static com.sforce.dataset.DatasetUtilConstants.INCREMENTAL_MODE_NONE;

@SuppressWarnings("deprecation")
public class DatasetUtilMain {

Expand Down Expand Up @@ -304,7 +303,23 @@ else if(args[i-1].equalsIgnoreCase("--codingErrorAction"))
}
DatasetUtilConstants.codingErrorAction = params.codingErrorAction;
}
}else
}
else if(args[i-1].equalsIgnoreCase("--mode"))
{
String arg = args[i];
if(arg!=null)
{
if (arg.equalsIgnoreCase(INCREMENTAL_MODE_INCREMENTAL)) {
params.mode = INCREMENTAL_MODE_INCREMENTAL;
} else if (arg.equalsIgnoreCase(INCREMENTAL_MODE_NONE)) {
params.mode = INCREMENTAL_MODE_NONE;
}else {
System.out.println("Invalid mode {"+arg+"} Must be '" + INCREMENTAL_MODE_INCREMENTAL + "' or '" + INCREMENTAL_MODE_NONE + "'");
System.exit(-1);
}
}
}
else
{
printUsage();
System.out.println("\nERROR: Invalid argument: "+args[i-1]);
Expand Down Expand Up @@ -484,6 +499,7 @@ public static void printUsage()
System.out.println("--sessionId : (Optional) the salesforce sessionId. if specified,specify endpoint");
System.out.println("--fileEncoding : (Optional) the encoding of the inputFile default UTF-8");
System.out.println("--uploadFormat : (Optional) the whether to upload as binary or csv. default binary");
System.out.println("--mode : (Optional) Incremental or None, default is None");

System.out.println("*******************************************************************************\n");
System.out.println("Usage Example 1: Upload a csv to a dataset");
Expand Down Expand Up @@ -717,7 +733,8 @@ public static boolean doAction(String action, PartnerConnection partnerConnectio
try
{
boolean status = DatasetLoader.uploadDataset(params.inputFile, params.schemaFile, params.uploadFormat, params.codingErrorAction,fileCharset, params.dataset,
params.app, params.datasetLabel, params.Operation, params.useBulkAPI,params.chunkSizeMulti, partnerConnection, params.notificationLevel, params.notificationEmail, System.out);
params.app, params.datasetLabel, params.Operation, params.useBulkAPI,params.chunkSizeMulti, partnerConnection, params.notificationLevel, params.notificationEmail,
params.mode, System.out);
if(status)
session.end();
else
Expand Down Expand Up @@ -928,6 +945,25 @@ public static void getRequiredParams(String action,PartnerConnection partnerConn
}
// System.out.println();
}
while (params.mode ==null || params.mode.isEmpty())
{
params.mode = getInputFromUser("Enter mode: ", false, false);
if (params.mode == null || params.mode.isEmpty()) {
params.mode = INCREMENTAL_MODE_NONE;
}
else if(params.mode.equalsIgnoreCase(INCREMENTAL_MODE_INCREMENTAL))
{
params.mode = INCREMENTAL_MODE_INCREMENTAL;
}
else if (params.mode.equalsIgnoreCase(INCREMENTAL_MODE_NONE)){
params.mode = INCREMENTAL_MODE_NONE;
}
else
{
System.out.println("Invalid mode {"+params.mode +"} Must be '" + INCREMENTAL_MODE_INCREMENTAL + "' or '" + INCREMENTAL_MODE_NONE + "'");
params.mode = null;
}
}

}else if(action.equalsIgnoreCase("downloadErrorFile"))
{
Expand Down
1 change: 1 addition & 0 deletions src/main/java/com/sforce/dataset/DatasetUtilParams.java
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,5 @@ public class DatasetUtilParams {
boolean debug = false;
boolean server = true;
CodingErrorAction codingErrorAction = CodingErrorAction.REPORT;
String mode = null;
}
115 changes: 69 additions & 46 deletions src/main/java/com/sforce/dataset/loader/DatasetLoader.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,9 @@
import com.sforce.ws.ConnectionException;
import com.sforce.ws.ConnectorConfig;
import com.sforce.ws.util.Base64;

import static com.sforce.dataset.DatasetUtilConstants.INCREMENTAL_MODE_INCREMENTAL;
import static com.sforce.dataset.DatasetUtilConstants.INCREMENTAL_MODE_NONE;
//import org.supercsv.io.CsvListReader;
//import org.supercsv.prefs.CsvPreference;

Expand Down Expand Up @@ -176,6 +179,7 @@ public static void uploadDataset(File inputCsv,
* @param logger the logger
* @param notificationLevel notificationLevel
* @param notificationEmail notificationEmail
* @param mode mode
* @return true, if successful
* @throws DatasetLoaderException the dataset loader exception
*/
Expand All @@ -184,7 +188,7 @@ public static boolean uploadDataset(String inputFileString,String schemaFileStri
String uploadFormat, CodingErrorAction codingErrorAction,
Charset inputFileCharset, String datasetAlias,
String datasetFolder,String datasetLabel, String Operation, boolean useBulkAPI,int chunkSizeMulti,
PartnerConnection partnerConnection,String notificationLevel, String notificationEmail, PrintStream logger) throws DatasetLoaderException
PartnerConnection partnerConnection,String notificationLevel, String notificationEmail, String mode, PrintStream logger) throws DatasetLoaderException
{
File archiveDir = null;
File datasetArchiveDir = null;
Expand Down Expand Up @@ -240,6 +244,16 @@ public static boolean uploadDataset(String inputFileString,String schemaFileStri
{
Operation = "Overwrite";
}
if(mode != null) {
if (mode.equalsIgnoreCase(INCREMENTAL_MODE_INCREMENTAL))
{
mode = INCREMENTAL_MODE_INCREMENTAL;
}
else
{
mode = INCREMENTAL_MODE_NONE;
}
}

if(datasetLabel==null || datasetLabel.trim().isEmpty())
{
Expand All @@ -261,6 +275,7 @@ public static boolean uploadDataset(String inputFileString,String schemaFileStri
logger.println("uploadFormat: "+uploadFormat);
logger.println("notificationLevel: "+notificationLevel);
logger.println("notificationEmail: "+notificationEmail);
logger.println("mode: "+mode);
logger.println("JVM Max memory: "+nf.format(rt.maxMemory()/mb));
logger.println("JVM Total memory: "+nf.format(rt.totalMemory()/mb));
logger.println("JVM Free memory: "+nf.format(rt.freeMemory()/mb));
Expand Down Expand Up @@ -493,7 +508,7 @@ public static boolean uploadDataset(String inputFileString,String schemaFileStri

if(hdrId==null || hdrId.isEmpty())
{
hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, datasetLabel, altSchema.toBytes(), uploadFormat, Operation, notificationLevel, notificationEmail, logger);
hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, datasetLabel, altSchema.toBytes(), uploadFormat, Operation, notificationLevel, notificationEmail, mode, logger);
}

if(hdrId ==null || hdrId.isEmpty())
Expand Down Expand Up @@ -844,7 +859,7 @@ public static boolean uploadDataset(String inputFileString,String schemaFileStri

long startTime = System.currentTimeMillis();
status = uploadEM(gzbinFile, uploadFormat, altSchema.toBytes(), datasetAlias,datasetFolder, datasetLabel,useBulkAPI, partnerConnection, hdrId, datasetArchiveDir,
"Overwrite", updateHdrJson, notificationLevel, notificationEmail, logger, chunkSizeMulti);
"Overwrite", updateHdrJson, notificationLevel, notificationEmail, mode, logger, chunkSizeMulti);
long endTime = System.currentTimeMillis();
uploadTime = endTime-startTime;

Expand Down Expand Up @@ -930,7 +945,7 @@ public static boolean uploadDataset(String inputFileString,String schemaFileStri
*/
private static boolean uploadEM(File dataFile, String dataFormat, byte[] metadataJsonBytes, String datasetAlias,String datasetFolder,
String datasetLabel, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation,
boolean updateHdrJson,String notificationLevel, String notificationEmail, PrintStream logger,int chunkSizeMulti) throws DatasetLoaderException, InterruptedException, IOException, ConnectionException, AsyncApiException
boolean updateHdrJson,String notificationLevel, String notificationEmail, String incrementalMode,PrintStream logger,int chunkSizeMulti) throws DatasetLoaderException, InterruptedException, IOException, ConnectionException, AsyncApiException
{
BlockingQueue<Map<Integer, File>> q = new LinkedBlockingQueue<Map<Integer, File>>();
LinkedList<Integer> existingFileParts = new LinkedList<Integer>();
Expand Down Expand Up @@ -975,7 +990,7 @@ private static boolean uploadEM(File dataFile, String dataFormat, byte[] metadat

if(hdrId==null || hdrId.trim().isEmpty())
{
hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, datasetLabel, metadataJsonBytes, dataFormat, Operation, notificationLevel, notificationEmail, logger);
hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, datasetLabel, metadataJsonBytes, dataFormat, Operation, notificationLevel, notificationEmail, incrementalMode, logger);
}else
{
existingFileParts = getUploadedFileParts(partnerConnection, hdrId);
Expand Down Expand Up @@ -1129,51 +1144,59 @@ private static boolean uploadEM(File dataFile, String dataFormat, byte[] metadat
* @return the string
* @throws DatasetLoaderException the dataset loader exception
*/
private static String insertFileHdr(PartnerConnection partnerConnection, String datasetAlias, String datasetContainer, String datasetLabel, byte[] metadataJson, String dataFormat, String Operation, String notificationLevel, String notificationEmail,PrintStream logger) throws DatasetLoaderException
private static String insertFileHdr(PartnerConnection partnerConnection, String datasetAlias, String datasetContainer, String datasetLabel, byte[] metadataJson, String dataFormat, String Operation,
String notificationLevel, String notificationEmail,String mode, PrintStream logger) throws DatasetLoaderException
{
String rowId = null;
long startTime = System.currentTimeMillis();
try {

com.sforce.dataset.Preferences userPref = DatasetUtilConstants.getPreferences(partnerConnection.getUserInfo().getOrganizationId());

SObject sobj = new SObject();
sobj.setType("InsightsExternalData");

if(dataFormat == null || dataFormat.equalsIgnoreCase("CSV"))
sobj.setField("Format","CSV");
else
sobj.setField("Format","Binary");

sobj.setField("EdgemartAlias", datasetAlias);

//EdgemartLabel
sobj.setField("EdgemartLabel", datasetLabel);

if(datasetContainer!=null && !datasetContainer.trim().isEmpty() && !datasetContainer.equals(DatasetUtilConstants.defaultAppName))
{
sobj.setField("EdgemartContainer", datasetContainer); //Optional dataset folder name
}


//sobj.setField("IsIndependentParts",Boolean.FALSE); //Optional Defaults to false

//sobj.setField("IsDependentOnLastUpload",Boolean.FALSE); //Optional Defaults to false

if(metadataJson != null && metadataJson.length != 0)
{
sobj.setField("MetadataJson",metadataJson);
if(DatasetUtilConstants.debug)
{
logger.println("MetadataJson {"+ new String(metadataJson) + "}");
}
}

if(Operation!=null)
sobj.setField("operation",Operation);
else
sobj.setField("operation","Overwrite");

com.sforce.dataset.Preferences userPref = DatasetUtilConstants.getPreferences(partnerConnection.getUserInfo().getOrganizationId());

SObject sobj = new SObject();
sobj.setType("InsightsExternalData");

if (dataFormat == null || dataFormat.equalsIgnoreCase("CSV"))
sobj.setField("Format", "CSV");
else
sobj.setField("Format", "Binary");

sobj.setField("EdgemartAlias", datasetAlias);

//EdgemartLabel
sobj.setField("EdgemartLabel", datasetLabel);

if (datasetContainer != null && !datasetContainer.trim().isEmpty() && !datasetContainer.equals(DatasetUtilConstants.defaultAppName)) {
sobj.setField("EdgemartContainer", datasetContainer); //Optional dataset folder name
}


//sobj.setField("IsIndependentParts",Boolean.FALSE); //Optional Defaults to false

//sobj.setField("IsDependentOnLastUpload",Boolean.FALSE); //Optional Defaults to false

if (metadataJson != null && metadataJson.length != 0) {
sobj.setField("MetadataJson", metadataJson);
if (DatasetUtilConstants.debug) {
logger.println("MetadataJson {" + new String(metadataJson) + "}");
}
}

if (Operation != null)
sobj.setField("operation", Operation);
else
sobj.setField("operation", "Overwrite");


if (mode != null) {
if (mode.equalsIgnoreCase(INCREMENTAL_MODE_INCREMENTAL)) {
sobj.setField("Mode", INCREMENTAL_MODE_INCREMENTAL);
}
else if (mode.equalsIgnoreCase(INCREMENTAL_MODE_NONE)) {
sobj.setField("Mode", INCREMENTAL_MODE_NONE);
}
}

sobj.setField("Action","None");

//"Always, Failures, Warnings, Never"
Expand All @@ -1200,7 +1223,7 @@ private static String insertFileHdr(PartnerConnection partnerConnection, String
sobj.setField("NotificationSent",notificationLevel);

sobj.setField("NotificationEmail",notificationEmail);


//sobj.setField("FileName",fileName);
//sobj.setField("Description",description);
Expand Down
Loading

0 comments on commit 5c1b6f7

Please sign in to comment.