diff --git a/pom.xml b/pom.xml index 6cce249..850f999 100644 --- a/pom.xml +++ b/pom.xml @@ -1,5 +1,6 @@ - + 4.0.0 com.force.ac Dataset Utils @@ -17,14 +18,14 @@ 1.6 UTF-8 - - - local-proj-repo - Project Local Repository - default - file://${basedir}/local-proj-repo/ - - + + + local-proj-repo + Project Local Repository + default + file://${basedir}/local-proj-repo/ + + BSD License (FreeBSD) @@ -37,8 +38,8 @@ scm:git:git@github.com:forcedotcom/Analytics-Cloud-Dataset-Utils.git scm:git:git@github.com:forcedotcom/Analytics-Cloud-Dataset-Utils.git scm:git:git@github.com:forcedotcom/Analytics-Cloud-Dataset-Utils.git - HEAD - + HEAD + com.force.api @@ -102,10 +103,10 @@ test - com.google.code.externalsortinginjava - externalsortinginjava - 0.1.9 - + com.google.code.externalsortinginjava + externalsortinginjava + 0.1.9 + clean install @@ -143,7 +144,8 @@ ${project.build.finalName} false - + com.sforce.dataset.DatasetUtilMain diff --git a/src/main/java/com/sforce/dataset/DatasetUtilMain.java b/src/main/java/com/sforce/dataset/DatasetUtilMain.java index 803de88..a30ee27 100644 --- a/src/main/java/com/sforce/dataset/DatasetUtilMain.java +++ b/src/main/java/com/sforce/dataset/DatasetUtilMain.java @@ -31,10 +31,13 @@ import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; -import java.net.URI; -import java.net.URISyntaxException; +import java.net.MalformedURLException; +import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.CodingErrorAction; +import java.text.DecimalFormat; +import java.util.Map; +import java.util.regex.Pattern; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.io.FileUtils; @@ -43,14 +46,19 @@ import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; +import com.sforce.dataset.flow.monitor.DataFlowMonitorUtil; import com.sforce.dataset.loader.DatasetLoader; import com.sforce.dataset.loader.EbinFormatWriter; +import com.sforce.dataset.loader.file.listener.FileListener; +import com.sforce.dataset.loader.file.listener.FileListenerThread; +import com.sforce.dataset.loader.file.listener.FileListenerUtil; import com.sforce.dataset.loader.file.schema.ExternalFileSchema; import com.sforce.dataset.util.CharsetChecker; import com.sforce.dataset.util.DatasetAugmenter; import com.sforce.dataset.util.DatasetDownloader; import com.sforce.dataset.util.DatasetUtils; import com.sforce.dataset.util.SfdcExtracter; +import com.sforce.dataset.util.SfdcUtils; import com.sforce.dataset.util.XmdUploader; import com.sforce.soap.partner.PartnerConnection; import com.sforce.ws.ConnectionException; @@ -58,58 +66,48 @@ @SuppressWarnings("deprecation") public class DatasetUtilMain { - public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/31.0"; + public static final String defaultEndpoint = "https://login.salesforce.com/services/Soap/u/32.0"; + public static final String defaultTestEndpoint = "https://test.salesforce.com/services/Soap/u/32.0"; + + public static final String[] validActions = {"load", "extract", "augment", "downloadXMD", "uploadXMD", "detectEncoding", "downloadErrorFile"}; public static void main(String[] args) { if(!printlneula()) { - System.err.println("You do not have permission to use this jar. Please delete it from this computer"); + System.out.println("You do not have permission to use this jar. Please delete it from this computer"); System.exit(-1); } - String dataset = null; - String datasetLabel = null; - String app = null; - String username = null; - String password = null; - String token = null; - String sessionId = null; - String endpoint = null; + DatasetUtilParams params = new DatasetUtilParams(); String action = null; - String inputFile = null; -// String jsonConfig = null; - String rootObject = null; - String fileEncoding = "UTF-8"; - Charset fileCharset = null; - String uploadFormat = "binary"; - String Operation = "Overwrite"; - int rowLimit = 1000; - boolean useBulkAPI = false; - CodingErrorAction codingErrorAction = CodingErrorAction.REPORT; if (args.length > 2) { for (int i=1; i< args.length; i=i+2){ - if(args[i-1].equalsIgnoreCase("--u")) + if(args[i-1].equalsIgnoreCase("--help") || args[i-1].equalsIgnoreCase("-help") || args[i-1].equalsIgnoreCase("help")) + { + printUsage(); + } + else if(args[i-1].equalsIgnoreCase("--u")) { - username = args[i]; + params.username = args[i]; } else if(args[i-1].equalsIgnoreCase("--p")) { - password = args[i]; + params.password = args[i]; } else if(args[i-1].equalsIgnoreCase("--sessionId")) { - sessionId = args[i]; + params.sessionId = args[i]; } else if(args[i-1].equalsIgnoreCase("--token")) { - token = args[i]; + params.token = args[i]; } else if(args[i-1].equalsIgnoreCase("--endpoint")) { - endpoint = args[i]; + params.endpoint = args[i]; } else if(args[i-1].equalsIgnoreCase("--action")) { @@ -117,30 +115,44 @@ else if(args[i-1].equalsIgnoreCase("--action")) } else if(args[i-1].equalsIgnoreCase("--inputFile")) { - inputFile = args[i]; + String tmp = args[i]; + if(tmp!=null) + { + File tempFile = new File (tmp); + if(tempFile.exists()) + { + params.inputFile = tempFile.toString(); + }else + { + System.out.println("File {"+args[i]+"} does not exist"); + System.exit(-1); + } + } } else if(args[i-1].equalsIgnoreCase("--dataset")) { - dataset = args[i]; + params.dataset = args[i]; } else if(args[i-1].equalsIgnoreCase("--app")) { - app = args[i]; + params.app = args[i]; } else if(args[i-1].equalsIgnoreCase("--useBulkAPI")) { if(args[i]!=null && args[i].trim().equalsIgnoreCase("true")) - useBulkAPI = true; + params.useBulkAPI = true; } else if(args[i-1].equalsIgnoreCase("--uploadFormat")) { if(args[i]!=null && args[i].trim().equalsIgnoreCase("csv")) - uploadFormat = "csv"; + params.uploadFormat = "csv"; + else if(args[i]!=null && args[i].trim().equalsIgnoreCase("binary")) + params.uploadFormat = "binary"; } else if(args[i-1].equalsIgnoreCase("--rowLimit")) { if(args[i]!=null && !args[i].trim().isEmpty()) - rowLimit = (new BigDecimal(args[i].trim())).intValue(); + params.rowLimit = (new BigDecimal(args[i].trim())).intValue(); } // else if(args[i-1].equalsIgnoreCase("--json")) // { @@ -148,11 +160,11 @@ else if(args[i-1].equalsIgnoreCase("--rowLimit")) // } else if(args[i-1].equalsIgnoreCase("--rootObject")) { - rootObject = args[i]; + params.rootObject = args[i]; } else if(args[i-1].equalsIgnoreCase("--fileEncoding")) { - fileEncoding = args[i]; + params.fileEncoding = args[i]; } else if(args[i-1].equalsIgnoreCase("--codingErrorAction")) { @@ -160,552 +172,774 @@ else if(args[i-1].equalsIgnoreCase("--codingErrorAction")) { if(args[i].equalsIgnoreCase("IGNORE")) { - codingErrorAction = CodingErrorAction.IGNORE; + params.codingErrorAction = CodingErrorAction.IGNORE; }else if(args[i].equalsIgnoreCase("REPORT")) { - codingErrorAction = CodingErrorAction.REPORT; + params.codingErrorAction = CodingErrorAction.REPORT; }else if(args[i].equalsIgnoreCase("REPLACE")) { - codingErrorAction = CodingErrorAction.REPLACE; + params.codingErrorAction = CodingErrorAction.REPLACE; } } }else { printUsage(); - System.err.println("\nERROR: Invalid argument: "+args[i-1]); + System.out.println("\nERROR: Invalid argument: "+args[i-1]); System.exit(-1); } - }//end for - }else - { - printUsage(); - System.exit(-1); + if(params.endpoint == null || params.endpoint.isEmpty()) + { + params.endpoint = defaultEndpoint; + } } + + printBanner(); - if (action == null) + + if(params.sessionId==null) { - printUsage(); - System.err.println("\nERROR: action must be specified"); - System.exit(-1); - } + if(params.username == null || params.username.trim().isEmpty()) + { + params.username = getInputFromUser("Enter salesforce username: ", true, false); + } - if(sessionId != null) + if(params.username.equals("-1")) + { + params.sessionId = getInputFromUser("Enter salesforce sessionId: ", true, false); + params.username = null; + params.password = null; + }else + { + if(params.password == null || params.password.trim().isEmpty()) + { + params.password = getInputFromUser("Enter salesforce password: ", true, true); + } + } + } + + + if(params.sessionId != null && !params.sessionId.isEmpty()) { - if(endpoint == null) + while(params.endpoint == null || params.endpoint.trim().isEmpty()) { - printUsage(); - System.err.println("\nERROR: endpoint must be specified when sessionId is specified"); - System.exit(-1); + params.endpoint = getInputFromUser("Enter salesforce instance url: ", true, false); + if(params.endpoint==null || params.endpoint.trim().isEmpty()) + System.out.println("\nERROR: endpoint must be specified when sessionId is specified"); } - if(endpoint.equals(defaultEndpoint)) - { - printUsage(); - System.err.println("\nERROR: endpoint must be the actual serviceURL and not the login url"); - System.exit(-1); - } + while(params.endpoint.toLowerCase().contains("login.salesforce.com") || params.endpoint.toLowerCase().contains("test.salesforce.com") || params.endpoint.toLowerCase().contains("test") || params.endpoint.toLowerCase().contains("prod") || params.endpoint.toLowerCase().contains("sandbox")) + { + System.out.println("\nERROR: endpoint must be the actual serviceURL and not the login url"); + params.endpoint = getInputFromUser("Enter salesforce instance url: ", true, false); + } }else { - if(endpoint == null) + if(params.endpoint == null || params.endpoint.isEmpty()) + { + params.endpoint = getInputFromUser("Enter salesforce instance url (default=prod): ", false, false); + if(params.endpoint == null || params.endpoint.trim().isEmpty()) { - endpoint = defaultEndpoint; + params.endpoint = defaultEndpoint; } + } } - if(endpoint!=null) - { try { - if(endpoint.equalsIgnoreCase("PROD") || endpoint.equalsIgnoreCase("PRODUCTION")) + if(params.endpoint.equalsIgnoreCase("PROD") || params.endpoint.equalsIgnoreCase("PRODUCTION")) { - endpoint = defaultEndpoint; - }else if(endpoint.equalsIgnoreCase("TEST") || endpoint.equalsIgnoreCase("SANDBOX")) + params.endpoint = defaultEndpoint; + }else if(params.endpoint.equalsIgnoreCase("TEST") || params.endpoint.equalsIgnoreCase("SANDBOX")) { - endpoint = defaultEndpoint.replace("login", "test"); + params.endpoint = defaultEndpoint.replace("login", "test"); } - URI uri = new URI(endpoint); - String scheme = uri.getScheme(); + URL uri = new URL(params.endpoint); + String protocol = uri.getProtocol(); String host = uri.getHost(); - if(!scheme.equalsIgnoreCase("https")) + if(protocol == null || !protocol.equalsIgnoreCase("https")) { - if(!host.toLowerCase().endsWith("internal.salesforce.com") && !host.toLowerCase().endsWith("localhost")) + if(host == null || !host.toLowerCase().endsWith("internal.salesforce.com") && !host.toLowerCase().endsWith("localhost")) { - System.err.println("\nERROR: UNSUPPORTED_CLIENT: HTTPS Required in endpoint"); + System.out.println("\nERROR: Invalid endpoint. UNSUPPORTED_CLIENT: HTTPS Required in endpoint"); System.exit(-1); } } if(uri.getPath() == null || uri.getPath().isEmpty()) { - uri = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), "/services/Soap/u/31.0", uri.getQuery(), uri.getFragment()); + uri = new URL(uri.getProtocol(), uri.getHost(), uri.getPort(), "/services/Soap/u/32.0"); } - endpoint = uri.toString(); - } catch (URISyntaxException e) { + params.endpoint = uri.toString(); + } catch (MalformedURLException e) { e.printStackTrace(); - System.err.println("\nERROR: endpoint is not a valid URL"); + System.out.println("\nERROR: endpoint is not a valid URL"); System.exit(-1); } + + + PartnerConnection partnerConnection = null; + if(params.username!=null || params.sessionId != null) + { + try { + partnerConnection = DatasetUtils.login(0, params.username, params.password, params.token, params.endpoint, params.sessionId); + } catch (ConnectionException e) { + e.printStackTrace(); + System.exit(-1); + } + } + + if(args.length==0) + { + System.out.println("\n*******************************************************************************"); + FileListenerUtil.startAllListener(partnerConnection); + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + } + System.out.println("*******************************************************************************\n"); + System.out.println(); + while(true) + { + action = getActionFromUser(); + if(action==null || action.isEmpty()) + { + System.exit(-1); + } + params = new DatasetUtilParams(); + getRequiredParams(action, partnerConnection, params); + boolean status = doAction(action, partnerConnection, params); + if(status) + { + if(action.equalsIgnoreCase("load")) + createListener(partnerConnection, params); + } + } + }else + { + doAction(action, partnerConnection, params); } - if (inputFile!=null) + } + + + public static void printUsage() + { + System.out.println("\n*******************************************************************************"); + System.out.println("Usage:"); + System.out.print("java -jar datasetutil.jar --action load --u userName --p password "); + System.out.println("--dataset datasetAlias --inputFile inputFile --endpoint endPoint"); + System.out.println("--action : Use either:load,extract,augment,downloadxmd,uploadxmd,detectEncoding"); + System.out.println(" : Use load for loading csv, augment for augmenting existing dataset"); + System.out.println("--u : Salesforce.com login"); + System.out.println("--p : (Optional) Salesforce.com password,if omitted you will be prompted"); + System.out.println("--token : (Optional) Salesforce.com token"); + System.out.println("--endpoint: (Optional) The salesforce soap api endpoint (test/prod)"); + System.out.println(" : Default: https://login.salesforce.com/services/Soap/u/31.0"); + System.out.println("--dataset : (Optional) the dataset alias. required if action=load"); + System.out.println("--app : (Optional) the app name for the dataset"); + System.out.println("--inputFile : (Optional) the input csv file. required if action=load"); + System.out.println("--rootObject: (Optional) the root SObject for the extract"); + System.out.println("--rowLimit: (Optional) the number of rows to extract, -1=all, deafult=1000"); + System.out.println("--sessionId : (Optional) the salesforce sessionId. if specified,specify endpoint"); + System.out.println("--fileEncoding : (Optional) the encoding of the inputFile default UTF-8"); + System.out.println("--uploadFormat : (Optional) the whether to upload as binary or csv. default binary"); +// System.out.println("jsonConfig: (Optional) the dataflow definition json file"); + System.out.println("*******************************************************************************\n"); + System.out.println("Usage Example 1: Upload a csv to a dataset"); + System.out.println("java -jar datasetutil.jar --action load --u pgupta@force.com --p @#@#@# --inputFile Opportunity.csv --dataset test"); + System.out.println(""); + System.out.println("Usage Example 2: Download dataset xmd files"); + System.out.println("java -jar datasetloader.jar --action downloadxmd --u pgupta@force.com --p @#@#@# --dataset test"); + System.out.println(""); + System.out.println("Usage Example 3: Upload user.xmd.json"); + System.out.println("java -jar datasetutil.jar --action uploadxmd --u pgupta@force.com --p @#@#@# --inputFile user.xmd.json --dataset test"); + System.out.println(""); + System.out.println("Usage Example 4: Augment 2 datasets"); + System.out.println("java -jar datasetutil.jar --action augment --u pgupta@force.com --p @#@#@#"); + System.out.println(""); + System.out.println("Usage Example 5: Generate the schema file from CSV"); + System.out.println("java -jar datasetutil.jar --action load --inputFile Opportunity.csv"); + System.out.println(""); + System.out.println("Usage Example 6: Extract salesforce data"); + System.out.println("java -jar datasetutil.jar --action extract --u pgupta@force.com --p @#@#@# --rootObject OpportunityLineItem"); + System.out.println(""); + } + + static boolean printlneula() + { + try { - File temp = new File(inputFile); - if(!temp.exists() && !temp.canRead()) + String userHome = System.getProperty("user.home"); + File lic = new File(userHome, ".ac.datautils.lic"); + if(!lic.exists()) { - System.err.println("\nERROR: inputFile {"+temp+"} not found"); - System.exit(-1); + System.out.println(eula); + System.out.println(); + while(true) + { + String response = DatasetUtils.readInputFromConsole("Do you agree to the above license agreement (Yes/No): "); + if(response!=null && (response.equalsIgnoreCase("yes") || response.equalsIgnoreCase("y"))) + { + FileUtils.writeStringToFile(lic, eula); + return true; + }else if(response!=null && (response.equalsIgnoreCase("no") || response.equalsIgnoreCase("n"))) + { + return false; + } + } + }else + { + return true; } - - String ext = FilenameUtils.getExtension(temp.getName()); - if(ext == null || !(ext.equalsIgnoreCase("csv") || ext.equalsIgnoreCase("bin") || ext.equalsIgnoreCase("gz") || ext.equalsIgnoreCase("json"))) + }catch(Throwable t) + { + t.printStackTrace(); + } + return false; + } + + public static String eula = "/*\n" + +"* Copyright (c) 2014, salesforce.com, inc.\n" + +"* All rights reserved.\n" + +"*\n" + +"* Redistribution and use in source and binary forms, with or without modification, are permitted provided\n" + +"* that the following conditions are met:\n" + +"* \n" + +"* Redistributions of source code must retain the above copyright notice, this list of conditions and the \n" + +"* following disclaimer.\n" + +"* \n" + +"* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and \n" + +"* the following disclaimer in the documentation and/or other materials provided with the distribution. \n" + +"* \n" + +"* Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or \n" + +"* promote products derived from this software without specific prior written permission." + +"* \n" + +"* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED \n" + +"* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A \n" + +"* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR \n" + +"* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED \n" + +"* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) \n" + +"* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING \n" + +"* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE \n" + +"* POSSIBILITY OF SUCH DAMAGE. \n" + +"*/\n"; + + public static String getInputFromUser(String message, boolean isRequired, boolean isPassword) + { + String input = null; + while(true) + { + try { - System.err.println("\nERROR: inputFile does not have valid extension"); - System.exit(-1); + if(!isPassword) + input = DatasetUtils.readInputFromConsole(message); + else + input = DatasetUtils.readPasswordFromConsole(message); + + if(input==null || input.isEmpty()) + { + if(!isRequired) + break; + }else + { + break; + } + }catch(Throwable me) + { + input = null; } - - if(action.equalsIgnoreCase("load")) + } + return input; + } + + public static String getActionFromUser() + { + System.out.println(); + String selectedAction = "load"; + int cnt=1; + DecimalFormat df = new DecimalFormat("00"); + df.setMinimumIntegerDigits(2); + for(String action:validActions) { - byte[] binHeader = new byte[5]; - if(ext.equalsIgnoreCase("bin") || ext.equalsIgnoreCase("gz")) + if(cnt==1) + System.out.println("Available Datasetutil Actions: "); + System.out.print(" "); + System.out.println(DatasetUtils.padLeft(cnt+"",3)+". "+action); + cnt++; + } + System.out.println(); + + while(true) + { + try + { + String tmp = DatasetUtils.readInputFromConsole("Enter Action number (0 = Exit): "); + if(tmp==null) + return null; + if(tmp.trim().isEmpty()) + continue; + long choice = Long.parseLong(tmp.trim()); + if(choice==0) + return null; + cnt = 1; + if(choice>0 && choice <= validActions.length) { - try { - InputStream fis = new FileInputStream(temp); - if(ext.equalsIgnoreCase("gz")) - fis = new GzipCompressorInputStream(new FileInputStream(temp)); - int cnt = fis.read(binHeader); - if(fis!=null) - { - IOUtils.closeQuietly(fis); - } - if(cnt<5) - { - System.err.println("\nERROR: inputFile {"+temp+"} in not valid"); - System.exit(-1); - } - } catch (FileNotFoundException e) { - e.printStackTrace(); - System.err.println("\nERROR: inputFile {"+temp+"} not found"); - System.exit(-1); - } catch (IOException e) { - e.printStackTrace(); - System.err.println("\nERROR: inputFile {"+temp+"} in not valid"); - System.exit(-1); - } - - if(!EbinFormatWriter.isValidBin(binHeader)) + for(String action:validActions) { - if(ext.equalsIgnoreCase("bin")) + if(cnt==choice) { - System.err.println("\nERROR: inputFile {"+temp+"} in not valid binary file"); - System.exit(-1); - }else - { - uploadFormat = "csv"; //Assume the user is uploading a .gz csv + selectedAction = action; + return selectedAction; } + cnt++; } } - } - - if(ext.equalsIgnoreCase("json")) - { - try - { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - mapper.readValue(temp, Object.class); - }catch(Throwable t) - { - System.err.println("\nERROR: inputFile {"+temp+"} is not valid json, Error: " + t.getMessage()); - System.exit(-1); + }catch(Throwable me) + { } } - + } + + public static boolean doAction(String action, PartnerConnection partnerConnection, DatasetUtilParams params) + { + + if (params.inputFile!=null) + { + File tempFile = validateInputFile(params.inputFile, action); + if(tempFile == null) + { + System.out.println("Inputfile {"+params.inputFile+"} is not valid"); + return false; + } } - if(dataset!=null) + if(params.dataset!=null && !params.dataset.isEmpty()) { - datasetLabel = dataset; - dataset = ExternalFileSchema.createDevName(dataset, "Dataset", 1); - if(!dataset.equals(datasetLabel)) + params.datasetLabel = params.dataset; + params.dataset = ExternalFileSchema.createDevName(params.dataset, "Dataset", 1); + if(!params.dataset.equals(params.datasetLabel)) { - System.err.println("\n Warning: dataset name can only contain alpha-numeric or '_', must start with alpha, and cannot end in '__c'"); - System.err.println("\n changing dataset name to: {"+dataset+"}"); + System.out.println("\n Warning: dataset name can only contain alpha-numeric or '_', must start with alpha, and cannot end in '__c'"); + System.out.println("\n changing dataset name to: {"+params.dataset+"}"); } } + Charset fileCharset = null; try { - fileCharset = Charset.forName(fileEncoding); + if(params.fileEncoding!=null && !params.fileEncoding.trim().isEmpty()) + fileCharset = Charset.forName(params.fileEncoding); + else + fileCharset = Charset.forName("UTF-8"); } catch (Throwable e) { e.printStackTrace(); - System.err.println("\nERROR: Invalid fileEncoding {"+fileEncoding+"}"); - System.exit(-1); + System.out.println("\nERROR: Invalid fileEncoding {"+params.fileEncoding+"}"); + return false; } if(action.equalsIgnoreCase("load")) { - if (inputFile==null) + if (params.inputFile==null || params.inputFile.isEmpty()) { - printUsage(); - System.err.println("\nERROR: inputFile must be specified"); - System.exit(-1); + System.out.println("\nERROR: inputFile must be specified"); + return false; } - if(dataset!=null && ((username == null) && (sessionId==null))) + if (params.dataset==null || params.dataset.isEmpty()) { - printUsage(); - System.err.println("\nERROR: username must be specified"); - System.exit(-1); + System.out.println("\nERROR: dataset name must be specified"); + return false; + } + + try { + return DatasetLoader.uploadDataset(params.inputFile, params.uploadFormat, params.codingErrorAction,fileCharset, params.dataset, params.app, params.datasetLabel, params.Operation, params.useBulkAPI, partnerConnection, System.out); + } catch (Exception e) { + System.out.println(); + e.printStackTrace(); + return false; + } + } else if(action.equalsIgnoreCase("detectEncoding")) + { + if (params.inputFile==null) + { + System.out.println("\nERROR: inputFile must be specified"); + return false; } - if(sessionId==null) + try { + CharsetChecker.detectCharset(new File(params.inputFile)); + } catch (Exception e) { + e.printStackTrace(); + return false; + } + }else if(action.equalsIgnoreCase("uploadxmd")) { - if((username != null) && dataset==null) + if (params.inputFile==null) { - printUsage(); - System.err.println("\nERROR: dataset must be specified"); - System.exit(-1); + System.out.println("\nERROR: inputFile must be specified"); + return false; } - if(dataset!=null && (password == null || password.isEmpty())) + if(params.dataset==null) { - try { - password = DatasetUtils.readPasswordFromConsole("Enter password: "); - } catch (IOException e) { - } - } - - if(dataset!=null && (password == null || password.isEmpty())) - { - printUsage(); - System.err.println("\nERROR: password must be specified"); - System.exit(-1); - } - }else - { - if(dataset==null) - { - printUsage(); - System.err.println("\nERROR: dataset must be specified"); - System.exit(-1); - } - } - - PartnerConnection partnerConnection = null; - if(username!=null || sessionId != null) - { - try { - partnerConnection = DatasetUtils.login(0, username, password, token, endpoint, sessionId); - } catch (ConnectionException e) { - e.printStackTrace(); - System.exit(-1); - } - } - - try { - - DatasetLoader.uploadDataset(inputFile, uploadFormat, codingErrorAction, fileCharset, dataset, app, datasetLabel, Operation, useBulkAPI, partnerConnection); -// DatasetLoader.uploadDataset(inputFile, dataset, app, username, password, endpoint, token, sessionId, useBulkAPI, uploadFormat, codingErrorAction, fileCharset); - } catch (Exception e) { - System.err.println(); - e.printStackTrace(); - System.exit(-1); - } - } else if(action.equalsIgnoreCase("detectEncoding")) - { - if (inputFile==null) - { - printUsage(); - System.err.println("\nERROR: inputFile must be specified"); - System.exit(-1); - } - - try { - CharsetChecker.detectCharset(new File(inputFile)); - } catch (Exception e) { - e.printStackTrace(); - System.exit(-1); - } - }else if(action.equalsIgnoreCase("uploadxmd")) - { - if (inputFile==null) - { - printUsage(); - System.err.println("\nERROR: inputFile must be specified"); - System.exit(-1); - } - - if(dataset==null) - { - printUsage(); - System.err.println("\nERROR: dataset must be specified"); - System.exit(-1); - }else - { - + System.out.println("\nERROR: dataset must be specified"); + return false; } - if(sessionId==null) - { - if(username == null) - { - printUsage(); - System.err.println("\nERROR: username must be specified"); - System.exit(-1); - } - - if(password == null || password.isEmpty()) - { - try { - password = DatasetUtils.readPasswordFromConsole("Enter password: "); - } catch (IOException e) { - } - } - - if(password == null || password.isEmpty()) - { - printUsage(); - System.err.println("\nERROR: password must be specified"); - System.exit(-1); - } - } try { - XmdUploader.uploadXmd(inputFile, dataset, username, password, endpoint, token, sessionId); + XmdUploader.uploadXmd(params.inputFile, params.dataset, partnerConnection); } catch (Exception e) { e.printStackTrace(); - System.exit(-1); + return false; } } else if(action.equalsIgnoreCase("downloadxmd")) { - if (dataset==null) + if (params.dataset==null) { - printUsage(); - System.err.println("\nERROR: dataset alias must be specified"); - System.exit(-1); - } - - if(sessionId==null) - { - if(username == null) - { - printUsage(); - System.err.println("\nERROR: username must be specified"); - System.exit(-1); - } - - if(password == null || password.isEmpty()) - { - try { - password = DatasetUtils.readPasswordFromConsole("Enter password: "); - } catch (IOException e) { - } - } - - if(password == null || password.isEmpty()) - { - printUsage(); - System.err.println("\nERROR: password must be specified"); - System.exit(-1); - } + System.out.println("\nERROR: dataset alias must be specified"); + return false; } try { - DatasetDownloader.downloadEM(dataset, username, password, endpoint, token, sessionId); + DatasetDownloader.downloadEM(params.dataset, partnerConnection); } catch (Exception e) { e.printStackTrace(); - System.exit(-1); + return false; } }else if(action.equalsIgnoreCase("augment")) { - if(sessionId==null) - { - if(username == null) - { - printUsage(); - System.err.println("\nERROR: username must be specified"); - System.exit(-1); - } - - if(password == null || password.isEmpty()) - { - try { - password = DatasetUtils.readPasswordFromConsole("Enter password: "); - } catch (IOException e) { - } - } - - if(password == null || password.isEmpty()) - { - printUsage(); - System.err.println("\nERROR: password must be specified"); - System.exit(-1); - } - } try { - DatasetAugmenter.augmentEM(username, password, endpoint, token, sessionId); + DatasetAugmenter.augmentEM(partnerConnection); } catch (Exception e) { e.printStackTrace(); - System.exit(-1); + return false; } }else if(action.equalsIgnoreCase("extract")) { - if(rootObject==null) + if(params.rootObject==null) { - printUsage(); - System.err.println("\nERROR: rootObject must be specified"); - System.exit(-1); - } - -// if(dataset==null) -// { -// System.err.println("\nERROR: dataset must be specified"); -// printUsage(); -// System.exit(-1); -// } - - if(sessionId==null) - { - if(username == null) - { - printUsage(); - System.err.println("\nERROR: username must be specified"); - System.exit(-1); - } - - if(password == null || password.isEmpty()) - { - try { - password = DatasetUtils.readPasswordFromConsole("Enter password: "); - } catch (IOException e) { - } - } - - if(password == null || password.isEmpty()) - { - printUsage(); - System.err.println("\nERROR: password must be specified"); - System.exit(-1); - } + System.out.println("\nERROR: rootObject must be specified"); + return false; } try{ - SfdcExtracter.extract(rootObject,dataset, username, password, token, endpoint, sessionId, rowLimit); + SfdcExtracter.extract(params.rootObject,params.dataset, partnerConnection, params.rowLimit); } catch (Exception e) { e.printStackTrace(); - System.exit(-1); + return false; + } + }else if(action.equalsIgnoreCase("downloadErrorFile")) + { + if (params.dataset==null) + { + System.out.println("\nERROR: dataset alias must be specified"); + return false; } + + try { + DataFlowMonitorUtil.getJobsAndErrorFiles(partnerConnection, params.dataset); + } catch (Exception e) { + System.out.println(); + e.printStackTrace(); + return false; + } + }else { printUsage(); - System.err.println("\nERROR: Invalid action {"+action+"}"); - System.exit(-1); + System.out.println("\nERROR: Invalid action {"+action+"}"); + return false; } + return true; } - public static void printUsage() - { - System.out.println("\n*******************************************************************************"); - System.out.println("Usage:"); - System.out.print("java -jar datasetutil.jar --action load --u userName --p password "); - System.out.println("--dataset datasetAlias --inputFile inputFile --endpoint endPoint"); - System.out.println("--action : Use either:load,extract,augment,downloadxmd,uploadxmd,detectEncoding"); - System.out.println(" : Use load for loading csv, augment for augmenting existing dataset"); - System.out.println("--u : Salesforce.com login"); - System.out.println("--p : (Optional) Salesforce.com password,if omitted you will be prompted"); - System.out.println("--token : (Optional) Salesforce.com token"); - System.out.println("--endpoint: (Optional) The salesforce soap api endpoint (test/prod)"); - System.out.println(" : Default: https://login.salesforce.com/services/Soap/u/31.0"); - System.out.println("--dataset : (Optional) the dataset alias. required if action=load"); - System.out.println("--app : (Optional) the app name for the dataset"); - System.out.println("--inputFile : (Optional) the input csv file. required if action=load"); - System.out.println("--rootObject: (Optional) the root SObject for the extract"); - System.out.println("--rowLimit: (Optional) the number of rows to extract, -1=all, deafult=1000"); - System.out.println("--sessionId : (Optional) the salesforce sessionId. if specified,specify endpoint"); - System.out.println("--fileEncoding : (Optional) the encoding of the inputFile default UTF-8"); - System.out.println("--uploadFormat : (Optional) the whether to upload as binary or csv. default binary"); -// System.out.println("jsonConfig: (Optional) the dataflow definition json file"); - System.out.println("*******************************************************************************\n"); - System.out.println("Usage Example 1: Upload a csv to a dataset"); - System.out.println("java -jar datasetutil.jar --action load --u pgupta@force.com --p @#@#@# --inputFile Opportunity.csv --dataset test"); - System.out.println(""); - System.out.println("Usage Example 2: Download dataset xmd files"); - System.out.println("java -jar datasetloader.jar --action downloadxmd --u pgupta@force.com --p @#@#@# --dataset test"); - System.out.println(""); - System.out.println("Usage Example 3: Upload user.xmd.json"); - System.out.println("java -jar datasetutil.jar --action uploadxmd --u pgupta@force.com --p @#@#@# --inputFile user.xmd.json --dataset test"); - System.out.println(""); - System.out.println("Usage Example 4: Augment 2 datasets"); - System.out.println("java -jar datasetutil.jar --action augment --u pgupta@force.com --p @#@#@#"); - System.out.println(""); - System.out.println("Usage Example 5: Generate the schema file from CSV"); - System.out.println("java -jar datasetutil.jar --action load --inputFile Opportunity.csv"); - System.out.println(""); - System.out.println("Usage Example 6: Extract salesforce data"); - System.out.println("java -jar datasetutil.jar --action extract --u pgupta@force.com --p @#@#@# --rootObject OpportunityLineItem"); - System.out.println(""); - } - static boolean printlneula() + public static void getRequiredParams(String action,PartnerConnection partnerConnection, DatasetUtilParams params) { - try + if (action == null || action.trim().isEmpty()) { - String userHome = System.getProperty("user.home"); - File lic = new File(userHome, ".ac.datautils.lic"); - if(!lic.exists()) + System.out.println("\nERROR: Invalid action {"+action+"}"); + System.out.println(); + return; + }else + if(action.equalsIgnoreCase("load")) + { + while (params.inputFile==null || params.inputFile.isEmpty()) { - System.out.println(eula); + String tmp = getInputFromUser("Enter inputFile: ", true, false); + if(tmp!=null) + { + File tempFile = validateInputFile(tmp, action); + if(tempFile !=null) + { + params.inputFile = tempFile.toString(); + break; + } + }else + System.out.println("File {"+tmp+"} not found"); System.out.println(); + } + + if (params.dataset==null || params.dataset.isEmpty()) + { + params.dataset = getInputFromUser("Enter dataset name: ", true, false); + } + + if (params.datasetLabel==null || params.datasetLabel.isEmpty()) + { + params.datasetLabel = getInputFromUser("Enter datasetLabel (Optional): ", false, false); + } + + if (params.app==null || params.app.isEmpty()) + { + params.app = getInputFromUser("Enter datasetFolder (Optional): ", false, false); + if(params.app != null && params.app.isEmpty()) + params.app = null; + } + + if (params.fileEncoding==null || params.fileEncoding.isEmpty()) + { while(true) { - String response = DatasetUtils.readInputFromConsole("Do you agree to the above license agreement (Yes/No): "); - if(response!=null && (response.equalsIgnoreCase("yes") || response.equalsIgnoreCase("y"))) - { - FileUtils.writeStringToFile(lic, eula); - return true; - }else if(response!=null && (response.equalsIgnoreCase("no") || response.equalsIgnoreCase("n"))) + params.fileEncoding = getInputFromUser("Enter fileEncoding (default=UTF-8): ", false, false); + if(params.fileEncoding == null || params.fileEncoding.trim().isEmpty()) + params.fileEncoding = "UTF-8"; + try { - return false; + Charset.forName(params.fileEncoding); + break; + } catch (Throwable e) { } + System.out.println("\nERROR: Invalid fileEncoding {"+params.fileEncoding+"}"); + System.out.println(); } - }else + } + + while (params.uploadFormat==null || params.uploadFormat.isEmpty()) { - return true; + params.uploadFormat = getInputFromUser("Enter uploadFormat (csv or binary) (default=binary): ", false, false); + if(params.uploadFormat == null || params.uploadFormat.trim().isEmpty()) + params.uploadFormat = "binary"; + if(!params.uploadFormat.equalsIgnoreCase("csv") && !params.uploadFormat.equalsIgnoreCase("binary")) + { + System.out.println("Invalid upload format {"+params.uploadFormat+"}"); + }else + { + break; + } + System.out.println(); } - }catch(Throwable t) + + }else if(action.equalsIgnoreCase("downloadErrorFile")) { - t.printStackTrace(); + if (params.dataset==null || params.dataset.isEmpty()) + { + params.dataset = getInputFromUser("Enter dataset name: ", true, false); + } + + } else if(action.equalsIgnoreCase("detectEncoding")) + { + while (params.inputFile==null || params.inputFile.isEmpty()) + { + String tmp = getInputFromUser("Enter inputFile: ", true, false); + if(tmp!=null) + { + File tempFile = validateInputFile(tmp, action); + if(tempFile !=null) + { + params.inputFile = tempFile.toString(); + break; + } + } else + System.out.println("File {"+tmp+"} not found"); + System.out.println(); + } + + }else if(action.equalsIgnoreCase("uploadxmd")) + { + while (params.inputFile==null || params.inputFile.isEmpty()) + { + String tmp = getInputFromUser("Enter inputFile: ", true, false); + if(tmp!=null) + { + File tempFile = validateInputFile(tmp, action); + if(tempFile !=null) + { + params.inputFile = tempFile.toString(); + break; + } + }else + System.out.println("File {"+tmp+"} not found"); + System.out.println(); + } + + if (params.dataset==null || params.dataset.isEmpty()) + { + params.dataset = getInputFromUser("Enter dataset name: ", true, false); + } + } else if(action.equalsIgnoreCase("downloadxmd")) + { + if (params.dataset==null || params.dataset.isEmpty()) + { + params.dataset = getInputFromUser("Enter dataset name: ", true, false); + } + }else if(action.equalsIgnoreCase("augment")) + { + + }else if(action.equalsIgnoreCase("extract")) + { + while (params.rootObject==null || params.rootObject.isEmpty()) + { + String tmp = getInputFromUser("Enter root SObject name for Extract: ", true, false); + Map objectList = null; + try { + objectList = SfdcUtils.getObjectList(partnerConnection, Pattern.compile("\\b"+tmp+"\\b"), false); + } catch (ConnectionException e) { + } + if(objectList==null || objectList.size()==0) + { + System.out.println("\nError: Object {"+tmp+"} not found"); + System.out.println(); + }else + { + params.rootObject = tmp; + break; + } + } + + }else if(action.equalsIgnoreCase("downloadErrorFile")) + { + if (params.dataset==null || params.dataset.isEmpty()) + { + params.dataset = getInputFromUser("Enter dataset name: ", true, false); + } + }else + { + printUsage(); + System.out.println("\nERROR: Invalid action {"+action+"}"); } - return false; } - public static String eula = "/*\n" - +"* Copyright (c) 2014, salesforce.com, inc.\n" - +"* All rights reserved.\n" - +"*\n" - +"* Redistribution and use in source and binary forms, with or without modification, are permitted provided\n" - +"* that the following conditions are met:\n" - +"* \n" - +"* Redistributions of source code must retain the above copyright notice, this list of conditions and the \n" - +"* following disclaimer.\n" - +"* \n" - +"* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and \n" - +"* the following disclaimer in the documentation and/or other materials provided with the distribution. \n" - +"* \n" - +"* Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or \n" - +"* promote products derived from this software without specific prior written permission." - +"* \n" - +"* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED \n" - +"* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A \n" - +"* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR \n" - +"* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED \n" - +"* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) \n" - +"* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING \n" - +"* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE \n" - +"* POSSIBILITY OF SUCH DAMAGE. \n" - +"*/\n"; + public static File validateInputFile(String inputFile, String action) + { + File temp = null; + if (inputFile!=null) + { + temp = new File(inputFile); + if(!temp.exists() && !temp.canRead()) + { + System.out.println("\nERROR: inputFile {"+temp+"} not found"); + return null; + } + + String ext = FilenameUtils.getExtension(temp.getName()); + if(ext == null || !(ext.equalsIgnoreCase("csv") || ext.equalsIgnoreCase("bin") || ext.equalsIgnoreCase("gz") || ext.equalsIgnoreCase("json"))) + { + System.out.println("\nERROR: inputFile does not have valid extension"); + return null; + } + + if(action.equalsIgnoreCase("load")) + { + byte[] binHeader = new byte[5]; + if(ext.equalsIgnoreCase("bin") || ext.equalsIgnoreCase("gz")) + { + try { + InputStream fis = new FileInputStream(temp); + if(ext.equalsIgnoreCase("gz")) + fis = new GzipCompressorInputStream(new FileInputStream(temp)); + int cnt = fis.read(binHeader); + if(fis!=null) + { + IOUtils.closeQuietly(fis); + } + if(cnt<5) + { + System.out.println("\nERROR: inputFile {"+temp+"} in not valid"); + return null; + } + } catch (FileNotFoundException e) { + e.printStackTrace(); + System.out.println("\nERROR: inputFile {"+temp+"} not found"); + return null; + } catch (IOException e) { + e.printStackTrace(); + System.out.println("\nERROR: inputFile {"+temp+"} in not valid"); + return null; + } + + if(!EbinFormatWriter.isValidBin(binHeader)) + { + if(ext.equalsIgnoreCase("bin")) + { + System.out.println("\nERROR: inputFile {"+temp+"} in not valid binary file"); + return null; + } + } + } + } + + if(ext.equalsIgnoreCase("json")) + { + try + { + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + mapper.readValue(temp, Object.class); + }catch(Throwable t) + { + System.out.println("\nERROR: inputFile {"+temp+"} is not valid json, Error: " + t.getMessage()); + return null; + } + } + + } + return temp; + + } + + + private static void createListener(PartnerConnection partnerConnection, + DatasetUtilParams params) { + String response = getInputFromUser("Do you want to create a FileListener for above file upload (Yes/No): ", false, false); + if(response!=null && (response.equalsIgnoreCase("Y") || response.equalsIgnoreCase("yes"))) + { + FileListener fileListener = new FileListener(); + fileListener.setApp(params.app); + fileListener.setCea(params.codingErrorAction); + fileListener.setDataset(params.dataset); + fileListener.setDatasetLabel(params.datasetLabel); + fileListener.setFilecharset(params.fileEncoding); + fileListener.setInputFileDirectory(params.inputFile); +// fileListener.setInputFilePattern(inputFilePattern); + fileListener.setOperation(params.Operation); + fileListener.setUploadFormat(params.uploadFormat); + fileListener.setUseBulkAPI(params.useBulkAPI); +// fileListener.setFileAge(fileAge); +// fileListener.setPollingInterval(); + try { + FileListenerThread.moveInputFile(new File(params.inputFile), System.currentTimeMillis(), true); + FileListenerUtil.addAndStartListener(fileListener, partnerConnection); + } catch (Throwable e) { + e.printStackTrace(); + } + } + } + + + public static void printBanner() + { + for(int i=0;i<5;i++) + System.out.println(); + System.out.println("\n\t\t****************************************"); + System.out.println("\t\tSalesforce Analytics Cloud Dataset Utils"); + System.out.println("\t\t****************************************\n"); + } + + } diff --git a/src/main/java/com/sforce/dataset/DatasetUtilParams.java b/src/main/java/com/sforce/dataset/DatasetUtilParams.java new file mode 100644 index 0000000..a8e6919 --- /dev/null +++ b/src/main/java/com/sforce/dataset/DatasetUtilParams.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2014, salesforce.com, inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and + * the following disclaimer in the documentation and/or other materials provided with the distribution. + * + * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.sforce.dataset; + +import java.nio.charset.CodingErrorAction; + +public class DatasetUtilParams { + String dataset = null; + String datasetLabel = null; + String app = null; + String username = null; + String password = null; + String token = null; + String sessionId = null; + String endpoint = null; + String inputFile = null; + String jsonConfig = null; + String rootObject = null; + String fileEncoding = null; + String uploadFormat = null; + String Operation = null; + int rowLimit = 0; + boolean useBulkAPI = false; + CodingErrorAction codingErrorAction = CodingErrorAction.REPORT; +} diff --git a/src/main/java/com/sforce/dataset/flow/DataFlow.java b/src/main/java/com/sforce/dataset/flow/DataFlow.java index 7de1afd..c1b6e33 100644 --- a/src/main/java/com/sforce/dataset/flow/DataFlow.java +++ b/src/main/java/com/sforce/dataset/flow/DataFlow.java @@ -31,17 +31,17 @@ public class DataFlow { public String _type = null; public Map _lastModifiedBy; -// : { -// "_type": "user", -// "profilePhotoUrl": "https://c.gus.content.force.com/profilephoto/005/T", -// "name": "Integration User", -// "_uid": "005B00000010TFBIA2" +// { +// String "_type" +// String "profilePhotoUrl" +// String "name" +// String "_uid" // }, public String nextRun; - public String _url;//: "/insights/internal_api/v1.0/esObject/workflow/0BwB00000004CClKAM/json", - public String name;//: "SalesEdgeEltWorkflow", - public String MasterLabel;//: "SalesEdge ELT Workflow", - public int RefreshFrequencySec;//": 86400, + public String _url; + public String name; + public String MasterLabel; + public int RefreshFrequencySec; public String _uid; public String WorkflowType; @SuppressWarnings("rawtypes") diff --git a/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java b/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java index 2d41b26..1f00a23 100644 --- a/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java +++ b/src/main/java/com/sforce/dataset/flow/DataFlowUtil.java @@ -94,8 +94,6 @@ public static void uploadDataFlow(PartnerConnection partnerConnection, DataFlow URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), df._url, null,null); HttpPatch httpPatch = new HttpPatch(patchURI); -// httpPatch.addHeader("Accept", "*/*"); -// httpPatch.addHeader("Content-Type", "application/json"); Map map = new LinkedHashMap(); map.put("workflowDefinition", df.workflowDefinition); @@ -142,13 +140,6 @@ public static DataFlow getDataFlow(PartnerConnection partnerConnection, String w URI listEMURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), "/insights/internal_api/v1.0/esObject/workflow", null,null); HttpGet listEMPost = new HttpGet(listEMURI); -// MultipartEntityBuilder builder = MultipartEntityBuilder.create(); -// builder.addTextBody("jsonMetadata", "{\"_alias\":\""+EM_NAME+"\",\"_type\":\"ebin\"}", ContentType.TEXT_PLAIN); -// builder.addBinaryBody(binFile.getName(), binFile, -// ContentType.APPLICATION_OCTET_STREAM, binFile.getName()); -// HttpEntity multipart = builder.build(); -// -// uploadFile.setEntity(multipart); listEMPost.setConfig(requestConfig); listEMPost.addHeader("Authorization","OAuth "+sessionID); CloseableHttpResponse emresponse = httpClient.execute(listEMPost); @@ -165,8 +156,6 @@ public static DataFlow getDataFlow(PartnerConnection partnerConnection, String w if(emList!=null && !emList.isEmpty()) { -// try -// { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); Map res = mapper.readValue(emList, Map.class); @@ -200,14 +189,12 @@ public static DataFlow getDataFlow(PartnerConnection partnerConnection, String w { t.printStackTrace(); } -// File dataflowFile = new File(dataDir,df.name+".json"); -// File dataFlowFile = new File(dataDir,df.name+"_"+sdf.format(new Date())+".json"); CloseableHttpClient httpClient1 = HttpClients.createDefault(); URI listEMURI1 = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), df._url, null,null); HttpGet listEMPost1 = new HttpGet(listEMURI1); - // System.out.println("Downloading file {"+filename+"} from url {"+listEMURI1+"}"); +// System.out.println("Downloading file {"+filename+"} from url {"+listEMURI1+"}"); // System.out.println("Downloading file {"+dataFlowFile+"}"); listEMPost1.setConfig(requestConfig); listEMPost1.addHeader("Authorization","OAuth "+sessionID); @@ -217,11 +204,8 @@ public static DataFlow getDataFlow(PartnerConnection partnerConnection, String w reasonPhrase = emresponse1.getStatusLine().getReasonPhrase(); statusCode = emresponse1.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { -// System.err.println("Method failed: " + reasonPhrase); throw new IOException(String.format("Dataflow %s download failed: %d %s", dataFlowFile,statusCode,reasonPhrase)); } - // System.out.println(String.format("statusCode: %d %s", statusCode,reasonPhrase)); - // System.out.println(String.format("reasonPhrase: %s", reasonPhrase)); HttpEntity emresponseEntity1 = emresponse1.getEntity(); InputStream emis1 = emresponseEntity1.getContent(); @@ -232,8 +216,6 @@ public static DataFlow getDataFlow(PartnerConnection partnerConnection, String w if(dataFlowJson!=null && !dataFlowJson.isEmpty()) { -// try -// { Map res2 = mapper.readValue(dataFlowJson, Map.class); // mapper.writerWithDefaultPrettyPrinter().writeValue(System.out, res); List flows2 = (List) res2.get("result"); @@ -254,19 +236,10 @@ public static DataFlow getDataFlow(PartnerConnection partnerConnection, String w { throw new IOException(String.format("Dataflow download failed, invalid server response %s",dataFlowJson)); } -// } catch (Throwable t) { -// t.printStackTrace(); -// } }else { throw new IOException(String.format("Dataflow download failed, invalid server response %s",dataFlowJson)); } -// BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(dataFlowFile)); -// JsonNode node = mapper.readTree(df.workflowDefinition); -// mapper.writerWithDefaultPrettyPrinter().writeValue(dataFlowFile, node); -// df.workflowDefinition = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(node); -// IOUtils.copy(emis1, out); -// out.close(); }else { throw new IOException(String.format("Dataflow download failed, invalid server response %s",emList)); @@ -281,16 +254,11 @@ public static DataFlow getDataFlow(PartnerConnection partnerConnection, String w { throw new IOException(String.format("Dataflow download failed, invalid server response %s",emList)); } -// } catch (Throwable t) { -// t.printStackTrace(); -// } - //System.err.println(emList); } - System.err.println("Dataflow {"+workflowName+"} not found"); + System.out.println("Dataflow {"+workflowName+"} not found"); return null; } - @SuppressWarnings({ "rawtypes", "unchecked" }) public static void startDataFlow(PartnerConnection partnerConnection, DataFlow df) throws ConnectionException, IllegalStateException, IOException, URISyntaxException { System.out.println(); @@ -310,16 +278,7 @@ public static void startDataFlow(PartnerConnection partnerConnection, DataFlow d URI patchURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), df._url.replace("json", "start"), null,null); HttpPut httput = new HttpPut(patchURI); -// httpPatch.addHeader("Accept", "*/*"); -// httpPatch.addHeader("Content-Type", "application/json"); - -// Map map = new LinkedHashMap(); -// map.put("_uid", df._uid); -// ObjectMapper mapper = new ObjectMapper(); -// StringEntity entity = new StringEntity(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(map), "UTF-8"); -// entity.setContentType("application/json"); httput.setConfig(requestConfig); -// httpPatch.setEntity(entity); httput.addHeader("Authorization","OAuth "+sessionID); CloseableHttpResponse emresponse = httpClient.execute(httput); String reasonPhrase = emresponse.getStatusLine().getReasonPhrase(); @@ -331,7 +290,6 @@ public static void startDataFlow(PartnerConnection partnerConnection, DataFlow d InputStream emis = emresponseEntity.getContent(); @SuppressWarnings("unused") String emList = IOUtils.toString(emis, "UTF-8"); -// System.out.println(emList); System.out.println("Dataflow {"+df.name+"} succesfully started"); emis.close(); httpClient.close(); diff --git a/src/main/java/com/sforce/dataset/flow/monitor/DataFlowMonitorUtil.java b/src/main/java/com/sforce/dataset/flow/monitor/DataFlowMonitorUtil.java new file mode 100644 index 0000000..1724221 --- /dev/null +++ b/src/main/java/com/sforce/dataset/flow/monitor/DataFlowMonitorUtil.java @@ -0,0 +1,248 @@ +/* + * Copyright (c) 2014, salesforce.com, inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and + * the following disclaimer in the documentation and/or other materials provided with the distribution. + * + * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.sforce.dataset.flow.monitor; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpStatus; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; + +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.sforce.soap.partner.PartnerConnection; +import com.sforce.ws.ConnectionException; +import com.sforce.ws.ConnectorConfig; + +public class DataFlowMonitorUtil { + + + public static void getJobsAndErrorFiles(PartnerConnection partnerConnection, String datasetName) throws ConnectionException, IllegalStateException, IOException, URISyntaxException + { + List jobs = getDataFlowJobs(partnerConnection, datasetName); + if(jobs!=null) + { + for(JobEntry job:jobs) + { + getJobErrorFile(partnerConnection, datasetName, job._uid); + } + } + } + + + @SuppressWarnings({ "rawtypes", "unchecked" }) + public static List getDataFlowJobs(PartnerConnection partnerConnection, String datasetName) throws ConnectionException, URISyntaxException, ClientProtocolException, IOException + { + List jobsList = new LinkedList(); + System.out.println(); + ConnectorConfig config = partnerConnection.getConfig(); + String sessionID = config.getSessionId(); + String serviceEndPoint = config.getServiceEndpoint(); + CloseableHttpClient httpClient = HttpClients.createDefault(); + + RequestConfig requestConfig = RequestConfig.custom() + .setSocketTimeout(60000) + .setConnectTimeout(60000) + .setConnectionRequestTimeout(60000) + .build(); + + URI u = new URI(serviceEndPoint); + + URI listEMURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), "/insights/internal_api/v1.0/esObject/jobs", null,null); + HttpGet listEMPost = new HttpGet(listEMURI); + + listEMPost.setConfig(requestConfig); + listEMPost.addHeader("Authorization","OAuth "+sessionID); + System.out.println("Fetching job list from server, this may take a minute..."); + CloseableHttpResponse emresponse = httpClient.execute(listEMPost); + String reasonPhrase = emresponse.getStatusLine().getReasonPhrase(); + int statusCode = emresponse.getStatusLine().getStatusCode(); + if (statusCode != HttpStatus.SC_OK) { + throw new IOException(String.format("getDataFlowJobs failed: %d %s", datasetName,statusCode,reasonPhrase)); + } + HttpEntity emresponseEntity = emresponse.getEntity(); + InputStream emis = emresponseEntity.getContent(); + String emList = IOUtils.toString(emis, "UTF-8"); + emis.close(); + httpClient.close(); + + if(emList!=null && !emList.isEmpty()) + { + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + Map res = mapper.readValue(emList, Map.class); +// mapper.writerWithDefaultPrettyPrinter().writeValue(System.out, res); + List jobs = (List) res.get("result"); + if(jobs != null && !jobs.isEmpty()) + { + for(Map job:jobs) + { + String _type = (String) job.get("_type"); + if(_type != null && _type.equals("jobs")) + { + String workflowName = (String) job.get("workflowName"); + if(workflowName != null && (datasetName == null || datasetName.isEmpty() || workflowName.startsWith(datasetName))) + { + JobEntry jobEntry = new JobEntry(); + jobEntry._createdDateTime = (Integer) job.get("_createdDateTime"); + jobEntry._type = (String) job.get("_type"); + jobEntry._uid = (String) job.get("_uid"); + jobEntry.duration = (Integer) job.get("duration"); + jobEntry.endTime = (String) job.get("endTime"); + jobEntry.endTimeEpoch = (Long) job.get("endTimeEpoch"); + jobEntry.errorMessage = (String) job.get("errorMessage"); + jobEntry.nodeUrl = (String) job.get("nodeUrl"); + jobEntry.startTime = (String) job.get("startTime"); + jobEntry.startTimeEpoch = (Long) job.get("startTimeEpoch"); + jobEntry.status = (Integer) job.get("status"); + jobEntry.type = (String) job.get("type"); + jobEntry.workflowName = (String) job.get("workflowName"); + jobsList.add(jobEntry); + } + }else + { + throw new IOException(String.format("Dataflow job list download failed, invalid server response %s",emList)); + } + } //end for + }else + { + throw new IOException(String.format("Dataflow job list download failed, invalid server response %s",emList)); + } + } + System.out.println("Found {"+jobsList.size()+"} jobs for dataset {"+datasetName+"}"); + return jobsList; + } + + public static boolean getJobErrorFile(PartnerConnection partnerConnection, String datasetName, String jobTrackerid) throws ConnectionException, URISyntaxException, ClientProtocolException, IOException + { + if(jobTrackerid == null || jobTrackerid.trim().isEmpty()) + { + System.out.println("Job TrackerId cannot be null"); + return false; + } + + System.out.println(); + ConnectorConfig config = partnerConnection.getConfig(); + String sessionID = config.getSessionId(); + String serviceEndPoint = config.getServiceEndpoint(); + CloseableHttpClient httpClient = HttpClients.createDefault(); + + RequestConfig requestConfig = RequestConfig.custom() + .setSocketTimeout(60000) + .setConnectTimeout(60000) + .setConnectionRequestTimeout(60000) + .build(); + + URI u = new URI(serviceEndPoint); + + URI listEMURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), "/insights/internal_api/v1.0/jobTrackerHeartbeat/{0}/nodes/digest/nodeerrorlog".replace("{0}", jobTrackerid), null,null); + HttpGet listEMPost = new HttpGet(listEMURI); + + listEMPost.setConfig(requestConfig); + listEMPost.addHeader("Authorization","OAuth "+sessionID); + System.out.println("Fetching error log for job {"+jobTrackerid+"} from server..."); + CloseableHttpResponse emresponse = httpClient.execute(listEMPost); + String reasonPhrase = emresponse.getStatusLine().getReasonPhrase(); + int statusCode = emresponse.getStatusLine().getStatusCode(); + if (statusCode != HttpStatus.SC_OK) { + throw new IOException(String.format("getDataFlowJobs failed: %d %s", datasetName,statusCode,reasonPhrase)); + } + HttpEntity emresponseEntity = emresponse.getEntity(); + InputStream emis = emresponseEntity.getContent(); + File outfile = new File(datasetName+"_"+jobTrackerid+"_error.csv"); + System.out.println("fetching file {"+outfile+"}. Content-length {"+emresponseEntity.getContentLength()+"}"); + BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(outfile)); + IOUtils.copy(emis, out); + out.close(); + emis.close(); + httpClient.close(); + System.out.println("file {"+outfile+"} downloaded. Size{"+outfile.length()+"}\n"); + return true; + +// if(emList!=null && !emList.isEmpty()) +// { +// ObjectMapper mapper = new ObjectMapper(); +// mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); +// Map res = mapper.readValue(emList, Map.class); +// mapper.writerWithDefaultPrettyPrinter().writeValue(System.out, res); +// List jobs = (List) res.get("result"); +// if(jobs != null && !jobs.isEmpty()) +// { +// for(Map job:jobs) +// { +// String _type = (String) job.get("_type"); +// if(_type != null && _type.equals("jobs")) +// { +// String workflowName = (String) job.get("workflowName"); +// if(workflowName != null && (datasetName == null || datasetName.isEmpty() || workflowName.startsWith(datasetName))) +// { +// JobEntry jobEntry = new JobEntry(); +// jobEntry._createdDateTime = (Integer) job.get("_createdDateTime"); +// jobEntry._type = (String) job.get("_type"); +// jobEntry._uid = (String) job.get("_uid"); +// jobEntry.duration = (Integer) job.get("duration"); +// jobEntry.endTime = (String) job.get("endTime"); +// jobEntry.endTimeEpoch = (Long) job.get("endTimeEpoch"); +// jobEntry.errorMessage = (String) job.get("errorMessage"); +// jobEntry.nodeUrl = (String) job.get("nodeUrl"); +// jobEntry.startTime = (String) job.get("startTime"); +// jobEntry.startTimeEpoch = (Long) job.get("startTimeEpoch"); +// jobEntry.status = (Integer) job.get("status"); +// jobEntry.type = (String) job.get("type"); +// jobEntry.workflowName = (String) job.get("workflowName"); +// jobsList.add(jobEntry); +// } +// }else +// { +// throw new IOException(String.format("Dataflow job list download failed, invalid server response %s",emList)); +// } +// } //end for +// }else +// { +// throw new IOException(String.format("Dataflow job list download failed, invalid server response %s",emList)); +// } +// } + + + } + + +} diff --git a/src/main/java/com/sforce/dataset/flow/monitor/JobEntry.java b/src/main/java/com/sforce/dataset/flow/monitor/JobEntry.java new file mode 100644 index 0000000..ae656c0 --- /dev/null +++ b/src/main/java/com/sforce/dataset/flow/monitor/JobEntry.java @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2014, salesforce.com, inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and + * the following disclaimer in the documentation and/or other materials provided with the distribution. + * + * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.sforce.dataset.flow.monitor; + +public class JobEntry { + String errorMessage = null; + long startTimeEpoch = 0; + int status = 0; + long endTimeEpoch = 0; + String _uid = null; + String type = null; + String endTime = null; + String startTime = null; + String _type = null; + long duration= 0; + long _createdDateTime = 0; + String workflowName = null; + String nodeUrl = null; +} diff --git a/src/main/java/com/sforce/dataset/loader/DatasetLoader.java b/src/main/java/com/sforce/dataset/loader/DatasetLoader.java index d46801d..7e4ca16 100644 --- a/src/main/java/com/sforce/dataset/loader/DatasetLoader.java +++ b/src/main/java/com/sforce/dataset/loader/DatasetLoader.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.io.PrintStream; import java.nio.charset.Charset; import java.nio.charset.CodingErrorAction; import java.nio.charset.MalformedInputException; @@ -101,6 +102,8 @@ public class DatasetLoader { public static final NumberFormat nf = NumberFormat.getIntegerInstance(); private static int MAX_NUM_UPLOAD_THREADS = 3; + +// PrintStream logger = null; @SuppressWarnings("deprecation") @@ -108,7 +111,7 @@ public static boolean uploadDataset(String inputFileString, String uploadFormat, CodingErrorAction codingErrorAction, Charset inputFileCharset, String datasetAlias, String datasetFolder,String datasetLabel, String Operation, boolean useBulkAPI, - PartnerConnection partnerConnection) + PartnerConnection partnerConnection, PrintStream logger) { File archiveDir = null; File datasetArchiveDir = null; @@ -126,36 +129,44 @@ public static boolean uploadDataset(String inputFileString, if(codingErrorAction==null) codingErrorAction = CodingErrorAction.REPORT; + + if(inputFileCharset==null) + inputFileCharset = Charset.forName("UTF-8"); + + if(Operation == null) + { + Operation = "Overwrite"; + } try { inputFile = new File(inputFileString); if(!inputFile.exists()) { - System.err.println("Error: File {"+inputFile.getAbsolutePath()+"} not found"); + logger.println("Error: File {"+inputFile.getAbsolutePath()+"} not found"); return false; } ExternalFileSchema schema = null; - System.out.println("\n*******************************************************************************"); + logger.println("\n*******************************************************************************"); if(FilenameUtils.getExtension(inputFile.getName()).equalsIgnoreCase("csv")) { - schema = ExternalFileSchema.init(inputFile, inputFileCharset); + schema = ExternalFileSchema.init(inputFile, inputFileCharset, logger); if(schema==null) { - System.err.println("Failed to parse schema file {"+ ExternalFileSchema.getSchemaFile(inputFile) +"}"); + logger.println("Failed to parse schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}"); return false; } }else { - schema = ExternalFileSchema.load(inputFile, inputFileCharset); + schema = ExternalFileSchema.load(inputFile, inputFileCharset, logger); if(schema==null) { - System.err.println("Failed to load schema file {"+ ExternalFileSchema.getSchemaFile(inputFile) +"}"); + logger.println("Failed to load schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}"); return false; } } - System.out.println("*******************************************************************************\n"); + logger.println("*******************************************************************************\n"); if(datasetAlias==null||datasetAlias.trim().isEmpty()) @@ -172,9 +183,9 @@ public static boolean uploadDataset(String inputFileString, //Validate access to the API before going any further - if(!DatasetLoader.checkAPIAccess(partnerConnection)) + if(!DatasetLoader.checkAPIAccess(partnerConnection, logger)) { - System.err.println("Error: you do not have access to Analytics Cloud API. Please contact salesforce support"); + logger.println("Error: you do not have access to Analytics Cloud API. Please contact salesforce support"); return false; } @@ -197,29 +208,29 @@ public static boolean uploadDataset(String inputFileString, } //Insert header - File metadataJsonFile = ExternalFileSchema.getSchemaFile(inputFile); + File metadataJsonFile = ExternalFileSchema.getSchemaFile(inputFile, logger); if(metadataJsonFile == null || !metadataJsonFile.canRead()) { - System.err.println("Error: metadata Json file {"+metadataJsonFile+"} not found"); + logger.println("Error: metadata Json file {"+metadataJsonFile+"} not found"); return false; } - String hdrId = getLastIncompleteFileHdr(partnerConnection, datasetAlias); + String hdrId = getLastIncompleteFileHdr(partnerConnection, datasetAlias, logger); if(hdrId==null) { - hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, FileUtils.readFileToByteArray(metadataJsonFile), uploadFormat, Operation); + hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, FileUtils.readFileToByteArray(metadataJsonFile), uploadFormat, Operation, logger); }else { - System.out.println("Record {"+hdrId+"} is being reused from InsightsExternalData"); + logger.println("Record {"+hdrId+"} is being reused from InsightsExternalData"); updateHdrJson = true; } if(hdrId ==null || hdrId.isEmpty()) { - System.err.println("Error: failed to insert header row into the saleforce SObject"); + logger.println("Error: failed to insert header row into the saleforce SObject"); return false; } - inputFile = CsvExternalSort.sortFile(inputFile, inputFileCharset, false, 1); + inputFile = CsvExternalSort.sortFile(inputFile, inputFileCharset, false, 1, schema); //Create the Bin file // File binFile = new File(csvFile.getParent(), datasetName + ".bin"); @@ -228,9 +239,9 @@ public static boolean uploadDataset(String inputFileString, { if(!FilenameUtils.getExtension(inputFile.getName()).equalsIgnoreCase("gz") || !FilenameUtils.getExtension(inputFile.getName()).equalsIgnoreCase("zip")) { - System.out.println("\n*******************************************************************************"); - System.out.println("Input file does not have '.csv' extension. Assuming input file is 'ebin' format"); - System.out.println("*******************************************************************************\n"); + logger.println("\n*******************************************************************************"); + logger.println("Input file does not have '.csv' extension. Assuming input file is 'ebin' format"); + logger.println("*******************************************************************************\n"); } } @@ -256,23 +267,21 @@ public static boolean uploadDataset(String inputFileString, long successRowCount = 0; long errorRowCount = 0; long startTime = System.currentTimeMillis(); - EbinFormatWriter w = new EbinFormatWriter(out, schema.objects.get(0).fields.toArray(new FieldType[0])); - ErrorWriter ew = new ErrorWriter(inputFile,","); + EbinFormatWriter ebinWriter = new EbinFormatWriter(out, schema.objects.get(0).fields.toArray(new FieldType[0]), logger); + ErrorWriter errorWriter = new ErrorWriter(inputFile,","); CsvListReader reader = new CsvListReader(new InputStreamReader(new BOMInputStream(new FileInputStream(inputFile), false), DatasetUtils.utf8Decoder(codingErrorAction , inputFileCharset )), CsvPreference.STANDARD_PREFERENCE); - WriterThread writer = new WriterThread(q, w, ew); + WriterThread writer = new WriterThread(q, ebinWriter, errorWriter, logger); Thread th = new Thread(writer,"Writer-Thread"); th.setDaemon(true); th.start(); try { - @SuppressWarnings("unused") - String[] header = reader.getHeader(true); boolean hasmore = true; - System.out.println("\n*******************************************************************************"); - System.out.println("File: "+inputFile+", being digested to file: "+gzbinFile); - System.out.println("*******************************************************************************\n"); + logger.println("\n*******************************************************************************"); + logger.println("File: "+inputFile.getName()+", being digested to file: "+gzbinFile.getName()); + logger.println("*******************************************************************************\n"); List row = null; while (hasmore) { @@ -282,6 +291,8 @@ public static boolean uploadDataset(String inputFileString, if(row!=null && !writer.isDone()) { totalRowCount++; + if(totalRowCount==1) + continue; if(row.size()!=0 ) { q.put(row.toArray(new String[row.size()])); @@ -294,76 +305,62 @@ public static boolean uploadDataset(String inputFileString, { // if(errorRowCount==0) // { -// System.err.println(); +// logger.println(); // } - System.err.println("Row {"+totalRowCount+"} has error {"+t+"}"); + logger.println("Line {"+(totalRowCount)+"} has error {"+t+"}"); if(t instanceof MalformedInputException) { - while(!q.isEmpty()) - { - try - { - Thread.sleep(1000); - }catch(InterruptedException in) - { - in.printStackTrace(); - } - } - + q.put(new String[0]); + int retryCount = 0; while(!writer.isDone()) { - q.put(new String[0]); + retryCount++; try { Thread.sleep(1000); + if(retryCount%10==0) + { + q.put(new String[0]); + logger.println("Waiting for writer to finish"); + } }catch(InterruptedException in) { in.printStackTrace(); } } - System.err.println("\n*******************************************************************************"); - System.err.println("The input file is not utf8 encoded. Please save it as UTF8 file first"); - System.err.println("*******************************************************************************\n"); + logger.println("\n*******************************************************************************"); + logger.println("The input file is not utf8 encoded. Please save it as UTF8 file first"); + logger.println("*******************************************************************************\n"); status = false; hasmore = false; } } }//end while - while(!q.isEmpty()) - { - try - { - System.out.println("1 Waiting for writer to finish"); - Thread.sleep(1000); - }catch(InterruptedException in) - { - in.printStackTrace(); - } - } - + int retryCount = 0; + q.put(new String[0]); + retryCount = 0; while(!writer.isDone()) { - q.put(new String[0]); + retryCount++; try { - System.out.println("2 Waiting for writer to finish"); Thread.sleep(1000); + if(retryCount%10==0) + { + q.put(new String[0]); + logger.println("Waiting for writer to finish"); + } }catch(InterruptedException in) { in.printStackTrace(); } } -// } - successRowCount = w.getSuccessRowCount(); + successRowCount = ebinWriter.getSuccessRowCount(); errorRowCount = writer.getErrorRowCount(); }finally { if(reader!=null) reader.close(); - if(w!=null) - w.finish(); - if(ew!=null) - ew.finish(); if(out!=null) out.close(); if(gzos!=null) @@ -376,6 +373,7 @@ public static boolean uploadDataset(String inputFileString, gzos = null; bos = null; fos = null; + ebinWriter = null; } long endTime = System.currentTimeMillis(); digestTime = endTime-startTime; @@ -384,14 +382,23 @@ public static boolean uploadDataset(String inputFileString, if(successRowCount<1) { - System.err.println("All rows failed. Please check {" + ew.getErrorFile() + "} for error rows"); + logger.println("\n*******************************************************************************"); + logger.println("All rows failed. Please check {" + errorWriter.getErrorFile() + "} for error rows"); + logger.println("*******************************************************************************\n"); return false; } - System.out.println("\n*******************************************************************************"); - System.out.println("Total Rows: "+nf.format(totalRowCount)+", Success Rows: "+nf.format(successRowCount)+", Eror Rows: "+nf.format(errorRowCount)); - if(gzbinFile.length()>0) - System.out.println("File: "+inputFile+", Size {"+nf.format(inputFile.length())+"} compressed to file: "+gzbinFile+", Size {"+nf.format(gzbinFile.length())+"} % Compression: "+(inputFile.length()/gzbinFile.length())*100 +"%"+", Digest Time {"+nf.format(digestTime) + "} msecs"); - System.out.println("*******************************************************************************\n"); + if(errorRowCount>1) + { + logger.println("\n*******************************************************************************"); + logger.println(nf.format(errorRowCount) + " Rows failed. Please check {" + errorWriter.getErrorFile().getName() + "} for error rows"); + logger.println("*******************************************************************************\n"); + } + + logger.println("\n*******************************************************************************"); + logger.println("Total Rows: "+nf.format(totalRowCount-1)+", Success Rows: "+nf.format(successRowCount)+", Error Rows: "+nf.format(errorRowCount) +", % Compression: "+(inputFile.length()/gzbinFile.length())*100 +"%"+", Digest Time {"+nf.format(digestTime) + "} msecs"); +// if(gzbinFile.length()>0) +// logger.println("File: "+inputFile+", Size {"+nf.format(inputFile.length())+"} compressed to file: "+gzbinFile+", Size {"+nf.format(gzbinFile.length())+"} % Compression: "+(inputFile.length()/gzbinFile.length())*100 +"%"+", Digest Time {"+nf.format(digestTime) + "} msecs"); + logger.println("*******************************************************************************\n"); } finally { if (out != null) { try { @@ -437,7 +444,7 @@ public static boolean uploadDataset(String inputFileString, IOUtils.copy(fis, gzOut); long endTime = System.currentTimeMillis(); if(gzbinFile.length()>0) - System.out.println(" Input File, Size {"+nf.format(inputFile.length())+"} compressed to gz file, Size {"+nf.format(gzbinFile.length())+"} % Compression: "+(inputFile.length()/gzbinFile.length())*100 +"%"+", Compression Time {"+nf.format((endTime-startTime)) + "} msecs"); + logger.println(" Input File, Size {"+nf.format(inputFile.length())+"} compressed to gz file, Size {"+nf.format(gzbinFile.length())+"} % Compression: "+(inputFile.length()/gzbinFile.length())*100 +"%"+", Compression Time {"+nf.format((endTime-startTime)) + "} msecs"); }finally { if(gzOut!=null) @@ -461,7 +468,7 @@ public static boolean uploadDataset(String inputFileString, if(!gzbinFile.exists() || gzbinFile.length()<1) { - System.err.println("Error: File {"+gzbinFile.getAbsolutePath()+"} not found or is zero bytes"); + logger.println("Error: File {"+gzbinFile.getAbsolutePath()+"} not found or is zero bytes"); return false; }else { @@ -479,36 +486,36 @@ public static boolean uploadDataset(String inputFileString, } }else { - System.out.println("Recovering process from last file {"+lastgzbinFile+"} upload"); + logger.println("Recovering process from last file {"+lastgzbinFile+"} upload"); updateHdrJson = false; //The file is already digested, we cannot update the hdr now gzbinFile = lastgzbinFile; } long startTime = System.currentTimeMillis(); - status = uploadEM(gzbinFile, uploadFormat, metadataJsonFile, datasetAlias,datasetFolder, useBulkAPI, partnerConnection, hdrId, datasetArchiveDir, "Overwrite", updateHdrJson); + status = uploadEM(gzbinFile, uploadFormat, metadataJsonFile, datasetAlias,datasetFolder, useBulkAPI, partnerConnection, hdrId, datasetArchiveDir, "Overwrite", updateHdrJson, logger); long endTime = System.currentTimeMillis(); uploadTime = endTime-startTime; } catch(MalformedInputException mie) { - System.err.println("\n*******************************************************************************"); - System.err.println("The input file is not valid utf8 encoded. Please save it as UTF8 file first"); + logger.println("\n*******************************************************************************"); + logger.println("The input file is not valid utf8 encoded. Please save it as UTF8 file first"); mie.printStackTrace(); status = false; - System.err.println("*******************************************************************************\n"); + logger.println("*******************************************************************************\n"); } catch (Throwable t) { - System.err.println("\n*******************************************************************************"); + logger.println("\n*******************************************************************************"); t.printStackTrace(); status = false; - System.err.println("*******************************************************************************\n"); + logger.println("*******************************************************************************\n"); } - System.err.println("\n*****************************************************************************************************************"); + logger.println("\n*****************************************************************************************************************"); if(status) - System.err.println("Succesfully uploaded {"+inputFile+"} to Dataset {"+datasetAlias+"} uploadTime {"+nf.format(uploadTime)+"} msecs" ); + logger.println("Successfully uploaded {"+inputFile+"} to Dataset {"+datasetAlias+"} uploadTime {"+nf.format(uploadTime)+"} msecs" ); else - System.err.println("Failed to load {"+inputFile+"} to Dataset {"+datasetAlias+"}"); - System.err.println("*****************************************************************************************************************\n"); + logger.println("Failed to load {"+inputFile+"} to Dataset {"+datasetAlias+"}"); + logger.println("*****************************************************************************************************************\n"); return status; } @@ -519,6 +526,7 @@ public static boolean uploadDataset(String inputFileString, * @param metadataJson The metadata of the file * @param datasetAlias The Alias of the dataset * @param datasetArchiveDir + * @param logger * @param username The Salesforce username * @param password The Salesforce password * @param token The Salesforce security token @@ -526,15 +534,15 @@ public static boolean uploadDataset(String inputFileString, * @return boolean status of the upload * @throws Exception */ - public static boolean uploadEM(File dataFile, String dataFormat, File metadataJson, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean updateHdrJson) throws Exception + public static boolean uploadEM(File dataFile, String dataFormat, File metadataJson, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean updateHdrJson, PrintStream logger) throws Exception { byte[] metadataJsonBytes = null; if(metadataJson != null && metadataJson.canRead()) metadataJsonBytes = FileUtils.readFileToByteArray(metadataJson); else - System.err.println("warning: metadata Json file {"+metadataJson+"} not found"); + logger.println("warning: metadata Json file {"+metadataJson+"} not found"); - return uploadEM(dataFile, dataFormat, metadataJsonBytes, datasetAlias, datasetFolder, useBulk, partnerConnection, hdrId, datasetArchiveDir, Operation, updateHdrJson); + return uploadEM(dataFile, dataFormat, metadataJsonBytes, datasetAlias, datasetFolder, useBulk, partnerConnection, hdrId, datasetArchiveDir, Operation, updateHdrJson, logger); } /** @@ -542,7 +550,8 @@ public static boolean uploadEM(File dataFile, String dataFormat, File metadataJs * @param dataFormat The format of the file (CSV or Binary) * @param metadataJson The metadata of the file * @param datasetAlias The Alias of the dataset - * @param Operation + * @param logger + * @param operation * @param username The Salesforce username * @param password The Salesforce password * @param token The Salesforce security token @@ -550,7 +559,7 @@ public static boolean uploadEM(File dataFile, String dataFormat, File metadataJs * @return boolean status of the upload * @throws Exception */ - public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadataJsonBytes, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean updateHdrJson) throws Exception + public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadataJsonBytes, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean updateHdrJson, PrintStream logger) throws Exception { BlockingQueue> q = new LinkedBlockingQueue>(); LinkedList existingFileParts = new LinkedList(); @@ -563,31 +572,31 @@ public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadata DatasetLoader eu = new DatasetLoader(); - System.out.println("\n*******************************************************************************"); + logger.println("\n*******************************************************************************"); if(datasetFolder != null && datasetFolder.trim().length()!=0) { - System.out.println("Uploading dataset {"+datasetAlias+"} to folder {" + datasetFolder + "}"); + logger.println("Uploading dataset {"+datasetAlias+"} to folder {" + datasetFolder + "}"); }else { - System.out.println("Uploading dataset {"+datasetAlias+"} to folder {" + partnerConnection.getUserInfo().getUserId() +"}"); + logger.println("Uploading dataset {"+datasetAlias+"} to folder {" + partnerConnection.getUserInfo().getUserId() +"}"); } - System.out.println("*******************************************************************************\n"); + logger.println("*******************************************************************************\n"); if(hdrId==null || hdrId.trim().isEmpty()) { - hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, metadataJsonBytes, dataFormat, Operation); + hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, metadataJsonBytes, dataFormat, Operation, logger); }else { existingFileParts = getUploadedFileParts(partnerConnection, hdrId); if(updateHdrJson && existingFileParts.isEmpty()) - updateFileHdr(partnerConnection, hdrId, datasetAlias, datasetFolder, metadataJsonBytes, dataFormat, "None", Operation); + updateFileHdr(partnerConnection, hdrId, datasetAlias, datasetFolder, metadataJsonBytes, dataFormat, "None", Operation, logger); } if(hdrId ==null || hdrId.isEmpty()) { return false; } - Map fileParts = chunkBinary(dataFile, datasetArchiveDir); + Map fileParts = chunkBinary(dataFile, datasetArchiveDir, logger); boolean allPartsUploaded = false; int retryCount=0; int totalErrorCount = 0; @@ -598,7 +607,7 @@ public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadata LinkedList upThreads = new LinkedList(); for(int i = 1;i<=MAX_NUM_UPLOAD_THREADS;i++) { - FilePartsUploaderThread writer = new FilePartsUploaderThread(q, partnerConnection, hdrId); + FilePartsUploaderThread writer = new FilePartsUploaderThread(q, partnerConnection, hdrId, logger); Thread th = new Thread(writer,"FilePartsUploaderThread-"+i); th.setDaemon(true); th.start(); @@ -607,8 +616,8 @@ public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadata if(useBulk) { - if(eu.insertFilePartsBulk(partnerConnection, hdrId, createBatchZip(fileParts, hdrId), 0)) - return updateFileHdr(partnerConnection, hdrId, null, null, null, null, "Process", null); + if(eu.insertFilePartsBulk(partnerConnection, hdrId, createBatchZip(fileParts, hdrId, logger), 0, logger)) + return updateFileHdr(partnerConnection, hdrId, null, null, null, null, "Process", null, logger); else return false; }else @@ -670,15 +679,15 @@ public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadata if(totalErrorCount==0 && allPartsUploaded) { - return updateFileHdr(partnerConnection, hdrId, null, null, null, null, "Process", null); + return updateFileHdr(partnerConnection, hdrId, null, null, null, null, "Process", null, logger); }else { - System.err.println("Not all file parts were uploaded to InsightsExternalDataPart, remaining files:"); + logger.println("Not all file parts were uploaded to InsightsExternalDataPart, remaining files:"); for(int i:fileParts.keySet()) { if(!existingFileParts.contains(i)) { - System.err.println(fileParts.get(i)); + logger.println(fileParts.get(i)); } } return false; @@ -686,7 +695,7 @@ public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadata } - private static String insertFileHdr(PartnerConnection partnerConnection, String datasetAlias, String datasetContainer, byte[] metadataJson, String dataFormat, String Operation) throws Exception + private static String insertFileHdr(PartnerConnection partnerConnection, String datasetAlias, String datasetContainer, byte[] metadataJson, String dataFormat, String Operation, PrintStream logger) throws Exception { String rowId = null; long startTime = System.currentTimeMillis(); @@ -715,9 +724,9 @@ private static String insertFileHdr(PartnerConnection partnerConnection, String sobj.setField("MetadataJson",metadataJson); if(Operation!=null) - sobj.setField("Operation",Operation); + sobj.setField("operation",Operation); else - sobj.setField("Operation","Overwrite"); + sobj.setField("operation","Overwrite"); sobj.setField("Action","None"); @@ -728,10 +737,10 @@ private static String insertFileHdr(PartnerConnection partnerConnection, String if(sv.isSuccess()) { rowId = sv.getId(); - System.out.println("Record {"+ sv.getId() + "} Inserted into InsightsExternalData, upload time {"+nf.format(endTime-startTime)+"} msec"); + logger.println("Record {"+ sv.getId() + "} Inserted into InsightsExternalData, upload time {"+nf.format(endTime-startTime)+"} msec"); }else { - System.err.println("Record {"+ sv.getId() + "} Insert Failed: " + (getErrorMessage(sv.getErrors()))); + logger.println("Record {"+ sv.getId() + "} Insert Failed: " + (getErrorMessage(sv.getErrors()))); } } @@ -750,7 +759,7 @@ private boolean insertFileParts(PartnerConnection partnerConnection, String insi { if(existingFileParts.contains(i)) { - System.out.println("Skipping, File Part {"+ fileParts.get(i) + "}, already Inserted into InsightsExternalDataPart"); + logger.println("Skipping, File Part {"+ fileParts.get(i) + "}, already Inserted into InsightsExternalDataPart"); fileParts.get(i).delete(); continue; } @@ -767,7 +776,7 @@ private boolean insertFileParts(PartnerConnection partnerConnection, String insi { if(sv.isSuccess()) { - System.out.println("File Part {"+ fileParts.get(i) + "} Inserted into InsightsExternalDataPart: " +sv.getId() + ", upload time {"+nf.format(endTime-startTime)+"} msec"); + logger.println("File Part {"+ fileParts.get(i) + "} Inserted into InsightsExternalDataPart: " +sv.getId() + ", upload time {"+nf.format(endTime-startTime)+"} msec"); try { fileParts.get(i).delete(); @@ -777,13 +786,13 @@ private boolean insertFileParts(PartnerConnection partnerConnection, String insi } }else { - System.err.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + (getErrorMessage(sv.getErrors()))); + logger.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + (getErrorMessage(sv.getErrors()))); failedFileParts.put(i, fileParts.get(i)); } } } catch (Throwable t) { t.printStackTrace(); - System.err.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + t.toString()); + logger.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + t.toString()); failedFileParts.put(i, fileParts.get(i)); } } @@ -803,18 +812,18 @@ private boolean insertFileParts(PartnerConnection partnerConnection, String insi } */ - private boolean insertFilePartsBulk(PartnerConnection partnerConnection, String insightsExternalDataId, Map fileParts, int retryCount) throws Exception + private boolean insertFilePartsBulk(PartnerConnection partnerConnection, String insightsExternalDataId, Map fileParts, int retryCount, PrintStream logger) throws Exception { BulkConnection bulkConnection = getBulkConnection(partnerConnection.getConfig()); JobInfo job = createJob("InsightsExternalDataPart", bulkConnection); LinkedHashMap batchInfoList = new LinkedHashMap(); for(int i:fileParts.keySet()) { - createBatch(fileParts.get(i), batchInfoList, bulkConnection, job); + createBatch(fileParts.get(i), batchInfoList, bulkConnection, job, logger); } closeJob(bulkConnection, job.getId()); - awaitCompletion(bulkConnection, job, batchInfoList); - checkResults(bulkConnection, job, batchInfoList); + awaitCompletion(bulkConnection, job, batchInfoList, logger); + checkResults(bulkConnection, job, batchInfoList, logger); if(!batchInfoList.isEmpty()) { @@ -844,7 +853,7 @@ private boolean insertFilePartsBulk(PartnerConnection partnerConnection, String retryCount++; Thread.sleep(1000*retryCount); // partnerConnection = DatasetUtils.login(0, username, password, token, endpoint, sessionId); - return insertFilePartsBulk(partnerConnection, insightsExternalDataId, failedFileParts, retryCount); + return insertFilePartsBulk(partnerConnection, insightsExternalDataId, failedFileParts, retryCount, logger); }else { return true; @@ -857,7 +866,7 @@ private boolean insertFilePartsBulk(PartnerConnection partnerConnection, String } - private static boolean updateFileHdr(PartnerConnection partnerConnection, String rowId, String datasetAlias, String datasetContainer, byte[] metadataJson, String dataFormat, String Action, String Operation) throws Exception + private static boolean updateFileHdr(PartnerConnection partnerConnection, String rowId, String datasetAlias, String datasetContainer, byte[] metadataJson, String dataFormat, String Action, String Operation, PrintStream logger) throws Exception { try { @@ -890,7 +899,7 @@ else if(dataFormat.equalsIgnoreCase("Binary")) // "Overwrite" if(Operation!=null && !Operation.isEmpty()) { - sobj.setField("Operation", Operation); + sobj.setField("operation", Operation); } //Process, None @@ -906,10 +915,10 @@ else if(dataFormat.equalsIgnoreCase("Binary")) if(sv.isSuccess()) { rowId = sv.getId(); - System.out.println("Record {"+ sv.getId() + "} updated in InsightsExternalData"+", upload time {"+nf.format(endTime-startTime)+"} msec"); + logger.println("Record {"+ sv.getId() + "} updated in InsightsExternalData"+", upload time {"+nf.format(endTime-startTime)+"} msec"); }else { - System.err.println("Record {"+ sv.getId() + "} update Failed: " + (getErrorMessage(sv.getErrors()))); + logger.println("Record {"+ sv.getId() + "} update Failed: " + (getErrorMessage(sv.getErrors()))); return false; } } @@ -922,10 +931,11 @@ else if(dataFormat.equalsIgnoreCase("Binary")) /** * @param inputFile + * @param logger * @return * @throws IOException */ - public static Map chunkBinary(File inputFile, File archiveDir) throws IOException + public static Map chunkBinary(File inputFile, File archiveDir, PrintStream logger) throws IOException { if(inputFile == null) { @@ -939,9 +949,9 @@ public static Map chunkBinary(File inputFile, File archiveDir) thr { throw new IOException("chunkBinary() inputFile {"+inputFile+"} is 0 bytes"); } - System.out.println("\n*******************************************************************************"); - System.out.println("Chunking file {"+inputFile+"} into {"+nf.format(DEFAULT_BUFFER_SIZE)+"} size chunks\n"); - long startTime = System.currentTimeMillis(); +// logger.println("\n*******************************************************************************"); +// logger.println("Chunking file {"+inputFile+"} into {"+nf.format(DEFAULT_BUFFER_SIZE)+"} size chunks\n"); +// long startTime = System.currentTimeMillis(); InputStream input = null; FileOutputStream tmpOut = null; LinkedHashMap fileParts = new LinkedHashMap(); @@ -961,7 +971,7 @@ public static Map chunkBinary(File inputFile, File archiveDir) thr FileUtils.deleteQuietly(tmpFile); if(tmpFile.exists()) { - System.err.println("Failed to cleanup file {"+tmpFile+"}"); + logger.println("Failed to cleanup file {"+tmpFile+"}"); } } tmpOut = new FileOutputStream(tmpFile); @@ -970,7 +980,7 @@ public static Map chunkBinary(File inputFile, File archiveDir) thr tmpOut.close(); tmpOut = null; fileParts.put(Integer.valueOf(filePartNumber),tmpFile); - System.out.println("Creating File part {"+tmpFile+"}, size {"+nf.format(tmpFile.length())+"}"); +// logger.println("Creating File part {"+tmpFile+"}, size {"+nf.format(tmpFile.length())+"}"); count = ((count == -1) ? n : (count + n)); } if(count == -1) @@ -987,13 +997,13 @@ public static Map chunkBinary(File inputFile, File archiveDir) thr tmpOut.close(); } catch (IOException e) {e.printStackTrace();} } - long endTime = System.currentTimeMillis(); - System.out.println("\nChunked file {"+inputFile+"} into {"+fileParts.size()+"} chunks in {"+nf.format(endTime-startTime)+"} msecs"); - System.out.println("*******************************************************************************\n"); +// long endTime = System.currentTimeMillis(); +// logger.println("\nChunked file {"+inputFile+"} into {"+fileParts.size()+"} chunks in {"+nf.format(endTime-startTime)+"} msecs"); +// logger.println("*******************************************************************************\n"); return fileParts; } - private static Map createBatchZip(Map fileParts,String insightsExternalDataId) throws IOException + private static Map createBatchZip(Map fileParts,String insightsExternalDataId, PrintStream logger) throws IOException { LinkedHashMap zipParts = new LinkedHashMap(); for(int i:fileParts.keySet()) @@ -1004,7 +1014,7 @@ private static Map createBatchZip(Map fileParts,Stri FileUtils.deleteQuietly(requestFile); if(requestFile.exists()) { - System.out.println("createBatchZip(): Failed to cleanup file {"+requestFile+"}"); + logger.println("createBatchZip(): Failed to cleanup file {"+requestFile+"}"); } } String[] row = new String[3]; @@ -1042,10 +1052,10 @@ private static Map createBatchZip(Map fileParts,Stri FileUtils.deleteQuietly(zipFile); if(zipFile.exists()) { - System.out.println("createBatchZip(): Failed to cleanup file {"+zipFile+"}"); + logger.println("createBatchZip(): Failed to cleanup file {"+zipFile+"}"); } } - createZip(zipFile, new File[]{requestFile,fileParts.get(i)}); + createZip(zipFile, new File[]{requestFile,fileParts.get(i)}, logger); zipParts.put(i,zipFile); } return zipParts; @@ -1122,9 +1132,10 @@ private static String replaceString(String original, String pattern, String repl /** * Gets the results of the operation and checks for errors. + * @param logger */ private static void checkResults(BulkConnection connection, JobInfo job, - LinkedHashMap batchInfoList) + LinkedHashMap batchInfoList, PrintStream logger) throws AsyncApiException, IOException { @SuppressWarnings("unchecked") LinkedHashMap tmp = (LinkedHashMap) batchInfoList.clone(); @@ -1144,8 +1155,8 @@ private static void checkResults(BulkConnection connection, JobInfo job, boolean created = Boolean.valueOf(resultInfo.get("Created")); if (success && created) { String id = resultInfo.get("Id"); -// System.out.println("Created row with id " + id); - System.out.println("File Part {"+ batchInfoList.get(b) + "} Inserted into InsightsExternalDataPart: " +id); +// logger.println("Created row with id " + id); + logger.println("File Part {"+ batchInfoList.get(b) + "} Inserted into InsightsExternalDataPart: " +id); File f = batchInfoList.remove(b); try { @@ -1154,18 +1165,18 @@ private static void checkResults(BulkConnection connection, JobInfo job, f.delete(); if(f.exists()) { - System.out.println("Failed to cleanup file {"+f+"}"); + logger.println("Failed to cleanup file {"+f+"}"); } } }catch(Throwable t) { - System.out.println("Failed to cleanup file {"+f+"}"); + logger.println("Failed to cleanup file {"+f+"}"); t.printStackTrace(); } } else if (!success) { String error = resultInfo.get("Error"); -// System.out.println("Failed with error: " + error); - System.err.println("File Part {"+ batchInfoList.get(b) + "} Insert Failed: " + error); +// logger.println("Failed with error: " + error); + logger.println("File Part {"+ batchInfoList.get(b) + "} Insert Failed: " + error); } } } @@ -1192,10 +1203,11 @@ private static void closeJob(BulkConnection connection, String jobId) * The job awaiting completion. * @param batchInfoList * List of batches for this job. + * @param logger * @throws AsyncApiException */ private static void awaitCompletion(BulkConnection connection, JobInfo job, - LinkedHashMap batchInfoList) + LinkedHashMap batchInfoList, PrintStream logger) throws AsyncApiException { long sleepTime = 0L; Set incomplete = new LinkedHashSet(); @@ -1206,7 +1218,7 @@ private static void awaitCompletion(BulkConnection connection, JobInfo job, try { Thread.sleep(sleepTime); } catch (InterruptedException e) {} - System.out.println("Awaiting Async Batch results. Incomplete Batch Size {" + incomplete.size() + "}"); + logger.println("Awaiting Async Batch results. Incomplete Batch Size {" + incomplete.size() + "}"); sleepTime = 10000L; BatchInfo[] statusList = connection.getBatchInfoList(job.getId()).getBatchInfo(); @@ -1214,7 +1226,7 @@ private static void awaitCompletion(BulkConnection connection, JobInfo job, if (b.getState() == BatchStateEnum.Completed || b.getState() == BatchStateEnum.Failed) { if (incomplete.remove(b.getId())) { -// System.out.println("BATCH STATUS:\n" + b); +// logger.println("BATCH STATUS:\n" + b); } } } @@ -1240,7 +1252,7 @@ private static JobInfo createJob(String sobjectType, BulkConnection connection) job.setOperation(OperationEnum.insert); job.setContentType(ContentType.ZIP_CSV); job = connection.createJob(job); -// System.out.println(job); +// logger.println(job); return job; } @@ -1270,13 +1282,13 @@ private static BulkConnection getBulkConnection(ConnectorConfig partnerConfig) private void createBatch(File zipFile, - LinkedHashMap batchInfos, BulkConnection connection, JobInfo jobInfo) + LinkedHashMap batchInfos, BulkConnection connection, JobInfo jobInfo, PrintStream logger) throws IOException, AsyncApiException { FileInputStream zipFileStream = new FileInputStream(zipFile); try { - System.out.println("creating bulk api batch for file {"+zipFile+"}"); + logger.println("creating bulk api batch for file {"+zipFile+"}"); BatchInfo batchInfo = connection.createBatchFromZipStream(jobInfo, zipFileStream); -// System.out.println(batchInfo); +// logger.println(batchInfo); batchInfos.put(batchInfo, zipFile); } finally { zipFileStream.close(); @@ -1284,7 +1296,7 @@ private void createBatch(File zipFile, } - private static void createZip(File zipfile,File[] files) throws IOException + private static void createZip(File zipfile,File[] files, PrintStream logger) throws IOException { if(zipfile == null) { @@ -1294,7 +1306,7 @@ private static void createZip(File zipfile,File[] files) throws IOException { throw new IOException("createZip(): called with null files parameter"); } - System.out.println("creating batch request zip file {"+zipfile+"}"); + logger.println("creating batch request zip file {"+zipfile+"}"); BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(zipfile)); ZipOutputStream zip = new ZipOutputStream(bos); for(File file:files) @@ -1328,12 +1340,12 @@ private static void createZip(File zipfile,File[] files) throws IOException file.delete(); if(file.exists()) { - System.out.println("createZip(): Failed to cleanup file {"+file+"}"); + logger.println("createZip(): Failed to cleanup file {"+file+"}"); } } }catch(Throwable t) { - System.out.println("createZip(): Failed to cleanup file {"+file+"}"); + logger.println("createZip(): Failed to cleanup file {"+file+"}"); t.printStackTrace(); } } @@ -1349,15 +1361,15 @@ public static boolean checkAPIAccess(String username2, String password2, Map objectList = SfdcUtils.getObjectList(partnerConnection, Pattern.compile("\\b"+"InsightsExternalData"+"\\b"), false); if(objectList==null || objectList.size()==0) { - System.err.println("\n"); - System.err.println("Error: Object {"+"InsightsExternalData"+"} not found"); + logger.println("\n"); + logger.println("Error: Object {"+"InsightsExternalData"+"} not found"); return false; } objectList = SfdcUtils.getObjectList(partnerConnection, Pattern.compile("\\b"+"InsightsExternalDataPart"+"\\b"), false); if(objectList==null || objectList.size()==0) { - System.err.println("\n"); - System.err.println("Error: Object {"+"InsightsExternalDataPart"+"} not found"); + logger.println("\n"); + logger.println("Error: Object {"+"InsightsExternalDataPart"+"} not found"); return false; } return true; @@ -1368,20 +1380,20 @@ public static boolean checkAPIAccess(String username2, String password2, } */ - private static boolean checkAPIAccess(PartnerConnection partnerConnection) { + private static boolean checkAPIAccess(PartnerConnection partnerConnection, PrintStream logger) { try { Map objectList = SfdcUtils.getObjectList(partnerConnection, Pattern.compile("\\b"+"InsightsExternalData"+"\\b"), false); if(objectList==null || objectList.size()==0) { - System.err.println("\n"); - System.err.println("Error: Object {"+"InsightsExternalData"+"} not found"); + logger.println("\n"); + logger.println("Error: Object {"+"InsightsExternalData"+"} not found"); return false; } objectList = SfdcUtils.getObjectList(partnerConnection, Pattern.compile("\\b"+"InsightsExternalDataPart"+"\\b"), false); if(objectList==null || objectList.size()==0) { - System.err.println("\n"); - System.err.println("Error: Object {"+"InsightsExternalDataPart"+"} not found"); + logger.println("\n"); + logger.println("Error: Object {"+"InsightsExternalDataPart"+"} not found"); return false; } return true; @@ -1392,7 +1404,7 @@ private static boolean checkAPIAccess(PartnerConnection partnerConnection) { } - private static String getLastIncompleteFileHdr(PartnerConnection partnerConnection, String datasetAlias) throws Exception + private static String getLastIncompleteFileHdr(PartnerConnection partnerConnection, String datasetAlias, PrintStream logger) throws Exception { String rowId = null; String soqlQuery = String.format("SELECT Id FROM InsightsExternalData WHERE EdgemartAlias = '%s' AND Status = 'New' AND Action = 'None' ORDER BY CreatedDate DESC LIMIT 1",datasetAlias); @@ -1424,7 +1436,7 @@ private static String getLastIncompleteFileHdr(PartnerConnection partnerConnecti } if(rowsSoFar>1) { - System.err.println("getLastIncompleteFileHdr() returned more than one row"); + logger.println("getLastIncompleteFileHdr() returned more than one row"); } return rowId; } diff --git a/src/main/java/com/sforce/dataset/loader/EbinFormatWriter.java b/src/main/java/com/sforce/dataset/loader/EbinFormatWriter.java index 65d6e80..4d323fb 100644 --- a/src/main/java/com/sforce/dataset/loader/EbinFormatWriter.java +++ b/src/main/java/com/sforce/dataset/loader/EbinFormatWriter.java @@ -26,6 +26,7 @@ package com.sforce.dataset.loader; import java.io.IOException; import java.io.OutputStream; +import java.io.PrintStream; import java.math.BigDecimal; import java.text.DecimalFormat; import java.text.NumberFormat; @@ -71,6 +72,7 @@ public class EbinFormatWriter { private int numColumns = 0; private OutputStream out; + private final PrintStream logger; int interval = 10; @@ -84,17 +86,17 @@ public class EbinFormatWriter { public static final NumberFormat nf = NumberFormat.getIntegerInstance(); long startTime = 0L; - public EbinFormatWriter(OutputStream out, List dataTypes) + public EbinFormatWriter(OutputStream out, List dataTypes,PrintStream logger) throws IOException { - this(out,dataTypes.toArray(new FieldType[0])); + this(out,dataTypes.toArray(new FieldType[0]), logger); } - public EbinFormatWriter(OutputStream out, FieldType[] dataTypes) + public EbinFormatWriter(OutputStream out, FieldType[] dataTypes,PrintStream logger) throws IOException { - this(out); + this(out, logger); // this.numColumns = dataTypes.length; for (FieldType dataType: dataTypes) { @@ -119,9 +121,10 @@ public EbinFormatWriter(OutputStream out, FieldType[] dataTypes) df.setMinimumIntegerDigits(2); } - protected EbinFormatWriter(OutputStream out) throws IOException + protected EbinFormatWriter(OutputStream out,PrintStream logger) throws IOException { this.out = out; + this.logger = logger; totalRowCount = 0; out.write(magic, 0, 3); out.write(version_high); @@ -151,7 +154,7 @@ public void addrow(String[] values) throws IOException,NumberFormatException, P long newStartTime = System.currentTimeMillis(); if(startTime==0) startTime = newStartTime; - System.out.println("Processing row {"+nf.format(totalRowCount) +"} time {"+nf.format(newStartTime-startTime)+"}"); + logger.println("Processing row {"+nf.format(totalRowCount) +"} time {"+nf.format(newStartTime-startTime)+"}"); startTime = newStartTime; } @@ -185,7 +188,7 @@ public void addrow(String[] values) throws IOException,NumberFormatException, P } }catch(Throwable t) { - System.out.println("Field {"+_dataTypes.get(key_value_count).name+"} has Invalid Expression {"+_dataTypes.get(key_value_count).getComputedFieldExpression()+"}"); + logger.println("Field {"+_dataTypes.get(key_value_count).name+"} has Invalid Expression {"+_dataTypes.get(key_value_count).getComputedFieldExpression()+"}"); t.printStackTrace(); } }else @@ -468,15 +471,15 @@ public void finish() throws IOException out.close(); } out = null; - if(totalRowCount==0) - { - throw new IOException("Atleast one row must be written"); - }else + if(totalRowCount!=0) { +// throw new IOException("Atleast one row must be written"); +// }else +// { long newStartTime = System.currentTimeMillis(); if(startTime==0) startTime = newStartTime; - System.out.println("Processed last row {"+nf.format(totalRowCount) +"} time {"+nf.format(newStartTime-startTime)+"}"); + logger.println("Processed last row {"+nf.format(totalRowCount) +"} time {"+nf.format(newStartTime-startTime)+"}"); startTime = newStartTime; } } diff --git a/src/main/java/com/sforce/dataset/loader/ErrorWriter.java b/src/main/java/com/sforce/dataset/loader/ErrorWriter.java index 9d58da5..451c270 100644 --- a/src/main/java/com/sforce/dataset/loader/ErrorWriter.java +++ b/src/main/java/com/sforce/dataset/loader/ErrorWriter.java @@ -55,9 +55,6 @@ public class ErrorWriter { private CsvPreference preference = CsvPreference.STANDARD_PREFERENCE; private DefaultCsvEncoder csvEncoder = new DefaultCsvEncoder(); - //private int numColumns; - - public static final char LF = '\n'; public static final char CR = '\r'; @@ -83,11 +80,6 @@ public ErrorWriter(File inputCsv,String delimiter) this.delimiter = delimiter; -// CsvReader reader = new CsvReader(new InputStreamReader(new BOMInputStream(new FileInputStream(inputCsv), false), DatasetUtils.utf8Decoder(null, null))); -// reader.readHeaders(); -// String header = reader.getRawRecord(); -// headerColumns = reader.getHeaders(); - CsvListReader reader = new CsvListReader(new InputStreamReader(new BOMInputStream(new FileInputStream(inputCsv), false), DatasetUtils.utf8Decoder(CodingErrorAction.IGNORE, null)), CsvPreference.STANDARD_PREFERENCE); headerColumns = reader.getHeader(true); reader.close(); @@ -152,9 +144,6 @@ public void finish() throws IOException if (fWriter != null) { fWriter.flush(); fWriter.close(); - System.out.println("\n*******************************************************************************"); - System.out.println("Error rows written to: "+this.errorCsv.getAbsolutePath()); - System.out.println("*******************************************************************************\n"); } fWriter = null; } diff --git a/src/main/java/com/sforce/dataset/loader/FilePartsUploaderThread.java b/src/main/java/com/sforce/dataset/loader/FilePartsUploaderThread.java index 6c03869..159d76f 100644 --- a/src/main/java/com/sforce/dataset/loader/FilePartsUploaderThread.java +++ b/src/main/java/com/sforce/dataset/loader/FilePartsUploaderThread.java @@ -26,6 +26,7 @@ package com.sforce.dataset.loader; import java.io.File; +import java.io.PrintStream; import java.text.NumberFormat; import java.util.Map; import java.util.concurrent.BlockingQueue; @@ -46,10 +47,11 @@ public class FilePartsUploaderThread implements Runnable { private volatile boolean isDone = false; private volatile int errorRowCount = 0; private volatile int totalRowCount = 0; + private final PrintStream logger; public static final NumberFormat nf = NumberFormat.getIntegerInstance(); -FilePartsUploaderThread(BlockingQueue> q,PartnerConnection partnerConnection, String insightsExternalDataId) +FilePartsUploaderThread(BlockingQueue> q,PartnerConnection partnerConnection, String insightsExternalDataId, PrintStream logger) { if(partnerConnection==null || insightsExternalDataId == null || q == null) { @@ -58,12 +60,13 @@ public class FilePartsUploaderThread implements Runnable { queue = q; this.partnerConnection = partnerConnection; this.insightsExternalDataId = insightsExternalDataId; + this.logger = logger; } public void run() { try { Map row = queue.take(); - System.out.println("Start: " + Thread.currentThread().getName()); + logger.println("Start: " + Thread.currentThread().getName()); while (!row.isEmpty()) { try { @@ -78,9 +81,9 @@ public void run() { row = queue.take(); } }catch (Throwable t) { - System.out.println (Thread.currentThread().getName() + " " + t.getMessage()); + logger.println (Thread.currentThread().getName() + " " + t.getMessage()); } - System.out.println("END: " + Thread.currentThread().getName()); + logger.println("END: " + Thread.currentThread().getName()); isDone = true; } @@ -114,16 +117,16 @@ private boolean insertFileParts(PartnerConnection partnerConnection, String insi { if(sv.isSuccess()) { - System.out.println("File Part {"+ fileParts.get(i) + "} Inserted into InsightsExternalDataPart: " +sv.getId() + ", upload time {"+nf.format(endTime-startTime)+"} msec"); + logger.println("File Part {"+ fileParts.get(i) + "} Inserted into InsightsExternalDataPart: " +sv.getId() + ", upload time {"+nf.format(endTime-startTime)+"} msec"); return true; }else { - System.err.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + (DatasetLoader.getErrorMessage(sv.getErrors()))); + logger.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + (DatasetLoader.getErrorMessage(sv.getErrors()))); } } } catch (Throwable t) { t.printStackTrace(); - System.err.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + t.toString()); + logger.println("File Part {"+ fileParts.get(i) + "} Insert Failed: " + t.toString()); } } // if(retryCount<3) diff --git a/src/main/java/com/sforce/dataset/loader/WriterThread.java b/src/main/java/com/sforce/dataset/loader/WriterThread.java index 55f6d7a..3bb289e 100644 --- a/src/main/java/com/sforce/dataset/loader/WriterThread.java +++ b/src/main/java/com/sforce/dataset/loader/WriterThread.java @@ -25,6 +25,8 @@ */ package com.sforce.dataset.loader; +import java.io.IOException; +import java.io.PrintStream; import java.util.concurrent.BlockingQueue; public class WriterThread implements Runnable { @@ -32,48 +34,53 @@ public class WriterThread implements Runnable { private static final int max_error_threshhold = 10000; private final BlockingQueue queue; - private final EbinFormatWriter w; - private final ErrorWriter ew; + @SuppressWarnings("deprecation") +private final EbinFormatWriter ebinWriter; + private final ErrorWriter errorwriter; + private final PrintStream logger; private volatile boolean isDone = false; private volatile int errorRowCount = 0; private volatile int totalRowCount = 0; -WriterThread(BlockingQueue q,EbinFormatWriter w,ErrorWriter ew) +@SuppressWarnings("deprecation") +WriterThread(BlockingQueue q,EbinFormatWriter w,ErrorWriter ew, PrintStream logger) { if(q==null || w == null || ew == null) { throw new IllegalArgumentException("Constructor input cannot be null"); } queue = q; - this.w = w; - this.ew = ew; + this.ebinWriter = w; + this.errorwriter = ew; + this.logger = logger; } - public void run() { + @SuppressWarnings("deprecation") +public void run() { + logger.println("Start: " + Thread.currentThread().getName()); try { String[] row = queue.take(); - System.out.println("Start: " + Thread.currentThread().getName()); while (row != null && row.length!=0) { try { totalRowCount++; - w.addrow(row); + ebinWriter.addrow(row); }catch(Throwable t) { if(errorRowCount==0) { - System.err.println(); + logger.println(); } - System.err.println("Row {"+totalRowCount+"} has error {"+t+"}"); + logger.println("Row {"+totalRowCount+"} has error {"+t+"}"); if(row!=null) { - ew.addError(row, t.getMessage()); + errorwriter.addError(row, t.getMessage()); errorRowCount++; if(errorRowCount>=max_error_threshhold) { - System.err.println("Max error threshold reached. Aborting processing"); + logger.println("Max error threshold reached. Aborting processing"); break; } } @@ -82,9 +89,19 @@ public void run() { row = queue.take(); } }catch (Throwable t) { - System.out.println (Thread.currentThread().getName() + " " + t.getMessage()); + logger.println (Thread.currentThread().getName() + " " + t.getMessage()); } - System.out.println("END: " + Thread.currentThread().getName()); + try { + ebinWriter.finish(); + } catch (IOException e) { + e.printStackTrace(); + } + try { + errorwriter.finish(); + } catch (IOException e) { + e.printStackTrace(); + } + logger.println("END: " + Thread.currentThread().getName()); isDone = true; } diff --git a/src/main/java/com/sforce/dataset/loader/file/listener/FileListener.java b/src/main/java/com/sforce/dataset/loader/file/listener/FileListener.java new file mode 100644 index 0000000..146c070 --- /dev/null +++ b/src/main/java/com/sforce/dataset/loader/file/listener/FileListener.java @@ -0,0 +1,195 @@ +package com.sforce.dataset.loader.file.listener; + +import java.io.File; +/* + * Copyright (c) 2014, salesforce.com, inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and + * the following disclaimer in the documentation and/or other materials provided with the distribution. + * + * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +import java.nio.charset.Charset; +import java.nio.charset.CodingErrorAction; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +public class FileListener { + String dataset = null; + String datasetLabel = null; + String app = null; + String filecharset = "UTF-8"; + String uploadFormat = "binary"; + String codingErrorAction = "REPORT"; + String operation = "Overwrite"; + boolean useBulkAPI = false; + String inputFileDirectory = null; + String inputFilePattern = null; + int pollingInterval = 10000; + int fileAge = 10000; + + @JsonIgnore + Charset charset = Charset.forName("UTF-8"); + + @JsonIgnore + File fileDir = null; + + @JsonIgnore + CodingErrorAction cea = CodingErrorAction.REPORT; + + + @JsonIgnore + public CodingErrorAction getCea() { + return cea; + } + + @JsonIgnore + public void setCea(CodingErrorAction cea) { + this.cea = cea; + } + + public String getDataset() { + return dataset; + } + public void setDataset(String dataset) { + this.dataset = dataset; + } + public String getDatasetLabel() { + return datasetLabel; + } + public void setDatasetLabel(String datasetLabel) { + this.datasetLabel = datasetLabel; + } + public String getApp() { + return app; + } + public void setApp(String app) { + this.app = app; + } + public String getFilecharset() { + return filecharset; + } + public void setFilecharset(String filecharset) { + if(filecharset!=null) + { + charset = Charset.forName(filecharset); + this.filecharset = filecharset; + } + } + public String getUploadFormat() { + return uploadFormat; + } + public void setUploadFormat(String uploadFormat) { + if(uploadFormat !=null) + { + if(uploadFormat.equalsIgnoreCase("CSV") || uploadFormat.equalsIgnoreCase("BINARY")) + this.uploadFormat = uploadFormat; + } + } + public String getCodingErrorAction() { + return codingErrorAction; + } + public void setCodingErrorAction(String codingErrorAction) { + if(codingErrorAction != null) + { + if(codingErrorAction.equalsIgnoreCase("REPLACE")) + { + cea = CodingErrorAction.REPLACE; + this.codingErrorAction = codingErrorAction; + } + else if(codingErrorAction.equalsIgnoreCase("IGNORE")) + { + cea = CodingErrorAction.IGNORE; + this.codingErrorAction = codingErrorAction; + } + } + } + public String getOperation() { + return operation; + } + public void setOperation(String operation) { + if(operation != null) + { + if(operation.equalsIgnoreCase("OVERWRITE")) + { + this.operation = "OVERWRITE"; + } + else if(operation.equalsIgnoreCase("APPEND")) + { + this.operation = "APPEND"; + } + else if(operation.equalsIgnoreCase("UPSERT")) + { + this.operation = "UPSERT"; + } + else if(operation.equalsIgnoreCase("DELETE")) + { + this.operation = "DELETE"; + } + } + } + public boolean isUseBulkAPI() { + return useBulkAPI; + } + public void setUseBulkAPI(boolean useBulkAPI) { + this.useBulkAPI = useBulkAPI; + } + public String getInputFileDirectory() { + return inputFileDirectory; + } + public void setInputFileDirectory(String inputFileDirectory) { + if(inputFileDirectory!=null) + { + File temp = new File(inputFileDirectory); + if(temp.exists()) + { + if(temp.isDirectory()) + { + this.inputFileDirectory = inputFileDirectory; + this.fileDir = temp; + }else + { + this.fileDir = temp.getParentFile(); + this.inputFileDirectory = this.fileDir.toString(); + this.inputFilePattern = temp.getName(); + } + } + } + } + public String getInputFilePattern() { + return inputFilePattern; + } + public void setInputFilePattern(String inputFilePattern) { + if(inputFilePattern!=null) + this.inputFilePattern = inputFilePattern; + } + public int getPollingInterval() { + return pollingInterval; + } + public void setPollingInterval(int pollingInterval) { + this.pollingInterval = pollingInterval; + } + public int getFileAge() { + return fileAge; + } + public void setFileAge(int fileAge) { + this.fileAge = fileAge; + } +} diff --git a/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerSettings.java b/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerSettings.java new file mode 100644 index 0000000..4b829a8 --- /dev/null +++ b/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerSettings.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2014, salesforce.com, inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and + * the following disclaimer in the documentation and/or other materials provided with the distribution. + * + * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.sforce.dataset.loader.file.listener; + +import java.util.LinkedHashMap; + +public class FileListenerSettings { + public LinkedHashMap fileListeners; +} diff --git a/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerThread.java b/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerThread.java new file mode 100644 index 0000000..032d07e --- /dev/null +++ b/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerThread.java @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2014, salesforce.com, inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and + * the following disclaimer in the documentation and/or other materials provided with the distribution. + * + * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.sforce.dataset.loader.file.listener; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.io.IOCase; +import org.apache.commons.io.filefilter.FileFilterUtils; +import org.apache.commons.io.filefilter.IOFileFilter; + +import com.sforce.dataset.loader.DatasetLoader; +import com.sforce.soap.partner.PartnerConnection; + +public class FileListenerThread implements Runnable { + + + private volatile boolean isDone = false; + + private final FileListener fileListener; + private final PartnerConnection partnerConnection; + private static final File errorDir = new File("error"); + private static final File successDir = new File("success"); + private static final File logsDir = new File("logs"); + + FileListenerThread(FileListener fileListener, PartnerConnection partnerConnection) throws IOException + { + if(fileListener==null) + { + throw new IllegalArgumentException("Constructor input cannot be null"); + } + this.fileListener = fileListener; + this.partnerConnection = partnerConnection; +// errorDir = new File("Error"); +// successDir = new File("Success"); + FileUtils.forceMkdir(errorDir); + FileUtils.forceMkdir(successDir); + FileUtils.forceMkdir(logsDir); + } + +public void run() { + System.out.println("Starting FileListener for Dataset {"+fileListener.dataset+"} "); + + try { + while (!isDone) { + try + { + long cutOff = System.currentTimeMillis() - (1000*fileListener.fileAge); + IOFileFilter ageFilter = FileFilterUtils.ageFileFilter(cutOff); + IOFileFilter nameFilter = FileFilterUtils.nameFileFilter(fileListener.inputFilePattern, IOCase.INSENSITIVE); +// IOFileFilter suffixFileFilter1 = FileFilterUtils.suffixFileFilter(".zip", IOCase.INSENSITIVE); +// IOFileFilter suffixFileFilter2 = FileFilterUtils.suffixFileFilter(".csv", IOCase.INSENSITIVE); +// IOFileFilter orFilter = FileFilterUtils.and(suffixFileFilter1, suffixFileFilter2); + IOFileFilter andFilter = FileFilterUtils.and(nameFilter, ageFilter); + + File[] files = getFiles(fileListener.fileDir, andFilter); + if (files == null) + { + try + { + Thread.sleep(fileListener.pollingInterval); + }catch(Throwable t) + { + t.printStackTrace(); + } + if(isDone) + break; + else + continue; + } + + for(File file:files) + { + PrintStream logger = null; + try + { + long timeStamp = System.currentTimeMillis(); + File logFile = new File(logsDir,FilenameUtils.getBaseName(file.getName())+timeStamp+".log"); + logger = new PrintStream(new FileOutputStream(logFile), true, "UTF-8"); + boolean status = DatasetLoader.uploadDataset(file.toString(), fileListener.uploadFormat, fileListener.cea, fileListener.charset, fileListener.dataset, fileListener.app, fileListener.datasetLabel, fileListener.operation, fileListener.useBulkAPI, partnerConnection, logger); + moveInputFile(file, timeStamp, status); + }catch(Throwable t) + { + if(logger!=null) + t.printStackTrace(logger); + else + t.printStackTrace(); + }finally + { + if(logger!=null) + logger.close(); + logger = null; + } + } + + }catch(Throwable t) + { + t.printStackTrace(); + } + } + }catch (Throwable t) { + System.out.println (Thread.currentThread().getName() + " " + t.getMessage()); + } + System.out.println("Starting FileListener for Dataset {"+fileListener.dataset+"} "); + isDone = true; + } + +public boolean isDone() { + return isDone; +} + + public static File[] getFiles(File directory, IOFileFilter fileFilter) { +// File[] files = directory.listFiles(fileFilter); +// Collection list = FileUtils.listFiles(directory, fileFilter,TrueFileFilter.INSTANCE); + Collection list = FileUtils.listFiles(directory, fileFilter, null); + + File[] files = list.toArray(new File[0]); + + if (files != null && files.length > 0) { + Arrays.sort(files, new Comparator() { + public int compare(File a, File b) { + long diff = (a.lastModified() - b.lastModified()); + if(diff>0L) + return 1; + else if(diff<0L) + return -1; + else + return 0; + } + }); + + //for (File file : files) { + // Date lastMod = new Date(file.lastModified()); + // System.out.println("Found File {" + file.getName() + "}, lastModified {"+ lastMod + "}"); + //} + return files; + } else { + return null; + } + } + + + public static void moveInputFile(File inputFile, long timeStamp, boolean isSuccess) + { + File directory = successDir; + if(!isSuccess) + directory = errorDir; + + File doneFile = new File(directory,timeStamp+"."+inputFile.getName()); + try { + FileUtils.moveFile(inputFile, doneFile); + } catch (IOException e) { + e.printStackTrace(); + } + File sortedtFile = new File(inputFile.getParent(), FilenameUtils.getBaseName(inputFile.getName())+ "_sorted." + FilenameUtils.getExtension(inputFile.getName())); + if(sortedtFile.exists()) + { + File sortedDoneFile = new File(directory,timeStamp+"."+sortedtFile.getName()); + try { + FileUtils.moveFile(sortedtFile, sortedDoneFile); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + + + +} \ No newline at end of file diff --git a/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerUtil.java b/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerUtil.java new file mode 100644 index 0000000..a1da4e4 --- /dev/null +++ b/src/main/java/com/sforce/dataset/loader/file/listener/FileListenerUtil.java @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2014, salesforce.com, inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and + * the following disclaimer in the documentation and/or other materials provided with the distribution. + * + * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.sforce.dataset.loader.file.listener; + +import java.io.File; +import java.io.IOException; +import java.util.LinkedHashMap; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.sforce.soap.partner.PartnerConnection; + +public class FileListenerUtil { + + public static final File listenerSettingsFile = new File(".sfdc_file_listeners.json"); +// static LinkedHashMap listeners = null; + + public static FileListenerSettings getFileListeners() throws JsonParseException, JsonMappingException, IOException + { + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + if(listenerSettingsFile.exists() && listenerSettingsFile.length()>0) + { + FileListenerSettings listenerSettings = mapper.readValue(listenerSettingsFile, FileListenerSettings.class); + return listenerSettings; + } + return null; + + } + + public static void saveFileListeners(FileListenerSettings listeners) throws JsonParseException, JsonMappingException, IOException + { + ObjectMapper mapper = new ObjectMapper(); + mapper.writerWithDefaultPrettyPrinter().writeValue(listenerSettingsFile, listeners); + } + + public static boolean addListener(FileListener listener) throws JsonParseException, JsonMappingException, IOException + { + FileListenerSettings listeners = getFileListeners(); + if(listeners==null) + { + listeners = new FileListenerSettings(); + } + if(listeners.fileListeners==null ) + { + listeners.fileListeners = new LinkedHashMap(); + } + if(!listeners.fileListeners.containsKey(listener.getDataset())) + { + listeners.fileListeners.put(listener.getDataset(), listener); + saveFileListeners(listeners); + return true; + }else + { + System.out.println("\nERROR: FileListener for dataset {"+listener.getDataset()+"} already exists"); + } + return false; + } + + public static boolean startListener(FileListener listener, PartnerConnection partnerConnection) throws IOException + { + FileListenerThread fileListenerThread = new FileListenerThread(listener, partnerConnection); + Thread th = new Thread(fileListenerThread,"FileListener-"+listener.dataset); + th.setDaemon(true); + th.start(); + return true; + } + + public static boolean addAndStartListener(FileListener listener, PartnerConnection partnerConnection) throws IOException + { + if(addListener(listener)) + return startListener(listener, partnerConnection); + else + return false; + } + + public static void startAllListener(PartnerConnection partnerConnection) + { + try + { + FileListenerSettings listeners = getFileListeners(); + if(listeners!=null && listeners.fileListeners != null && !listeners.fileListeners.isEmpty()) + { + for(String dataset:listeners.fileListeners.keySet()) + { + startListener(listeners.fileListeners.get(dataset), partnerConnection); + } + } + } catch (Throwable t) { + t.printStackTrace(); + } + } + + +} diff --git a/src/main/java/com/sforce/dataset/loader/file/schema/DetectFieldTypes.java b/src/main/java/com/sforce/dataset/loader/file/schema/DetectFieldTypes.java index 0c7b8c0..2bb9b27 100644 --- a/src/main/java/com/sforce/dataset/loader/file/schema/DetectFieldTypes.java +++ b/src/main/java/com/sforce/dataset/loader/file/schema/DetectFieldTypes.java @@ -29,6 +29,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; +import java.io.PrintStream; import java.math.BigDecimal; import java.math.RoundingMode; import java.nio.charset.Charset; @@ -56,7 +57,7 @@ public class DetectFieldTypes { public static final String[] additionalDatePatterns ={"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'","yyyy-MM-dd'T'HH:mm:ss'Z'","yyyy-MM-dd'T'HH:mm:ss.SSS","yyyy-MM-dd'T'HH:mm:ss","M/d/yyyy HH:mm:ss","M/d/yy HH:mm:ss","M-d-yyyy HH:mm:ss","M-d-yy HH:mm:ss","d/M/yyyy HH:mm:ss","d/M/yy HH:mm:ss","d-M-yyyy HH:mm:ss","d-M-yy HH:mm:ss", "M/d/yy", "d/M/yy","M-d-yy", "d-M-yy"}; // - public LinkedList detect(File inputCsv, ExternalFileSchema userSchema, Charset fileCharset) throws IOException + public LinkedList detect(File inputCsv, ExternalFileSchema userSchema, Charset fileCharset, PrintStream logger) throws IOException { CsvListReader reader = null; LinkedList types = null; @@ -109,14 +110,14 @@ public LinkedList detect(File inputCsv, ExternalFileSchema userSchema if(first) { - System.out.println("Detecting schema from csv file {"+ inputCsv +"} ..."); + logger.println("Detecting schema from csv file {"+ inputCsv +"} ..."); first = false; } LinkedList columnValues = new LinkedList(); int rowCount = 0; - System.out.print("Column: "+ header[i]); + logger.print("Column: "+ header[i]); try { reader = new CsvListReader(new InputStreamReader(new BOMInputStream(new FileInputStream(inputCsv), false), DatasetUtils.utf8Decoder(null , fileCharset)), CsvPreference.STANDARD_PREFERENCE); @@ -148,25 +149,25 @@ public LinkedList detect(File inputCsv, ExternalFileSchema userSchema if(bd!=null) { newField = FieldType.GetMeasureKeyDataType(devNames[i], 0, bd.scale(), 0L); - System.out.println(", Type: Numeric, Scale: "+ bd.scale()); + logger.println(", Type: Numeric, Scale: "+ bd.scale()); }else { SimpleDateFormat sdf = detectDate(columnValues); if(sdf!= null) { newField = FieldType.GetDateKeyDataType(devNames[i], sdf.toPattern(), null); - System.out.println(", Type: Date, Format: "+ sdf.toPattern()); + logger.println(", Type: Date, Format: "+ sdf.toPattern()); }else { newField = FieldType.GetStringKeyDataType(devNames[i], null, null); int prec = detectTextPrecision(columnValues); if(prec>255) { - System.out.println(", Type: Text, Precison: "+255+" (Column will be truncated to 255 characters)"); + logger.println(", Type: Text, Precison: "+255+" (Column will be truncated to 255 characters)"); } else { - System.out.println(", Type: Text, Precison: "+prec); + logger.println(", Type: Text, Precison: "+prec); } newField.setPrecision(255); //Assume upper limit for precision of text fields even if the values may be smaller } @@ -184,11 +185,11 @@ public LinkedList detect(File inputCsv, ExternalFileSchema userSchema if(!first) { - System.out.println("Schema file {"+ ExternalFileSchema.getSchemaFile(inputCsv) +"} successfully generated..."); + logger.println("Schema file {"+ ExternalFileSchema.getSchemaFile(inputCsv, logger) +"} successfully generated..."); } } finally { - System.out.println(""); + logger.println(""); if(reader!=null) reader.close(); } @@ -215,7 +216,7 @@ public BigDecimal detectNumeric(LinkedList columnValues) try { bd = new BigDecimal(columnValue); - //System.out.println("Value: {"+columnValue+"} Scale: {"+bd.scale()+"}"); + //logger.println("Value: {"+columnValue+"} Scale: {"+bd.scale()+"}"); if(maxScale == null || bd.scale() > maxScale.scale()) maxScale = bd; success++; @@ -274,8 +275,8 @@ public SimpleDateFormat detectDate(LinkedList columnValues) { // if(dtf.toPattern().equals("MM/dd/yyyy hh:mm:ss a")) // { -// System.out.println(i + "| " + locales[i].getDisplayCountry()+"| "+dtf.toPattern()); -// System.out.println(columnValue.trim()); +// logger.println(i + "| " + locales[i].getDisplayCountry()+"| "+dtf.toPattern()); +// logger.println(columnValue.trim()); // t.printStackTrace(); // } } diff --git a/src/main/java/com/sforce/dataset/loader/file/schema/ExternalFileSchema.java b/src/main/java/com/sforce/dataset/loader/file/schema/ExternalFileSchema.java index 5e6497d..cba0b06 100644 --- a/src/main/java/com/sforce/dataset/loader/file/schema/ExternalFileSchema.java +++ b/src/main/java/com/sforce/dataset/loader/file/schema/ExternalFileSchema.java @@ -28,6 +28,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; +import java.io.PrintStream; import java.nio.charset.Charset; import java.util.HashSet; import java.util.LinkedList; @@ -90,27 +91,27 @@ public boolean equals(Object obj) { return true; } - public static ExternalFileSchema init(File csvFile, Charset fileCharset) throws JsonParseException, JsonMappingException, IOException + public static ExternalFileSchema init(File csvFile, Charset fileCharset, PrintStream logger) throws JsonParseException, JsonMappingException, IOException { ExternalFileSchema newSchema = null; //try //{ - ExternalFileSchema userSchema = ExternalFileSchema.load(csvFile, fileCharset); - ExternalFileSchema autoSchema = ExternalFileSchema.createAutoSchema(csvFile, userSchema, fileCharset); + ExternalFileSchema userSchema = ExternalFileSchema.load(csvFile, fileCharset, logger); + ExternalFileSchema autoSchema = ExternalFileSchema.createAutoSchema(csvFile, userSchema, fileCharset, logger); if(userSchema==null) { - ExternalFileSchema.save(csvFile, autoSchema); + ExternalFileSchema.save(csvFile, autoSchema, logger); userSchema = autoSchema; } if(userSchema!=null && !userSchema.equals(autoSchema)) { - ExternalFileSchema schema = ExternalFileSchema.merge(userSchema, autoSchema); + ExternalFileSchema schema = ExternalFileSchema.merge(userSchema, autoSchema, logger); if(!schema.equals(userSchema)) { - System.err.println("Saving merged schema"); - ExternalFileSchema.save(csvFile, schema); + logger.println("Saving merged schema"); + ExternalFileSchema.save(csvFile, schema, logger); } // newSchema = ExternalFileSchema.load(csvFile); newSchema = schema; @@ -121,13 +122,13 @@ public static ExternalFileSchema init(File csvFile, Charset fileCharset) throws //} catch (Throwable t) { // t.printStackTrace(); //} - validateSchema(newSchema); + validateSchema(newSchema, logger); return newSchema; } - public static ExternalFileSchema createAutoSchema(File csvFile, ExternalFileSchema userSchema, Charset fileCharset) throws IOException + public static ExternalFileSchema createAutoSchema(File csvFile, ExternalFileSchema userSchema, Charset fileCharset, PrintStream logger) throws IOException { ExternalFileSchema emd = null; String baseName = FilenameUtils.getBaseName(csvFile.getName()); @@ -146,7 +147,7 @@ public static ExternalFileSchema createAutoSchema(File csvFile, ExternalFileSche } DetectFieldTypes detEFT = new DetectFieldTypes(); - LinkedList fields = detEFT.detect(csvFile, userSchema, fileCharset); + LinkedList fields = detEFT.detect(csvFile, userSchema, fileCharset, logger); FileFormat fileFormat = new FileFormat(); ObjectType od = new ObjectType(); @@ -169,12 +170,12 @@ public static ExternalFileSchema createAutoSchema(File csvFile, ExternalFileSche //} catch (Throwable t) { // t.printStackTrace(); //} - validateSchema(emd); + validateSchema(emd, logger); return emd; } - public static void save(File schemaFile,ExternalFileSchema emd) + public static void save(File schemaFile,ExternalFileSchema emd, PrintStream logger) { ObjectMapper mapper = new ObjectMapper(); try @@ -190,7 +191,7 @@ public static void save(File schemaFile,ExternalFileSchema emd) } } - public static ExternalFileSchema load(File inputCSV, Charset fileCharset) throws JsonParseException, JsonMappingException, IOException + public static ExternalFileSchema load(File inputCSV, Charset fileCharset, PrintStream logger) throws JsonParseException, JsonMappingException, IOException { File schemaFile = inputCSV; ObjectMapper mapper = new ObjectMapper(); @@ -203,7 +204,7 @@ public static ExternalFileSchema load(File inputCSV, Charset fileCharset) throws ExternalFileSchema userSchema = null; if(schemaFile.exists()) { - System.out.println("Loading existing schema from file {"+ schemaFile +"}"); + logger.println("Loading existing schema from file {"+ schemaFile +"}"); userSchema = mapper.readValue(schemaFile, ExternalFileSchema.class); } @@ -280,7 +281,7 @@ public static ExternalFileSchema load(File inputCSV, Charset fileCharset) throws } if(!found) { - System.err.println("Field {"+field.getName()+"} not found in schema file {"+ schemaFile +"}"); + logger.println("Field {"+field.getName()+"} not found in schema file {"+ schemaFile +"}"); fields.remove(field); } @@ -289,12 +290,12 @@ public static ExternalFileSchema load(File inputCSV, Charset fileCharset) throws } } } - validateSchema(userSchema); + validateSchema(userSchema, logger); return userSchema; } - private static void validateSchema(ExternalFileSchema schema) throws IllegalArgumentException + private static void validateSchema(ExternalFileSchema schema, PrintStream logger) throws IllegalArgumentException { StringBuffer message = new StringBuffer(); if(schema!=null) @@ -413,7 +414,7 @@ private static void validateSchema(ExternalFileSchema schema) throws IllegalArgu } } - public static ExternalFileSchema merge(ExternalFileSchema userSchema, ExternalFileSchema autoSchema) + public static ExternalFileSchema merge(ExternalFileSchema userSchema, ExternalFileSchema autoSchema, PrintStream logger) { ExternalFileSchema mergedSchema = null; try @@ -457,7 +458,7 @@ public static ExternalFileSchema merge(ExternalFileSchema userSchema, ExternalFi found = true; if(!auto_field.equals(user_field)) { - System.err.println("Field {"+user_field+"} has been modified by user"); + logger.println("Field {"+user_field+"} has been modified by user"); merged_field = new FieldType(user_field.name!=null?user_field.name:auto_field.name); merged_field.type = user_field.type!=null?user_field.type:auto_field.type; // merged_field.acl = user_field.acl!=null?user_field.acl:auto_field.acl; @@ -481,7 +482,7 @@ public static ExternalFileSchema merge(ExternalFileSchema userSchema, ExternalFi } if(!found) { - System.err.println("Found new field {"+auto_field+"} in CSV"); + logger.println("Found new field {"+auto_field+"} in CSV"); } if(merged_field==null) { @@ -525,7 +526,7 @@ public static ExternalFileSchema merge(ExternalFileSchema userSchema, ExternalFi } - public static File getSchemaFile(File csvFile) { + public static File getSchemaFile(File csvFile, PrintStream logger) { try { //init(csvFile); diff --git a/src/main/java/com/sforce/dataset/loader/file/sort/CsvExternalSort.java b/src/main/java/com/sforce/dataset/loader/file/sort/CsvExternalSort.java index 9193fed..db823b6 100644 --- a/src/main/java/com/sforce/dataset/loader/file/sort/CsvExternalSort.java +++ b/src/main/java/com/sforce/dataset/loader/file/sort/CsvExternalSort.java @@ -53,7 +53,7 @@ public class CsvExternalSort extends ExternalSort { - public static File sortFile(File inputCsv, final Charset cs, final boolean distinct,final int headersize) throws IOException + public static File sortFile(File inputCsv, final Charset cs, final boolean distinct,final int headersize, ExternalFileSchema schema) throws IOException { if(inputCsv==null || !inputCsv.canRead()) { @@ -62,10 +62,10 @@ public static File sortFile(File inputCsv, final Charset cs, final boolean disti File outputFile = new File(inputCsv.getParent(), FilenameUtils.getBaseName(inputCsv.getName())+ "_sorted." + FilenameUtils.getExtension(inputCsv.getName())); - ExternalFileSchema schema = ExternalFileSchema.load(inputCsv, cs); +// ExternalFileSchema schema = ExternalFileSchema.load(inputCsv, cs); if(schema==null || schema.objects == null || schema.objects.size()==0 || schema.objects.get(0).fields == null) { - throw new IOException("File does not have valid metadata json {"+ExternalFileSchema.getSchemaFile(inputCsv)+"}"); + throw new IOException("File does not have valid metadata json {"+ExternalFileSchema.getSchemaFile(inputCsv, System.out)+"}"); } CsvRowComparator cmp = null; @@ -81,7 +81,7 @@ public static File sortFile(File inputCsv, final Charset cs, final boolean disti // } List l = sortInBatch(inputCsv, cs, cmp, distinct, headersize); - System.out.println("CsvExternalSort created " + l.size() + " tmp files"); +// System.out.println("CsvExternalSort created " + l.size() + " tmp files"); mergeSortedFiles(l, outputFile, cmp, cs, distinct, inputCsv, headersize); return outputFile; } diff --git a/src/main/java/com/sforce/dataset/util/DatasetAugmenter.java b/src/main/java/com/sforce/dataset/util/DatasetAugmenter.java index a5b4c52..83a2afe 100644 --- a/src/main/java/com/sforce/dataset/util/DatasetAugmenter.java +++ b/src/main/java/com/sforce/dataset/util/DatasetAugmenter.java @@ -79,14 +79,13 @@ public DatasetAugmenter(String username,String password, String token, String en */ @SuppressWarnings({ "rawtypes", "unchecked" }) - public static void augmentEM(String username, String password,String endpoint,String token, String sessionId) throws Exception + public static void augmentEM(PartnerConnection partnerConnection) throws Exception { - PartnerConnection partnerConnection = DatasetUtils.login(0,username, password, token, endpoint, sessionId); Map map = DatasetUtils.listPublicDataset(partnerConnection); System.out.println("\n"); if(map==null || map.size()==0) { - System.err.println("No dataset found in org"); + System.out.println("No dataset found in org"); return; } int cnt = 1; @@ -162,7 +161,7 @@ public static void augmentEM(String username, String password,String endpoint,St System.out.println("\n"); if(leftDims==null || leftDims.size()==0) { - System.err.println("No Dimensions found in Datasets {"+leftDataSet+"}"); + System.out.println("No Dimensions found in Datasets {"+leftDataSet+"}"); return; } cnt = 1; @@ -202,7 +201,7 @@ public static void augmentEM(String username, String password,String endpoint,St System.out.println("\n"); if(rightDims==null || rightDims.size()==0) { - System.err.println("No Dimensions found in Datasets {"+rightDataSet+"}"); + System.out.println("No Dimensions found in Datasets {"+rightDataSet+"}"); return; } cnt = 1; @@ -403,7 +402,7 @@ public static void downloadEMJson(Map resp, PartnerConnection connection) throws String reasonPhrase = emresponse1.getStatusLine().getReasonPhrase(); int statusCode = emresponse1.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { - System.err.println("Method failed: " + reasonPhrase); + System.out.println("Method failed: " + reasonPhrase); continue; } // System.out.println(String.format("statusCode: %d", statusCode)); diff --git a/src/main/java/com/sforce/dataset/util/DatasetDownloader.java b/src/main/java/com/sforce/dataset/util/DatasetDownloader.java index b0094b4..bafe28e 100644 --- a/src/main/java/com/sforce/dataset/util/DatasetDownloader.java +++ b/src/main/java/com/sforce/dataset/util/DatasetDownloader.java @@ -59,9 +59,8 @@ public class DatasetDownloader { * @throws Exception */ @SuppressWarnings({ "rawtypes", "unchecked" }) - public static boolean downloadEM(String EM_NAME, String username, String password,String endpoint,String token,String sessionId) throws Exception { + public static boolean downloadEM(String EM_NAME, PartnerConnection connection) throws Exception { - PartnerConnection connection = DatasetUtils.login(0, username, password, token, endpoint, sessionId); ConnectorConfig config = connection.getConfig(); String sessionID = config.getSessionId(); String _alias = null; @@ -113,7 +112,7 @@ public static boolean downloadEM(String EM_NAME, String username, String passwor { _alias = (String) resp.get("_alias"); Integer _createdDateTime = (Integer) resp.get("_createdDateTime"); - //System.err.println("_createdDateTime: "+ _createdDateTime); + //System.out.println("_createdDateTime: "+ _createdDateTime); if(_createdDateTime != null) { createdDateTime = new Date(1000L*_createdDateTime); @@ -145,7 +144,6 @@ public static boolean downloadEM(String EM_NAME, String username, String passwor URI listEMURI1 = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), url, null,null); HttpGet listEMPost1 = new HttpGet(listEMURI1); -// System.out.println("Downloading file {"+filename+"} from url {"+listEMURI1+"}"); System.out.println("Downloading file {"+filename+"}"); listEMPost1.setConfig(requestConfig); listEMPost1.addHeader("Authorization","OAuth "+sessionID); @@ -155,7 +153,7 @@ public static boolean downloadEM(String EM_NAME, String username, String passwor String reasonPhrase = emresponse1.getStatusLine().getReasonPhrase(); int statusCode = emresponse1.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { - System.err.println("Method failed: " + reasonPhrase); + System.out.println("Method failed: " + reasonPhrase); System.out.println(String.format("%s download failed: %d %s", filename,statusCode,reasonPhrase)); continue; } @@ -190,7 +188,7 @@ public static boolean downloadEM(String EM_NAME, String username, String passwor t.printStackTrace(); } - //System.err.println(emList); + //System.out.println(emList); } diff --git a/src/main/java/com/sforce/dataset/util/DatasetUtils.java b/src/main/java/com/sforce/dataset/util/DatasetUtils.java index 5fbcda7..6f232f8 100644 --- a/src/main/java/com/sforce/dataset/util/DatasetUtils.java +++ b/src/main/java/com/sforce/dataset/util/DatasetUtils.java @@ -169,7 +169,7 @@ public static Map listDataset(PartnerConnection connection) throws E t.printStackTrace(); } - //System.err.println(emList); + //System.out.println(emList); } @@ -240,7 +240,7 @@ public static boolean downloadEM(String EM_NAME, PartnerConnection connection) t { _alias = (String) resp.get("_alias"); Integer _createdDateTime = (Integer) resp.get("_createdDateTime"); - //System.err.println("_createdDateTime: "+ _createdDateTime); + //System.out.println("_createdDateTime: "+ _createdDateTime); if(_createdDateTime != null) { createdDateTime = new Date(1000L*_createdDateTime); @@ -278,7 +278,7 @@ public static boolean downloadEM(String EM_NAME, PartnerConnection connection) t String reasonPhrase = emresponse1.getStatusLine().getReasonPhrase(); int statusCode = emresponse1.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { - System.err.println("Method failed: " + reasonPhrase); + System.out.println("Method failed: " + reasonPhrase); continue; } System.out.println(String.format("statusCode: %d", statusCode)); @@ -308,7 +308,7 @@ public static boolean downloadEM(String EM_NAME, PartnerConnection connection) t t.printStackTrace(); } - //System.err.println(emList); + //System.out.println(emList); } @@ -357,6 +357,7 @@ public static PartnerConnection login(int retryCount,String username,String pass @SuppressWarnings("unused") GetUserInfoResult userInfo = connection.getUserInfo(); System.out.println("Service Endpoint: " + config.getServiceEndpoint()); + System.out.println("SessionId Endpoint: " + config.getSessionId()); // System.out.println("User Id: " + userInfo.getUserName()); // System.out.println("User Email: " + userInfo.getUserEmail()); System.out.println(); @@ -364,7 +365,7 @@ public static PartnerConnection login(int retryCount,String username,String pass } return connection; }catch (ConnectionException e) { - System.err.println(e.getClass().getCanonicalName()); + System.out.println(e.getClass().getCanonicalName()); e.printStackTrace(); boolean retryError = true; if(e instanceof LoginFault || sessionId != null) @@ -425,7 +426,7 @@ public int compare( Map.Entry o1, Map.Entry o2 ) } - public static String readInputFromConsole(String prompt) throws IOException { + public static String readInputFromConsole(String prompt) { String line = null; Console c = System.console(); if (c != null) { @@ -443,7 +444,7 @@ public static String readInputFromConsole(String prompt) throws IOException { return line; } - public static String readPasswordFromConsole(String prompt) throws IOException { + public static String readPasswordFromConsole(String prompt) { String line = null; Console c = System.console(); if (c != null) { diff --git a/src/main/java/com/sforce/dataset/util/SfdcExtracter.java b/src/main/java/com/sforce/dataset/util/SfdcExtracter.java index 54b6872..cf004f5 100644 --- a/src/main/java/com/sforce/dataset/util/SfdcExtracter.java +++ b/src/main/java/com/sforce/dataset/util/SfdcExtracter.java @@ -59,27 +59,26 @@ public class SfdcExtracter { public static final NumberFormat nf = NumberFormat.getIntegerInstance(); @SuppressWarnings("rawtypes") - public static void extract(String rootSObject,String datasetAlias, String username, String password,String token, String endpoint, String sessionId, int rowLimit) throws Exception + public static void extract(String rootSObject,String datasetAlias, PartnerConnection partnerConnection, int rowLimit) throws Exception { if(SfdcUtils.excludedObjects.contains(rootSObject)) { - System.err.println("Error: Object {"+rootSObject+"} not supported"); + System.out.println("Error: Object {"+rootSObject+"} not supported"); return; } Map selectedObjectList = new LinkedHashMap(); - PartnerConnection partnerConnection = DatasetUtils.login(0,username, password, token, endpoint, sessionId); Map objectList = SfdcUtils.getObjectList(partnerConnection, Pattern.compile("\\b"+rootSObject+"\\b"), false); System.out.println("\n"); if(objectList==null || objectList.size()==0) { - System.err.println("Error: Object {"+rootSObject+"} not found"); + System.out.println("Error: Object {"+rootSObject+"} not found"); return; } if( objectList.size()>1) { - System.err.println("Error: More than one Object found {"+objectList.keySet()+"}"); + System.out.println("Error: More than one Object found {"+objectList.keySet()+"}"); return; } selectedObjectList.putAll(objectList); @@ -191,7 +190,7 @@ public static void extract(String rootSObject,String datasetAlias, String userna canWrite = true; }catch(Throwable t) { -// System.err.println(t.getMessage()); +// System.out.println(t.getMessage()); canWrite = false; DatasetUtils.readInputFromConsole("file {"+csvFile+"} is open in excel please close it first, press enter when done: "); } @@ -235,7 +234,7 @@ public static void extract(String rootSObject,String datasetAlias, String userna } }else { - System.err.println("The Dataflow file is > 100K consider removing fields and upload the file manually"); + System.out.println("The Dataflow file is > 100K consider removing fields and upload the file manually"); } } @@ -252,7 +251,7 @@ public static LinkedHashMap createWF(Map selectedObjectList, Part { if(SfdcUtils.excludedObjects.contains(selectedObjectList.get(alias))) { - System.err.println("Skipping object {"+selectedObjectList.get(alias)+"}"); + System.out.println("Skipping object {"+selectedObjectList.get(alias)+"}"); continue; } @@ -305,7 +304,7 @@ public static LinkedHashMap createWF(Map selectedObjectList, Part { if(labels.containsKey(fld.getLabel())) { - System.err.println("field {"+fld.getName()+"} has duplicate label matching field {"+labels.get(fld.getLabel())+"}"); + System.out.println("field {"+fld.getName()+"} has duplicate label matching field {"+labels.get(fld.getLabel())+"}"); // continue; } labels.put(fld.getLabel(), fld.getName()); @@ -315,7 +314,7 @@ public static LinkedHashMap createWF(Map selectedObjectList, Part flds.add(temp); }else { - System.err.println("user has skipped field:"+selectedObjectList.get(alias)+"."+fld.getName()); + System.out.println("user has skipped field:"+selectedObjectList.get(alias)+"."+fld.getName()); } } // SfdcUtils.read(partnerConnection, selectedObjectList.get(alias), fields, 1000,dataDir); diff --git a/src/main/java/com/sforce/dataset/util/SfdcUtils.java b/src/main/java/com/sforce/dataset/util/SfdcUtils.java index a95335c..f090ef7 100644 --- a/src/main/java/com/sforce/dataset/util/SfdcUtils.java +++ b/src/main/java/com/sforce/dataset/util/SfdcUtils.java @@ -155,7 +155,7 @@ public static Map getObjectList(PartnerConnection partnerConnecti if(excludedObjects.contains(sObjectResult.getName())) { -// System.err.println("Skipping object {"+sObjectResult.getName()+"}"); +// System.out.println("Skipping object {"+sObjectResult.getName()+"}"); continue; } @@ -229,7 +229,7 @@ public static Map getRelatedObjectList( // if(excludedObjects.contains(relatedSObjectType)) { - System.err.println("Skipping object {"+relatedSObjectType+"}"); + System.out.println("Skipping object {"+relatedSObjectType+"}"); continue; } @@ -318,7 +318,7 @@ public static List getFieldList if(labels.containsKey(field.getLabel())) { - System.err.println("{"+field.getName()+"} has duplicate label matching field {"+labels.get(field.getLabel()).getName()+"}"); + System.out.println("{"+field.getName()+"} has duplicate label matching field {"+labels.get(field.getLabel()).getName()+"}"); // continue; } @@ -355,7 +355,7 @@ public static List getFieldList // Set the Business Name (Name used in UI) if(bField==null) { - System.err.println("field: "+ field); + System.out.println("field: "+ field); } bField.setLabel(field.getLabel()); @@ -393,14 +393,13 @@ public static List getFieldList * @param pagesize * @param dataDir * @return - * @throws ReflectiveOperationException * @throws ConnectionException * @throws UnsupportedEncodingException */ public static boolean read(PartnerConnection partnerConnection,String recordInfo, List fieldList, long pagesize, File dataDir) throws - ReflectiveOperationException, ConnectionException, UnsupportedEncodingException, IOException + ConnectionException, UnsupportedEncodingException, IOException { // These debug statements should help you understand what is being // passed back to your calls. You can comment these out if you like @@ -431,7 +430,7 @@ public static boolean read(PartnerConnection partnerConnection,String recordInfo bos = null; }catch(Throwable t) { -// System.err.println(t.getMessage()); +// System.out.println(t.getMessage()); canWrite = false; DatasetUtils.readInputFromConsole("file {"+csvFile+"} is open in excel please close it first, press enter when done: "); } @@ -661,7 +660,7 @@ private static String generateSOQL(String recordInfo, if((soql.length()+(", " + field.name).length())>(20000-varLen)) { - System.err.println("Too many fields in object {"+topLevelSObjectName+"} truncating query to 20,000 chars"); + System.out.println("Too many fields in object {"+topLevelSObjectName+"} truncating query to 20,000 chars"); break; } diff --git a/src/main/java/com/sforce/dataset/util/XmdUploader.java b/src/main/java/com/sforce/dataset/util/XmdUploader.java index 616ee57..8f1b2e6 100644 --- a/src/main/java/com/sforce/dataset/util/XmdUploader.java +++ b/src/main/java/com/sforce/dataset/util/XmdUploader.java @@ -64,8 +64,7 @@ public class XmdUploader { * @throws Exception */ @SuppressWarnings({ "rawtypes", "unchecked" }) - public static boolean uploadXmd(String userXmdFile, String datasetAlias, - String username, String password, String endpoint, String token, String sessionId) throws URISyntaxException, ClientProtocolException, IOException, ConnectionException + public static boolean uploadXmd(String userXmdFile, String datasetAlias, PartnerConnection connection) throws URISyntaxException, ClientProtocolException, IOException, ConnectionException { if(datasetAlias==null||datasetAlias.trim().isEmpty()) { @@ -95,7 +94,6 @@ public static boolean uploadXmd(String userXmdFile, String datasetAlias, // t.printStackTrace(); } - PartnerConnection connection = DatasetUtils.login(0,username, password, token, endpoint, sessionId); System.out.println(); @@ -145,7 +143,7 @@ public static boolean uploadXmd(String userXmdFile, String datasetAlias, edgemartId = (String) resp.get("_uid"); Integer _createdDateTime = (Integer) resp.get("_createdDateTime"); - //System.err.println("_createdDateTime: "+ _createdDateTime); + //System.out.println("_createdDateTime: "+ _createdDateTime); if(_createdDateTime != null) { createdDateTime = new Date(1000L*_createdDateTime); @@ -179,7 +177,7 @@ public static boolean uploadXmd(String userXmdFile, String datasetAlias, t.printStackTrace(); } - //System.err.println(emList); + //System.out.println(emList); } if(_alias != null && _alias.equals(datasetAlias)) @@ -187,7 +185,7 @@ public static boolean uploadXmd(String userXmdFile, String datasetAlias, System.out.println("Found existing Dataset {"+_alias+"} version {"+versionID+"}, created on {"+createdDateTime+"}, in folder {"+folderID+"}"); }else { - System.err.println("Dataset {"+_alias+"} not found"); + System.out.println("Dataset {"+_alias+"} not found"); return false; } @@ -237,14 +235,14 @@ public static boolean uploadXmd(String userXmdFile, String datasetAlias, return true; }else { - System.err.println("User XMD uploaded to Dataset {"+_alias+"} failed"); - System.err.println(resp); + System.out.println("User XMD uploaded to Dataset {"+_alias+"} failed"); + System.out.println(resp); return false; } }else { - System.err.println("User XMD uploaded to Dataset {"+_alias+"} failed"); - System.err.println(res); + System.out.println("User XMD uploaded to Dataset {"+_alias+"} failed"); + System.out.println(res); return false; } // if(name_param.equals("name")) @@ -253,8 +251,8 @@ public static boolean uploadXmd(String userXmdFile, String datasetAlias, // System.out.println("EM {"+emName+"} : is being updated"); }else { - System.err.println("User XMD uploaded to Dataset {"+_alias+"} failed"); - System.err.println(responseString); + System.out.println("User XMD uploaded to Dataset {"+_alias+"} failed"); + System.out.println(responseString); return false; } } catch (Throwable t) { @@ -263,54 +261,6 @@ public static boolean uploadXmd(String userXmdFile, String datasetAlias, } return false; } - - /* - public static PartnerConnection login(final String username, - String password, String token, String endpoint) throws Exception { - - if (username == null || username.isEmpty()) { - throw new Exception("username is required"); - } - - if (password == null || password.isEmpty()) { - throw new Exception("password is required"); - } - - if (endpoint == null || endpoint.isEmpty()) { - throw new Exception("endpoint is required"); - } - - if (token == null) - token = ""; - - password = password + token; - - try { - ConnectorConfig config = new ConnectorConfig(); - config.setUsername(username); - config.setPassword(password); - config.setAuthEndpoint(endpoint); - config.setSessionRenewer(new SessionRenewerImpl(username, password, null, endpoint)); - - PartnerConnection connection = new PartnerConnection(config); - GetUserInfoResult userInfo = connection.getUserInfo(); - - System.out.println("\nLogging in ...\n"); - //System.out.println("UserID: " + userInfo.getUserId()); - //System.out.println("User Full Name: " + userInfo.getUserFullName()); - System.out.println("User Email: " + userInfo.getUserEmail()); - //System.out.println("SessionID: " + config.getSessionId()); - //System.out.println("Auth End Point: " + config.getAuthEndpoint()); - //System.out.println("Service End Point: " + config.getServiceEndpoint()); - - return connection; - - } catch (ConnectionException e) { - e.printStackTrace(); - throw new Exception(e.getLocalizedMessage()); - } - } - */ @SuppressWarnings("rawtypes") static Map getAlias(List emarts, String alias) diff --git a/src/main/resources/appIcon.icns b/src/main/resources/appIcon.icns new file mode 100644 index 0000000..75075fd Binary files /dev/null and b/src/main/resources/appIcon.icns differ