From b535c9d76063a2ada56414b2eef9c69b61ffb8ea Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 8 Jan 2020 16:24:58 -0500 Subject: [PATCH 1/2] formatting --- .../sead/uploader/dataverse/DVUploader.java | 628 +++++++++--------- 1 file changed, 314 insertions(+), 314 deletions(-) diff --git a/src/main/java/org/sead/uploader/dataverse/DVUploader.java b/src/main/java/org/sead/uploader/dataverse/DVUploader.java index caceb84..b2e8810 100644 --- a/src/main/java/org/sead/uploader/dataverse/DVUploader.java +++ b/src/main/java/org/sead/uploader/dataverse/DVUploader.java @@ -64,96 +64,96 @@ */ public class DVUploader extends AbstractUploader { - private static String apiKey = null; - private static String datasetPID = null; - private static boolean oldServer = false; - private static int maxWaitTime = 60; - private static boolean recurse = false; - private static boolean directUpload = false; - - public static void main(String args[]) throws Exception { - - setUploader(new DVUploader()); - uploader.createLogFile("DVUploaderLog_"); - uploader.setSpaceType("Dataverse"); - println("\n----------------------------------------------------------------------------------\n"); - println("TTTTT DDD L Texas"); - println(" T D D L Digital"); - println(" T DDD LLL Library"); - println(""); - println("DVUploader - a command-line application to upload files to any Dataverse Dataset"); - println("Developed for the Dataverse Community"); - println("\n----------------------------------------------------------------------------------\n"); - println("\n***Parsing arguments:***\n"); - uploader.parseArgs(args); - if (server == null || datasetPID == null || apiKey == null || requests.isEmpty()) { - println("\n***Required arguments not found.***"); - usage(); - } else { - println("\n***Starting to Process Upload Requests:***\n"); - uploader.processRequests(); - } - println("\n***Execution Complete.***"); - } - - private static void usage() { - println("\nUsage:"); - println(" java -jar DVUploader-1.0.1.jar -server= -key= -did= "); - - println("\n where:"); - println(" = the URL of the server to upload to, e.g. https://datverse.tdl.org"); - println(" = your personal apikey, created in the dataverse server at "); - println(" = the Dataset DOI you are uploading to, e.g. doi:10.5072/A1B2C3"); - println(" = a space separated list of files to upload or directory name(s) where the files to upload are"); - println("\n Optional Arguments:"); - println(" -directupload - Use Dataverse's direct upload capability to send files directly to their final location (only works if this is enabled on the server)"); - - println(" -listonly - Scan the Dataset and local files and list what would be uploaded (does not upload with this flag)"); - println(" -limit= - Specify a maximum number of files to upload per invocation."); - println(" -verify - Check both the file name and checksum in comparing with current Dataset entries."); - println(" -skip= - a number of files to skip before starting processing (saves time when you know the first n files have been uploaded before)"); - println(" -recurse - recurse into subdirectories"); - println(" -maxlockwait - the maximum time to wait (in seconds) for a Dataset lock (i.e. while the last file is ingested) to expire (default 60 seconds)"); - println(""); - - } - - @Override - public boolean parseCustomArg(String arg) { - - if (arg.startsWith("-key")) { - apiKey = arg.substring(arg.indexOf(argSeparator) + 1); - println("Using apiKey: " + apiKey); - return true; - } else if (arg.startsWith("-did")) { - datasetPID = arg.substring(arg.indexOf(argSeparator) + 1); - println("Adding content to: " + datasetPID); - return true; - } else if (arg.equals("-recurse")) { - recurse = true; - println("Will recurse into subdirectories"); - return true; - } else if (arg.equals("-directupload")) { - directUpload = true; - println("Will use direct upload of files (if configured on this server)"); - return true; - } else if (arg.startsWith("-maxlockwait")) { - try { - maxWaitTime = Integer.parseInt(arg.substring(arg.indexOf(argSeparator) + 1)); - println("Setting max wait time for locks to :" + maxWaitTime + " seconds"); - } catch (NumberFormatException nfe) { - println("Unable to parse max wait time for locks, using default (60 seconds)"); - } - return true; - } - return false; - } - - @Override - public HttpClientContext authenticate() { - return new HttpClientContext(); - } - + private static String apiKey = null; + private static String datasetPID = null; + private static boolean oldServer = false; + private static int maxWaitTime = 60; + private static boolean recurse = false; + private static boolean directUpload = false; + + public static void main(String args[]) throws Exception { + + setUploader(new DVUploader()); + uploader.createLogFile("DVUploaderLog_"); + uploader.setSpaceType("Dataverse"); + println("\n----------------------------------------------------------------------------------\n"); + println("TTTTT DDD L Texas"); + println(" T D D L Digital"); + println(" T DDD LLL Library"); + println(""); + println("DVUploader - a command-line application to upload files to any Dataverse Dataset"); + println("Developed for the Dataverse Community"); + println("\n----------------------------------------------------------------------------------\n"); + println("\n***Parsing arguments:***\n"); + uploader.parseArgs(args); + if (server == null || datasetPID == null || apiKey == null || requests.isEmpty()) { + println("\n***Required arguments not found.***"); + usage(); + } else { + println("\n***Starting to Process Upload Requests:***\n"); + uploader.processRequests(); + } + println("\n***Execution Complete.***"); + } + + private static void usage() { + println("\nUsage:"); + println(" java -jar DVUploader-1.0.1.jar -server= -key= -did= "); + + println("\n where:"); + println(" = the URL of the server to upload to, e.g. https://datverse.tdl.org"); + println(" = your personal apikey, created in the dataverse server at "); + println(" = the Dataset DOI you are uploading to, e.g. doi:10.5072/A1B2C3"); + println(" = a space separated list of files to upload or directory name(s) where the files to upload are"); + println("\n Optional Arguments:"); + println(" -directupload - Use Dataverse's direct upload capability to send files directly to their final location (only works if this is enabled on the server)"); + + println(" -listonly - Scan the Dataset and local files and list what would be uploaded (does not upload with this flag)"); + println(" -limit= - Specify a maximum number of files to upload per invocation."); + println(" -verify - Check both the file name and checksum in comparing with current Dataset entries."); + println(" -skip= - a number of files to skip before starting processing (saves time when you know the first n files have been uploaded before)"); + println(" -recurse - recurse into subdirectories"); + println(" -maxlockwait - the maximum time to wait (in seconds) for a Dataset lock (i.e. while the last file is ingested) to expire (default 60 seconds)"); + println(""); + + } + + @Override + public boolean parseCustomArg(String arg) { + + if (arg.startsWith("-key")) { + apiKey = arg.substring(arg.indexOf(argSeparator) + 1); + println("Using apiKey: " + apiKey); + return true; + } else if (arg.startsWith("-did")) { + datasetPID = arg.substring(arg.indexOf(argSeparator) + 1); + println("Adding content to: " + datasetPID); + return true; + } else if (arg.equals("-recurse")) { + recurse = true; + println("Will recurse into subdirectories"); + return true; + } else if (arg.equals("-directupload")) { + directUpload = true; + println("Will use direct upload of files (if configured on this server)"); + return true; + } else if (arg.startsWith("-maxlockwait")) { + try { + maxWaitTime = Integer.parseInt(arg.substring(arg.indexOf(argSeparator) + 1)); + println("Setting max wait time for locks to :" + maxWaitTime + " seconds"); + } catch (NumberFormatException nfe) { + println("Unable to parse max wait time for locks, using default (60 seconds)"); + } + return true; + } + return false; + } + + @Override + public HttpClientContext authenticate() { + return new HttpClientContext(); + } + public CloseableHttpClient getSharedHttpClient() { if (httpclient == null) { // use the TrustSelfSignedStrategy to allow Self Signed Certificates @@ -173,8 +173,8 @@ public CloseableHttpClient getSharedHttpClient() { .custom() .setSSLSocketFactory(connectionFactory) .setUserAgent("curl/7.61.1") - .setDefaultRequestConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build()) - .build(); + .setDefaultRequestConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build()) + .build(); } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) { Logger.getLogger(DVUploader.class.getName()).log(Level.SEVERE, null, ex); } @@ -182,192 +182,192 @@ public CloseableHttpClient getSharedHttpClient() { return httpclient; } - HashMap existingItems = null; - boolean datasetMDRetrieved = false; - - CloseableHttpClient httpclient = null; - - /** - * - * @param path - the current path to the item - * @param item - the local item to find - * @return - */ - @Override - public String itemExists(String path, Resource item) { - String tagId = null; - - String relPath = path; - if (importRO) { - // remove the '//data' prefix on imported paths to make - // it match the file upload paths - relPath = relPath.substring(relPath.substring(1).indexOf("/") + 1); - relPath = relPath.substring(relPath.substring(1).indexOf("/") + 1); - } - - String sourcepath = item.getName(); - - // One-time: get metadata for dataset to see if it exists and what files it - // contains - if (!datasetMDRetrieved) { - httpclient = getSharedHttpClient(); - - try { - // This api call will find the dataset and, if found, retrieve the list of files - // in the current version (the only one we can add to) - // http://$SERVER/api/datasets/$id/versions/$versionId/files?key=$apiKey - - String serviceUrl = server + "/api/datasets/:persistentId/versions/:latest/files?key=" + apiKey - + "&persistentId=" + datasetPID; - HttpGet httpget = new HttpGet(serviceUrl); - - CloseableHttpResponse response = httpclient.execute(httpget, getLocalContext()); - JSONArray datafileList = null; - try { - switch (response.getStatusLine().getStatusCode()) { - case 200: - HttpEntity resEntity = response.getEntity(); - if (resEntity != null) { - String res = EntityUtils.toString(resEntity); - datafileList = (new JSONObject(res)).getJSONArray("data"); - existingItems = new HashMap<>(); - } - break; - case 404: - println("Dataset Not Found: " + datasetPID); - break; - default: - // Report unexpected errors and assume dataset doesn't exist - println("Error response when checking for existing item at " + sourcepath + " : " - + response.getStatusLine().getReasonPhrase()); - break; - } - } finally { - response.close(); - } - boolean convertedFiles = false; - if (datafileList != null) { - for (int i = 0; i < datafileList.length(); i++) { - JSONObject df = datafileList.getJSONObject(i).getJSONObject("dataFile"); - if (df.has("originalFileFormat") - && (!df.getString("contentType").equals(df.getString("originalFileFormat")))) { - println("The file named " + df.getString("filename") - + " on the server was created by Dataverse's ingest process from an original uploaded file"); - convertedFiles = true; - } - existingItems.put(df.getString("filename"), df.getJSONObject("checksum")); - } - if (convertedFiles) { - println("***** DVUploader cannot detect attempts to re-upload files to Dataverse when Dataverse has created a derived file during ingest such as those listed above."); - println("***** You may see upload errors for any file where ingest would re-create one of these files."); - } - - } - - } catch (IOException e) { - println("Error processing check on " + sourcepath + " : " + e.getMessage()); - } finally { - datasetMDRetrieved = true; - } - - } - - if (relPath.equals("/")) { - // existingItems will exist if we found the dataset - if (existingItems != null) { - if (item.isDirectory()) { - // Looking for the dataset itself - tagId = datasetPID; - } else { - // A single file on the command line - if (existingItems.containsKey(sourcepath)) { - JSONObject checksum = existingItems.get(sourcepath); - tagId = checksum.getString("type") + ":" + checksum.getString("value"); - } - - } - } - } else { - // Looking for an item in the local directory structure - if (item.isDirectory()) { - // Directories aren't yet represented in Dataverse - return null; - } else { - // A file within the local directory - if (existingItems.containsKey(sourcepath)) { - JSONObject checksum = existingItems.get(sourcepath); - tagId = checksum.getString("type") + ":" + checksum.getString("value"); - } - } - } - if (verify && (tagId != null) && (!item.isDirectory())) { - tagId = verifyDataByHash(tagId, path, item); - } - return (tagId); - } - - static HashMap hashIssues = new HashMap(); - - @Override - protected String verifyDataByHash(String tagId, String path, Resource item) { - JSONObject checksum = existingItems.get(item.getName()); - if (!checksum.getString("value").equals(item.getHash(checksum.getString("type")))) { - hashIssues.put(path + item.getName(), "!!!: A different version of this item exists with ID: " + tagId); - return null; - } // else it matches! - return tagId; - } - - @Override - public void addDatasetMetadata(String newSubject, String type, JSONObject relationships) { - // TBD - // println("DVUploader does not yet add metadata to a Dataset"); - } - - @Override - protected void postProcessChildren() { - // TBD - // println("DVUploader does not need to post-process after files are uploaded"); - } - - @Override - protected void postProcessCollection() { - // TBD - // println("DVUploader does not yet support creation of datasets or uploading - // sub-directories and their contents"); - - } - - @Override - protected String preprocessCollection(Resource dir, String path, String parentId, String collectionId) - throws UploaderException { - if (!path.equals("/" + dir.getName().trim()) && !recurse) { - throw new UploaderException(" DVUploader is not configured to recurse into sub-directories."); - } - return null; - } - - @Override - protected String postProcessChild(Resource dir, String path, String parentId, String collectionId) { - // TBD - // println("DVUploader does not need to post-process newly created items"); - return null; - } - - @Override - protected void postProcessDatafile(String newUri, String existingUri, String collectionId, Resource file, - Resource dir) throws ClientProtocolException, IOException { - // TBD - // println("DVUploader does not need to post-process data files"); - - } - - @Override - protected HttpClientContext reauthenticate(long startTime) { - // TBD - // println("DVUploader does not need to reauthenticate"); - return getLocalContext(); - } + HashMap existingItems = null; + boolean datasetMDRetrieved = false; + + CloseableHttpClient httpclient = null; + + /** + * + * @param path - the current path to the item + * @param item - the local item to find + * @return + */ + @Override + public String itemExists(String path, Resource item) { + String tagId = null; + + String relPath = path; + if (importRO) { + // remove the '//data' prefix on imported paths to make + // it match the file upload paths + relPath = relPath.substring(relPath.substring(1).indexOf("/") + 1); + relPath = relPath.substring(relPath.substring(1).indexOf("/") + 1); + } + + String sourcepath = item.getName(); + + // One-time: get metadata for dataset to see if it exists and what files it + // contains + if (!datasetMDRetrieved) { + httpclient = getSharedHttpClient(); + + try { + // This api call will find the dataset and, if found, retrieve the list of files + // in the current version (the only one we can add to) + // http://$SERVER/api/datasets/$id/versions/$versionId/files?key=$apiKey + + String serviceUrl = server + "/api/datasets/:persistentId/versions/:latest/files?key=" + apiKey + + "&persistentId=" + datasetPID; + HttpGet httpget = new HttpGet(serviceUrl); + + CloseableHttpResponse response = httpclient.execute(httpget, getLocalContext()); + JSONArray datafileList = null; + try { + switch (response.getStatusLine().getStatusCode()) { + case 200: + HttpEntity resEntity = response.getEntity(); + if (resEntity != null) { + String res = EntityUtils.toString(resEntity); + datafileList = (new JSONObject(res)).getJSONArray("data"); + existingItems = new HashMap<>(); + } + break; + case 404: + println("Dataset Not Found: " + datasetPID); + break; + default: + // Report unexpected errors and assume dataset doesn't exist + println("Error response when checking for existing item at " + sourcepath + " : " + + response.getStatusLine().getReasonPhrase()); + break; + } + } finally { + response.close(); + } + boolean convertedFiles = false; + if (datafileList != null) { + for (int i = 0; i < datafileList.length(); i++) { + JSONObject df = datafileList.getJSONObject(i).getJSONObject("dataFile"); + if (df.has("originalFileFormat") + && (!df.getString("contentType").equals(df.getString("originalFileFormat")))) { + println("The file named " + df.getString("filename") + + " on the server was created by Dataverse's ingest process from an original uploaded file"); + convertedFiles = true; + } + existingItems.put(df.getString("filename"), df.getJSONObject("checksum")); + } + if (convertedFiles) { + println("***** DVUploader cannot detect attempts to re-upload files to Dataverse when Dataverse has created a derived file during ingest such as those listed above."); + println("***** You may see upload errors for any file where ingest would re-create one of these files."); + } + + } + + } catch (IOException e) { + println("Error processing check on " + sourcepath + " : " + e.getMessage()); + } finally { + datasetMDRetrieved = true; + } + + } + + if (relPath.equals("/")) { + // existingItems will exist if we found the dataset + if (existingItems != null) { + if (item.isDirectory()) { + // Looking for the dataset itself + tagId = datasetPID; + } else { + // A single file on the command line + if (existingItems.containsKey(sourcepath)) { + JSONObject checksum = existingItems.get(sourcepath); + tagId = checksum.getString("type") + ":" + checksum.getString("value"); + } + + } + } + } else { + // Looking for an item in the local directory structure + if (item.isDirectory()) { + // Directories aren't yet represented in Dataverse + return null; + } else { + // A file within the local directory + if (existingItems.containsKey(sourcepath)) { + JSONObject checksum = existingItems.get(sourcepath); + tagId = checksum.getString("type") + ":" + checksum.getString("value"); + } + } + } + if (verify && (tagId != null) && (!item.isDirectory())) { + tagId = verifyDataByHash(tagId, path, item); + } + return (tagId); + } + + static HashMap hashIssues = new HashMap(); + + @Override + protected String verifyDataByHash(String tagId, String path, Resource item) { + JSONObject checksum = existingItems.get(item.getName()); + if (!checksum.getString("value").equals(item.getHash(checksum.getString("type")))) { + hashIssues.put(path + item.getName(), "!!!: A different version of this item exists with ID: " + tagId); + return null; + } // else it matches! + return tagId; + } + + @Override + public void addDatasetMetadata(String newSubject, String type, JSONObject relationships) { + // TBD + // println("DVUploader does not yet add metadata to a Dataset"); + } + + @Override + protected void postProcessChildren() { + // TBD + // println("DVUploader does not need to post-process after files are uploaded"); + } + + @Override + protected void postProcessCollection() { + // TBD + // println("DVUploader does not yet support creation of datasets or uploading + // sub-directories and their contents"); + + } + + @Override + protected String preprocessCollection(Resource dir, String path, String parentId, String collectionId) + throws UploaderException { + if (!path.equals("/" + dir.getName().trim()) && !recurse) { + throw new UploaderException(" DVUploader is not configured to recurse into sub-directories."); + } + return null; + } + + @Override + protected String postProcessChild(Resource dir, String path, String parentId, String collectionId) { + // TBD + // println("DVUploader does not need to post-process newly created items"); + return null; + } + + @Override + protected void postProcessDatafile(String newUri, String existingUri, String collectionId, Resource file, + Resource dir) throws ClientProtocolException, IOException { + // TBD + // println("DVUploader does not need to post-process data files"); + + } + + @Override + protected HttpClientContext reauthenticate(long startTime) { + // TBD + // println("DVUploader does not need to reauthenticate"); + return getLocalContext(); + } @Override protected String uploadDatafile(Resource file, String path) { @@ -643,40 +643,40 @@ protected String uploadDatafile(Resource file, String path) { } - private boolean isLocked() { - if (httpclient == null) { - httpclient = getSharedHttpClient(); - } - try { - String urlString = server + "/api/datasets/:persistentId/locks"; - urlString = urlString + "?persistentId=" + datasetPID + "&key=" + apiKey; - HttpGet httpget = new HttpGet(urlString); - - CloseableHttpResponse response = httpclient.execute(httpget, getLocalContext()); - try { - if (response.getStatusLine().getStatusCode() == 200) { - HttpEntity resEntity = response.getEntity(); - if (resEntity != null) { - String res = EntityUtils.toString(resEntity); - boolean locked = (new JSONObject(res)).getJSONArray("data").length() > 0; - if (locked) { - println("Dataset locked - waiting..."); - } - return locked; - } - } else { - oldServer = true; - TimeUnit.SECONDS.sleep(1); - return false; - } - } catch (InterruptedException e) { - e.printStackTrace(); - } finally { - EntityUtils.consumeQuietly(response.getEntity()); - } - } catch (IOException e) { - println(e.getMessage()); - } - return false; - } + private boolean isLocked() { + if (httpclient == null) { + httpclient = getSharedHttpClient(); + } + try { + String urlString = server + "/api/datasets/:persistentId/locks"; + urlString = urlString + "?persistentId=" + datasetPID + "&key=" + apiKey; + HttpGet httpget = new HttpGet(urlString); + + CloseableHttpResponse response = httpclient.execute(httpget, getLocalContext()); + try { + if (response.getStatusLine().getStatusCode() == 200) { + HttpEntity resEntity = response.getEntity(); + if (resEntity != null) { + String res = EntityUtils.toString(resEntity); + boolean locked = (new JSONObject(res)).getJSONArray("data").length() > 0; + if (locked) { + println("Dataset locked - waiting..."); + } + return locked; + } + } else { + oldServer = true; + TimeUnit.SECONDS.sleep(1); + return false; + } + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); + } + } catch (IOException e) { + println(e.getMessage()); + } + return false; + } } From 5cc0312d10d8aea8331c92e510d663b0ffabe6dd Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 8 Jan 2020 17:07:29 -0500 Subject: [PATCH 2/2] Add trustall certs flag and do cleanup --- .../sead/uploader/dataverse/DVUploader.java | 61 ++++++++++--------- 1 file changed, 32 insertions(+), 29 deletions(-) diff --git a/src/main/java/org/sead/uploader/dataverse/DVUploader.java b/src/main/java/org/sead/uploader/dataverse/DVUploader.java index ce640ba..7e423e3 100644 --- a/src/main/java/org/sead/uploader/dataverse/DVUploader.java +++ b/src/main/java/org/sead/uploader/dataverse/DVUploader.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.io.InputStream; -import java.net.URL; import java.security.DigestInputStream; import java.security.KeyManagementException; import java.security.KeyStoreException; @@ -70,6 +69,7 @@ public class DVUploader extends AbstractUploader { private static int maxWaitTime = 60; private static boolean recurse = false; private static boolean directUpload = false; + private static boolean trustCerts = false; public static void main(String args[]) throws Exception { @@ -114,6 +114,7 @@ private static void usage() { println(" -skip= - a number of files to skip before starting processing (saves time when you know the first n files have been uploaded before)"); println(" -recurse - recurse into subdirectories"); println(" -maxlockwait - the maximum time to wait (in seconds) for a Dataset lock (i.e. while the last file is ingested) to expire (default 60 seconds)"); + println(" -trustall - trust all server certificates (i.e. for use when testing with self-signed certificates)"); println(""); } @@ -137,6 +138,10 @@ public boolean parseCustomArg(String arg) { directUpload = true; println("Will use direct upload of files (if configured on this server)"); return true; + } else if (arg.equals("-trustall")) { + trustCerts = true; + println("Will trust all certificates"); + return true; } else if (arg.startsWith("-maxlockwait")) { try { maxWaitTime = Integer.parseInt(arg.substring(arg.indexOf(argSeparator) + 1)); @@ -158,23 +163,30 @@ public CloseableHttpClient getSharedHttpClient() { if (httpclient == null) { // use the TrustSelfSignedStrategy to allow Self Signed Certificates SSLContext sslContext; + SSLConnectionSocketFactory connectionFactory; try { - sslContext = SSLContextBuilder - .create() - .loadTrustMaterial(new TrustAllStrategy()) - .build(); - - // create an SSL Socket Factory to use the SSLContext with the trust self signed certificate strategy - // and allow all hosts verifier. - SSLConnectionSocketFactory connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); - - // finally create the HttpClient using HttpClient factory methods and assign the ssl socket factory - httpclient = HttpClients - .custom() - .setSSLSocketFactory(connectionFactory) - .setUserAgent("curl/7.61.1") - .setDefaultRequestConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build()) - .build(); + if (trustCerts) { + sslContext = SSLContextBuilder + .create() + .loadTrustMaterial(new TrustAllStrategy()) + .build(); + // create an SSL Socket Factory to use the SSLContext with the trust self signed certificate strategy + // and allow all hosts verifier. + connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); + // finally create the HttpClient using HttpClient factory methods and assign the ssl socket factory + httpclient = HttpClients + .custom() + .setSSLSocketFactory(connectionFactory) + .setDefaultRequestConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build()) + .build(); + } else { + httpclient = HttpClients + .custom() + .setDefaultRequestConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build()) + .build(); + + } + } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) { Logger.getLogger(DVUploader.class.getName()).log(Level.SEVERE, null, ex); } @@ -398,32 +410,23 @@ protected String uploadDatafile(Resource file, String path) { JSONObject data = (new JSONObject(jsonResponse)).getJSONObject("data"); uploadUrl = data.getString("url"); String storageIdentifier = data.getString("storageIdentifier"); - println("Put to: " + uploadUrl); - println("storageId: " + storageIdentifier); HttpPut httpput = new HttpPut(uploadUrl); MessageDigest messageDigest = MessageDigest.getInstance("MD5"); - println(file.getAbsolutePath() + " " + file.length()); try (InputStream inStream = file.getInputStream(); DigestInputStream digestInputStream = new DigestInputStream(inStream, messageDigest)) { - println("Set S3 entity"); httpput.setEntity(new BufferedHttpEntity(new InputStreamEntity(digestInputStream, file.length()))); - println("Calling S3"); CloseableHttpResponse putResponse = httpclient.execute(httpput); try { int putStatus = putResponse.getStatusLine().getStatusCode(); - println("Status " + putStatus); String putRes = null; HttpEntity putEntity = putResponse.getEntity(); if (putEntity != null) { putRes = EntityUtils.toString(putEntity); - println(putRes); } if (putStatus == 200) { - println("S3 Success"); String localchecksum = Hex.encodeHexString(digestInputStream.getMessageDigest().digest()); - println("Checksum: " + localchecksum); // Now post data urlString = server + "/api/datasets/:persistentId/add"; urlString = urlString + "?persistentId=" + datasetPID + "&key=" + apiKey; @@ -511,7 +514,7 @@ protected String uploadDatafile(Resource file, String path) { } catch (IOException e) { e.printStackTrace(System.out); - println("Error processing 1" + file.getAbsolutePath() + " : " + e.getMessage()); + println("Error processing POST to Dataverse" + file.getAbsolutePath() + " : " + e.getMessage()); retry = 0; } } @@ -525,7 +528,7 @@ protected String uploadDatafile(Resource file, String path) { } catch (IOException e) { e.printStackTrace(System.out); - println("Error processing 2 " + file.getAbsolutePath() + " : " + e.getMessage()); + println("Error processing file upload " + file.getAbsolutePath() + " : " + e.getMessage()); retry = 0; } catch (NoSuchAlgorithmException e1) { // TODO Auto-generated catch block @@ -533,7 +536,7 @@ protected String uploadDatafile(Resource file, String path) { } } catch (IOException e) { - println("Error processing 3" + file.getAbsolutePath() + " : " + e.getMessage()); + println("Error processing request for storage id" + file.getAbsolutePath() + " : " + e.getMessage()); retry = 0; } }