diff --git a/README.md b/README.md
index 4b6f05262..1febb0aa9 100644
--- a/README.md
+++ b/README.md
@@ -41,7 +41,7 @@ The SDK can also be included directly into a Maven or Gradle build. There is als
com.spectralogic.ds3
ds3-sdk
- 3.2.7
+ 3.2.8
...
@@ -64,8 +64,8 @@ repositories {
dependencies {
...
- compile 'com.spectralogic.ds3:ds3-sdk:3.2.7'
- // compile 'com.spectralogic.ds3:ds3-sdk:3.2.7:all'
+ compile 'com.spectralogic.ds3:ds3-sdk:3.2.8'
+ // compile 'com.spectralogic.ds3:ds3-sdk:3.2.8:all'
...
}
diff --git a/build.gradle b/build.gradle
index 3076c86d1..0a5686168 100644
--- a/build.gradle
+++ b/build.gradle
@@ -15,7 +15,7 @@
allprojects {
group = 'com.spectralogic.ds3'
- version = '3.2.7'
+ version = '3.2.8'
}
subprojects {
diff --git a/ds3-sdk-integration/src/test/java/com/spectralogic/ds3client/integration/Regression_Test.java b/ds3-sdk-integration/src/test/java/com/spectralogic/ds3client/integration/Regression_Test.java
index 91ef01980..6c978c354 100644
--- a/ds3-sdk-integration/src/test/java/com/spectralogic/ds3client/integration/Regression_Test.java
+++ b/ds3-sdk-integration/src/test/java/com/spectralogic/ds3client/integration/Regression_Test.java
@@ -18,16 +18,20 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.spectralogic.ds3client.Ds3Client;
-import com.spectralogic.ds3client.commands.spectrads3.CancelJobSpectraS3Request;
-import com.spectralogic.ds3client.commands.spectrads3.CancelJobSpectraS3Response;
-import com.spectralogic.ds3client.commands.spectrads3.GetJobChunksReadyForClientProcessingSpectraS3Request;
+import com.spectralogic.ds3client.commands.PutObjectRequest;
+import com.spectralogic.ds3client.commands.PutObjectResponse;
+import com.spectralogic.ds3client.commands.spectrads3.*;
import com.spectralogic.ds3client.helpers.Ds3ClientHelpers;
+import com.spectralogic.ds3client.helpers.JobRecoveryException;
import com.spectralogic.ds3client.integration.test.helpers.TempStorageIds;
import com.spectralogic.ds3client.integration.test.helpers.TempStorageUtil;
+import com.spectralogic.ds3client.models.BulkObject;
import com.spectralogic.ds3client.models.ChecksumType;
import com.spectralogic.ds3client.models.Contents;
+import com.spectralogic.ds3client.models.Objects;
import com.spectralogic.ds3client.models.bulk.Ds3Object;
import com.spectralogic.ds3client.networking.FailedRequestException;
+import com.spectralogic.ds3client.utils.ResourceUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -35,13 +39,18 @@
import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.net.URISyntaxException;
import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.notNullValue;
+import static com.spectralogic.ds3client.integration.Util.RESOURCE_BASE_NAME;
+import static com.spectralogic.ds3client.integration.Util.deleteAllContents;
+import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
public class Regression_Test {
@@ -95,7 +104,7 @@ public void testMarkerWithSpaces() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
- Util.deleteAllContents(client, bucketName);
+ deleteAllContents(client, bucketName);
}
}
@@ -131,7 +140,7 @@ public void testPrefixWithSpaces() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
- Util.deleteAllContents(client, bucketName);
+ deleteAllContents(client, bucketName);
}
}
@@ -180,7 +189,7 @@ public void testPrefixForDirectoriesWithSpaces() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
- Util.deleteAllContents(client, bucketName);
+ deleteAllContents(client, bucketName);
}
}
@@ -228,7 +237,7 @@ public void testPrefixForNestedDirectories() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
- Util.deleteAllContents(client, bucketName);
+ deleteAllContents(client, bucketName);
}
}
@@ -261,8 +270,56 @@ public SeekableByteChannel buildChannel(final String key) throws IOException {
}
});
} finally {
- Util.deleteAllContents(client, bucketName);
+ deleteAllContents(client, bucketName);
}
+ }
+
+ @Test
+ public void testRecoverWriteJobWithHelper() throws IOException, JobRecoveryException, URISyntaxException {
+ final String bucketName = "test_recover_write_job_bucket";
+ final String book1 = "beowulf.txt";
+ final String book2 = "ulysses.txt";
+
+ try {
+ HELPERS.ensureBucketExists(bucketName, envDataPolicyId);
+ final Path objPath1 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book1);
+ final Path objPath2 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book2);
+ final Ds3Object obj1 = new Ds3Object(book1, Files.size(objPath1));
+ final Ds3Object obj2 = new Ds3Object(book2, Files.size(objPath2));
+
+ final Ds3ClientHelpers.Job job = Ds3ClientHelpers.wrap(client).startWriteJob(bucketName, Lists.newArrayList(obj1, obj2));
+
+ final PutObjectResponse putResponse1 = client.putObject(new PutObjectRequest(
+ job.getBucketName(),
+ book1,
+ new ResourceObjectPutter(RESOURCE_BASE_NAME).buildChannel(book1),
+ job.getJobId().toString(),
+ 0,
+ Files.size(objPath1)));
+ assertThat(putResponse1, is(notNullValue()));
+ assertThat(putResponse1.getStatusCode(), is(equalTo(200)));
+
+ // Interuption...
+ final Ds3ClientHelpers.Job recoverJob = HELPERS.recoverWriteJob(job.getJobId());
+
+ recoverJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
+ @Override
+ public SeekableByteChannel buildChannel(final String key) throws IOException {
+ return Files.newByteChannel(objPath2, StandardOpenOption.READ);
+ }
+ });
+
+ final GetJobSpectraS3Response finishedJob = client.getJobSpectraS3(new GetJobSpectraS3Request(job.getJobId()));
+
+ for (final Objects objects : finishedJob.getMasterObjectListResult().getObjects()) {
+ for (final BulkObject bulkObject : objects.getObjects()) {
+ assertTrue(bulkObject.getInCache());
+ }
+ }
+
+ } finally {
+ deleteAllContents(client, bucketName);
+ }
}
}
diff --git a/ds3-sdk/src/main/java/com/spectralogic/ds3client/helpers/strategy/PutStreamerStrategy.java b/ds3-sdk/src/main/java/com/spectralogic/ds3client/helpers/strategy/PutStreamerStrategy.java
index a47587c0a..835dad484 100644
--- a/ds3-sdk/src/main/java/com/spectralogic/ds3client/helpers/strategy/PutStreamerStrategy.java
+++ b/ds3-sdk/src/main/java/com/spectralogic/ds3client/helpers/strategy/PutStreamerStrategy.java
@@ -16,6 +16,7 @@
package com.spectralogic.ds3client.helpers.strategy;
import com.google.common.base.Function;
+import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableMap;
import com.spectralogic.ds3client.Ds3Client;
@@ -60,7 +61,12 @@ public Iterable getWork() throws IOException, InterruptedException {
final Objects nextChunk = allocateChunk(filteredChunkIterator.next());
LOG.debug("Allocating chunk: {}", nextChunk.getChunkId().toString());
- return FluentIterable.from(nextChunk.getObjects()).transform(new Function() {
+ return FluentIterable.from(nextChunk.getObjects()).filter(new Predicate() {
+ @Override
+ public boolean apply(@Nullable final BulkObject input) {
+ return !input.getInCache();
+ }
+ }).transform(new Function() {
@Nullable
@Override
public JobPart apply(@Nullable final BulkObject input) {