Skip to content

Commit

Permalink
Merge pull request #376 from rpmoore/in_cache_fix
Browse files Browse the repository at this point in the history
Adding a regression test and fix to validate that items already in ca…
  • Loading branch information
RachelTucker authored Nov 2, 2016
2 parents 375c18d + 8e29ad4 commit db010d1
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 15 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ The SDK can also be included directly into a Maven or Gradle build. There is als
<dependency>
<groupId>com.spectralogic.ds3</groupId>
<artifactId>ds3-sdk</artifactId>
<version>3.2.7</version>
<version>3.2.8</version>
<!-- <classifier>all</classifier> -->
</dependency>
...
Expand All @@ -64,8 +64,8 @@ repositories {
dependencies {
...
compile 'com.spectralogic.ds3:ds3-sdk:3.2.7'
// compile 'com.spectralogic.ds3:ds3-sdk:3.2.7:all'
compile 'com.spectralogic.ds3:ds3-sdk:3.2.8'
// compile 'com.spectralogic.ds3:ds3-sdk:3.2.8:all'
...
}
Expand Down
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

allprojects {
group = 'com.spectralogic.ds3'
version = '3.2.7'
version = '3.2.8'
}

subprojects {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,30 +18,39 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.spectralogic.ds3client.Ds3Client;
import com.spectralogic.ds3client.commands.spectrads3.CancelJobSpectraS3Request;
import com.spectralogic.ds3client.commands.spectrads3.CancelJobSpectraS3Response;
import com.spectralogic.ds3client.commands.spectrads3.GetJobChunksReadyForClientProcessingSpectraS3Request;
import com.spectralogic.ds3client.commands.PutObjectRequest;
import com.spectralogic.ds3client.commands.PutObjectResponse;
import com.spectralogic.ds3client.commands.spectrads3.*;
import com.spectralogic.ds3client.helpers.Ds3ClientHelpers;
import com.spectralogic.ds3client.helpers.JobRecoveryException;
import com.spectralogic.ds3client.integration.test.helpers.TempStorageIds;
import com.spectralogic.ds3client.integration.test.helpers.TempStorageUtil;
import com.spectralogic.ds3client.models.BulkObject;
import com.spectralogic.ds3client.models.ChecksumType;
import com.spectralogic.ds3client.models.Contents;
import com.spectralogic.ds3client.models.Objects;
import com.spectralogic.ds3client.models.bulk.Ds3Object;
import com.spectralogic.ds3client.networking.FailedRequestException;
import com.spectralogic.ds3client.utils.ResourceUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.List;
import java.util.UUID;

import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static com.spectralogic.ds3client.integration.Util.RESOURCE_BASE_NAME;
import static com.spectralogic.ds3client.integration.Util.deleteAllContents;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;

public class Regression_Test {
Expand Down Expand Up @@ -95,7 +104,7 @@ public void testMarkerWithSpaces() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
Util.deleteAllContents(client, bucketName);
deleteAllContents(client, bucketName);
}
}

Expand Down Expand Up @@ -131,7 +140,7 @@ public void testPrefixWithSpaces() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
Util.deleteAllContents(client, bucketName);
deleteAllContents(client, bucketName);
}
}

Expand Down Expand Up @@ -180,7 +189,7 @@ public void testPrefixForDirectoriesWithSpaces() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
Util.deleteAllContents(client, bucketName);
deleteAllContents(client, bucketName);
}
}

Expand Down Expand Up @@ -228,7 +237,7 @@ public void testPrefixForNestedDirectories() throws IOException {
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
assertEquals(204, cancelJobResponse.getStatusCode());
} finally {
Util.deleteAllContents(client, bucketName);
deleteAllContents(client, bucketName);
}
}

Expand Down Expand Up @@ -261,8 +270,56 @@ public SeekableByteChannel buildChannel(final String key) throws IOException {
}
});
} finally {
Util.deleteAllContents(client, bucketName);
deleteAllContents(client, bucketName);
}
}

@Test
public void testRecoverWriteJobWithHelper() throws IOException, JobRecoveryException, URISyntaxException {
final String bucketName = "test_recover_write_job_bucket";
final String book1 = "beowulf.txt";
final String book2 = "ulysses.txt";

try {
HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

final Path objPath1 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book1);
final Path objPath2 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book2);
final Ds3Object obj1 = new Ds3Object(book1, Files.size(objPath1));
final Ds3Object obj2 = new Ds3Object(book2, Files.size(objPath2));

final Ds3ClientHelpers.Job job = Ds3ClientHelpers.wrap(client).startWriteJob(bucketName, Lists.newArrayList(obj1, obj2));

final PutObjectResponse putResponse1 = client.putObject(new PutObjectRequest(
job.getBucketName(),
book1,
new ResourceObjectPutter(RESOURCE_BASE_NAME).buildChannel(book1),
job.getJobId().toString(),
0,
Files.size(objPath1)));
assertThat(putResponse1, is(notNullValue()));
assertThat(putResponse1.getStatusCode(), is(equalTo(200)));

// Interuption...
final Ds3ClientHelpers.Job recoverJob = HELPERS.recoverWriteJob(job.getJobId());

recoverJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
@Override
public SeekableByteChannel buildChannel(final String key) throws IOException {
return Files.newByteChannel(objPath2, StandardOpenOption.READ);
}
});

final GetJobSpectraS3Response finishedJob = client.getJobSpectraS3(new GetJobSpectraS3Request(job.getJobId()));

for (final Objects objects : finishedJob.getMasterObjectListResult().getObjects()) {
for (final BulkObject bulkObject : objects.getObjects()) {
assertTrue(bulkObject.getInCache());
}
}

} finally {
deleteAllContents(client, bucketName);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
package com.spectralogic.ds3client.helpers.strategy;

import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableMap;
import com.spectralogic.ds3client.Ds3Client;
Expand Down Expand Up @@ -60,7 +61,12 @@ public Iterable<JobPart> getWork() throws IOException, InterruptedException {
final Objects nextChunk = allocateChunk(filteredChunkIterator.next());

LOG.debug("Allocating chunk: {}", nextChunk.getChunkId().toString());
return FluentIterable.from(nextChunk.getObjects()).transform(new Function<BulkObject, JobPart>() {
return FluentIterable.from(nextChunk.getObjects()).filter(new Predicate<BulkObject>() {
@Override
public boolean apply(@Nullable final BulkObject input) {
return !input.getInCache();
}
}).transform(new Function<BulkObject, JobPart>() {
@Nullable
@Override
public JobPart apply(@Nullable final BulkObject input) {
Expand Down

0 comments on commit db010d1

Please sign in to comment.