Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Stop using SDK v1 #338

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions examples/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,18 @@

<dependencies>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-kinesis</artifactId>
<groupId>software.amazon.kinesis</groupId>
<artifactId>amazon-kinesis-client</artifactId>
<!-- Because KCL 2.2.9 works on a different protobuf(2.6.1) than KPL's protobuf(3.x)-->
<!-- excluding this so that KPL's Protobuf version is honored for testing and parsing messages.-->
<!-- This means we are not yet utilizing the integration of GSR Library with KCL yet. -->
<exclusions>
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
</exclusions>
Comment on lines +66 to +74
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,26 +14,24 @@
*/
package com.amazonaws.services.schemaregistry.examples.kds;

import com.amazonaws.services.kinesis.AmazonKinesis;
import com.amazonaws.services.kinesis.AmazonKinesisClientBuilder;
import com.amazonaws.services.kinesis.model.DescribeStreamRequest;
import com.amazonaws.services.kinesis.model.DescribeStreamResult;
import com.amazonaws.services.kinesis.model.GetRecordsRequest;
import com.amazonaws.services.kinesis.model.GetRecordsResult;
import com.amazonaws.services.kinesis.model.GetShardIteratorRequest;
import com.amazonaws.services.kinesis.model.GetShardIteratorResult;
import com.amazonaws.services.kinesis.model.PutRecordsRequest;
import com.amazonaws.services.kinesis.model.PutRecordsRequestEntry;
import com.amazonaws.services.kinesis.model.PutRecordsResult;
import com.amazonaws.services.kinesis.model.Record;
import com.amazonaws.services.kinesis.model.Shard;
import com.amazonaws.services.schemaregistry.common.Schema;
import com.amazonaws.services.schemaregistry.common.configs.GlueSchemaRegistryConfiguration;
import com.amazonaws.services.schemaregistry.deserializers.GlueSchemaRegistryDeserializer;
import com.amazonaws.services.schemaregistry.deserializers.GlueSchemaRegistryDeserializerImpl;
import com.amazonaws.services.schemaregistry.serializers.GlueSchemaRegistrySerializer;
import com.amazonaws.services.schemaregistry.serializers.GlueSchemaRegistrySerializerImpl;
import com.amazonaws.services.schemaregistry.utils.AVROUtils;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
Expand All @@ -49,19 +47,22 @@
import org.joda.time.DateTime;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
import software.amazon.awssdk.core.SdkBytes;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.glue.model.DataFormat;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import software.amazon.awssdk.services.kinesis.KinesisClient;
import software.amazon.awssdk.services.kinesis.model.DescribeStreamRequest;
import software.amazon.awssdk.services.kinesis.model.DescribeStreamResponse;
import software.amazon.awssdk.services.kinesis.model.GetRecordsRequest;
import software.amazon.awssdk.services.kinesis.model.GetRecordsResponse;
import software.amazon.awssdk.services.kinesis.model.GetShardIteratorRequest;
import software.amazon.awssdk.services.kinesis.model.GetShardIteratorResponse;
import software.amazon.awssdk.services.kinesis.model.PutRecordsRequest;
import software.amazon.awssdk.services.kinesis.model.PutRecordsRequestEntry;
import software.amazon.awssdk.services.kinesis.model.PutRecordsResponse;
import software.amazon.awssdk.services.kinesis.model.Record;
import software.amazon.awssdk.services.kinesis.model.Shard;
import software.amazon.awssdk.services.kinesis.model.ShardIteratorType;

/**
* This is an example of how to use Glue Schema Registry (GSR) with Kinesis Data Streams Get / Put Record APIs.
Expand All @@ -70,7 +71,7 @@
*/
public class PutRecordGetRecordExample {
private static final String AVRO_USER_SCHEMA_FILE = "src/main/resources/user.avsc";
private static AmazonKinesis kinesisClient;
private static KinesisClient kinesisClient;
private static final Logger LOGGER = Logger.getLogger(PutRecordGetRecordExample.class.getSimpleName());
private static AwsCredentialsProvider awsCredentialsProvider =
DefaultCredentialsProvider
Expand All @@ -97,7 +98,7 @@ public static void main(final String[] args) throws Exception {
int numOfRecords = Integer.parseInt(cmd.getOptionValue("numRecords", "10"));

//Kinesis data streams client initialization.
kinesisClient = AmazonKinesisClientBuilder.standard().withRegion(regionName).build();
kinesisClient = KinesisClient.builder().region(Region.of(regionName)).build();

//Glue Schema Registry serializer initialization for the producer.
glueSchemaRegistrySerializer =
Expand Down Expand Up @@ -129,65 +130,68 @@ public static void main(final String[] args) throws Exception {
private static void getRecordsWithSchema(String streamName, Date timestamp) throws IOException {
//Standard Kinesis code to getRecords from a Kinesis Data Stream.
String shardIterator;
DescribeStreamRequest describeStreamRequest = new DescribeStreamRequest();
describeStreamRequest.setStreamName(streamName);
DescribeStreamRequest describeStreamRequest = DescribeStreamRequest.builder()
.streamName(streamName)
.build();
List<Shard> shards = new ArrayList<>();

DescribeStreamResult streamRes;
DescribeStreamResponse streamRes;
do {
streamRes = kinesisClient.describeStream(describeStreamRequest);
shards.addAll(streamRes.getStreamDescription().getShards());
shards.addAll(streamRes.streamDescription().shards());

if (shards.size() > 0) {
shards.get(shards.size() - 1).getShardId();
shards.get(shards.size() - 1).shardId();
}
Comment on lines 143 to 145
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These doesn't seem to do anything. I kept it because it was this way originally.

} while (streamRes.getStreamDescription().getHasMoreShards());
} while (streamRes.streamDescription().hasMoreShards());

GetShardIteratorRequest itReq = new GetShardIteratorRequest();
itReq.setStreamName(streamName);
itReq.setShardId(shards.get(0).getShardId());
itReq.setTimestamp(timestamp);
itReq.setShardIteratorType("AT_TIMESTAMP");
GetShardIteratorRequest itReq = GetShardIteratorRequest.builder()
.streamName(streamName)
.shardId(shards.get(0).shardId())
.timestamp(timestamp.toInstant())
.shardIteratorType(ShardIteratorType.AT_TIMESTAMP)
.build();

GetShardIteratorResult shardIteratorResult = kinesisClient.getShardIterator(itReq);
shardIterator = shardIteratorResult.getShardIterator();
GetShardIteratorResponse shardIteratorResult = kinesisClient.getShardIterator(itReq);
shardIterator = shardIteratorResult.shardIterator();

// Create new GetRecordsRequest with existing shardIterator.
GetRecordsRequest recordsRequest = new GetRecordsRequest();
recordsRequest.setShardIterator(shardIterator);
recordsRequest.setLimit(1000);
GetRecordsRequest recordsRequest = GetRecordsRequest.builder()
.shardIterator(shardIterator)
.limit(1000)
.build();

GetRecordsResult result = kinesisClient.getRecords(recordsRequest);
GetRecordsResponse result = kinesisClient.getRecords(recordsRequest);

for (Record record : result.getRecords()) {
ByteBuffer recordAsByteBuffer = record.getData();
for (Record record : result.records()) {
ByteBuffer recordAsByteBuffer = record.data().asByteBuffer();
GenericRecord decodedRecord = decodeRecord(recordAsByteBuffer);
LOGGER.info("Decoded Record: " + decodedRecord);
}
}

private static void putRecordsWithSchema(String streamName, int numOfRecords, Schema gsrSchema, Date timestamp) {
//Standard Kinesis code to putRecords into a Kinesis Data Stream.
PutRecordsRequest putRecordsRequest = new PutRecordsRequest();
putRecordsRequest.setStreamName(streamName);
PutRecordsRequest.Builder putRecordsRequest = PutRecordsRequest.builder();
putRecordsRequest.streamName(streamName);

List<PutRecordsRequestEntry> recordsRequestEntries = new ArrayList<>();

LOGGER.info("Putting " + numOfRecords + " into " + streamName + " with schema" + gsrSchema);
for (int i = 0; i < numOfRecords; i++) {
GenericRecord record = (GenericRecord) getTestRecord(i);
byte[] recordWithSchema = encodeRecord(record, streamName, gsrSchema);
PutRecordsRequestEntry entry = new PutRecordsRequestEntry();
entry.setData(ByteBuffer.wrap(recordWithSchema));
entry.setPartitionKey(String.valueOf(timestamp.toInstant()
PutRecordsRequestEntry.Builder entry = PutRecordsRequestEntry.builder();
entry.data(SdkBytes.fromByteBuffer(ByteBuffer.wrap(recordWithSchema)));
entry.partitionKey(String.valueOf(timestamp.toInstant()
.toEpochMilli()));

recordsRequestEntries.add(entry);
recordsRequestEntries.add(entry.build());
}

putRecordsRequest.setRecords(recordsRequestEntries);
putRecordsRequest.records(recordsRequestEntries);

PutRecordsResult putRecordResult = kinesisClient.putRecords(putRecordsRequest);
PutRecordsResponse putRecordResult = kinesisClient.putRecords(putRecordsRequest.build());

LOGGER.info("Successfully put records: " + putRecordResult);
}
Expand Down
6 changes: 0 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<glue.schema.registry.groupId>software.amazon.glue</glue.schema.registry.groupId>
<aws.sdk.v2.version>2.22.12</aws.sdk.v2.version>
<aws.sdk.v1.version>1.12.660</aws.sdk.v1.version>
<kafka.scala.version>2.12</kafka.scala.version>
<kafka.version>3.6.1</kafka.version>
<avro.version>1.11.3</avro.version>
Expand Down Expand Up @@ -252,11 +251,6 @@
<artifactId>everit-json-schema</artifactId>
<version>${everit.json.schema.version}</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-kinesis</artifactId>
<version>${aws.sdk.v1.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-cbor</artifactId>
Expand Down
5 changes: 0 additions & 5 deletions serializer-deserializer/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,6 @@
</scm>

<dependencies>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-sts</artifactId>
<version>${aws.sdk.v1.version}</version>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>sts</artifactId>
Expand Down