Skip to content

Commit

Permalink
SNOW-926149 Fix issues while using snowflake-jdbc-fips
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-lsembera committed Oct 6, 2023
1 parent 02d06c9 commit bc4c02f
Show file tree
Hide file tree
Showing 21 changed files with 623 additions and 35 deletions.
23 changes: 23 additions & 0 deletions .github/workflows/End2EndTest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,3 +67,26 @@ jobs:
- name: Unit & Integration Test (Windows)
continue-on-error: false
run: mvn -DghActionsIT verify --batch-mode
build-e2e-jar-test:
name: E2E JAR Test - ${{ matrix.java }}, Cloud ${{ matrix.snowflake_cloud }}
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
java: [ 8 ]
snowflake_cloud: [ 'AWS' ]
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Install Java ${{ matrix.java }}
uses: actions/setup-java@v2
with:
distribution: temurin
java-version: ${{ matrix.java }}
cache: maven
- name: Decrypt profile.json for Cloud ${{ matrix.snowflake_cloud }}
env:
DECRYPTION_PASSPHRASE: ${{ secrets.PROFILE_JSON_DECRYPT_PASSPHRASE }}
run: ./scripts/decrypt_secret.sh ${{ matrix.snowflake_cloud }}
- name: Run E2E JAR Test
run: ./e2e-jar-test/run_e2e_jar_test.sh
19 changes: 19 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,25 @@ dependencies {
}
```

## Jar Versions

The Snowflake Ingest SDK provides shaded and unshaded versions of its jar. The shaded version bundles the dependencies into its own jar,
whereas the unshaded version declares its dependencies in `pom.xml`, which are fetched as standard transitive dependencies by the build system like Maven or Gradle.
The shaded JAR can help avoid potential dependency conflicts, but the unshaded version provides finer graned control over transitive dependencies.

## Using with snowflake-jdbc-fics

For use cases, which need to use `snowflake-jdbc-fips` instead of the default `snowflake-jdbc`, we recommend to take the following steps:

- Use the unshaded version of the Ingest SDK.
- Exclude these transitive dependencies:
- `net.snowflake:snowflake-jdbc`
- `org.bouncycastle:bcpkix-jdk18on`
- `org.bouncycastle:bcprov-jdk18on`
- Add a dependency on `snowflake-jdbc-fips`.

See [this test](https://github.com/snowflakedb/snowflake-ingest-java/tree/master/e2e-jar-test/fips) for an example how to use Snowflake Ingest SDK together with Snowflake FIPS JDBC Driver.

# Example

## Snowpipe
Expand Down
38 changes: 38 additions & 0 deletions e2e-jar-test/core/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>net.snowflake.snowflake-ingest-java-e2e-jar-test</groupId>
<artifactId>parent</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>

<artifactId>core</artifactId>
<name>core</name>
<packaging>jar</packaging>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>

<dependencies>
<!-- Provided because we let submodules define which way they pull in the SDK (e.g. with/without snowflake-jdbc-fips, etc) -->
<dependency>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-ingest-sdk</artifactId>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
</dependency>

<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
</dependencies>
</project>
169 changes: 169 additions & 0 deletions e2e-jar-test/core/src/main/java/net/snowflake/IngestTestUtils.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
package net.snowflake;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Base64;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.UUID;

import net.snowflake.ingest.streaming.OpenChannelRequest;
import net.snowflake.ingest.streaming.SnowflakeStreamingIngestChannel;
import net.snowflake.ingest.streaming.SnowflakeStreamingIngestClient;
import net.snowflake.ingest.streaming.SnowflakeStreamingIngestClientFactory;

public class IngestTestUtils {
private static final String PROFILE_PATH = "profile.json";

private final Connection connection;

private final String database;
private final String schema;
private final String table;

private final String testId;

private final SnowflakeStreamingIngestClient client;

private final SnowflakeStreamingIngestChannel channel;

private final ObjectMapper objectMapper = new ObjectMapper();

private final Random random = new Random();

private final Base64.Decoder base64Decoder = Base64.getDecoder();

public IngestTestUtils(String testName)
throws SQLException,
IOException,
ClassNotFoundException,
NoSuchAlgorithmException,
InvalidKeySpecException {
testId = String.format("%s_%s", testName, UUID.randomUUID().toString().replace("-", "_"));
connection = getConnection();
database = String.format("database_%s", testId);
schema = String.format("schema_%s", testId);
table = String.format("table_%s", testId);

connection.createStatement().execute(String.format("create database %s", database));
connection.createStatement().execute(String.format("create schema %s", schema));
connection.createStatement().execute(String.format("create table %s (c1 int, c2 varchar, c3 binary)", table));

client =
SnowflakeStreamingIngestClientFactory.builder("TestClient01")
.setProperties(loadProperties())
.build();

channel = client.openChannel(
OpenChannelRequest.builder(String.format("channel_%s", this.testId))
.setDBName(database)
.setSchemaName(schema)
.setTableName(table)
.setOnErrorOption(OpenChannelRequest.OnErrorOption.CONTINUE)
.build());
}

private Properties loadProperties() throws IOException {
Properties props = new Properties();
Iterator<Map.Entry<String, JsonNode>> propIt =
objectMapper.readTree(new String(Files.readAllBytes(Paths.get(PROFILE_PATH)))).fields();
while (propIt.hasNext()) {
Map.Entry<String, JsonNode> prop = propIt.next();
props.put(prop.getKey(), prop.getValue().asText());
}
return props;
}

private Connection getConnection()
throws IOException, ClassNotFoundException, SQLException, NoSuchAlgorithmException, InvalidKeySpecException {
Class.forName("net.snowflake.client.jdbc.SnowflakeDriver");

Properties loadedProps = loadProperties();

byte[] decoded = base64Decoder.decode(loadedProps.getProperty("private_key"));
KeyFactory kf = KeyFactory.getInstance("RSA");

PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(decoded);
PrivateKey privateKey = kf.generatePrivate(keySpec);

Properties props = new Properties();
props.putAll(loadedProps);
props.put("client_session_keep_alive", "true");
props.put("privateKey", privateKey);

return DriverManager.getConnection(loadedProps.getProperty("connect_string"), props);
}

private Map<String, Object> createRow() {
Map<String, Object> row = new HashMap<>();

byte[] bytes = new byte[1024];
random.nextBytes(bytes);

row.put("c1", random.nextInt());
row.put("c2", String.valueOf(random.nextInt()));
row.put("c3", bytes);

return row;
}

/**
* Given a channel and expected offset, this method waits up to 60 seconds until the last
* committed offset is equal to the passed offset
*/
private void waitForOffset(SnowflakeStreamingIngestChannel channel, String expectedOffset)
throws InterruptedException {
int counter = 0;
String lastCommittedOffset = null;
while (counter < 600) {
String currentOffset = channel.getLatestCommittedOffsetToken();
if (expectedOffset.equals(currentOffset)) {
return;
}
System.out.printf("Waiting for offset expected=%s actual=%s%n", expectedOffset, currentOffset);
lastCommittedOffset = currentOffset;
counter++;
Thread.sleep(100);
}
throw new RuntimeException(
String.format(
"Timeout exceeded while waiting for offset %s. Last committed offset: %s",
expectedOffset, lastCommittedOffset));
}

public void test() throws InterruptedException {
// Insert few rows one by one
for (int offset = 2; offset < 1000; offset++) {
offset++;
channel.insertRow(createRow(), String.valueOf(offset));
}

// Insert a batch of rows
String offset = "final-offset";
channel.insertRows(
Arrays.asList(createRow(), createRow(), createRow(), createRow(), createRow()), offset);

waitForOffset(channel, offset);
}

public void close() throws Exception {
connection.close();
channel.close().get();
client.close();
}
}
53 changes: 53 additions & 0 deletions e2e-jar-test/fips/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>net.snowflake.snowflake-ingest-java-e2e-jar-test</groupId>
<artifactId>parent</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>

<artifactId>fips</artifactId>
<packaging>jar</packaging>
<name>fips</name>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>

<dependencies>
<dependency>
<groupId>net.snowflake.snowflake-ingest-java-e2e-jar-test</groupId>
<artifactId>core</artifactId>
</dependency>

<dependency>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-ingest-sdk</artifactId>
<exclusions>
<exclusion>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk18on</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk18on</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc-fips</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
</dependencies>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package net.snowflake;

import org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.security.Security;

public class FipsIngestE2ETest {

private IngestTestUtils ingestTestUtils;

@Before
public void setUp() throws Exception {
// Add FIPS provider, the SDK does not do this by default
Security.addProvider(new BouncyCastleFipsProvider("C:HYBRID;ENABLE{All};"));

ingestTestUtils = new IngestTestUtils("fips_ingest");
}

@After
public void tearDown() throws Exception {
ingestTestUtils.close();
}

@Test
public void name() throws InterruptedException {
ingestTestUtils.test();
}
}
Loading

0 comments on commit bc4c02f

Please sign in to comment.