diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 72c788c36..b83a77291 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,7 +1,3 @@
-**JDBC Driver 3.19.1**
-
-- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc
-
**JDBC Driver 3.19.0**
- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc
diff --git a/FIPS/pom.xml b/FIPS/pom.xml
index 503f75cbc..425d263df 100644
--- a/FIPS/pom.xml
+++ b/FIPS/pom.xml
@@ -5,29 +5,17 @@
net.snowflake
snowflake-jdbc-parent
- 3.19.1
+ 3.19.1-SNAPSHOT
../parent-pom.xml
snowflake-jdbc-fips
- 3.19.1
+ 3.19.1-SNAPSHOT
jar
snowflake-jdbc-fips
http://maven.apache.org
-
-
- Central
- Internal Central Repo2
- default
- https://artifactory.ci1.us-west-2.aws-dev.app.snowflake.com/artifactory/development-maven-virtual/
-
- false
-
-
-
-
3.3.9
@@ -608,15 +596,15 @@
-
+
-
+
-
+
diff --git a/FIPS/public_pom.xml b/FIPS/public_pom.xml
index d180e4a57..00bc9738c 100644
--- a/FIPS/public_pom.xml
+++ b/FIPS/public_pom.xml
@@ -32,8 +32,8 @@
- 1.0.2.4
- 1.0.5
+ 1.0.2.5
+ 1.0.7
5.13.0
diff --git a/dependencies/Readme.md b/dependencies/Readme.md
index 7b4a4c73c..5abaea2ae 100644
--- a/dependencies/Readme.md
+++ b/dependencies/Readme.md
@@ -1,2 +1,2 @@
-Arrow dependencies are built from internal branch `upgradeto17.0.0`. This build was applied the AIX fix.
+Arrow dependencies are built from internal branch `upgradeTo17.0.0-v2`. This build was applied the AIX fix and the customer logger instead of slf4j logger.
diff --git a/dependencies/arrow-format-17.0.0.jar b/dependencies/arrow-format-17.0.0.jar
index 349272113..b4a34e86f 100644
Binary files a/dependencies/arrow-format-17.0.0.jar and b/dependencies/arrow-format-17.0.0.jar differ
diff --git a/dependencies/arrow-memory-core-17.0.0.jar b/dependencies/arrow-memory-core-17.0.0.jar
index a218df5db..12d9c9116 100644
Binary files a/dependencies/arrow-memory-core-17.0.0.jar and b/dependencies/arrow-memory-core-17.0.0.jar differ
diff --git a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar
index 2004a461e..72d374247 100644
Binary files a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar and b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar differ
diff --git a/dependencies/arrow-memory-unsafe-17.0.0.jar b/dependencies/arrow-memory-unsafe-17.0.0.jar
index b9897fe47..65aac2a7b 100644
Binary files a/dependencies/arrow-memory-unsafe-17.0.0.jar and b/dependencies/arrow-memory-unsafe-17.0.0.jar differ
diff --git a/dependencies/arrow-vector-17.0.0.jar b/dependencies/arrow-vector-17.0.0.jar
index 69ccfaf0d..29055d603 100644
Binary files a/dependencies/arrow-vector-17.0.0.jar and b/dependencies/arrow-vector-17.0.0.jar differ
diff --git a/linkage-checker-exclusion-rules.xml b/linkage-checker-exclusion-rules.xml
index 8bad89714..65affa44a 100644
--- a/linkage-checker-exclusion-rules.xml
+++ b/linkage-checker-exclusion-rules.xml
@@ -44,6 +44,16 @@
?
+
+
+
+ ?
+
+
+
+
+ ?
+
+ org.bouncycastle
+ bcutil-jdk18on
+ ${bouncycastle.version}
+
org.bouncycastle
diff --git a/pom.xml b/pom.xml
index 4bc83e067..2fd7f9216 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,13 +6,13 @@
net.snowflake
snowflake-jdbc-parent
- 3.19.1
+ 3.19.1-SNAPSHOT
./parent-pom.xml
${artifactId}
- 3.19.1
+ 3.19.1-SNAPSHOT
jar
${artifactId}
@@ -36,6 +36,10 @@
org.bouncycastle
bcprov-jdk18on
+
+ org.bouncycastle
+ bcutil-jdk18on
+
@@ -1031,6 +1035,7 @@
+
@@ -1105,7 +1110,7 @@
diff --git a/src/main/java/net/snowflake/client/core/CancellationReason.java b/src/main/java/net/snowflake/client/core/CancellationReason.java
new file mode 100644
index 000000000..e3ae4e308
--- /dev/null
+++ b/src/main/java/net/snowflake/client/core/CancellationReason.java
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.core;
+
+@SnowflakeJdbcInternalApi
+public enum CancellationReason {
+ UNKNOWN,
+ CLIENT_REQUESTED,
+ TIMEOUT
+}
diff --git a/src/main/java/net/snowflake/client/core/SFBaseStatement.java b/src/main/java/net/snowflake/client/core/SFBaseStatement.java
index 17b2fd1b6..8a6136434 100644
--- a/src/main/java/net/snowflake/client/core/SFBaseStatement.java
+++ b/src/main/java/net/snowflake/client/core/SFBaseStatement.java
@@ -116,9 +116,23 @@ public abstract SFBaseResultSet asyncExecute(
*
* @throws SFException if the statement is already closed.
* @throws SQLException if there are server-side errors from trying to abort.
+ * @deprecated use {@link #cancel(CancellationReason)} instead
*/
+ @Deprecated
public abstract void cancel() throws SFException, SQLException;
+ /**
+ * Aborts the statement.
+ *
+ * @param cancellationReason reason for the cancellation
+ * @throws SFException if the statement is already closed.
+ * @throws SQLException if there are server-side errors from trying to abort.
+ */
+ @SnowflakeJdbcInternalApi
+ public void cancel(CancellationReason cancellationReason) throws SFException, SQLException {
+ cancel(); // default cancel is called to keep interface backward compatibility
+ }
+
/**
* Sets a property within session properties, i.e., if the sql is using set-sf-property
*
diff --git a/src/main/java/net/snowflake/client/core/SFLoginOutput.java b/src/main/java/net/snowflake/client/core/SFLoginOutput.java
index 8daf81f10..3470076b9 100644
--- a/src/main/java/net/snowflake/client/core/SFLoginOutput.java
+++ b/src/main/java/net/snowflake/client/core/SFLoginOutput.java
@@ -18,6 +18,7 @@ public class SFLoginOutput {
private int databaseMajorVersion;
private int databaseMinorVersion;
private Duration httpClientSocketTimeout;
+ private Duration httpClientConnectionTimeout;
private String sessionDatabase;
private String sessionSchema;
private String sessionRole;
@@ -53,6 +54,7 @@ public class SFLoginOutput {
this.databaseMajorVersion = databaseMajorVersion;
this.databaseMinorVersion = databaseMinorVersion;
this.httpClientSocketTimeout = Duration.ofMillis(httpClientSocketTimeout);
+ this.httpClientConnectionTimeout = Duration.ofMillis(httpClientConnectionTimeout);
this.sessionDatabase = sessionDatabase;
this.sessionSchema = sessionSchema;
this.sessionRole = sessionRole;
@@ -113,7 +115,7 @@ Duration getHttpClientSocketTimeout() {
}
Duration getHttpClientConnectionTimeout() {
- return httpClientSocketTimeout;
+ return httpClientConnectionTimeout;
}
Map getCommonParams() {
diff --git a/src/main/java/net/snowflake/client/core/SFStatement.java b/src/main/java/net/snowflake/client/core/SFStatement.java
index 6142b8eb9..f3a0f8a09 100644
--- a/src/main/java/net/snowflake/client/core/SFStatement.java
+++ b/src/main/java/net/snowflake/client/core/SFStatement.java
@@ -298,7 +298,7 @@ private TimeBombTask(SFStatement statement) {
@Override
public Void call() throws SQLException {
try {
- statement.cancel();
+ statement.cancel(CancellationReason.TIMEOUT);
} catch (SFException ex) {
throw new SnowflakeSQLLoggedException(
session, ex.getSqlState(), ex.getVendorCode(), ex, ex.getParams());
@@ -711,10 +711,11 @@ private void reauthenticate() throws SFException, SnowflakeSQLException {
*
* @param sql sql statement
* @param mediaType media type
+ * @param cancellationReason reason for the cancellation
* @throws SnowflakeSQLException if failed to cancel the statement
* @throws SFException if statement is already closed
*/
- private void cancelHelper(String sql, String mediaType)
+ private void cancelHelper(String sql, String mediaType, CancellationReason cancellationReason)
throws SnowflakeSQLException, SFException {
synchronized (this) {
if (isClosed) {
@@ -734,7 +735,7 @@ private void cancelHelper(String sql, String mediaType)
.setMaxRetries(session.getMaxHttpRetries())
.setHttpClientSettingsKey(session.getHttpClientKey());
- StmtUtil.cancel(stmtInput);
+ StmtUtil.cancel(stmtInput, cancellationReason);
synchronized (this) {
/*
@@ -842,6 +843,12 @@ public void close() {
@Override
public void cancel() throws SFException, SQLException {
logger.trace("void cancel()", false);
+ cancel(CancellationReason.UNKNOWN);
+ }
+
+ @Override
+ public void cancel(CancellationReason cancellationReason) throws SFException, SQLException {
+ logger.trace("void cancel(CancellationReason)", false);
if (canceling.get()) {
logger.debug("Query is already cancelled", false);
@@ -866,7 +873,7 @@ public void cancel() throws SFException, SQLException {
}
// cancel the query on the server side if it has been issued
- cancelHelper(this.sqlText, StmtUtil.SF_MEDIA_TYPE);
+ cancelHelper(this.sqlText, StmtUtil.SF_MEDIA_TYPE, cancellationReason);
}
}
diff --git a/src/main/java/net/snowflake/client/core/StmtUtil.java b/src/main/java/net/snowflake/client/core/StmtUtil.java
index 96fefe5dc..3d8055d1f 100644
--- a/src/main/java/net/snowflake/client/core/StmtUtil.java
+++ b/src/main/java/net/snowflake/client/core/StmtUtil.java
@@ -681,8 +681,23 @@ protected static JsonNode getQueryResultJSON(String queryId, SFSession session)
* @param stmtInput input statement
* @throws SFException if there is an internal exception
* @throws SnowflakeSQLException if failed to cancel the statement
+ * @deprecated use {@link #cancel(StmtInput, CancellationReason)} instead
*/
+ @Deprecated
public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLException {
+ cancel(stmtInput, CancellationReason.UNKNOWN);
+ }
+
+ /**
+ * Cancel a statement identifiable by a request id
+ *
+ * @param stmtInput input statement
+ * @param cancellationReason reason for the cancellation
+ * @throws SFException if there is an internal exception
+ * @throws SnowflakeSQLException if failed to cancel the statement
+ */
+ public static void cancel(StmtInput stmtInput, CancellationReason cancellationReason)
+ throws SFException, SnowflakeSQLException {
HttpPost httpRequest = null;
AssertUtil.assertTrue(
@@ -701,7 +716,7 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE
try {
URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl);
-
+ logger.warn("Cancelling query {} with reason {}", stmtInput.requestId, cancellationReason);
logger.debug("Aborting query: {}", stmtInput.sql);
uriBuilder.setPath(SF_PATH_ABORT_REQUEST_V1);
diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java
index 5be46c5de..c753c87de 100644
--- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java
+++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java
@@ -283,7 +283,14 @@ public static CloseableHttpResponse execute(
// if an SSL issue occurs like an SSLHandshakeException then fail
// immediately and stop retrying the requests
- throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, ex.getMessage());
+ String formattedMsg =
+ ex.getMessage()
+ + "\n"
+ + "Verify that the hostnames and portnumbers in SYSTEM$ALLOWLIST are added to your firewall's allowed list.\n"
+ + "To troubleshoot your connection further, you can refer to this article:\n"
+ + "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview";
+
+ throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, formattedMsg);
} catch (Exception ex) {
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java
index bd5a3945e..895275cef 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java
@@ -1649,6 +1649,7 @@ private void uploadStream() throws SnowflakeSQLException {
/** Download a file from remote, and return an input stream */
@Override
public InputStream downloadStream(String fileName) throws SnowflakeSQLException {
+ logger.debug("Downloading file as stream: {}", fileName);
if (stageInfo.getStageType() == StageInfo.StageType.LOCAL_FS) {
logger.error("downloadStream function doesn't support local file system", false);
@@ -1662,14 +1663,32 @@ public InputStream downloadStream(String fileName) throws SnowflakeSQLException
remoteLocation remoteLocation = extractLocationAndPath(stageInfo.getLocation());
- String stageFilePath = fileName;
+ // when downloading files as stream there should be only one file in source files
+ String sourceLocation =
+ sourceFiles.stream()
+ .findFirst()
+ .orElseThrow(
+ () ->
+ new SnowflakeSQLException(
+ queryID,
+ SqlState.NO_DATA,
+ ErrorCode.FILE_NOT_FOUND.getMessageCode(),
+ session,
+ "File not found: " + fileName));
+
+ if (!fileName.equals(sourceLocation)) {
+ // filename may be different from source location e.g. in git repositories
+ logger.debug("Changing file to download location from {} to {}", fileName, sourceLocation);
+ }
+ String stageFilePath = sourceLocation;
if (!remoteLocation.path.isEmpty()) {
- stageFilePath = SnowflakeUtil.concatFilePathNames(remoteLocation.path, fileName, "/");
+ stageFilePath = SnowflakeUtil.concatFilePathNames(remoteLocation.path, sourceLocation, "/");
}
+ logger.debug("Stage file path for {} is {}", sourceLocation, stageFilePath);
- RemoteStoreFileEncryptionMaterial encMat = srcFileToEncMat.get(fileName);
- String presignedUrl = srcFileToPresignedUrl.get(fileName);
+ RemoteStoreFileEncryptionMaterial encMat = srcFileToEncMat.get(sourceLocation);
+ String presignedUrl = srcFileToPresignedUrl.get(sourceLocation);
return storageFactory
.createClient(stageInfo, parallel, encMat, session)
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java
index 5016c175b..08cb3fac7 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java
@@ -20,6 +20,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
+import net.snowflake.client.core.CancellationReason;
import net.snowflake.client.core.ExecTimeTelemetryData;
import net.snowflake.client.core.ParameterBindingDTO;
import net.snowflake.client.core.ResultUtil;
@@ -952,7 +953,7 @@ public void cancel() throws SQLException {
raiseSQLExceptionIfStatementIsClosed();
try {
- sfBaseStatement.cancel();
+ sfBaseStatement.cancel(CancellationReason.CLIENT_REQUESTED);
} catch (SFException ex) {
throw new SnowflakeSQLException(ex, ex.getSqlState(), ex.getVendorCode(), ex.getParams());
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java
index 1485249b3..05dad6292 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java
@@ -4,6 +4,7 @@
package net.snowflake.client.jdbc;
+import static java.util.Arrays.stream;
import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -32,10 +33,12 @@
import java.util.Optional;
import java.util.Properties;
import java.util.Random;
+import java.util.TreeMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
import net.snowflake.client.core.Constants;
import net.snowflake.client.core.HttpClientSettingsKey;
import net.snowflake.client.core.OCSPMode;
@@ -53,6 +56,7 @@
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
+import org.apache.http.NameValuePair;
/**
* @author jhuang
@@ -835,4 +839,29 @@ public static String getJsonNodeStringValue(JsonNode node) throws SFException {
}
return node.isValueNode() ? node.asText() : node.toString();
}
+
+ /**
+ * Method introduced to avoid inconsistencies in custom headers handling, since these are defined
+ * on drivers side e.g. some drivers might internally convert headers to canonical form.
+ */
+ @SnowflakeJdbcInternalApi
+ public static Map createCaseInsensitiveMap(Map input) {
+ Map caseInsensitiveMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ if (input != null) {
+ caseInsensitiveMap.putAll(input);
+ }
+ return caseInsensitiveMap;
+ }
+
+ /** toCaseInsensitiveMap, but adjusted to Headers[] argument type */
+ @SnowflakeJdbcInternalApi
+ public static Map createCaseInsensitiveMap(Header[] headers) {
+ if (headers != null) {
+ return createCaseInsensitiveMap(
+ stream(headers)
+ .collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue)));
+ } else {
+ return new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ }
+ }
}
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java
index 93646e104..c3602fcf7 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java
@@ -3,8 +3,9 @@
*/
package net.snowflake.client.jdbc.cloud.storage;
-import java.util.HashMap;
import java.util.Map;
+import java.util.TreeMap;
+import net.snowflake.client.jdbc.SnowflakeUtil;
/**
* Implements platform-independent interface Azure BLOB and GCS object metadata
@@ -16,11 +17,11 @@
*/
public class CommonObjectMetadata implements StorageObjectMetadata {
private long contentLength;
- private Map userDefinedMetadata;
+ private final Map userDefinedMetadata;
private String contentEncoding;
CommonObjectMetadata() {
- userDefinedMetadata = new HashMap<>();
+ userDefinedMetadata = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
}
/*
@@ -31,7 +32,7 @@ public class CommonObjectMetadata implements StorageObjectMetadata {
long contentLength, String contentEncoding, Map userDefinedMetadata) {
this.contentEncoding = contentEncoding;
this.contentLength = contentLength;
- this.userDefinedMetadata = userDefinedMetadata;
+ this.userDefinedMetadata = SnowflakeUtil.createCaseInsensitiveMap(userDefinedMetadata);
}
/**
@@ -41,7 +42,6 @@ public class CommonObjectMetadata implements StorageObjectMetadata {
public Map getUserMetadata() {
return userDefinedMetadata;
}
- ;
/**
* @return returns the size of object in bytes
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java
index ec54508f9..38f20cf65 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java
@@ -5,6 +5,7 @@
import com.amazonaws.services.s3.model.ObjectMetadata;
import java.util.Map;
+import net.snowflake.client.jdbc.SnowflakeUtil;
/**
* s3 implementation of platform independent StorageObjectMetadata interface, wraps an S3
@@ -28,7 +29,7 @@ public class S3ObjectMetadata implements StorageObjectMetadata {
@Override
public Map getUserMetadata() {
- return objectMetadata.getUserMetadata();
+ return SnowflakeUtil.createCaseInsensitiveMap(objectMetadata.getUserMetadata());
}
@Override
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java
index 3bb209c48..853d461b5 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java
@@ -5,6 +5,7 @@
import com.amazonaws.services.s3.model.ObjectMetadata;
import java.util.Map;
+import net.snowflake.client.jdbc.SnowflakeUtil;
/**
* Implementation of StorageObjectMetadata for S3 for remote storage object metadata.
@@ -26,7 +27,7 @@ public S3StorageObjectMetadata(ObjectMetadata s3Metadata) {
*/
@Override
public Map getUserMetadata() {
- return this.s3Metadata.getUserMetadata();
+ return SnowflakeUtil.createCaseInsensitiveMap(this.s3Metadata.getUserMetadata());
}
/**
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java
index cdf303bbd..4bec46ca7 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java
@@ -4,6 +4,8 @@
package net.snowflake.client.jdbc.cloud.storage;
import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
+import static net.snowflake.client.core.HttpUtil.setProxyForAzure;
+import static net.snowflake.client.core.HttpUtil.setSessionlessProxyForAzure;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
import com.fasterxml.jackson.core.JsonFactory;
@@ -41,7 +43,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import net.snowflake.client.core.HttpUtil;
import net.snowflake.client.core.ObjectMapperFactory;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFSession;
@@ -154,9 +155,9 @@ private void setupAzureClient(
this.azStorageClient = new CloudBlobClient(storageEndpoint, azCreds);
opContext = new OperationContext();
if (session != null) {
- HttpUtil.setProxyForAzure(session.getHttpClientKey(), opContext);
+ setProxyForAzure(session.getHttpClientKey(), opContext);
} else {
- HttpUtil.setSessionlessProxyForAzure(stage.getProxyProperties(), opContext);
+ setSessionlessProxyForAzure(stage.getProxyProperties(), opContext);
}
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("invalid_azure_credentials");
@@ -273,7 +274,8 @@ public StorageObjectMetadata getObjectMetadata(String remoteStorageLocation, Str
blob.downloadAttributes(null, null, opContext);
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
// Get the BLOB system properties we care about
BlobProperties properties = blob.getProperties();
@@ -348,7 +350,8 @@ public void download(
blob.downloadAttributes(null, transferOptions, opContext);
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);
@@ -447,13 +450,11 @@ public InputStream downloadToStream(
InputStream stream = blob.openInputStream(null, null, opContext);
stopwatch.stop();
long downloadMillis = stopwatch.elapsedMillis();
- Map userDefinedMetadata = blob.getMetadata();
-
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);
-
String key = encryptionData.getKey();
-
String iv = encryptionData.getValue();
if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) {
@@ -574,7 +575,7 @@ public void upload(
CloudBlockBlob blob = container.getBlockBlobReference(destFileName);
// Set the user-defined/Snowflake metadata and upload the BLOB
- blob.setMetadata((HashMap) meta.getUserMetadata());
+ blob.setMetadata(new HashMap<>(meta.getUserMetadata()));
BlobRequestOptions transferOptions = new BlobRequestOptions();
transferOptions.setConcurrentRequestCount(parallelism);
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java
index d907973ac..003d894ae 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java
@@ -4,6 +4,10 @@
package net.snowflake.client.jdbc.cloud.storage;
import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
+import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemPropertyToBooleanValue;
+import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap;
+import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause;
+import static net.snowflake.client.jdbc.SnowflakeUtil.isBlank;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
import com.fasterxml.jackson.core.JsonFactory;
@@ -62,7 +66,6 @@
import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial;
import net.snowflake.common.core.SqlState;
import org.apache.commons.io.IOUtils;
-import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.methods.HttpGet;
@@ -310,18 +313,14 @@ public void download(
outStream.close();
bodyStream.close();
if (isEncrypting()) {
- for (Header header : response.getAllHeaders()) {
- if (header
- .getName()
- .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) {
- AbstractMap.SimpleEntry encryptionData =
- parseEncryptionData(header.getValue(), queryId);
-
- key = encryptionData.getKey();
- iv = encryptionData.getValue();
- break;
- }
- }
+ Map userDefinedHeaders =
+ createCaseInsensitiveMap(response.getAllHeaders());
+ AbstractMap.SimpleEntry encryptionData =
+ parseEncryptionData(
+ userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP),
+ queryId);
+ key = encryptionData.getKey();
+ iv = encryptionData.getValue();
}
stopwatch.stop();
downloadMillis = stopwatch.elapsedMillis();
@@ -355,9 +354,10 @@ public void download(
logger.debug("Download successful", false);
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
if (isEncrypting()) {
- if (userDefinedMetadata != null) {
+ if (!userDefinedMetadata.isEmpty()) {
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId);
@@ -499,18 +499,14 @@ public InputStream downloadToStream(
inputStream = response.getEntity().getContent();
if (isEncrypting()) {
- for (Header header : response.getAllHeaders()) {
- if (header
- .getName()
- .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) {
- AbstractMap.SimpleEntry encryptionData =
- parseEncryptionData(header.getValue(), queryId);
-
- key = encryptionData.getKey();
- iv = encryptionData.getValue();
- break;
- }
- }
+ Map userDefinedHeaders =
+ createCaseInsensitiveMap(response.getAllHeaders());
+ AbstractMap.SimpleEntry encryptionData =
+ parseEncryptionData(
+ userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP),
+ queryId);
+ key = encryptionData.getKey();
+ iv = encryptionData.getValue();
}
stopwatch.stop();
downloadMillis = stopwatch.elapsedMillis();
@@ -538,7 +534,8 @@ public InputStream downloadToStream(
inputStream = Channels.newInputStream(blob.reader());
if (isEncrypting()) {
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId);
@@ -1121,7 +1118,7 @@ public void handleStorageException(
// If there is no space left in the download location, java.io.IOException is thrown.
// Don't retry.
- if (SnowflakeUtil.getRootCause(ex) instanceof IOException) {
+ if (getRootCause(ex) instanceof IOException) {
SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId);
}
@@ -1181,7 +1178,7 @@ public void handleStorageException(
}
}
} else if (ex instanceof InterruptedException
- || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) {
+ || getRootCause(ex) instanceof SocketTimeoutException) {
if (retryCount > getMaxRetries()) {
throw new SnowflakeSQLLoggedException(
queryId,
@@ -1278,7 +1275,7 @@ private AbstractMap.SimpleEntry parseEncryptionData(
/** Adds digest metadata to the StorageObjectMetadata object */
@Override
public void addDigestMetadata(StorageObjectMetadata meta, String digest) {
- if (!SnowflakeUtil.isBlank(digest)) {
+ if (!isBlank(digest)) {
meta.addUserMetadata("sfc-digest", digest);
}
}
@@ -1355,7 +1352,7 @@ private void setupGCSClient(
private static boolean areDisabledGcsDefaultCredentials(SFSession session) {
return session != null && session.getDisableGcsDefaultCredentials()
- || SnowflakeUtil.convertSystemPropertyToBooleanValue(
+ || convertSystemPropertyToBooleanValue(
DISABLE_GCS_DEFAULT_CREDENTIALS_PROPERTY_NAME, false);
}
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java
index 3b33b60f0..bdede5843 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java
@@ -5,6 +5,8 @@
package net.snowflake.client.jdbc.cloud.storage;
import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
+import static net.snowflake.client.jdbc.SnowflakeUtil.createDefaultExecutorService;
+import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
import com.amazonaws.AmazonClientException;
@@ -368,7 +370,7 @@ public void download(
new ExecutorFactory() {
@Override
public ExecutorService newExecutor() {
- return SnowflakeUtil.createDefaultExecutorService(
+ return createDefaultExecutorService(
"s3-transfer-manager-downloader-", parallelism);
}
})
@@ -379,7 +381,8 @@ public ExecutorService newExecutor() {
// Pull object metadata from S3
ObjectMetadata meta = amazonClient.getObjectMetadata(remoteStorageLocation, stageFilePath);
- Map metaMap = meta.getUserMetadata();
+ Map metaMap =
+ SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata());
String key = metaMap.get(AMZ_KEY);
String iv = metaMap.get(AMZ_IV);
@@ -481,7 +484,8 @@ public InputStream downloadToStream(
InputStream stream = file.getObjectContent();
stopwatch.stop();
long downloadMillis = stopwatch.elapsedMillis();
- Map metaMap = meta.getUserMetadata();
+ Map metaMap =
+ SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata());
String key = metaMap.get(AMZ_KEY);
String iv = metaMap.get(AMZ_IV);
@@ -611,7 +615,7 @@ public void upload(
new ExecutorFactory() {
@Override
public ExecutorService newExecutor() {
- return SnowflakeUtil.createDefaultExecutorService(
+ return createDefaultExecutorService(
"s3-transfer-manager-uploader-", parallelism);
}
})
@@ -821,7 +825,7 @@ private static void handleS3Exception(
// If there is no space left in the download location, java.io.IOException is thrown.
// Don't retry.
- if (SnowflakeUtil.getRootCause(ex) instanceof IOException) {
+ if (getRootCause(ex) instanceof IOException) {
SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId);
}
@@ -912,7 +916,7 @@ private static void handleS3Exception(
}
} else {
if (ex instanceof InterruptedException
- || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) {
+ || getRootCause(ex) instanceof SocketTimeoutException) {
if (retryCount > s3Client.getMaxRetries()) {
throw new SnowflakeSQLLoggedException(
queryId,
diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java
index efed33896..4dbbcb021 100644
--- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java
+++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java
@@ -12,6 +12,7 @@
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.AnyOf.anyOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -49,6 +50,7 @@
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
+import javax.net.ssl.SSLHandshakeException;
import net.snowflake.client.ConditionalIgnoreRule;
import net.snowflake.client.RunningNotOnAWS;
import net.snowflake.client.RunningOnGithubAction;
@@ -1313,7 +1315,7 @@ public void testDownloadStreamWithFileNotFoundException() throws SQLException {
.unwrap(SnowflakeConnection.class)
.downloadStream("@testDownloadStream_stage", "/fileNotExist.gz", true);
} catch (SQLException ex) {
- assertThat(ex.getErrorCode(), is(ErrorCode.S3_OPERATION_ERROR.getMessageCode()));
+ assertThat(ex.getErrorCode(), is(ErrorCode.FILE_NOT_FOUND.getMessageCode()));
}
long endDownloadTime = System.currentTimeMillis();
// S3Client retries some exception for a default timeout of 5 minutes
@@ -1618,4 +1620,29 @@ public void shouldGetOverridenConnectionAndSocketTimeouts() throws Exception {
assertEquals(Duration.ofMillis(200), HttpUtil.getSocketTimeout());
}
}
+
+ /** Added in > 3.19.0 */
+ @Test
+ public void shouldFailOnSslExceptionWithLinkToTroubleShootingGuide() throws InterruptedException {
+ Properties properties = new Properties();
+ properties.put("user", "fakeuser");
+ properties.put("password", "testpassword");
+ properties.put("ocspFailOpen", Boolean.FALSE.toString());
+
+ try {
+ DriverManager.getConnection("jdbc:snowflake://expired.badssl.com/", properties);
+ fail("should fail");
+ } catch (SQLException e) {
+ // *.badssl.com may fail with timeout
+ if (!(e.getCause() instanceof SSLHandshakeException)
+ && e.getCause().getMessage().toLowerCase().contains("timed out")) {
+ return;
+ }
+ assertThat(e.getCause(), instanceOf(SSLHandshakeException.class));
+ assertTrue(
+ e.getMessage()
+ .contains(
+ "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview"));
+ }
+ }
}
diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java
index 04c9c9311..49c6c6d10 100644
--- a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java
+++ b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java
@@ -109,7 +109,7 @@ public void testValidityExpiredOCSPResponseFailOpen() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -147,7 +147,7 @@ public void testNoOCSPResponderURLFailOpen() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -184,7 +184,7 @@ public void testValidityExpiredOCSPResponseInsecure() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -199,7 +199,7 @@ public void testCertAttachedInvalidFailOpen() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -235,7 +235,7 @@ public void testUnknownOCSPCertFailOpen() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -294,7 +294,7 @@ public void testOCSPCacheServerTimeoutFailOpen() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -333,7 +333,7 @@ public void testOCSPResponderTimeoutFailOpen() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -369,7 +369,7 @@ public void testOCSPResponder403FailOpen() {
} catch (SQLException ex) {
assertThat(ex, instanceOf(SnowflakeSQLException.class));
assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode()));
- assertThat(ex.getMessage(), httpStatus403Or513());
+ assertThat(ex.getMessage(), httpStatus403Or404Or513());
assertNull(ex.getCause());
}
}
@@ -412,26 +412,39 @@ public void testExpiredCert() {
/** Test Wrong host. Will fail in both FAIL_OPEN and FAIL_CLOSED. */
@Test
- public void testWrongHost() {
+ public void testWrongHost() throws InterruptedException {
try {
DriverManager.getConnection(
"jdbc:snowflake://wrong.host.badssl.com/", OCSPFailClosedProperties());
fail("should fail");
} catch (SQLException ex) {
+ // *.badssl.com may fail with timeout
+ if (!(ex.getCause() instanceof SSLPeerUnverifiedException)
+ && !(ex.getCause() instanceof SSLHandshakeException)
+ && ex.getCause().getMessage().toLowerCase().contains("timed out")) {
+ return;
+ }
assertThat(ex, instanceOf(SnowflakeSQLException.class));
// The certificates used by badssl.com expired around 05/17/2022,
- // https://github.com/chromium/badssl.com/issues/504. After the certificates had been updated,
- // the exception seems to be changed from SSLPeerUnverifiedException to SSLHandshakeException.
+ // https://github.com/chromium/badssl.com/issues/504. After the certificates had been
+ // updated,
+ // the exception seems to be changed from SSLPeerUnverifiedException to
+ // SSLHandshakeException.
assertThat(
ex.getCause(),
anyOf(
instanceOf(SSLPeerUnverifiedException.class),
instanceOf(SSLHandshakeException.class)));
+ return;
}
+ fail("All retries failed");
}
- private static Matcher httpStatus403Or513() {
- return anyOf(containsString("HTTP status=403"), containsString("HTTP status=513"));
+ private static Matcher httpStatus403Or404Or513() {
+ return anyOf(
+ containsString("HTTP status=403"),
+ containsString("HTTP status=404"),
+ containsString("HTTP status=513"));
}
}
diff --git a/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java b/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java
new file mode 100644
index 000000000..b720591de
--- /dev/null
+++ b/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.jdbc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.List;
+import net.snowflake.client.ConditionalIgnoreRule;
+import net.snowflake.client.RunningOnGithubAction;
+import net.snowflake.client.category.TestCategoryOthers;
+import org.apache.commons.io.IOUtils;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(TestCategoryOthers.class)
+public class GitRepositoryDownloadLatestIT extends BaseJDBCTest {
+
+ /**
+ * Test needs to set up git integration which is not available in GH Action tests and needs
+ * accountadmin role. Added in > 3.19.0
+ */
+ @Test
+ @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
+ public void shouldDownloadFileAndStreamFromGitRepository() throws Exception {
+ try (Connection connection = getConnection()) {
+ prepareJdbcRepoInSnowflake(connection);
+
+ String stageName =
+ String.format("@%s.%s.JDBC", connection.getCatalog(), connection.getSchema());
+ String fileName = ".pre-commit-config.yaml";
+ String filePathInGitRepo = "branches/master/" + fileName;
+
+ List fetchedFileContent =
+ getContentFromFile(connection, stageName, filePathInGitRepo, fileName);
+
+ List fetchedStreamContent =
+ getContentFromStream(connection, stageName, filePathInGitRepo);
+
+ assertFalse("File content cannot be empty", fetchedFileContent.isEmpty());
+ assertFalse("Stream content cannot be empty", fetchedStreamContent.isEmpty());
+ assertEquals(fetchedFileContent, fetchedStreamContent);
+ }
+ }
+
+ private static void prepareJdbcRepoInSnowflake(Connection connection) throws SQLException {
+ try (Statement statement = connection.createStatement()) {
+ statement.execute("use role accountadmin");
+ statement.execute(
+ "CREATE OR REPLACE API INTEGRATION gh_integration\n"
+ + " API_PROVIDER = git_https_api\n"
+ + " API_ALLOWED_PREFIXES = ('https://github.com/snowflakedb/snowflake-jdbc.git')\n"
+ + " ENABLED = TRUE;");
+ statement.execute(
+ "CREATE OR REPLACE GIT REPOSITORY jdbc\n"
+ + "ORIGIN = 'https://github.com/snowflakedb/snowflake-jdbc.git'\n"
+ + "API_INTEGRATION = gh_integration;");
+ }
+ }
+
+ private static List getContentFromFile(
+ Connection connection, String stageName, String filePathInGitRepo, String fileName)
+ throws IOException, SQLException {
+ Path tempDir = Files.createTempDirectory("git");
+ String stagePath = stageName + "/" + filePathInGitRepo;
+ Path downloadedFile = tempDir.resolve(fileName);
+ String command = String.format("GET '%s' '%s'", stagePath, tempDir.toUri());
+
+ try (Statement statement = connection.createStatement();
+ ResultSet rs = statement.executeQuery(command); ) {
+ // then
+ assertTrue("has result", rs.next());
+ return Files.readAllLines(downloadedFile);
+ } finally {
+ Files.delete(downloadedFile);
+ Files.delete(tempDir);
+ }
+ }
+
+ private static List getContentFromStream(
+ Connection connection, String stageName, String filePathInGitRepo)
+ throws SQLException, IOException {
+ SnowflakeConnection unwrap = connection.unwrap(SnowflakeConnection.class);
+ try (InputStream inputStream = unwrap.downloadStream(stageName, filePathInGitRepo, false)) {
+ return IOUtils.readLines(inputStream, StandardCharsets.UTF_8);
+ }
+ }
+}
diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java
index 989f1211a..91052fd7c 100644
--- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java
+++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java
@@ -1470,54 +1470,6 @@ public void testNoSpaceLeftOnDeviceException() throws SQLException {
}
}
- @Test
- @Ignore // ignored until SNOW-1616480 is resolved
- public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable {
- File destFolder = tmpFolder.newFolder();
- String destFolderCanonicalPath = destFolder.getCanonicalPath();
- // set parameter for presignedUrl upload instead of downscoped token
- Properties paramProperties = new Properties();
- paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", false);
- try (Connection connection = getConnection("gcpaccount", paramProperties);
- Statement statement = connection.createStatement()) {
- try {
- // create a stage to put the file in
- statement.execute("CREATE OR REPLACE STAGE " + testStageName);
-
- SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession();
-
- // Test put file with internal compression
- String putCommand = "put file:///dummy/path/file1.gz @" + testStageName;
- SnowflakeFileTransferAgent sfAgent =
- new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession));
- List metadata = sfAgent.getFileTransferMetadatas();
-
- String srcPath = getFullPathFileInResource(TEST_DATA_FILE);
- for (SnowflakeFileTransferMetadata oneMetadata : metadata) {
- InputStream inputStream = new FileInputStream(srcPath);
-
- assertTrue(oneMetadata.isForOneFile());
- SnowflakeFileTransferAgent.uploadWithoutConnection(
- SnowflakeFileTransferConfig.Builder.newInstance()
- .setSnowflakeFileTransferMetadata(oneMetadata)
- .setUploadStream(inputStream)
- .setRequireCompress(true)
- .setNetworkTimeoutInMilli(0)
- .setOcspMode(OCSPMode.FAIL_OPEN)
- .build());
- }
-
- assertTrue(
- "Failed to get files",
- statement.execute(
- "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8"));
- assertTrue(isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true));
- } finally {
- statement.execute("DROP STAGE if exists " + testStageName);
- }
- }
- }
-
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testUploadWithGCSDownscopedCredentialWithoutConnection() throws Throwable {
diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java
index 6e61d82dc..703e55b7c 100644
--- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java
+++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java
@@ -3,6 +3,8 @@
*/
package net.snowflake.client.jdbc;
+import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap;
+import static net.snowflake.client.jdbc.SnowflakeUtil.extractColumnMetadata;
import static net.snowflake.client.jdbc.SnowflakeUtil.getSnowflakeType;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -16,8 +18,13 @@
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TreeMap;
import net.snowflake.client.category.TestCategoryCore;
import net.snowflake.client.core.ObjectMapperFactory;
+import org.apache.http.Header;
+import org.apache.http.message.BasicHeader;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -39,8 +46,7 @@ public void testCreateMetadata() throws Throwable {
SnowflakeColumnMetadata expectedColumnMetadata =
createExpectedMetadata(rootNode, fieldOne, fieldTwo);
// when
- SnowflakeColumnMetadata columnMetadata =
- SnowflakeUtil.extractColumnMetadata(rootNode, false, null);
+ SnowflakeColumnMetadata columnMetadata = extractColumnMetadata(rootNode, false, null);
// then
assertNotNull(columnMetadata);
assertEquals(
@@ -62,8 +68,7 @@ public void testCreateFieldsMetadataForObject() throws Throwable {
rootNode.putIfAbsent("fields", fields);
// when
- SnowflakeColumnMetadata columnMetadata =
- SnowflakeUtil.extractColumnMetadata(rootNode, false, null);
+ SnowflakeColumnMetadata columnMetadata = extractColumnMetadata(rootNode, false, null);
// then
assertNotNull(columnMetadata);
assertEquals("OBJECT", columnMetadata.getTypeName());
@@ -82,6 +87,36 @@ public void testCreateFieldsMetadataForObject() throws Throwable {
assertTrue(secondField.isNullable());
}
+ @Test
+ public void shouldConvertCreateCaseInsensitiveMap() {
+ Map map = new HashMap<>();
+ map.put("key1", "value1");
+
+ map = SnowflakeUtil.createCaseInsensitiveMap(map);
+ assertTrue(map instanceof TreeMap);
+ assertEquals(String.CASE_INSENSITIVE_ORDER, ((TreeMap) map).comparator());
+ assertEquals("value1", map.get("key1"));
+ assertEquals("value1", map.get("Key1"));
+ assertEquals("value1", map.get("KEy1"));
+
+ map.put("KEY1", "changed_value1");
+ assertEquals("changed_value1", map.get("KEY1"));
+ }
+
+ @Test
+ public void shouldConvertHeadersCreateCaseInsensitiveMap() {
+ Header[] headers =
+ new Header[] {new BasicHeader("key1", "value1"), new BasicHeader("key2", "value2")};
+
+ Map map = createCaseInsensitiveMap(headers);
+ assertTrue(map instanceof TreeMap);
+ assertEquals(String.CASE_INSENSITIVE_ORDER, ((TreeMap) map).comparator());
+ assertEquals("value1", map.get("key1"));
+ assertEquals("value2", map.get("key2"));
+ assertEquals("value1", map.get("Key1"));
+ assertEquals("value2", map.get("Key2"));
+ }
+
private static SnowflakeColumnMetadata createExpectedMetadata(
JsonNode rootNode, JsonNode fieldOne, JsonNode fieldTwo) throws SnowflakeSQLLoggedException {
ColumnTypeInfo columnTypeInfo =
diff --git a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java
index 3ab179b70..093c2de27 100644
--- a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java
+++ b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java
@@ -119,7 +119,7 @@ public void testDownloadToStreamBlobNotFoundGCS() throws SQLException {
assertTrue(ex instanceof SQLException);
assertTrue(
"Wrong exception message: " + ex.getMessage(),
- ex.getMessage().matches(".*Blob.*not found in bucket.*"));
+ ex.getMessage().contains("File not found"));
} finally {
statement.execute("rm @~/" + DEST_PREFIX);
}
diff --git a/thin_public_pom.xml b/thin_public_pom.xml
index 8069afd9d..08ab73da6 100644
--- a/thin_public_pom.xml
+++ b/thin_public_pom.xml
@@ -37,7 +37,7 @@
4.4.16
1.12.655
5.0.0
- 1.74
+ 1.78.1
1.17.0
2.11.0
1.2
@@ -117,6 +117,11 @@
bcprov-jdk18on
${bouncycastle.version}
+
+ org.bouncycastle
+ bcutil-jdk18on
+ ${bouncycastle.version}
+
com.amazonaws
aws-java-sdk-core