diff --git a/dependencies/Readme.md b/dependencies/Readme.md index 7b4a4c73c..5abaea2ae 100644 --- a/dependencies/Readme.md +++ b/dependencies/Readme.md @@ -1,2 +1,2 @@ -Arrow dependencies are built from internal branch `upgradeto17.0.0`. This build was applied the AIX fix. +Arrow dependencies are built from internal branch `upgradeTo17.0.0-v2`. This build was applied the AIX fix and the customer logger instead of slf4j logger. diff --git a/dependencies/arrow-format-17.0.0.jar b/dependencies/arrow-format-17.0.0.jar index 349272113..b4a34e86f 100644 Binary files a/dependencies/arrow-format-17.0.0.jar and b/dependencies/arrow-format-17.0.0.jar differ diff --git a/dependencies/arrow-memory-core-17.0.0.jar b/dependencies/arrow-memory-core-17.0.0.jar index a218df5db..12d9c9116 100644 Binary files a/dependencies/arrow-memory-core-17.0.0.jar and b/dependencies/arrow-memory-core-17.0.0.jar differ diff --git a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar index 2004a461e..72d374247 100644 Binary files a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar and b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar differ diff --git a/dependencies/arrow-memory-unsafe-17.0.0.jar b/dependencies/arrow-memory-unsafe-17.0.0.jar index b9897fe47..65aac2a7b 100644 Binary files a/dependencies/arrow-memory-unsafe-17.0.0.jar and b/dependencies/arrow-memory-unsafe-17.0.0.jar differ diff --git a/dependencies/arrow-vector-17.0.0.jar b/dependencies/arrow-vector-17.0.0.jar index 69ccfaf0d..29055d603 100644 Binary files a/dependencies/arrow-vector-17.0.0.jar and b/dependencies/arrow-vector-17.0.0.jar differ diff --git a/src/main/java/net/snowflake/client/core/SFLoginOutput.java b/src/main/java/net/snowflake/client/core/SFLoginOutput.java index 8daf81f10..3470076b9 100644 --- a/src/main/java/net/snowflake/client/core/SFLoginOutput.java +++ b/src/main/java/net/snowflake/client/core/SFLoginOutput.java @@ -18,6 +18,7 @@ public class SFLoginOutput { private int databaseMajorVersion; private int databaseMinorVersion; private Duration httpClientSocketTimeout; + private Duration httpClientConnectionTimeout; private String sessionDatabase; private String sessionSchema; private String sessionRole; @@ -53,6 +54,7 @@ public class SFLoginOutput { this.databaseMajorVersion = databaseMajorVersion; this.databaseMinorVersion = databaseMinorVersion; this.httpClientSocketTimeout = Duration.ofMillis(httpClientSocketTimeout); + this.httpClientConnectionTimeout = Duration.ofMillis(httpClientConnectionTimeout); this.sessionDatabase = sessionDatabase; this.sessionSchema = sessionSchema; this.sessionRole = sessionRole; @@ -113,7 +115,7 @@ Duration getHttpClientSocketTimeout() { } Duration getHttpClientConnectionTimeout() { - return httpClientSocketTimeout; + return httpClientConnectionTimeout; } Map getCommonParams() { diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java index 5be46c5de..c753c87de 100644 --- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java +++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java @@ -283,7 +283,14 @@ public static CloseableHttpResponse execute( // if an SSL issue occurs like an SSLHandshakeException then fail // immediately and stop retrying the requests - throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, ex.getMessage()); + String formattedMsg = + ex.getMessage() + + "\n" + + "Verify that the hostnames and portnumbers in SYSTEM$ALLOWLIST are added to your firewall's allowed list.\n" + + "To troubleshoot your connection further, you can refer to this article:\n" + + "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview"; + + throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, formattedMsg); } catch (Exception ex) { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 1485249b3..05dad6292 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -4,6 +4,7 @@ package net.snowflake.client.jdbc; +import static java.util.Arrays.stream; import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY; import com.fasterxml.jackson.core.JsonProcessingException; @@ -32,10 +33,12 @@ import java.util.Optional; import java.util.Properties; import java.util.Random; +import java.util.TreeMap; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import net.snowflake.client.core.Constants; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.OCSPMode; @@ -53,6 +56,7 @@ import org.apache.commons.io.IOUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; +import org.apache.http.NameValuePair; /** * @author jhuang @@ -835,4 +839,29 @@ public static String getJsonNodeStringValue(JsonNode node) throws SFException { } return node.isValueNode() ? node.asText() : node.toString(); } + + /** + * Method introduced to avoid inconsistencies in custom headers handling, since these are defined + * on drivers side e.g. some drivers might internally convert headers to canonical form. + */ + @SnowflakeJdbcInternalApi + public static Map createCaseInsensitiveMap(Map input) { + Map caseInsensitiveMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + if (input != null) { + caseInsensitiveMap.putAll(input); + } + return caseInsensitiveMap; + } + + /** toCaseInsensitiveMap, but adjusted to Headers[] argument type */ + @SnowflakeJdbcInternalApi + public static Map createCaseInsensitiveMap(Header[] headers) { + if (headers != null) { + return createCaseInsensitiveMap( + stream(headers) + .collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue))); + } else { + return new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + } + } } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java index 93646e104..c3602fcf7 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java @@ -3,8 +3,9 @@ */ package net.snowflake.client.jdbc.cloud.storage; -import java.util.HashMap; import java.util.Map; +import java.util.TreeMap; +import net.snowflake.client.jdbc.SnowflakeUtil; /** * Implements platform-independent interface Azure BLOB and GCS object metadata @@ -16,11 +17,11 @@ */ public class CommonObjectMetadata implements StorageObjectMetadata { private long contentLength; - private Map userDefinedMetadata; + private final Map userDefinedMetadata; private String contentEncoding; CommonObjectMetadata() { - userDefinedMetadata = new HashMap<>(); + userDefinedMetadata = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); } /* @@ -31,7 +32,7 @@ public class CommonObjectMetadata implements StorageObjectMetadata { long contentLength, String contentEncoding, Map userDefinedMetadata) { this.contentEncoding = contentEncoding; this.contentLength = contentLength; - this.userDefinedMetadata = userDefinedMetadata; + this.userDefinedMetadata = SnowflakeUtil.createCaseInsensitiveMap(userDefinedMetadata); } /** @@ -41,7 +42,6 @@ public class CommonObjectMetadata implements StorageObjectMetadata { public Map getUserMetadata() { return userDefinedMetadata; } - ; /** * @return returns the size of object in bytes diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java index ec54508f9..38f20cf65 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java @@ -5,6 +5,7 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import java.util.Map; +import net.snowflake.client.jdbc.SnowflakeUtil; /** * s3 implementation of platform independent StorageObjectMetadata interface, wraps an S3 @@ -28,7 +29,7 @@ public class S3ObjectMetadata implements StorageObjectMetadata { @Override public Map getUserMetadata() { - return objectMetadata.getUserMetadata(); + return SnowflakeUtil.createCaseInsensitiveMap(objectMetadata.getUserMetadata()); } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java index 3bb209c48..853d461b5 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java @@ -5,6 +5,7 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import java.util.Map; +import net.snowflake.client.jdbc.SnowflakeUtil; /** * Implementation of StorageObjectMetadata for S3 for remote storage object metadata. @@ -26,7 +27,7 @@ public S3StorageObjectMetadata(ObjectMetadata s3Metadata) { */ @Override public Map getUserMetadata() { - return this.s3Metadata.getUserMetadata(); + return SnowflakeUtil.createCaseInsensitiveMap(this.s3Metadata.getUserMetadata()); } /** diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java index cdf303bbd..4bec46ca7 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java @@ -4,6 +4,8 @@ package net.snowflake.client.jdbc.cloud.storage; import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED; +import static net.snowflake.client.core.HttpUtil.setProxyForAzure; +import static net.snowflake.client.core.HttpUtil.setSessionlessProxyForAzure; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import com.fasterxml.jackson.core.JsonFactory; @@ -41,7 +43,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFSession; @@ -154,9 +155,9 @@ private void setupAzureClient( this.azStorageClient = new CloudBlobClient(storageEndpoint, azCreds); opContext = new OperationContext(); if (session != null) { - HttpUtil.setProxyForAzure(session.getHttpClientKey(), opContext); + setProxyForAzure(session.getHttpClientKey(), opContext); } else { - HttpUtil.setSessionlessProxyForAzure(stage.getProxyProperties(), opContext); + setSessionlessProxyForAzure(stage.getProxyProperties(), opContext); } } catch (URISyntaxException ex) { throw new IllegalArgumentException("invalid_azure_credentials"); @@ -273,7 +274,8 @@ public StorageObjectMetadata getObjectMetadata(String remoteStorageLocation, Str blob.downloadAttributes(null, null, opContext); // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); // Get the BLOB system properties we care about BlobProperties properties = blob.getProperties(); @@ -348,7 +350,8 @@ public void download( blob.downloadAttributes(null, transferOptions, opContext); // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId); @@ -447,13 +450,11 @@ public InputStream downloadToStream( InputStream stream = blob.openInputStream(null, null, opContext); stopwatch.stop(); long downloadMillis = stopwatch.elapsedMillis(); - Map userDefinedMetadata = blob.getMetadata(); - + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId); - String key = encryptionData.getKey(); - String iv = encryptionData.getValue(); if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { @@ -574,7 +575,7 @@ public void upload( CloudBlockBlob blob = container.getBlockBlobReference(destFileName); // Set the user-defined/Snowflake metadata and upload the BLOB - blob.setMetadata((HashMap) meta.getUserMetadata()); + blob.setMetadata(new HashMap<>(meta.getUserMetadata())); BlobRequestOptions transferOptions = new BlobRequestOptions(); transferOptions.setConcurrentRequestCount(parallelism); diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java index d907973ac..003d894ae 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java @@ -4,6 +4,10 @@ package net.snowflake.client.jdbc.cloud.storage; import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED; +import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemPropertyToBooleanValue; +import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap; +import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause; +import static net.snowflake.client.jdbc.SnowflakeUtil.isBlank; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import com.fasterxml.jackson.core.JsonFactory; @@ -62,7 +66,6 @@ import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; -import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.HttpResponseException; import org.apache.http.client.methods.HttpGet; @@ -310,18 +313,14 @@ public void download( outStream.close(); bodyStream.close(); if (isEncrypting()) { - for (Header header : response.getAllHeaders()) { - if (header - .getName() - .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) { - AbstractMap.SimpleEntry encryptionData = - parseEncryptionData(header.getValue(), queryId); - - key = encryptionData.getKey(); - iv = encryptionData.getValue(); - break; - } - } + Map userDefinedHeaders = + createCaseInsensitiveMap(response.getAllHeaders()); + AbstractMap.SimpleEntry encryptionData = + parseEncryptionData( + userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP), + queryId); + key = encryptionData.getKey(); + iv = encryptionData.getValue(); } stopwatch.stop(); downloadMillis = stopwatch.elapsedMillis(); @@ -355,9 +354,10 @@ public void download( logger.debug("Download successful", false); // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); if (isEncrypting()) { - if (userDefinedMetadata != null) { + if (!userDefinedMetadata.isEmpty()) { AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId); @@ -499,18 +499,14 @@ public InputStream downloadToStream( inputStream = response.getEntity().getContent(); if (isEncrypting()) { - for (Header header : response.getAllHeaders()) { - if (header - .getName() - .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) { - AbstractMap.SimpleEntry encryptionData = - parseEncryptionData(header.getValue(), queryId); - - key = encryptionData.getKey(); - iv = encryptionData.getValue(); - break; - } - } + Map userDefinedHeaders = + createCaseInsensitiveMap(response.getAllHeaders()); + AbstractMap.SimpleEntry encryptionData = + parseEncryptionData( + userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP), + queryId); + key = encryptionData.getKey(); + iv = encryptionData.getValue(); } stopwatch.stop(); downloadMillis = stopwatch.elapsedMillis(); @@ -538,7 +534,8 @@ public InputStream downloadToStream( inputStream = Channels.newInputStream(blob.reader()); if (isEncrypting()) { // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId); @@ -1121,7 +1118,7 @@ public void handleStorageException( // If there is no space left in the download location, java.io.IOException is thrown. // Don't retry. - if (SnowflakeUtil.getRootCause(ex) instanceof IOException) { + if (getRootCause(ex) instanceof IOException) { SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId); } @@ -1181,7 +1178,7 @@ public void handleStorageException( } } } else if (ex instanceof InterruptedException - || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) { + || getRootCause(ex) instanceof SocketTimeoutException) { if (retryCount > getMaxRetries()) { throw new SnowflakeSQLLoggedException( queryId, @@ -1278,7 +1275,7 @@ private AbstractMap.SimpleEntry parseEncryptionData( /** Adds digest metadata to the StorageObjectMetadata object */ @Override public void addDigestMetadata(StorageObjectMetadata meta, String digest) { - if (!SnowflakeUtil.isBlank(digest)) { + if (!isBlank(digest)) { meta.addUserMetadata("sfc-digest", digest); } } @@ -1355,7 +1352,7 @@ private void setupGCSClient( private static boolean areDisabledGcsDefaultCredentials(SFSession session) { return session != null && session.getDisableGcsDefaultCredentials() - || SnowflakeUtil.convertSystemPropertyToBooleanValue( + || convertSystemPropertyToBooleanValue( DISABLE_GCS_DEFAULT_CREDENTIALS_PROPERTY_NAME, false); } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java index 3b33b60f0..bdede5843 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java @@ -5,6 +5,8 @@ package net.snowflake.client.jdbc.cloud.storage; import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED; +import static net.snowflake.client.jdbc.SnowflakeUtil.createDefaultExecutorService; +import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import com.amazonaws.AmazonClientException; @@ -368,7 +370,7 @@ public void download( new ExecutorFactory() { @Override public ExecutorService newExecutor() { - return SnowflakeUtil.createDefaultExecutorService( + return createDefaultExecutorService( "s3-transfer-manager-downloader-", parallelism); } }) @@ -379,7 +381,8 @@ public ExecutorService newExecutor() { // Pull object metadata from S3 ObjectMetadata meta = amazonClient.getObjectMetadata(remoteStorageLocation, stageFilePath); - Map metaMap = meta.getUserMetadata(); + Map metaMap = + SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata()); String key = metaMap.get(AMZ_KEY); String iv = metaMap.get(AMZ_IV); @@ -481,7 +484,8 @@ public InputStream downloadToStream( InputStream stream = file.getObjectContent(); stopwatch.stop(); long downloadMillis = stopwatch.elapsedMillis(); - Map metaMap = meta.getUserMetadata(); + Map metaMap = + SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata()); String key = metaMap.get(AMZ_KEY); String iv = metaMap.get(AMZ_IV); @@ -611,7 +615,7 @@ public void upload( new ExecutorFactory() { @Override public ExecutorService newExecutor() { - return SnowflakeUtil.createDefaultExecutorService( + return createDefaultExecutorService( "s3-transfer-manager-uploader-", parallelism); } }) @@ -821,7 +825,7 @@ private static void handleS3Exception( // If there is no space left in the download location, java.io.IOException is thrown. // Don't retry. - if (SnowflakeUtil.getRootCause(ex) instanceof IOException) { + if (getRootCause(ex) instanceof IOException) { SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId); } @@ -912,7 +916,7 @@ private static void handleS3Exception( } } else { if (ex instanceof InterruptedException - || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) { + || getRootCause(ex) instanceof SocketTimeoutException) { if (retryCount > s3Client.getMaxRetries()) { throw new SnowflakeSQLLoggedException( queryId, diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index efed33896..4d2129a53 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -12,6 +12,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.AnyOf.anyOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -48,7 +49,9 @@ import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.Random; import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLHandshakeException; import net.snowflake.client.ConditionalIgnoreRule; import net.snowflake.client.RunningNotOnAWS; import net.snowflake.client.RunningOnGithubAction; @@ -1618,4 +1621,37 @@ public void shouldGetOverridenConnectionAndSocketTimeouts() throws Exception { assertEquals(Duration.ofMillis(200), HttpUtil.getSocketTimeout()); } } + + /** Added in > 3.19.0 */ + @Test + public void shouldFailOnSslExceptionWithLinkToTroubleShootingGuide() throws InterruptedException { + Properties properties = new Properties(); + properties.put("user", "fakeuser"); + properties.put("password", "testpassword"); + properties.put("ocspFailOpen", Boolean.FALSE.toString()); + + int maxRetries = 5; + int retry = 0; + + // *.badssl.com may fail on timeouts + while (retry < maxRetries) { + try { + DriverManager.getConnection("jdbc:snowflake://expired.badssl.com/", properties); + fail("should fail"); + } catch (SQLException e) { + if (!(e.getCause() instanceof SSLHandshakeException)) { + retry++; + Thread.sleep(1000 * new Random().nextInt(3)); + continue; + } + assertThat(e.getCause(), instanceOf(SSLHandshakeException.class)); + assertTrue( + e.getMessage() + .contains( + "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview")); + return; + } + } + fail("All retries failed"); + } } diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java index 04c9c9311..025d4c7a4 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java @@ -17,6 +17,7 @@ import java.sql.DriverManager; import java.sql.SQLException; import java.util.Properties; +import java.util.Random; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLPeerUnverifiedException; import net.snowflake.client.ConditionalIgnoreRule; @@ -412,22 +413,38 @@ public void testExpiredCert() { /** Test Wrong host. Will fail in both FAIL_OPEN and FAIL_CLOSED. */ @Test - public void testWrongHost() { - try { - DriverManager.getConnection( - "jdbc:snowflake://wrong.host.badssl.com/", OCSPFailClosedProperties()); - fail("should fail"); - } catch (SQLException ex) { - assertThat(ex, instanceOf(SnowflakeSQLException.class)); - - // The certificates used by badssl.com expired around 05/17/2022, - // https://github.com/chromium/badssl.com/issues/504. After the certificates had been updated, - // the exception seems to be changed from SSLPeerUnverifiedException to SSLHandshakeException. - assertThat( - ex.getCause(), - anyOf( - instanceOf(SSLPeerUnverifiedException.class), - instanceOf(SSLHandshakeException.class))); + public void testWrongHost() throws InterruptedException { + int maxRetries = 5; + int retry = 0; + + // *.badssl.com may fail on timeouts + while (retry < maxRetries) { + try { + DriverManager.getConnection( + "jdbc:snowflake://wrong.host.badssl.com/", OCSPFailClosedProperties()); + fail("should fail"); + } catch (SQLException ex) { + if (!(ex.getCause() instanceof SSLPeerUnverifiedException) + && !(ex.getCause() instanceof SSLHandshakeException)) { + retry++; + Thread.sleep(1000 * new Random().nextInt(3)); + continue; + } + assertThat(ex, instanceOf(SnowflakeSQLException.class)); + + // The certificates used by badssl.com expired around 05/17/2022, + // https://github.com/chromium/badssl.com/issues/504. After the certificates had been + // updated, + // the exception seems to be changed from SSLPeerUnverifiedException to + // SSLHandshakeException. + assertThat( + ex.getCause(), + anyOf( + instanceOf(SSLPeerUnverifiedException.class), + instanceOf(SSLHandshakeException.class))); + return; + } + fail("All retries failed"); } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java index 6e61d82dc..703e55b7c 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -3,6 +3,8 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap; +import static net.snowflake.client.jdbc.SnowflakeUtil.extractColumnMetadata; import static net.snowflake.client.jdbc.SnowflakeUtil.getSnowflakeType; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -16,8 +18,13 @@ import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.TreeMap; import net.snowflake.client.category.TestCategoryCore; import net.snowflake.client.core.ObjectMapperFactory; +import org.apache.http.Header; +import org.apache.http.message.BasicHeader; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -39,8 +46,7 @@ public void testCreateMetadata() throws Throwable { SnowflakeColumnMetadata expectedColumnMetadata = createExpectedMetadata(rootNode, fieldOne, fieldTwo); // when - SnowflakeColumnMetadata columnMetadata = - SnowflakeUtil.extractColumnMetadata(rootNode, false, null); + SnowflakeColumnMetadata columnMetadata = extractColumnMetadata(rootNode, false, null); // then assertNotNull(columnMetadata); assertEquals( @@ -62,8 +68,7 @@ public void testCreateFieldsMetadataForObject() throws Throwable { rootNode.putIfAbsent("fields", fields); // when - SnowflakeColumnMetadata columnMetadata = - SnowflakeUtil.extractColumnMetadata(rootNode, false, null); + SnowflakeColumnMetadata columnMetadata = extractColumnMetadata(rootNode, false, null); // then assertNotNull(columnMetadata); assertEquals("OBJECT", columnMetadata.getTypeName()); @@ -82,6 +87,36 @@ public void testCreateFieldsMetadataForObject() throws Throwable { assertTrue(secondField.isNullable()); } + @Test + public void shouldConvertCreateCaseInsensitiveMap() { + Map map = new HashMap<>(); + map.put("key1", "value1"); + + map = SnowflakeUtil.createCaseInsensitiveMap(map); + assertTrue(map instanceof TreeMap); + assertEquals(String.CASE_INSENSITIVE_ORDER, ((TreeMap) map).comparator()); + assertEquals("value1", map.get("key1")); + assertEquals("value1", map.get("Key1")); + assertEquals("value1", map.get("KEy1")); + + map.put("KEY1", "changed_value1"); + assertEquals("changed_value1", map.get("KEY1")); + } + + @Test + public void shouldConvertHeadersCreateCaseInsensitiveMap() { + Header[] headers = + new Header[] {new BasicHeader("key1", "value1"), new BasicHeader("key2", "value2")}; + + Map map = createCaseInsensitiveMap(headers); + assertTrue(map instanceof TreeMap); + assertEquals(String.CASE_INSENSITIVE_ORDER, ((TreeMap) map).comparator()); + assertEquals("value1", map.get("key1")); + assertEquals("value2", map.get("key2")); + assertEquals("value1", map.get("Key1")); + assertEquals("value2", map.get("Key2")); + } + private static SnowflakeColumnMetadata createExpectedMetadata( JsonNode rootNode, JsonNode fieldOne, JsonNode fieldTwo) throws SnowflakeSQLLoggedException { ColumnTypeInfo columnTypeInfo =