Skip to content

Commit

Permalink
Merge branch 'master' into Snow-1213120-Reuse-Connections-3
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-ext-simba-jf authored Oct 22, 2024
2 parents e6ae253 + e5beff8 commit 079b9db
Show file tree
Hide file tree
Showing 18 changed files with 208 additions and 78 deletions.
2 changes: 1 addition & 1 deletion dependencies/Readme.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
Arrow dependencies are built from internal branch `upgradeto17.0.0`. This build was applied the AIX fix.
Arrow dependencies are built from internal branch `upgradeTo17.0.0-v2`. This build was applied the AIX fix and the customer logger instead of slf4j logger.

Binary file modified dependencies/arrow-format-17.0.0.jar
Binary file not shown.
Binary file modified dependencies/arrow-memory-core-17.0.0.jar
Binary file not shown.
Binary file modified dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar
Binary file not shown.
Binary file modified dependencies/arrow-memory-unsafe-17.0.0.jar
Binary file not shown.
Binary file modified dependencies/arrow-vector-17.0.0.jar
Binary file not shown.
4 changes: 3 additions & 1 deletion src/main/java/net/snowflake/client/core/SFLoginOutput.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ public class SFLoginOutput {
private int databaseMajorVersion;
private int databaseMinorVersion;
private Duration httpClientSocketTimeout;
private Duration httpClientConnectionTimeout;
private String sessionDatabase;
private String sessionSchema;
private String sessionRole;
Expand Down Expand Up @@ -53,6 +54,7 @@ public class SFLoginOutput {
this.databaseMajorVersion = databaseMajorVersion;
this.databaseMinorVersion = databaseMinorVersion;
this.httpClientSocketTimeout = Duration.ofMillis(httpClientSocketTimeout);
this.httpClientConnectionTimeout = Duration.ofMillis(httpClientConnectionTimeout);
this.sessionDatabase = sessionDatabase;
this.sessionSchema = sessionSchema;
this.sessionRole = sessionRole;
Expand Down Expand Up @@ -113,7 +115,7 @@ Duration getHttpClientSocketTimeout() {
}

Duration getHttpClientConnectionTimeout() {
return httpClientSocketTimeout;
return httpClientConnectionTimeout;
}

Map<String, Object> getCommonParams() {
Expand Down
9 changes: 8 additions & 1 deletion src/main/java/net/snowflake/client/jdbc/RestRequest.java
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,14 @@ public static CloseableHttpResponse execute(
// if an SSL issue occurs like an SSLHandshakeException then fail
// immediately and stop retrying the requests

throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, ex.getMessage());
String formattedMsg =
ex.getMessage()
+ "\n"
+ "Verify that the hostnames and portnumbers in SYSTEM$ALLOWLIST are added to your firewall's allowed list.\n"
+ "To troubleshoot your connection further, you can refer to this article:\n"
+ "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview";

throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, formattedMsg);

} catch (Exception ex) {

Expand Down
29 changes: 29 additions & 0 deletions src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

package net.snowflake.client.jdbc;

import static java.util.Arrays.stream;
import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY;

import com.fasterxml.jackson.core.JsonProcessingException;
Expand Down Expand Up @@ -32,10 +33,12 @@
import java.util.Optional;
import java.util.Properties;
import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import net.snowflake.client.core.Constants;
import net.snowflake.client.core.HttpClientSettingsKey;
import net.snowflake.client.core.OCSPMode;
Expand All @@ -53,6 +56,7 @@
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;

/**
* @author jhuang
Expand Down Expand Up @@ -835,4 +839,29 @@ public static String getJsonNodeStringValue(JsonNode node) throws SFException {
}
return node.isValueNode() ? node.asText() : node.toString();
}

/**
* Method introduced to avoid inconsistencies in custom headers handling, since these are defined
* on drivers side e.g. some drivers might internally convert headers to canonical form.
*/
@SnowflakeJdbcInternalApi
public static Map<String, String> createCaseInsensitiveMap(Map<String, String> input) {
Map<String, String> caseInsensitiveMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
if (input != null) {
caseInsensitiveMap.putAll(input);
}
return caseInsensitiveMap;
}

/** toCaseInsensitiveMap, but adjusted to Headers[] argument type */
@SnowflakeJdbcInternalApi
public static Map<String, String> createCaseInsensitiveMap(Header[] headers) {
if (headers != null) {
return createCaseInsensitiveMap(
stream(headers)
.collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue)));
} else {
return new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
*/
package net.snowflake.client.jdbc.cloud.storage;

import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import net.snowflake.client.jdbc.SnowflakeUtil;

/**
* Implements platform-independent interface Azure BLOB and GCS object metadata
Expand All @@ -16,11 +17,11 @@
*/
public class CommonObjectMetadata implements StorageObjectMetadata {
private long contentLength;
private Map<String, String> userDefinedMetadata;
private final Map<String, String> userDefinedMetadata;
private String contentEncoding;

CommonObjectMetadata() {
userDefinedMetadata = new HashMap<>();
userDefinedMetadata = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
}

/*
Expand All @@ -31,7 +32,7 @@ public class CommonObjectMetadata implements StorageObjectMetadata {
long contentLength, String contentEncoding, Map<String, String> userDefinedMetadata) {
this.contentEncoding = contentEncoding;
this.contentLength = contentLength;
this.userDefinedMetadata = userDefinedMetadata;
this.userDefinedMetadata = SnowflakeUtil.createCaseInsensitiveMap(userDefinedMetadata);
}

/**
Expand All @@ -41,7 +42,6 @@ public class CommonObjectMetadata implements StorageObjectMetadata {
public Map<String, String> getUserMetadata() {
return userDefinedMetadata;
}
;

/**
* @return returns the size of object in bytes
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import com.amazonaws.services.s3.model.ObjectMetadata;
import java.util.Map;
import net.snowflake.client.jdbc.SnowflakeUtil;

/**
* s3 implementation of platform independent StorageObjectMetadata interface, wraps an S3
Expand All @@ -28,7 +29,7 @@ public class S3ObjectMetadata implements StorageObjectMetadata {

@Override
public Map<String, String> getUserMetadata() {
return objectMetadata.getUserMetadata();
return SnowflakeUtil.createCaseInsensitiveMap(objectMetadata.getUserMetadata());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import com.amazonaws.services.s3.model.ObjectMetadata;
import java.util.Map;
import net.snowflake.client.jdbc.SnowflakeUtil;

/**
* Implementation of StorageObjectMetadata for S3 for remote storage object metadata.
Expand All @@ -26,7 +27,7 @@ public S3StorageObjectMetadata(ObjectMetadata s3Metadata) {
*/
@Override
public Map<String, String> getUserMetadata() {
return this.s3Metadata.getUserMetadata();
return SnowflakeUtil.createCaseInsensitiveMap(this.s3Metadata.getUserMetadata());
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
package net.snowflake.client.jdbc.cloud.storage;

import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
import static net.snowflake.client.core.HttpUtil.setProxyForAzure;
import static net.snowflake.client.core.HttpUtil.setSessionlessProxyForAzure;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;

import com.fasterxml.jackson.core.JsonFactory;
Expand Down Expand Up @@ -41,7 +43,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.snowflake.client.core.HttpUtil;
import net.snowflake.client.core.ObjectMapperFactory;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFSession;
Expand Down Expand Up @@ -154,9 +155,9 @@ private void setupAzureClient(
this.azStorageClient = new CloudBlobClient(storageEndpoint, azCreds);
opContext = new OperationContext();
if (session != null) {
HttpUtil.setProxyForAzure(session.getHttpClientKey(), opContext);
setProxyForAzure(session.getHttpClientKey(), opContext);
} else {
HttpUtil.setSessionlessProxyForAzure(stage.getProxyProperties(), opContext);
setSessionlessProxyForAzure(stage.getProxyProperties(), opContext);
}
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("invalid_azure_credentials");
Expand Down Expand Up @@ -273,7 +274,8 @@ public StorageObjectMetadata getObjectMetadata(String remoteStorageLocation, Str
blob.downloadAttributes(null, null, opContext);

// Get the user-defined BLOB metadata
Map<String, String> userDefinedMetadata = blob.getMetadata();
Map<String, String> userDefinedMetadata =
SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());

// Get the BLOB system properties we care about
BlobProperties properties = blob.getProperties();
Expand Down Expand Up @@ -348,7 +350,8 @@ public void download(
blob.downloadAttributes(null, transferOptions, opContext);

// Get the user-defined BLOB metadata
Map<String, String> userDefinedMetadata = blob.getMetadata();
Map<String, String> userDefinedMetadata =
SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

Expand Down Expand Up @@ -447,13 +450,11 @@ public InputStream downloadToStream(
InputStream stream = blob.openInputStream(null, null, opContext);
stopwatch.stop();
long downloadMillis = stopwatch.elapsedMillis();
Map<String, String> userDefinedMetadata = blob.getMetadata();

Map<String, String> userDefinedMetadata =
SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

String key = encryptionData.getKey();

String iv = encryptionData.getValue();

if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) {
Expand Down Expand Up @@ -574,7 +575,7 @@ public void upload(
CloudBlockBlob blob = container.getBlockBlobReference(destFileName);

// Set the user-defined/Snowflake metadata and upload the BLOB
blob.setMetadata((HashMap<String, String>) meta.getUserMetadata());
blob.setMetadata(new HashMap<>(meta.getUserMetadata()));

BlobRequestOptions transferOptions = new BlobRequestOptions();
transferOptions.setConcurrentRequestCount(parallelism);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@
package net.snowflake.client.jdbc.cloud.storage;

import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemPropertyToBooleanValue;
import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap;
import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause;
import static net.snowflake.client.jdbc.SnowflakeUtil.isBlank;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;

import com.fasterxml.jackson.core.JsonFactory;
Expand Down Expand Up @@ -62,7 +66,6 @@
import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial;
import net.snowflake.common.core.SqlState;
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.methods.HttpGet;
Expand Down Expand Up @@ -310,18 +313,14 @@ public void download(
outStream.close();
bodyStream.close();
if (isEncrypting()) {
for (Header header : response.getAllHeaders()) {
if (header
.getName()
.equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) {
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(header.getValue(), queryId);

key = encryptionData.getKey();
iv = encryptionData.getValue();
break;
}
}
Map<String, String> userDefinedHeaders =
createCaseInsensitiveMap(response.getAllHeaders());
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(
userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP),
queryId);
key = encryptionData.getKey();
iv = encryptionData.getValue();
}
stopwatch.stop();
downloadMillis = stopwatch.elapsedMillis();
Expand Down Expand Up @@ -355,9 +354,10 @@ public void download(
logger.debug("Download successful", false);

// Get the user-defined BLOB metadata
Map<String, String> userDefinedMetadata = blob.getMetadata();
Map<String, String> userDefinedMetadata =
SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
if (isEncrypting()) {
if (userDefinedMetadata != null) {
if (!userDefinedMetadata.isEmpty()) {
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId);

Expand Down Expand Up @@ -499,18 +499,14 @@ public InputStream downloadToStream(
inputStream = response.getEntity().getContent();

if (isEncrypting()) {
for (Header header : response.getAllHeaders()) {
if (header
.getName()
.equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) {
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(header.getValue(), queryId);

key = encryptionData.getKey();
iv = encryptionData.getValue();
break;
}
}
Map<String, String> userDefinedHeaders =
createCaseInsensitiveMap(response.getAllHeaders());
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(
userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP),
queryId);
key = encryptionData.getKey();
iv = encryptionData.getValue();
}
stopwatch.stop();
downloadMillis = stopwatch.elapsedMillis();
Expand Down Expand Up @@ -538,7 +534,8 @@ public InputStream downloadToStream(
inputStream = Channels.newInputStream(blob.reader());
if (isEncrypting()) {
// Get the user-defined BLOB metadata
Map<String, String> userDefinedMetadata = blob.getMetadata();
Map<String, String> userDefinedMetadata =
SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId);

Expand Down Expand Up @@ -1121,7 +1118,7 @@ public void handleStorageException(

// If there is no space left in the download location, java.io.IOException is thrown.
// Don't retry.
if (SnowflakeUtil.getRootCause(ex) instanceof IOException) {
if (getRootCause(ex) instanceof IOException) {
SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId);
}

Expand Down Expand Up @@ -1181,7 +1178,7 @@ public void handleStorageException(
}
}
} else if (ex instanceof InterruptedException
|| SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) {
|| getRootCause(ex) instanceof SocketTimeoutException) {
if (retryCount > getMaxRetries()) {
throw new SnowflakeSQLLoggedException(
queryId,
Expand Down Expand Up @@ -1278,7 +1275,7 @@ private AbstractMap.SimpleEntry<String, String> parseEncryptionData(
/** Adds digest metadata to the StorageObjectMetadata object */
@Override
public void addDigestMetadata(StorageObjectMetadata meta, String digest) {
if (!SnowflakeUtil.isBlank(digest)) {
if (!isBlank(digest)) {
meta.addUserMetadata("sfc-digest", digest);
}
}
Expand Down Expand Up @@ -1355,7 +1352,7 @@ private void setupGCSClient(

private static boolean areDisabledGcsDefaultCredentials(SFSession session) {
return session != null && session.getDisableGcsDefaultCredentials()
|| SnowflakeUtil.convertSystemPropertyToBooleanValue(
|| convertSystemPropertyToBooleanValue(
DISABLE_GCS_DEFAULT_CREDENTIALS_PROPERTY_NAME, false);
}

Expand Down
Loading

0 comments on commit 079b9db

Please sign in to comment.