nullables;
@@ -143,6 +145,7 @@ public SFResultSetMetaData(
this.columnTypeNames = new ArrayList<>(this.columnCount);
this.columnTypes = new ArrayList<>(this.columnCount);
this.precisions = new ArrayList<>(this.columnCount);
+ this.dimensions = new ArrayList<>(this.columnCount);
this.scales = new ArrayList<>(this.columnCount);
this.nullables = new ArrayList<>(this.columnCount);
this.columnSrcDatabases = new ArrayList<>(this.columnCount);
@@ -156,6 +159,7 @@ public SFResultSetMetaData(
columnNames.add(columnMetadata.get(colIdx).getName());
columnTypeNames.add(columnMetadata.get(colIdx).getTypeName());
precisions.add(calculatePrecision(columnMetadata.get(colIdx)));
+ dimensions.add(calculateDimension(columnMetadata.get(colIdx)));
columnTypes.add(columnMetadata.get(colIdx).getType());
scales.add(columnMetadata.get(colIdx).getScale());
nullables.add(
@@ -200,6 +204,14 @@ private Integer calculatePrecision(SnowflakeColumnMetadata columnMetadata) {
}
}
+ private Integer calculateDimension(SnowflakeColumnMetadata columnMetadata) {
+ int columnType = columnMetadata.getType();
+ if (columnType == SnowflakeUtil.EXTRA_TYPES_VECTOR) {
+ return columnMetadata.getDimension();
+ }
+ return 0;
+ }
+
private Integer calculateDisplaySize(SnowflakeColumnMetadata columnMetadata) {
int columnType = columnMetadata.getType();
switch (columnType) {
@@ -403,6 +415,14 @@ public int getPrecision(int column) {
}
}
+ public int getDimension(int column) {
+ if (dimensions != null && dimensions.size() >= column && column > 0) {
+ return dimensions.get(column - 1);
+ } else {
+ return 0;
+ }
+ }
+
public boolean isSigned(int column) {
return (columnTypes.get(column - 1) == Types.INTEGER
|| columnTypes.get(column - 1) == Types.DECIMAL
diff --git a/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java b/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java
index bbd1e1c14..aca26a272 100644
--- a/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java
+++ b/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java
@@ -23,7 +23,8 @@
/** Snowflake custom SSLConnectionSocketFactory */
public class SFSSLConnectionSocketFactory extends SSLConnectionSocketFactory {
- static final SFLogger logger = SFLoggerFactory.getLogger(SFSSLConnectionSocketFactory.class);
+ private static final SFLogger logger =
+ SFLoggerFactory.getLogger(SFSSLConnectionSocketFactory.class);
private static final String SSL_VERSION = "TLSv1.2";
diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java
index d7ee69a07..bb0b2b2a8 100644
--- a/src/main/java/net/snowflake/client/core/SFSession.java
+++ b/src/main/java/net/snowflake/client/core/SFSession.java
@@ -21,6 +21,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
@@ -40,11 +41,14 @@
import net.snowflake.client.jdbc.SnowflakeSQLException;
import net.snowflake.client.jdbc.SnowflakeSQLLoggedException;
import net.snowflake.client.jdbc.SnowflakeUtil;
+import net.snowflake.client.jdbc.diagnostic.DiagnosticContext;
import net.snowflake.client.jdbc.telemetry.Telemetry;
import net.snowflake.client.jdbc.telemetry.TelemetryClient;
import net.snowflake.client.jdbc.telemetryOOB.TelemetryService;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
+import net.snowflake.client.log.SFLoggerUtil;
+import net.snowflake.client.util.Stopwatch;
import net.snowflake.common.core.ClientAuthnDTO;
import net.snowflake.common.core.SqlState;
import org.apache.http.HttpHeaders;
@@ -58,7 +62,7 @@ public class SFSession extends SFBaseSession {
public static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION;
public static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake";
public static final String SF_HEADER_TOKEN_TAG = "Token";
- static final SFLogger logger = SFLoggerFactory.getLogger(SFSession.class);
+ private static final SFLogger logger = SFLoggerFactory.getLogger(SFSession.class);
private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper();
private static final String SF_PATH_SESSION_HEARTBEAT = "/session/heartbeat";
private static final String SF_PATH_QUERY_MONITOR = "/monitoring/queries/";
@@ -94,6 +98,7 @@ public class SFSession extends SFBaseSession {
* Default:300 seconds
*/
private int loginTimeout = 300;
+
/**
* Amount of milliseconds a user is willing to tolerate for network related issues (e.g. HTTP
* 503/504) or database transient issues (e.g. GS not responding)
@@ -213,7 +218,7 @@ private JsonNode getQueryMetadata(String queryID) throws SQLException {
loginTimeout,
authTimeout,
(int) httpClientSocketTimeout.toMillis(),
- 0,
+ maxHttpRetries,
getHttpClientKey());
jsonNode = OBJECT_MAPPER.readTree(response);
} catch (Exception e) {
@@ -224,7 +229,7 @@ private JsonNode getQueryMetadata(String queryID) throws SQLException {
// Get response as JSON and parse it to get the query status
// check the success field first
if (!jsonNode.path("success").asBoolean()) {
- logger.debug("response = {}", response);
+ logger.debug("Response: {}", response);
int errorCode = jsonNode.path("code").asInt();
// If the error is due to an expired session token, try renewing the session and trying
@@ -511,21 +516,23 @@ public boolean containProperty(String key) {
* @throws SnowflakeSQLException exception raised from Snowflake components
*/
public synchronized void open() throws SFException, SnowflakeSQLException {
+ Stopwatch stopwatch = new Stopwatch();
+ stopwatch.start();
performSanityCheckOnProperties();
Map connectionPropertiesMap = getConnectionPropertiesMap();
- logger.debug(
- "input: server={}, account={}, user={}, password={}, role={}, database={}, schema={},"
- + " warehouse={}, validate_default_parameters={}, authenticator={}, ocsp_mode={},"
- + " passcode_in_password={}, passcode={}, private_key={}, disable_socks_proxy={},"
- + " application={}, app_id={}, app_version={}, login_timeout={}, retry_timeout={}, network_timeout={},"
- + " query_timeout={}, tracing={}, private_key_file={}, private_key_file_pwd={}."
- + " session_parameters: client_store_temporary_credential={}, gzip_disabled={}",
+ logger.info(
+ "Opening session with server: {}, account: {}, user: {}, password is {}, role: {}, database: {}, schema: {},"
+ + " warehouse: {}, validate default parameters: {}, authenticator: {}, ocsp mode: {},"
+ + " passcode in password: {}, passcode is {}, private key is {}, disable socks proxy: {},"
+ + " application: {}, app id: {}, app version: {}, login timeout: {}, retry timeout: {}, network timeout: {},"
+ + " query timeout: {}, tracing: {}, private key file: {}, private key file pwd is {},"
+ + " enable_diagnostics: {}, diagnostics_allowlist_path: {},"
+ + " session parameters: client store temporary credential: {}, gzip disabled: {}",
connectionPropertiesMap.get(SFSessionProperty.SERVER_URL),
connectionPropertiesMap.get(SFSessionProperty.ACCOUNT),
connectionPropertiesMap.get(SFSessionProperty.USER),
- !Strings.isNullOrEmpty((String) connectionPropertiesMap.get(SFSessionProperty.PASSWORD))
- ? "***"
- : "(empty)",
+ SFLoggerUtil.isVariableProvided(
+ (String) connectionPropertiesMap.get(SFSessionProperty.PASSWORD)),
connectionPropertiesMap.get(SFSessionProperty.ROLE),
connectionPropertiesMap.get(SFSessionProperty.DATABASE),
connectionPropertiesMap.get(SFSessionProperty.SCHEMA),
@@ -534,12 +541,9 @@ public synchronized void open() throws SFException, SnowflakeSQLException {
connectionPropertiesMap.get(SFSessionProperty.AUTHENTICATOR),
getOCSPMode().name(),
connectionPropertiesMap.get(SFSessionProperty.PASSCODE_IN_PASSWORD),
- !Strings.isNullOrEmpty((String) connectionPropertiesMap.get(SFSessionProperty.PASSCODE))
- ? "***"
- : "(empty)",
- connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY) != null
- ? "(not null)"
- : "(null)",
+ SFLoggerUtil.isVariableProvided(
+ (String) connectionPropertiesMap.get(SFSessionProperty.PASSCODE)),
+ SFLoggerUtil.isVariableProvided(connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY)),
connectionPropertiesMap.get(SFSessionProperty.DISABLE_SOCKS_PROXY),
connectionPropertiesMap.get(SFSessionProperty.APPLICATION),
connectionPropertiesMap.get(SFSessionProperty.APP_ID),
@@ -550,22 +554,22 @@ public synchronized void open() throws SFException, SnowflakeSQLException {
connectionPropertiesMap.get(SFSessionProperty.QUERY_TIMEOUT),
connectionPropertiesMap.get(SFSessionProperty.TRACING),
connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE),
- !Strings.isNullOrEmpty(
- (String) connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD))
- ? "***"
- : "(empty)",
+ SFLoggerUtil.isVariableProvided(
+ (String) connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD)),
+ connectionPropertiesMap.get(SFSessionProperty.ENABLE_DIAGNOSTICS),
+ connectionPropertiesMap.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE),
sessionParametersMap.get(CLIENT_STORE_TEMPORARY_CREDENTIAL),
connectionPropertiesMap.get(SFSessionProperty.GZIP_DISABLED));
HttpClientSettingsKey httpClientSettingsKey = getHttpClientKey();
logger.debug(
- "connection proxy parameters: use_proxy={}, proxy_host={}, proxy_port={}, proxy_user={},"
- + " proxy_password={}, non_proxy_hosts={}, proxy_protocol={}",
+ "Connection proxy parameters: use proxy: {}, proxy host: {}, proxy port: {}, proxy user: {},"
+ + " proxy password is {}, non proxy hosts: {}, proxy protocol: {}",
httpClientSettingsKey.usesProxy(),
httpClientSettingsKey.getProxyHost(),
httpClientSettingsKey.getProxyPort(),
httpClientSettingsKey.getProxyUser(),
- !Strings.isNullOrEmpty(httpClientSettingsKey.getProxyPassword()) ? "***" : "(empty)",
+ SFLoggerUtil.isVariableProvided(httpClientSettingsKey.getProxyPassword()),
httpClientSettingsKey.getNonProxyHosts(),
httpClientSettingsKey.getProxyHttpProtocol());
@@ -608,19 +612,26 @@ public synchronized void open() throws SFException, SnowflakeSQLException {
connectionPropertiesMap.get(SFSessionProperty.DISABLE_CONSOLE_LOGIN) != null
? getBooleanValue(
connectionPropertiesMap.get(SFSessionProperty.DISABLE_CONSOLE_LOGIN))
- : true);
+ : true)
+ .setDisableSamlURLCheck(
+ connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK) != null
+ ? getBooleanValue(
+ connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK))
+ : false);
- // Enable or disable OOB telemetry based on connection parameter. Default is disabled.
- // The value may still change later when session parameters from the server are read.
- if (getBooleanValue(
- connectionPropertiesMap.get(SFSessionProperty.CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED))) {
- TelemetryService.enable();
- } else {
- TelemetryService.disable();
- }
+ logger.info(
+ "Connecting to {} Snowflake domain",
+ loginInput.getHostFromServerUrl().toLowerCase().endsWith(".cn") ? "CHINA" : "GLOBAL");
+
+ // we ignore the parameters CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED and htapOOBTelemetryEnabled
+ // OOB telemetry is disabled
+ TelemetryService.disableOOBTelemetry();
// propagate OCSP mode to SFTrustManager. Note OCSP setting is global on JVM.
HttpUtil.initHttpClient(httpClientSettingsKey, null);
+
+ runDiagnosticsIfEnabled();
+
SFLoginOutput loginOutput =
SessionUtil.openSession(loginInput, connectionPropertiesMap, tracingLevel.toString());
isClosed = false;
@@ -644,13 +655,7 @@ public synchronized void open() throws SFException, SnowflakeSQLException {
// Update common parameter values for this session
SessionUtil.updateSfDriverParamValues(loginOutput.getCommonParams(), this);
- // Enable or disable HTAP OOB telemetry based on connection parameter. Default is disabled.
- if (getBooleanValue(
- connectionPropertiesMap.get(SFSessionProperty.HTAP_OOB_TELEMETRY_ENABLED))) {
- TelemetryService.enableHTAP();
- } else {
- TelemetryService.disableHTAP();
- }
+
String loginDatabaseName = (String) connectionPropertiesMap.get(SFSessionProperty.DATABASE);
String loginSchemaName = (String) connectionPropertiesMap.get(SFSessionProperty.SCHEMA);
String loginRole = (String) connectionPropertiesMap.get(SFSessionProperty.ROLE);
@@ -702,6 +707,8 @@ public synchronized void open() throws SFException, SnowflakeSQLException {
// start heartbeat for this session so that the master token will not expire
startHeartbeatForThisSession();
+ stopwatch.stop();
+ logger.info("Session {} opened in {} ms.", getSessionId(), stopwatch.elapsedMillis());
}
/**
@@ -762,10 +769,14 @@ boolean isUsernamePasswordMFAAuthenticator() {
synchronized void renewSession(String prevSessionToken)
throws SFException, SnowflakeSQLException {
if (sessionToken != null && !sessionToken.equals(prevSessionToken)) {
- logger.debug("not renew session because session token has not been updated.", false);
+ logger.debug(
+ "Not renewing session {} because session token has not been updated.", getSessionId());
return;
}
+ Stopwatch stopwatch = new Stopwatch();
+ stopwatch.start();
+ logger.debug("Renewing session {}", getSessionId());
SFLoginInput loginInput = new SFLoginInput();
loginInput
.setServerUrl(getServerUrl())
@@ -786,6 +797,9 @@ synchronized void renewSession(String prevSessionToken)
sessionToken = loginOutput.getSessionToken();
masterToken = loginOutput.getMasterToken();
+ stopwatch.stop();
+ logger.debug(
+ "Session {} renewed successfully in {} ms", getSessionId(), stopwatch.elapsedMillis());
}
/**
@@ -805,14 +819,17 @@ public String getSessionToken() {
*/
@Override
public void close() throws SFException, SnowflakeSQLException {
- logger.debug(" public void close()", false);
+ logger.debug("Closing session {}", getSessionId());
// stop heartbeat for this session
stopHeartbeatForThisSession();
if (isClosed) {
+ logger.debug("Session {} is already closed", getSessionId());
return;
}
+ Stopwatch stopwatch = new Stopwatch();
+ stopwatch.start();
SFLoginInput loginInput = new SFLoginInput();
loginInput
@@ -832,6 +849,11 @@ public void close() throws SFException, SnowflakeSQLException {
qcc.clearCache();
}
+ stopwatch.stop();
+ logger.info(
+ "Session {} has been successfully closed in {} ms",
+ getSessionId(),
+ stopwatch.elapsedMillis());
isClosed = true;
}
@@ -887,23 +909,26 @@ public Void call() throws SQLException {
/** Start heartbeat for this session */
protected void startHeartbeatForThisSession() {
if (getEnableHeartbeat() && !Strings.isNullOrEmpty(masterToken)) {
- logger.debug("start heartbeat, master token validity: " + masterTokenValidityInSeconds);
+ logger.debug(
+ "Session {} start heartbeat, master token validity: {} s",
+ getSessionId(),
+ masterTokenValidityInSeconds);
HeartbeatBackground.getInstance()
.addSession(this, masterTokenValidityInSeconds, heartbeatFrequency);
} else {
- logger.debug("heartbeat not enabled for the session", false);
+ logger.debug("Heartbeat not enabled for the session {}", getSessionId());
}
}
/** Stop heartbeat for this session */
protected void stopHeartbeatForThisSession() {
if (getEnableHeartbeat() && !Strings.isNullOrEmpty(masterToken)) {
- logger.debug("stop heartbeat", false);
+ logger.debug("Session {} stop heartbeat", getSessionId());
HeartbeatBackground.getInstance().removeSession(this);
} else {
- logger.debug("heartbeat not enabled for the session", false);
+ logger.debug("Heartbeat not enabled for the session {}", getSessionId());
}
}
@@ -914,12 +939,15 @@ protected void stopHeartbeatForThisSession() {
* @throws SQLException exception raised from SQL generic layers
*/
protected void heartbeat() throws SFException, SQLException {
- logger.debug(" public void heartbeat()", false);
+ logger.debug("Session {} heartbeat", getSessionId());
if (isClosed) {
return;
}
+ Stopwatch stopwatch = new Stopwatch();
+ stopwatch.start();
+
HttpPost postRequest = null;
String requestId = UUIDUtils.getUUID().toString();
@@ -969,14 +997,14 @@ protected void heartbeat() throws SFException, SQLException {
JsonNode rootNode;
- logger.debug("connection heartbeat response: {}", theResponse);
+ logger.debug("Connection heartbeat response: {}", theResponse);
rootNode = OBJECT_MAPPER.readTree(theResponse);
// check the response to see if it is session expiration response
if (rootNode != null
&& (Constants.SESSION_EXPIRED_GS_CODE == rootNode.path("code").asInt())) {
- logger.debug("renew session and retry", false);
+ logger.debug("Renew session and retry", false);
this.renewSession(prevSessionToken);
retry = true;
continue;
@@ -992,12 +1020,15 @@ protected void heartbeat() throws SFException, SQLException {
throw (SnowflakeSQLException) ex;
}
- logger.error("unexpected exception", ex);
+ logger.error("Unexpected exception", ex);
throw new SFException(
ErrorCode.INTERNAL_ERROR, IncidentUtil.oneLiner("unexpected exception", ex));
}
} while (retry);
+ stopwatch.stop();
+ logger.debug(
+ "Session {} heartbeat successful in {} ms", getSessionId(), stopwatch.elapsedMillis());
}
void injectedDelay() {
@@ -1248,4 +1279,45 @@ public SFClientConfig getSfClientConfig() {
public void setSfClientConfig(SFClientConfig sfClientConfig) {
this.sfClientConfig = sfClientConfig;
}
+
+ /**
+ * If the JDBC driver starts in diagnostics mode then the method prints results of the
+ * connectivity tests it performs in the logs. A SQLException is thrown with a message indicating
+ * that the driver is in diagnostics mode, and that a connection was not created.
+ */
+ private void runDiagnosticsIfEnabled() throws SnowflakeSQLException {
+ Map connectionPropertiesMap = getConnectionPropertiesMap();
+ boolean isDiagnosticsEnabled =
+ Optional.ofNullable(connectionPropertiesMap.get(SFSessionProperty.ENABLE_DIAGNOSTICS))
+ .map(b -> (Boolean) b)
+ .orElse(false);
+
+ if (!isDiagnosticsEnabled) {
+ return;
+ }
+ logger.info("Running diagnostics tests");
+ String allowListFile =
+ (String) connectionPropertiesMap.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE);
+
+ if (allowListFile == null || allowListFile.isEmpty()) {
+ logger.error(
+ "Diagnostics was enabled but an allowlist file was not provided."
+ + " Please provide an allowlist JSON file using the connection parameter {}",
+ SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE);
+ throw new SnowflakeSQLException(
+ "Diagnostics was enabled but an allowlist file was not provided. "
+ + "Please provide an allowlist JSON file using the connection parameter "
+ + SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE);
+ } else {
+ DiagnosticContext diagnosticContext =
+ new DiagnosticContext(allowListFile, connectionPropertiesMap);
+ diagnosticContext.runDiagnostics();
+ }
+
+ throw new SnowflakeSQLException(
+ "A connection was not created because the driver is running in diagnostics mode."
+ + " If this is unintended then disable diagnostics check by removing the "
+ + SFSessionProperty.ENABLE_DIAGNOSTICS
+ + " connection parameter");
+ }
}
diff --git a/src/main/java/net/snowflake/client/core/SFSessionProperty.java b/src/main/java/net/snowflake/client/core/SFSessionProperty.java
index 0ca91809c..3dcb09602 100644
--- a/src/main/java/net/snowflake/client/core/SFSessionProperty.java
+++ b/src/main/java/net/snowflake/client/core/SFSessionProperty.java
@@ -77,12 +77,15 @@ public enum SFSessionProperty {
PUT_GET_MAX_RETRIES("putGetMaxRetries", false, Integer.class),
RETRY_TIMEOUT("retryTimeout", false, Integer.class),
+ ENABLE_DIAGNOSTICS("ENABLE_DIAGNOSTICS", false, Boolean.class),
+ DIAGNOSTICS_ALLOWLIST_FILE("DIAGNOSTICS_ALLOWLIST_FILE", false, String.class),
ENABLE_PATTERN_SEARCH("enablePatternSearch", false, Boolean.class),
DISABLE_GCS_DEFAULT_CREDENTIALS("disableGcsDefaultCredentials", false, Boolean.class),
- JDBC_ARROW_TREAT_DECIMAL_AS_INT("JDBC_ARROW_TREAT_DECIMAL_AS_INT", false, Boolean.class);
+ JDBC_ARROW_TREAT_DECIMAL_AS_INT("JDBC_ARROW_TREAT_DECIMAL_AS_INT", false, Boolean.class),
+ DISABLE_SAML_URL_CHECK("disableSamlURLCheck", false, Boolean.class);
// property key in string
private String propertyKey;
diff --git a/src/main/java/net/snowflake/client/core/SFSqlInput.java b/src/main/java/net/snowflake/client/core/SFSqlInput.java
index b3efa6893..2b3d6ba95 100644
--- a/src/main/java/net/snowflake/client/core/SFSqlInput.java
+++ b/src/main/java/net/snowflake/client/core/SFSqlInput.java
@@ -4,7 +4,6 @@
package net.snowflake.client.core;
import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLInput;
import java.util.List;
import java.util.Map;
@@ -31,8 +30,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) {
* @param tz timezone to consider.
* @return the attribute; if the value is SQL NULL
, returns null
* @exception SQLException if a database access error occurs
- * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method
- * @since 1.2
*/
java.sql.Timestamp readTimestamp(TimeZone tz) throws SQLException;
/**
@@ -43,8 +40,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) {
* @return the attribute at the head of the stream as an {@code Object} in the Java programming
* language;{@code null} if the attribute is SQL {@code NULL}
* @exception SQLException if a database access error occurs
- * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method
- * @since 1.8
*/
T readObject(Class type, TimeZone tz) throws SQLException;
/**
@@ -55,8 +50,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) {
* @return the attribute at the head of the stream as an {@code List} in the Java programming
* language;{@code null} if the attribute is SQL {@code NULL}
* @exception SQLException if a database access error occurs
- * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method
- * @since 1.8
*/
List readList(Class type) throws SQLException;
@@ -68,8 +61,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) {
* @return the attribute at the head of the stream as an {@code Map} in the Java programming
* language;{@code null} if the attribute is SQL {@code NULL}
* @exception SQLException if a database access error occurs
- * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method
- * @since 1.8
*/
Map readMap(Class type) throws SQLException;
/**
@@ -80,8 +71,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) {
* @return the attribute at the head of the stream as an {@code Array} in the Java programming
* language;{@code null} if the attribute is SQL {@code NULL}
* @exception SQLException if a database access error occurs
- * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method
- * @since 1.8
*/
T[] readArray(Class type) throws SQLException;
}
diff --git a/src/main/java/net/snowflake/client/core/SFStatement.java b/src/main/java/net/snowflake/client/core/SFStatement.java
index 1dd555bb5..6142b8eb9 100644
--- a/src/main/java/net/snowflake/client/core/SFStatement.java
+++ b/src/main/java/net/snowflake/client/core/SFStatement.java
@@ -44,7 +44,7 @@
/** Snowflake statement */
public class SFStatement extends SFBaseStatement {
- static final SFLogger logger = SFLoggerFactory.getLogger(SFStatement.class);
+ private static final SFLogger logger = SFLoggerFactory.getLogger(SFStatement.class);
private SFSession session;
@@ -80,7 +80,7 @@ public class SFStatement extends SFBaseStatement {
private long conservativeMemoryLimit; // in bytes
public SFStatement(SFSession session) {
- logger.debug(" public SFStatement(SFSession session)", false);
+ logger.trace("SFStatement(SFSession session)", false);
this.session = session;
Integer queryTimeout = session == null ? null : session.getQueryTimeout();
@@ -91,7 +91,7 @@ public SFStatement(SFSession session) {
private void verifyArrowSupport() {
if (SnowflakeDriver.isDisableArrowResultFormat()) {
logger.debug(
- "disable arrow support: {}", SnowflakeDriver.getDisableArrowResultFormatMessage());
+ "Disable arrow support: {}", SnowflakeDriver.getDisableArrowResultFormatMessage());
statementParametersMap.put("JDBC_QUERY_RESULT_FORMAT", "JSON");
}
}
@@ -205,7 +205,7 @@ SFBaseResultSet executeQueryInternal(
throws SQLException, SFException {
resetState();
- logger.debug("executeQuery: {}", sql);
+ logger.debug("ExecuteQuery: {}", sql);
if (session == null || session.isClosed()) {
throw new SQLException("connection is closed");
@@ -771,9 +771,9 @@ public SFBaseResultSet execute(
session.injectedDelay();
if (session.getPreparedStatementLogging()) {
- logger.info("execute: {}", sql);
+ logger.info("Execute: {}", sql);
} else {
- logger.debug("execute: {}", sql);
+ logger.debug("Execute: {}", sql);
}
String trimmedSql = sql.trim();
@@ -798,7 +798,7 @@ private SFBaseResultSet executeFileTransfer(String sql) throws SQLException, SFE
try {
transferAgent.execute();
- logger.debug("setting result set", false);
+ logger.debug("Setting result set", false);
resultSet = (SFFixedViewResultSet) transferAgent.getResultSet();
childResults = Collections.emptyList();
@@ -814,7 +814,7 @@ private SFBaseResultSet executeFileTransfer(String sql) throws SQLException, SFE
@Override
public void close() {
- logger.debug("public void close()", false);
+ logger.trace("void close()", false);
if (requestId != null) {
EventUtil.triggerStateTransition(
@@ -827,7 +827,7 @@ public void close() {
isClosed = true;
if (httpRequest != null) {
- logger.debug("releasing connection for the http request", false);
+ logger.debug("Releasing connection for the http request", false);
httpRequest.releaseConnection();
httpRequest = null;
@@ -841,7 +841,7 @@ public void close() {
@Override
public void cancel() throws SFException, SQLException {
- logger.debug("public void cancel()", false);
+ logger.trace("void cancel()", false);
if (canceling.get()) {
logger.debug("Query is already cancelled", false);
diff --git a/src/main/java/net/snowflake/client/core/SFTrustManager.java b/src/main/java/net/snowflake/client/core/SFTrustManager.java
index bd05729c3..740c70fe3 100644
--- a/src/main/java/net/snowflake/client/core/SFTrustManager.java
+++ b/src/main/java/net/snowflake/client/core/SFTrustManager.java
@@ -140,7 +140,7 @@ public class SFTrustManager extends X509ExtendedTrustManager {
/** OCSP response cache file name. Should be identical to other driver's cache file name. */
static final String CACHE_FILE_NAME = "ocsp_response_cache.json";
- private static final SFLogger LOGGER = SFLoggerFactory.getLogger(SFTrustManager.class);
+ private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManager.class);
private static final ASN1ObjectIdentifier OIDocsp =
new ASN1ObjectIdentifier("1.3.6.1.5.5.7.48.1").intern();
private static final ASN1ObjectIdentifier SHA1RSA =
@@ -167,8 +167,10 @@ public class SFTrustManager extends X509ExtendedTrustManager {
private static final int DEFAULT_OCSP_CACHE_SERVER_CONNECTION_TIMEOUT = 5000;
/** Default OCSP responder connection timeout */
private static final int DEFAULT_OCSP_RESPONDER_CONNECTION_TIMEOUT = 10000;
+ /** Default OCSP Cache server host name prefix */
+ private static final String DEFAULT_OCSP_CACHE_HOST_PREFIX = "http://ocsp.snowflakecomputing.";
/** Default OCSP Cache server host name */
- private static final String DEFAULT_OCSP_CACHE_HOST = "http://ocsp.snowflakecomputing.com";
+ private static final String DEFAULT_OCSP_CACHE_HOST = DEFAULT_OCSP_CACHE_HOST_PREFIX + "com";
/** OCSP response file cache directory */
private static final FileCacheManager fileCacheManager;
@@ -200,7 +202,7 @@ public class SFTrustManager extends X509ExtendedTrustManager {
/** OCSP Response Cache server Retry URL pattern */
static String SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN;
/** OCSP response cache server URL. */
- private static String SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE;
+ static String SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE;
private static JcaX509CertificateConverter CONVERTER_X509 = new JcaX509CertificateConverter();
/** RootCA cache */
@@ -283,6 +285,9 @@ public class SFTrustManager extends X509ExtendedTrustManager {
JsonNode res = fileCacheManager.readCacheFile();
readJsonStoreCache(res);
}
+
+ logger.debug(
+ "Initializing trust manager with OCSP mode: {}, cache file: {}", ocspMode, cacheFile);
}
/** Deletes OCSP response cache file from disk. */
@@ -312,7 +317,7 @@ static void resetOCSPResponseCacherServerURL(String ocspCacheServerUrl) throws I
return;
}
SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheServerUrl;
- if (!SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE.startsWith(DEFAULT_OCSP_CACHE_HOST)) {
+ if (!SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE.startsWith(DEFAULT_OCSP_CACHE_HOST_PREFIX)) {
URL url = new URL(SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE);
if (url.getPort() > 0) {
SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN =
@@ -322,10 +327,13 @@ static void resetOCSPResponseCacherServerURL(String ocspCacheServerUrl) throws I
SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN =
String.format("%s://%s/retry/%s", url.getProtocol(), url.getHost(), "%s/%s");
}
+ logger.debug(
+ "Reset OCSP response cache server URL to: {}",
+ SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN);
}
}
- private static void setOCSPResponseCacheServerURL() {
+ private static void setOCSPResponseCacheServerURL(String topLevelDomain) {
String ocspCacheUrl = systemGetProperty(SF_OCSP_RESPONSE_CACHE_SERVER_URL);
if (ocspCacheUrl != null) {
SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheUrl;
@@ -336,30 +344,31 @@ private static void setOCSPResponseCacheServerURL() {
SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheUrl;
}
} catch (Throwable ex) {
- LOGGER.debug(
+ logger.debug(
"Failed to get environment variable " + SF_OCSP_RESPONSE_CACHE_SERVER_URL + ". Ignored",
true);
}
if (SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE == null) {
SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE =
- String.format("%s/%s", DEFAULT_OCSP_CACHE_HOST, CACHE_FILE_NAME);
+ String.format("%s%s/%s", DEFAULT_OCSP_CACHE_HOST_PREFIX, topLevelDomain, CACHE_FILE_NAME);
}
+ logger.debug("Set OCSP response cache server to: {}", SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE);
}
private static boolean useOCSPResponseCacheServer() {
String ocspCacheServerEnabled = systemGetProperty(SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED);
if (Boolean.FALSE.toString().equalsIgnoreCase(ocspCacheServerEnabled)) {
- LOGGER.debug("No OCSP Response Cache Server is used.", false);
+ logger.debug("No OCSP Response Cache Server is used.", false);
return false;
}
try {
ocspCacheServerEnabled = systemGetEnv(SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED);
if (Boolean.FALSE.toString().equalsIgnoreCase(ocspCacheServerEnabled)) {
- LOGGER.debug("No OCSP Response Cache Server is used.", false);
+ logger.debug("No OCSP Response Cache Server is used.", false);
return false;
}
} catch (Throwable ex) {
- LOGGER.debug(
+ logger.debug(
"Failed to get environment variable "
+ SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED
+ ". Ignored",
@@ -383,7 +392,7 @@ private static String encodeCacheKey(OcspResponseCacheKey ocsp_cache_key) {
CertID cid = new CertID(algo, nameHash, keyHash, snumber);
return Base64.encodeBase64String(cid.toASN1Primitive().getEncoded());
} catch (Exception ex) {
- LOGGER.debug("Failed to encode cache key to base64 encoded cert id", false);
+ logger.debug("Failed to encode cache key to base64 encoded cert id", false);
}
return null;
}
@@ -423,7 +432,7 @@ private static SFPair> decodeCacheFro
JsonNode ocspRespBase64 = elem.getValue();
if (!ocspRespBase64.isArray() || ocspRespBase64.size() != 2) {
- LOGGER.debug("Invalid cache file format. Ignored", false);
+ logger.debug("Invalid cache file format. Ignored", false);
return null;
}
long producedAt = ocspRespBase64.get(0).asLong();
@@ -465,14 +474,14 @@ private static ObjectNode encodeCacheToJSON() {
}
return out;
} catch (IOException ex) {
- LOGGER.debug("Failed to encode ASN1 object.", false);
+ logger.debug("Failed to encode ASN1 object.", false);
}
return null;
}
private static synchronized void readJsonStoreCache(JsonNode m) {
if (m == null || !m.getNodeType().equals(JsonNodeType.OBJECT)) {
- LOGGER.debug("Invalid cache file format.", false);
+ logger.debug("Invalid cache file format.", false);
return;
}
try {
@@ -489,7 +498,7 @@ private static synchronized void readJsonStoreCache(JsonNode m) {
}
}
} catch (IOException ex) {
- LOGGER.debug("Failed to decode the cache file", false);
+ logger.debug("Failed to decode the cache file", false);
}
}
@@ -669,7 +678,7 @@ private void checkNewOCSPEndpointAvailability() {
try {
new_ocsp_ept = systemGetEnv("SF_OCSP_ACTIVATE_NEW_ENDPOINT");
} catch (Throwable ex) {
- LOGGER.debug(
+ logger.debug(
"Could not get environment variable to check for New OCSP Endpoint Availability", false);
new_ocsp_ept = systemGetProperty("net.snowflake.jdbc.ocsp_activate_new_endpoint");
}
@@ -784,22 +793,23 @@ void validateRevocationStatus(X509Certificate[] chain, String peerHost)
ocspCacheServer.resetOCSPResponseCacheServer(peerHost);
}
- setOCSPResponseCacheServerURL();
+ String topLevelDomain = peerHost.substring(peerHost.lastIndexOf(".") + 1);
+ setOCSPResponseCacheServerURL(topLevelDomain);
boolean isCached = isCached(pairIssuerSubjectList);
if (useOCSPResponseCacheServer() && !isCached) {
if (!ocspCacheServer.new_endpoint_enabled) {
- LOGGER.debug(
+ logger.debug(
"Downloading OCSP response cache from the server. URL: {}",
SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE);
} else {
- LOGGER.debug(
+ logger.debug(
"Downloading OCSP response cache from the server. URL: {}",
ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER);
}
try {
readOcspResponseCacheServer();
} catch (SFOCSPException ex) {
- LOGGER.debug(
+ logger.debug(
"Error downloading OCSP Response from cache server : {}."
+ "OCSP Responses will be fetched directly from the CA OCSP"
+ "Responder ",
@@ -900,7 +910,7 @@ private void executeOneRevocationStatusCheck(
telemetryData.setCacheHit(true);
}
} catch (Throwable ex) {
- LOGGER.debug(
+ logger.debug(
"Exception occurred while trying to fetch OCSP Response - {}", ex.getMessage());
throw new SFOCSPException(
OCSPErrorCode.OCSP_RESPONSE_FETCH_FAILURE,
@@ -908,8 +918,8 @@ private void executeOneRevocationStatusCheck(
ex);
}
- LOGGER.debug(
- "validating. {}", CertificateIDToString(req.getRequestList()[0].getCertID()));
+ logger.debug(
+ "Validating. {}", CertificateIDToString(req.getRequestList()[0].getCertID()));
try {
validateRevocationStatusMain(pairIssuerSubject, value0.right);
success = true;
@@ -930,12 +940,12 @@ private void executeOneRevocationStatusCheck(
} catch (CertificateException ex) {
WAS_CACHE_UPDATED.set(OCSP_RESPONSE_CACHE.remove(keyOcspResponse) != null);
if (WAS_CACHE_UPDATED.get()) {
- LOGGER.debug("deleting the invalid OCSP cache.", false);
+ logger.debug("Deleting the invalid OCSP cache.", false);
}
cause = ex;
- LOGGER.debug(
- "Retrying {}/{} after sleeping {}(ms)", retry + 1, maxRetryCounter, sleepTime);
+ logger.debug(
+ "Retrying {}/{} after sleeping {} ms", retry + 1, maxRetryCounter, sleepTime);
try {
if (retry + 1 < maxRetryCounter) {
Thread.sleep(sleepTime);
@@ -950,7 +960,7 @@ private void executeOneRevocationStatusCheck(
error = new CertificateException(ex);
ocspLog =
telemetryData.generateTelemetry(SF_OCSP_EVENT_TYPE_REVOKED_CERTIFICATE_ERROR, error);
- LOGGER.error(ocspLog, false);
+ logger.error(ocspLog, false);
throw error;
}
@@ -960,21 +970,21 @@ private void executeOneRevocationStatusCheck(
error =
new CertificateException(
"Certificate Revocation check failed. Could not retrieve OCSP Response.", cause);
- LOGGER.debug(cause.getMessage(), false);
+ logger.debug(cause.getMessage(), false);
} else {
error =
new CertificateException(
"Certificate Revocation check failed. Could not retrieve OCSP Response.");
- LOGGER.debug(error.getMessage(), false);
+ logger.debug(error.getMessage(), false);
}
ocspLog = telemetryData.generateTelemetry(SF_OCSP_EVENT_TYPE_VALIDATION_ERROR, error);
if (isOCSPFailOpen()) {
// Log includes fail-open warning.
- LOGGER.error(generateFailOpenLog(ocspLog), false);
+ logger.error(generateFailOpenLog(ocspLog), false);
} else {
// still not success, raise an error.
- LOGGER.debug(ocspLog, false);
+ logger.debug(ocspLog, false);
throw error;
}
}
@@ -993,7 +1003,7 @@ private boolean isCached(List> pairIssuerSubjec
for (SFPair pairIssuerSubject : pairIssuerSubjectList) {
OCSPReq req = createRequest(pairIssuerSubject);
CertificateID certificateId = req.getRequestList()[0].getCertID();
- LOGGER.debug(CertificateIDToString(certificateId), false);
+ logger.debug(CertificateIDToString(certificateId), false);
CertID cid = certificateId.toASN1Primitive();
OcspResponseCacheKey k =
new OcspResponseCacheKey(
@@ -1003,18 +1013,18 @@ private boolean isCached(List> pairIssuerSubjec
SFPair res = OCSP_RESPONSE_CACHE.get(k);
if (res == null) {
- LOGGER.debug("Not all OCSP responses for the certificate is in the cache.", false);
+ logger.debug("Not all OCSP responses for the certificate is in the cache.", false);
isCached = false;
break;
} else if (currentTimeSecond - CACHE_EXPIRATION_IN_SECONDS > res.left) {
- LOGGER.debug("Cache for CertID expired.", false);
+ logger.debug("Cache for CertID expired.", false);
isCached = false;
break;
} else {
try {
validateRevocationStatusMain(pairIssuerSubject, res.right);
} catch (SFOCSPException ex) {
- LOGGER.debug(
+ logger.debug(
"Cache includes invalid OCSPResponse. "
+ "Will download the OCSP cache from Snowflake OCSP server",
false);
@@ -1023,7 +1033,7 @@ private boolean isCached(List> pairIssuerSubjec
}
}
} catch (IOException ex) {
- LOGGER.debug("Failed to encode CertID.", false);
+ logger.debug("Failed to encode CertID.", false);
}
return isCached;
}
@@ -1059,14 +1069,14 @@ private void readOcspResponseCacheServer() throws SFOCSPException {
JsonNode m = OBJECT_MAPPER.readTree(out.toByteArray());
out.close();
readJsonStoreCache(m);
- LOGGER.debug("Successfully downloaded OCSP cache from the server.", false);
+ logger.debug("Successfully downloaded OCSP cache from the server.", false);
} catch (IOException ex) {
- LOGGER.debug(
+ logger.debug(
"Failed to read the OCSP response cache from the server. " + "Server: {}, Err: {}",
ocspCacheServerInUse,
ex);
} catch (URISyntaxException ex) {
- LOGGER.debug("Indicate that a string could not be parsed as a URI reference.", false);
+ logger.debug("Indicate that a string could not be parsed as a URI reference.", false);
throw new SFOCSPException(
OCSPErrorCode.INVALID_CACHE_SERVER_URL, "Invalid OCSP Cache Server URL used", ex);
} finally {
@@ -1141,11 +1151,11 @@ private OCSPResp fetchOcspResponse(
} else {
url = new URL(String.format("%s/%s", ocspUrlStr, urlEncodedOCSPReq));
}
- LOGGER.debug("not hit cache. Fetching OCSP response from CA OCSP server. {}", url);
+ logger.debug("Not hit cache. Fetching OCSP response from CA OCSP server. {}", url);
} else {
url = new URL(ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL);
- LOGGER.debug(
- "not hit cache. Fetching OCSP response from Snowflake OCSP Response Fetcher. {}", url);
+ logger.debug(
+ "Not hit cache. Fetching OCSP response from Snowflake OCSP Response Fetcher. {}", url);
}
long sleepTime = INITIAL_SLEEPING_TIME_IN_MILLISECONDS;
@@ -1180,12 +1190,12 @@ private OCSPResp fetchOcspResponse(
break;
}
} catch (IOException ex) {
- LOGGER.debug("Failed to reach out OCSP responder: {}", ex.getMessage());
+ logger.debug("Failed to reach out OCSP responder: {}", ex.getMessage());
savedEx = ex;
}
IOUtils.closeQuietly(response);
- LOGGER.debug("Retrying {}/{} after sleeping {}(ms)", retry + 1, maxRetryCounter, sleepTime);
+ logger.debug("Retrying {}/{} after sleeping {} ms", retry + 1, maxRetryCounter, sleepTime);
try {
if (retry + 1 < maxRetryCounter) {
Thread.sleep(sleepTime);
@@ -1245,8 +1255,10 @@ private int getOCSPResponderConnectionTimeout() {
private String overrideOCSPURL(String ocspURL) {
String ocspURLInput = systemGetProperty(SF_OCSP_TEST_RESPONDER_URL);
if (ocspURLInput != null) {
+ logger.debug("Overriding OCSP url to: {}", ocspURLInput);
return ocspURLInput;
}
+ logger.debug("Overriding OCSP url to: {}", ocspURL);
return ocspURL;
}
@@ -1272,7 +1284,7 @@ private void validateRevocationStatusMain(
X509CertificateHolder signVerifyCert;
checkInvalidSigningCertTestParameter();
if (attachedCerts.length > 0) {
- LOGGER.debug(
+ logger.debug(
"Certificate is attached for verification. "
+ "Verifying it by the issuer certificate.",
false);
@@ -1296,15 +1308,15 @@ private void validateRevocationStatusMain(
CONVERTER_X509.getCertificate(signVerifyCert).getTBSCertificate(),
signVerifyCert.getSignatureAlgorithm());
} catch (CertificateException ex) {
- LOGGER.debug("OCSP Signing Certificate signature verification failed", false);
+ logger.debug("OCSP Signing Certificate signature verification failed", false);
throw new SFOCSPException(
OCSPErrorCode.INVALID_CERTIFICATE_SIGNATURE,
"OCSP Signing Certificate signature verification failed",
ex);
}
- LOGGER.debug("Verifying OCSP signature by the attached certificate public key.", false);
+ logger.debug("Verifying OCSP signature by the attached certificate public key.", false);
} else {
- LOGGER.debug(
+ logger.debug(
"Certificate is NOT attached for verification. "
+ "Verifying OCSP signature by the issuer public key.",
false);
@@ -1317,7 +1329,7 @@ private void validateRevocationStatusMain(
basicOcspResp.getTBSResponseData(),
basicOcspResp.getSignatureAlgorithmID());
} catch (CertificateException ex) {
- LOGGER.debug("OCSP signature verification failed", false);
+ logger.debug("OCSP signature verification failed", false);
throw new SFOCSPException(
OCSPErrorCode.INVALID_OCSP_RESPONSE_SIGNATURE,
"OCSP signature verification failed",
@@ -1376,7 +1388,7 @@ private void validateBasicOcspResponse(Date currentTime, BasicOCSPResp basicOcsp
Date thisUpdate = singleResps.getThisUpdate();
Date nextUpdate = singleResps.getNextUpdate();
- LOGGER.debug(
+ logger.debug(
"Current Time: {}, This Update: {}, Next Update: {}",
currentTime,
thisUpdate,
@@ -1392,7 +1404,7 @@ private void validateBasicOcspResponse(Date currentTime, BasicOCSPResp basicOcsp
DATE_FORMAT_UTC.format(nextUpdate)));
}
}
- LOGGER.debug("OK. Verified the certificate revocation status.", false);
+ logger.debug("OK. Verified the certificate revocation status.", false);
}
private void checkCertUnknownTestParameter() throws SFOCSPException {
@@ -1516,7 +1528,7 @@ private String ocspResponseToB64(OCSPResp ocspResp) {
try {
return Base64.encodeBase64String(ocspResp.getEncoded());
} catch (Throwable ex) {
- LOGGER.debug("Could not convert OCSP Response to Base64", false);
+ logger.debug("Could not convert OCSP Response to Base64", false);
return null;
}
}
@@ -1525,7 +1537,7 @@ private OCSPResp b64ToOCSPResp(String ocspRespB64) {
try {
return new OCSPResp(Base64.decodeBase64(ocspRespB64));
} catch (Throwable ex) {
- LOGGER.debug("Could not cover OCSP Response from Base64 to OCSPResp object", false);
+ logger.debug("Could not cover OCSP Response from Base64 to OCSPResp object", false);
return null;
}
}
@@ -1537,14 +1549,16 @@ static class OCSPCacheServer {
void resetOCSPResponseCacheServer(String host) {
String ocspCacheServerUrl;
- if (host.indexOf(".global.snowflakecomputing.com") > 0) {
+ if (host.toLowerCase().contains(".global.snowflakecomputing.")) {
ocspCacheServerUrl =
String.format("https://ocspssd%s/%s", host.substring(host.indexOf('-')), "ocsp");
- } else if (host.indexOf(".snowflakecomputing.com") > 0) {
+ } else if (host.toLowerCase().contains(".snowflakecomputing.")) {
ocspCacheServerUrl =
String.format("https://ocspssd%s/%s", host.substring(host.indexOf('.')), "ocsp");
} else {
- ocspCacheServerUrl = "https://ocspssd.snowflakecomputing.com/ocsp";
+ String topLevelDomain = host.substring(host.lastIndexOf(".") + 1);
+ ocspCacheServerUrl =
+ String.format("https://ocspssd.snowflakecomputing.%s/ocsp", topLevelDomain);
}
SF_OCSP_RESPONSE_CACHE_SERVER = String.format("%s/%s", ocspCacheServerUrl, "fetch");
SF_OCSP_RESPONSE_RETRY_URL = String.format("%s/%s", ocspCacheServerUrl, "retry");
@@ -1624,7 +1638,7 @@ public byte[] getDigest() {
String.format(
"Failed to instantiate the algorithm: %s. err=%s",
ALGORITHM_SHA1_NAME, ex.getMessage());
- LOGGER.error(errMsg, false);
+ logger.error(errMsg, false);
throw new RuntimeException(errMsg);
}
}
diff --git a/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java b/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java
index 144caefec..5030e4603 100644
--- a/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java
+++ b/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java
@@ -22,6 +22,7 @@ private SecureStorageAppleManager() {
}
public static SecureStorageAppleManager builder() {
+ logger.info("Using Apple Keychain as a token cache storage");
return new SecureStorageAppleManager();
}
diff --git a/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java b/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java
index e1f352187..7663147b3 100644
--- a/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java
+++ b/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java
@@ -41,6 +41,8 @@ private SecureStorageLinuxManager() {
.setCacheExpirationInSeconds(CACHE_EXPIRATION_IN_SECONDS)
.setCacheFileLockExpirationInSeconds(CACHE_FILE_LOCK_EXPIRATION_IN_SECONDS)
.build();
+ logger.info(
+ "Using temporary file: {} as a token cache storage", fileCacheManager.getCacheFilePath());
}
private static class SecureStorageLinuxManagerHolder {
diff --git a/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java b/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java
index f43952023..f38c1570b 100644
--- a/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java
+++ b/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java
@@ -33,6 +33,7 @@ private SecureStorageWindowsManager() {
}
public static SecureStorageWindowsManager builder() {
+ logger.info("Using Windows Credential Manager as a token cache storage");
return new SecureStorageWindowsManager();
}
diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java
index a3421e841..6a9db988f 100644
--- a/src/main/java/net/snowflake/client/core/SessionUtil.java
+++ b/src/main/java/net/snowflake/client/core/SessionUtil.java
@@ -37,6 +37,7 @@
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.client.util.SecretDetector;
+import net.snowflake.client.util.Stopwatch;
import net.snowflake.common.core.ClientAuthnDTO;
import net.snowflake.common.core.ClientAuthnParameter;
import net.snowflake.common.core.SqlState;
@@ -71,12 +72,11 @@ public class SessionUtil {
public static final String SF_QUERY_SESSION_DELETE = "delete";
// Headers
- public static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION;
+ @Deprecated
+ public static final String SF_HEADER_AUTHORIZATION = SFSession.SF_HEADER_AUTHORIZATION;
// Authentication type
private static final String SF_HEADER_BASIC_AUTHTYPE = "Basic";
- private static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake";
- private static final String SF_HEADER_TOKEN_TAG = "Token";
private static final String CLIENT_STORE_TEMPORARY_CREDENTIAL =
"CLIENT_STORE_TEMPORARY_CREDENTIAL";
private static final String CLIENT_REQUEST_MFA_TOKEN = "CLIENT_REQUEST_MFA_TOKEN";
@@ -343,6 +343,8 @@ private static SFLoginOutput newSession(
Map connectionPropertiesMap,
String tracingLevel)
throws SFException, SnowflakeSQLException {
+ Stopwatch stopwatch = new Stopwatch();
+ stopwatch.start();
// build URL for login request
URIBuilder uriBuilder;
URI loginURI;
@@ -369,6 +371,18 @@ private static SFLoginOutput newSession(
final ClientAuthnDTO.AuthenticatorType authenticatorType = getAuthenticator(loginInput);
Map commonParams;
+ String oktaUsername = loginInput.getOKTAUserName();
+ logger.debug(
+ "Authenticating user: {}, host: {} with authentication method: {}."
+ + " Login timeout: {} s, auth timeout: {} s, OCSP mode: {}{}",
+ loginInput.getUserName(),
+ loginInput.getHostFromServerUrl(),
+ authenticatorType,
+ loginInput.getLoginTimeout(),
+ loginInput.getAuthTimeout(),
+ loginInput.getOCSPMode(),
+ Strings.isNullOrEmpty(oktaUsername) ? "" : ", okta username: " + oktaUsername);
+
try {
uriBuilder = new URIBuilder(loginInput.getServerUrl());
@@ -629,7 +643,7 @@ private static SFLoginOutput newSession(
* HttpClient should take authorization header from char[] instead of
* String.
*/
- postRequest.setHeader(SF_HEADER_AUTHORIZATION, SF_HEADER_BASIC_AUTHTYPE);
+ postRequest.setHeader(SFSession.SF_HEADER_AUTHORIZATION, SF_HEADER_BASIC_AUTHTYPE);
setServiceNameHeader(loginInput, postRequest);
@@ -639,6 +653,8 @@ private static SFLoginOutput newSession(
int leftsocketTimeout = loginInput.getSocketTimeoutInMillis();
int retryCount = 0;
+ Exception lastRestException = null;
+
while (true) {
try {
theString =
@@ -650,6 +666,7 @@ private static SFLoginOutput newSession(
retryCount,
loginInput.getHttpClientSettingsKey());
} catch (SnowflakeSQLException ex) {
+ lastRestException = ex;
if (ex.getErrorCode() == ErrorCode.AUTHENTICATOR_REQUEST_TIMEOUT.getMessageCode()) {
if (authenticatorType == ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT
|| authenticatorType == ClientAuthnDTO.AuthenticatorType.OKTA) {
@@ -714,16 +731,42 @@ private static SFLoginOutput newSession(
} else {
throw ex;
}
+ } catch (Exception ex) {
+ lastRestException = ex;
}
break;
}
+ if (theString == null) {
+ if (lastRestException != null) {
+ logger.error(
+ "Failed to open new session for user: {}, host: {}. Error: {}",
+ loginInput.getUserName(),
+ loginInput.getHostFromServerUrl(),
+ lastRestException);
+ throw lastRestException;
+ } else {
+ SnowflakeSQLException exception =
+ new SnowflakeSQLException(
+ NO_QUERY_ID,
+ "empty authentication response",
+ SqlState.CONNECTION_EXCEPTION,
+ ErrorCode.CONNECTION_ERROR.getMessageCode());
+ logger.error(
+ "Failed to open new session for user: {}, host: {}. Error: {}",
+ loginInput.getUserName(),
+ loginInput.getHostFromServerUrl(),
+ exception);
+ throw exception;
+ }
+ }
+
// general method, same as with data binding
JsonNode jsonNode = mapper.readTree(theString);
// check the success field first
if (!jsonNode.path("success").asBoolean()) {
- logger.debug("response = {}", theString);
+ logger.debug("Response: {}", theString);
int errorCode = jsonNode.path("code").asInt();
if (errorCode == Constants.ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE) {
@@ -741,9 +784,16 @@ private static SFLoginOutput newSession(
deleteMfaTokenCache(loginInput.getHostFromServerUrl(), loginInput.getUserName());
}
+ String errorMessage = jsonNode.path("message").asText();
+
+ logger.error(
+ "Failed to open new session for user: {}, host: {}. Error: {}",
+ loginInput.getUserName(),
+ loginInput.getHostFromServerUrl(),
+ errorMessage);
throw new SnowflakeSQLException(
NO_QUERY_ID,
- jsonNode.path("message").asText(),
+ errorMessage,
SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION,
errorCode);
}
@@ -769,7 +819,7 @@ private static SFLoginOutput newSession(
commonParams = SessionUtil.getCommonParams(jsonNode.path("data").path("parameters"));
if (serverVersion != null) {
- logger.debug("server version = {}", serverVersion);
+ logger.debug("Server version: {}", serverVersion);
if (serverVersion.indexOf(" ") > 0) {
databaseVersion = serverVersion.substring(0, serverVersion.indexOf(" "));
@@ -777,7 +827,7 @@ private static SFLoginOutput newSession(
databaseVersion = serverVersion;
}
} else {
- logger.debug("server version is null", false);
+ logger.debug("Server version is null", false);
}
if (databaseVersion != null) {
@@ -800,13 +850,13 @@ private static SFLoginOutput newSession(
if (!jsonNode.path("data").path("newClientForUpgrade").isNull()) {
newClientForUpgrade = jsonNode.path("data").path("newClientForUpgrade").asText();
- logger.debug("new client: {}", newClientForUpgrade);
+ logger.debug("New client: {}", newClientForUpgrade);
}
// get health check interval and adjust network timeouts if different
int healthCheckIntervalFromGS = jsonNode.path("data").path("healthCheckInterval").asInt();
- logger.debug("health check interval = {}", healthCheckIntervalFromGS);
+ logger.debug("Health check interval: {}", healthCheckIntervalFromGS);
if (healthCheckIntervalFromGS > 0 && healthCheckIntervalFromGS != healthCheckInterval) {
// add health check interval to socket timeout
@@ -821,9 +871,9 @@ private static SFLoginOutput newSession(
HttpUtil.setRequestConfig(requestConfig);
- logger.debug("adjusted connection timeout to = {}", loginInput.getConnectionTimeout());
+ logger.debug("Adjusted connection timeout to: {}", loginInput.getConnectionTimeout());
- logger.debug("adjusted socket timeout to = {}", httpClientSocketTimeout);
+ logger.debug("Adjusted socket timeout to: {}", httpClientSocketTimeout);
}
} catch (SnowflakeSQLException ex) {
throw ex; // must catch here to avoid Throwable to get the exception
@@ -873,6 +923,13 @@ && asBoolean(loginInput.getSessionParameters().get(CLIENT_STORE_TEMPORARY_CREDEN
CredentialManager.getInstance().writeMfaToken(loginInput, ret);
}
+ stopwatch.stop();
+ logger.debug(
+ "User: {}, host: {} with authentication method: {} authenticated successfully in {} ms",
+ loginInput.getUserName(),
+ loginInput.getHostFromServerUrl(),
+ authenticatorType,
+ stopwatch.elapsedMillis());
return ret;
}
@@ -974,13 +1031,18 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT
postRequest.addHeader("accept", "application/json");
postRequest.setHeader(
- SF_HEADER_AUTHORIZATION,
- SF_HEADER_SNOWFLAKE_AUTHTYPE + " " + SF_HEADER_TOKEN_TAG + "=\"" + headerToken + "\"");
+ SFSession.SF_HEADER_AUTHORIZATION,
+ SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE
+ + " "
+ + SFSession.SF_HEADER_TOKEN_TAG
+ + "=\""
+ + headerToken
+ + "\"");
setServiceNameHeader(loginInput, postRequest);
logger.debug(
- "request type: {}, old session token: {}, " + "master token: {}",
+ "Request type: {}, old session token: {}, " + "master token: {}",
requestType.value,
(ArgSupplier) () -> loginInput.getSessionToken() != null ? "******" : null,
(ArgSupplier) () -> loginInput.getMasterToken() != null ? "******" : null);
@@ -999,7 +1061,7 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT
// check the success field first
if (!jsonNode.path("success").asBoolean()) {
- logger.debug("response = {}", theString);
+ logger.debug("Response: {}", theString);
String errorCode = jsonNode.path("code").asText();
String message = jsonNode.path("message").asText();
@@ -1037,7 +1099,7 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT
* @throws SFException if failed to close session
*/
static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException {
- logger.debug(" public void close() throws SFException");
+ logger.trace("void close() throws SFException");
// assert the following inputs are valid
AssertUtil.assertTrue(
@@ -1068,10 +1130,10 @@ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeS
postRequest, loginInput.getAdditionalHttpHeadersForSnowsight());
postRequest.setHeader(
- SF_HEADER_AUTHORIZATION,
- SF_HEADER_SNOWFLAKE_AUTHTYPE
+ SFSession.SF_HEADER_AUTHORIZATION,
+ SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE
+ " "
- + SF_HEADER_TOKEN_TAG
+ + SFSession.SF_HEADER_TOKEN_TAG
+ "=\""
+ loginInput.getSessionToken()
+ "\"");
@@ -1089,15 +1151,15 @@ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeS
JsonNode rootNode;
- logger.debug("connection close response: {}", theString);
+ logger.debug("Connection close response: {}", theString);
rootNode = mapper.readTree(theString);
SnowflakeUtil.checkErrorAndThrowException(rootNode);
} catch (URISyntaxException ex) {
- throw new RuntimeException("unexpected URI syntax exception", ex);
+ throw new RuntimeException("Unexpected URI syntax exception", ex);
} catch (IOException ex) {
- logger.error("unexpected IO exception for: " + postRequest, ex);
+ logger.error("Unexpected IO exception for: " + postRequest, ex);
} catch (SnowflakeSQLException ex) {
// ignore exceptions for session expiration exceptions and for
// sessions that no longer exist
@@ -1154,6 +1216,16 @@ private static String federatedFlowStep4(
loginInput.getHttpClientSettingsKey());
// step 5
+ validateSAML(responseHtml, loginInput);
+ } catch (IOException | URISyntaxException ex) {
+ handleFederatedFlowError(loginInput, ex);
+ }
+ return responseHtml;
+ }
+
+ private static void validateSAML(String responseHtml, SFLoginInput loginInput)
+ throws SnowflakeSQLException, MalformedURLException {
+ if (!loginInput.getDisableSamlURLCheck()) {
String postBackUrl = getPostBackUrlFromHTML(responseHtml);
if (!isPrefixEqual(postBackUrl, loginInput.getServerUrl())) {
URL idpDestinationUrl = new URL(postBackUrl);
@@ -1167,18 +1239,13 @@ private static String federatedFlowStep4(
clientDestinationHostName,
idpDestinationHostName);
- // Session is in process of getting created, so exception constructor takes in null session
- // value
+ // Session is in process of getting created, so exception constructor takes in null
throw new SnowflakeSQLLoggedException(
null,
ErrorCode.IDP_INCORRECT_DESTINATION.getMessageCode(),
- SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION
- /* session = */ );
+ SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION);
}
- } catch (IOException | URISyntaxException ex) {
- handleFederatedFlowError(loginInput, ex);
}
- return responseHtml;
}
/**
@@ -1229,7 +1296,7 @@ private static String federatedFlowStep3(SFLoginInput loginInput, String tokenUr
null,
loginInput.getHttpClientSettingsKey());
- logger.debug("user is authenticated against {}.", loginInput.getAuthenticator());
+ logger.debug("User is authenticated against {}.", loginInput.getAuthenticator());
// session token is in the data field of the returned json response
final JsonNode jsonNode = mapper.readTree(idpResponse);
@@ -1268,7 +1335,7 @@ private static void federatedFlowStep2(SFLoginInput loginInput, String tokenUrl,
null,
ErrorCode.IDP_CONNECTION_ERROR.getMessageCode(),
SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION
- /* session = */ );
+ /* session= */ );
}
} catch (MalformedURLException ex) {
handleFederatedFlowError(loginInput, ex);
@@ -1317,12 +1384,12 @@ private static JsonNode federatedFlowStep1(SFLoginInput loginInput) throws Snowf
loginInput.getSocketTimeoutInMillis(),
0,
loginInput.getHttpClientSettingsKey());
- logger.debug("authenticator-request response: {}", gsResponse);
+ logger.debug("Authenticator-request response: {}", gsResponse);
JsonNode jsonNode = mapper.readTree(gsResponse);
// check the success field first
if (!jsonNode.path("success").asBoolean()) {
- logger.debug("response = {}", gsResponse);
+ logger.debug("Response: {}", gsResponse);
int errorCode = jsonNode.path("code").asInt();
throw new SnowflakeSQLException(
@@ -1460,7 +1527,7 @@ public static Map getCommonParams(JsonNode paramsNode) {
// What type of value is it and what's the value?
if (!child.hasNonNull("value")) {
- logger.debug("No value found for Common Parameter {}", child.path("name").asText());
+ logger.debug("No value found for Common Parameter: {}", child.path("name").asText());
continue;
}
@@ -1495,7 +1562,7 @@ static void updateSfDriverParamValues(Map parameters, SFBaseSess
session.setCommonParameters(parameters);
}
for (Map.Entry entry : parameters.entrySet()) {
- logger.debug("processing parameter {}", entry.getKey());
+ logger.debug("Processing parameter {}", entry.getKey());
if ("CLIENT_DISABLE_INCIDENTS".equalsIgnoreCase(entry.getKey())) {
SnowflakeDriver.setDisableIncidents((Boolean) entry.getValue());
@@ -1592,11 +1659,9 @@ static void updateSfDriverParamValues(Map parameters, SFBaseSess
session.setClientPrefetchThreads((int) entry.getValue());
}
} else if (CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED.equalsIgnoreCase(entry.getKey())) {
- if ((boolean) entry.getValue()) {
- TelemetryService.enable();
- } else {
- TelemetryService.disable();
- }
+ // we ignore the parameter CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED
+ // OOB telemetry is always disabled
+ TelemetryService.disableOOBTelemetry();
} else if (CLIENT_VALIDATE_DEFAULT_PARAMETERS.equalsIgnoreCase(entry.getKey())) {
if (session != null) {
session.setValidateDefaultParameters(SFLoginInput.getBooleanValue(entry.getValue()));
@@ -1641,7 +1706,7 @@ enum TokenRequestType {
* @param serverUrl The Snowflake URL includes protocol such as "https://"
*/
public static void resetOCSPUrlIfNecessary(String serverUrl) throws IOException {
- if (serverUrl.indexOf(".privatelink.snowflakecomputing.com") > 0) {
+ if (PrivateLinkDetector.isPrivateLink(serverUrl)) {
// Privatelink uses special OCSP Cache server
URL url = new URL(serverUrl);
String host = url.getHost();
diff --git a/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java b/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java
index da7807b69..9db2f0589 100644
--- a/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java
+++ b/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java
@@ -46,7 +46,8 @@
* user can type IdP username and password. 4. Return token and proof key to the GS to gain access.
*/
public class SessionUtilExternalBrowser {
- static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilExternalBrowser.class);
+ private static final SFLogger logger =
+ SFLoggerFactory.getLogger(SessionUtilExternalBrowser.class);
public interface AuthExternalBrowserHandlers {
// build a HTTP post object
@@ -202,14 +203,14 @@ private String getSSOUrl(int port) throws SFException, SnowflakeSQLException {
0,
loginInput.getHttpClientSettingsKey());
- logger.debug("authenticator-request response: {}", theString);
+ logger.debug("Authenticator-request response: {}", theString);
// general method, same as with data binding
JsonNode jsonNode = mapper.readTree(theString);
// check the success field first
if (!jsonNode.path("success").asBoolean()) {
- logger.debug("response = {}", theString);
+ logger.debug("Response: {}", theString);
String errorCode = jsonNode.path("code").asText();
throw new SnowflakeSQLException(
SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION,
@@ -240,7 +241,7 @@ private String getConsoleLoginUrl(int port) throws SFException {
String consoleLoginUrl = consoleLoginUriBuilder.build().toURL().toString();
- logger.debug("console login url: {}", consoleLoginUrl);
+ logger.debug("Console login url: {}", consoleLoginUrl);
return consoleLoginUrl;
} catch (Exception ex) {
@@ -266,7 +267,7 @@ void authenticate() throws SFException, SnowflakeSQLException {
try {
// main procedure
int port = this.getLocalPort(ssocket);
- logger.debug("Listening localhost:{}", port);
+ logger.debug("Listening localhost: {}", port);
if (loginInput.getDisableConsoleLogin()) {
// Access GS to get SSO URL
diff --git a/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java b/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java
index 2bef91eda..ad63ea603 100644
--- a/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java
+++ b/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java
@@ -17,6 +17,7 @@
import java.io.IOException;
import java.io.StringReader;
import java.nio.file.Files;
+import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
@@ -52,7 +53,7 @@
/** Class used to compute jwt token for key pair authentication Created by hyu on 1/16/18. */
class SessionUtilKeyPair {
- static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilKeyPair.class);
+ private static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilKeyPair.class);
// user name in upper case
private final String userName;
@@ -147,7 +148,6 @@ private SecretKeyFactory getSecretKeyFactory(String algorithm) throws NoSuchAlgo
private PrivateKey extractPrivateKeyFromFile(String privateKeyFile, String privateKeyFilePwd)
throws SFException {
-
if (isBouncyCastleProviderEnabled) {
try {
return extractPrivateKeyWithBouncyCastle(privateKeyFile, privateKeyFilePwd);
@@ -234,8 +234,11 @@ public static int getTimeout() {
private PrivateKey extractPrivateKeyWithBouncyCastle(
String privateKeyFile, String privateKeyFilePwd)
throws IOException, PKCSException, OperatorCreationException {
+ Path privKeyPath = Paths.get(privateKeyFile);
+ FileUtil.logFileUsage(
+ privKeyPath, "Extract private key from file using Bouncy Castle provider", true);
PrivateKeyInfo privateKeyInfo = null;
- PEMParser pemParser = new PEMParser(new FileReader(Paths.get(privateKeyFile).toFile()));
+ PEMParser pemParser = new PEMParser(new FileReader(privKeyPath.toFile()));
Object pemObject = pemParser.readObject();
if (pemObject instanceof PKCS8EncryptedPrivateKeyInfo) {
// Handle the case where the private key is encrypted.
@@ -263,7 +266,9 @@ private PrivateKey extractPrivateKeyWithBouncyCastle(
private PrivateKey extractPrivateKeyWithJdk(String privateKeyFile, String privateKeyFilePwd)
throws IOException, NoSuchAlgorithmException, InvalidKeySpecException, InvalidKeyException {
- String privateKeyContent = new String(Files.readAllBytes(Paths.get(privateKeyFile)));
+ Path privKeyPath = Paths.get(privateKeyFile);
+ FileUtil.logFileUsage(privKeyPath, "Extract private key from file using Jdk", true);
+ String privateKeyContent = new String(Files.readAllBytes(privKeyPath));
if (Strings.isNullOrEmpty(privateKeyFilePwd)) {
// unencrypted private key file
return generatePrivateKey(false, privateKeyContent, privateKeyFilePwd);
diff --git a/src/main/java/net/snowflake/client/core/SfSqlArray.java b/src/main/java/net/snowflake/client/core/SfSqlArray.java
index 83270796a..70682b4f4 100644
--- a/src/main/java/net/snowflake/client/core/SfSqlArray.java
+++ b/src/main/java/net/snowflake/client/core/SfSqlArray.java
@@ -1,11 +1,17 @@
package net.snowflake.client.core;
+import static net.snowflake.client.core.FieldSchemaCreator.buildBindingSchemaForType;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
import java.sql.Array;
import java.sql.JDBCType;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
+import java.util.Arrays;
import java.util.Map;
+import net.snowflake.client.jdbc.BindingParameterMetadata;
+import net.snowflake.client.jdbc.SnowflakeUtil;
@SnowflakeJdbcInternalApi
public class SfSqlArray implements Array {
@@ -74,4 +80,19 @@ public ResultSet getResultSet(long index, int count, Map> map)
@Override
public void free() throws SQLException {}
+
+ public String getJsonString() throws SQLException {
+ try {
+ return SnowflakeUtil.mapJson(elements);
+ } catch (JsonProcessingException e) {
+ throw new SQLException("There is exception during array to json string.", e);
+ }
+ }
+
+ public BindingParameterMetadata getSchema() throws SQLException {
+ return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata()
+ .withType("array")
+ .withFields(Arrays.asList(buildBindingSchemaForType(getBaseType(), false)))
+ .build();
+ }
}
diff --git a/src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java b/src/main/java/net/snowflake/client/core/SfTimestampUtil.java
similarity index 76%
rename from src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java
rename to src/main/java/net/snowflake/client/core/SfTimestampUtil.java
index b95c518c6..ed58f4481 100644
--- a/src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java
+++ b/src/main/java/net/snowflake/client/core/SfTimestampUtil.java
@@ -4,6 +4,7 @@
package net.snowflake.client.core;
+import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.TimeZone;
@@ -11,7 +12,9 @@
import net.snowflake.common.core.SnowflakeDateTimeFormat;
@SnowflakeJdbcInternalApi
-public class SqlInputTimestampUtil {
+public class SfTimestampUtil {
+
+ static final long MS_IN_DAY = 86400 * 1000;
public static Timestamp getTimestampFromType(
int columnSubType,
@@ -25,7 +28,7 @@ public static Timestamp getTimestampFromType(
} else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ
|| columnSubType == Types.TIMESTAMP) {
return getTimestampFromFormat(
- "TIMESTAMP_NTZ_OUTPUT_FORMAT", value, session, sessionTimeZone, tz);
+ "TIMESTAMP_NTZ_OUTPUT_FORMAT", value, session, sessionTimeZone, TimeZone.getDefault());
} else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ) {
return getTimestampFromFormat(
"TIMESTAMP_TZ_OUTPUT_FORMAT", value, session, sessionTimeZone, tz);
@@ -46,4 +49,13 @@ private static Timestamp getTimestampFromFormat(
SnowflakeDateTimeFormat formatter = SnowflakeDateTimeFormat.fromSqlFormat(rawFormat);
return formatter.parse(value, tz, 0, false).getTimestamp();
}
+
+ public static long getTimeInNanoseconds(Time x) {
+ long msSinceEpoch = x.getTime();
+ // Use % + % instead of just % to get the nonnegative remainder.
+ // TODO(mkember): Change to use Math.floorMod when Client is on Java 8.
+ long msSinceMidnight = (msSinceEpoch % MS_IN_DAY + MS_IN_DAY) % MS_IN_DAY;
+ long nanosSinceMidnight = msSinceMidnight * 1000 * 1000;
+ return nanosSinceMidnight;
+ }
}
diff --git a/src/main/java/net/snowflake/client/core/StmtUtil.java b/src/main/java/net/snowflake/client/core/StmtUtil.java
index a02fb4d7b..96fefe5dc 100644
--- a/src/main/java/net/snowflake/client/core/StmtUtil.java
+++ b/src/main/java/net/snowflake/client/core/StmtUtil.java
@@ -23,7 +23,6 @@
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.client.util.SecretDetector;
import net.snowflake.common.api.QueryInProgressResponse;
-import org.apache.http.HttpHeaders;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
@@ -43,25 +42,15 @@ public class StmtUtil {
private static final String SF_PATH_QUERY_RESULT = "/queries/%s/result";
- static final String SF_QUERY_REQUEST_ID = "requestId";
-
private static final String SF_QUERY_COMBINE_DESCRIBE_EXECUTE = "combinedDescribe";
- private static final String SF_QUERY_CONTEXT = "queryContext";
-
- private static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION;
-
- private static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake";
-
- private static final String SF_HEADER_TOKEN_TAG = "Token";
-
static final String SF_MEDIA_TYPE = "application/snowflake";
// we don't want to retry canceling forever so put a limit which is
// twice as much as our default socket timeout
static final int SF_CANCELING_RETRY_TIMEOUT_IN_MILLIS = 600000; // 10 min
- static final SFLogger logger = SFLoggerFactory.getLogger(StmtUtil.class);
+ private static final SFLogger logger = SFLoggerFactory.getLogger(StmtUtil.class);
/** Input for executing a statement on server */
static class StmtInput {
@@ -310,12 +299,12 @@ public static StmtOutput execute(StmtInput stmtInput, ExecTimeTelemetryData exec
// don't need to execute the query again
if (stmtInput.retry && stmtInput.prevGetResultURL != null) {
logger.debug(
- "retrying statement execution with get result URL: {}", stmtInput.prevGetResultURL);
+ "Retrying statement execution with get result URL: {}", stmtInput.prevGetResultURL);
} else {
URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl);
uriBuilder.setPath(SF_PATH_QUERY_V1);
- uriBuilder.addParameter(SF_QUERY_REQUEST_ID, stmtInput.requestId);
+ uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, stmtInput.requestId);
if (stmtInput.combineDescribe) {
uriBuilder.addParameter(SF_QUERY_COMBINE_DESCRIBE_EXECUTE, Boolean.TRUE.toString());
@@ -376,10 +365,10 @@ public static StmtOutput execute(StmtInput stmtInput, ExecTimeTelemetryData exec
httpRequest.addHeader("accept", stmtInput.mediaType);
httpRequest.setHeader(
- SF_HEADER_AUTHORIZATION,
- SF_HEADER_SNOWFLAKE_AUTHTYPE
+ SFSession.SF_HEADER_AUTHORIZATION,
+ SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE
+ " "
- + SF_HEADER_TOKEN_TAG
+ + SFSession.SF_HEADER_TOKEN_TAG
+ "=\""
+ stmtInput.sessionToken
+ "\"");
@@ -522,11 +511,11 @@ else if (stmtInput.asyncExec
// simulate client pause before trying to fetch result so that
// we can test query behavior related to disconnected client
if (stmtInput.injectClientPause != 0) {
- logger.debug("inject client pause for {} seconds", stmtInput.injectClientPause);
+ logger.debug("Inject client pause for {} seconds", stmtInput.injectClientPause);
try {
Thread.sleep(stmtInput.injectClientPause * 1000);
} catch (InterruptedException ex) {
- logger.debug("exception encountered while injecting pause", false);
+ logger.debug("Exception encountered while injecting pause", false);
}
}
}
@@ -606,14 +595,14 @@ protected static String getQueryResult(
protected static String getQueryResult(String getResultPath, StmtInput stmtInput)
throws SFException, SnowflakeSQLException {
HttpGet httpRequest = null;
- logger.debug("get query result: {}", getResultPath);
+ logger.debug("Get query result: {}", getResultPath);
try {
URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl);
uriBuilder.setPath(getResultPath);
- uriBuilder.addParameter(SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString());
+ uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString());
httpRequest = new HttpGet(uriBuilder.build());
// Add custom headers before adding common headers
@@ -623,10 +612,10 @@ protected static String getQueryResult(String getResultPath, StmtInput stmtInput
httpRequest.addHeader("accept", stmtInput.mediaType);
httpRequest.setHeader(
- SF_HEADER_AUTHORIZATION,
- SF_HEADER_SNOWFLAKE_AUTHTYPE
+ SFSession.SF_HEADER_AUTHORIZATION,
+ SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE
+ " "
- + SF_HEADER_TOKEN_TAG
+ + SFSession.SF_HEADER_TOKEN_TAG
+ "=\""
+ stmtInput.sessionToken
+ "\"");
@@ -717,7 +706,7 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE
uriBuilder.setPath(SF_PATH_ABORT_REQUEST_V1);
- uriBuilder.addParameter(SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString());
+ uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString());
httpRequest = new HttpPost(uriBuilder.build());
// Add custom headers before adding common headers
@@ -742,10 +731,10 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE
httpRequest.addHeader("accept", stmtInput.mediaType);
httpRequest.setHeader(
- SF_HEADER_AUTHORIZATION,
- SF_HEADER_SNOWFLAKE_AUTHTYPE
+ SFSession.SF_HEADER_AUTHORIZATION,
+ SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE
+ " "
- + SF_HEADER_TOKEN_TAG
+ + SFSession.SF_HEADER_TOKEN_TAG
+ "=\""
+ stmtInput.sessionToken
+ "\"");
@@ -798,7 +787,7 @@ public static SFStatementType checkStageManageCommand(String sql) {
// skip commenting prefixed with //
while (trimmedSql.startsWith("//")) {
if (logger.isDebugEnabled()) {
- logger.debug("skipping // comments in: \n{}", trimmedSql);
+ logger.debug("Skipping // comments in: \n{}", trimmedSql);
}
if (trimmedSql.indexOf('\n') > 0) {
diff --git a/src/main/java/net/snowflake/client/core/URLUtil.java b/src/main/java/net/snowflake/client/core/URLUtil.java
index cd4129e4c..56fa0f266 100644
--- a/src/main/java/net/snowflake/client/core/URLUtil.java
+++ b/src/main/java/net/snowflake/client/core/URLUtil.java
@@ -3,8 +3,11 @@
*/
package net.snowflake.client.core;
+import static net.snowflake.client.core.SFSession.SF_QUERY_REQUEST_ID;
+
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
+import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLEncoder;
@@ -15,10 +18,12 @@
import javax.annotation.Nullable;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.utils.URLEncodedUtils;
public class URLUtil {
- static final SFLogger logger = SFLoggerFactory.getLogger(URLUtil.class);
+ private static final SFLogger logger = SFLoggerFactory.getLogger(URLUtil.class);
static final String validURLPattern =
"^http(s?)\\:\\/\\/[0-9a-zA-Z]([-.\\w]*[0-9a-zA-Z@:])*(:(0-9)*)*(\\/?)([a-zA-Z0-9\\-\\.\\?\\,\\&\\(\\)\\/\\\\\\+&%\\$#_=@]*)?$";
static final Pattern pattern = Pattern.compile(validURLPattern);
@@ -53,4 +58,20 @@ public static String urlEncode(String target) throws UnsupportedEncodingExceptio
}
return encodedTarget;
}
+
+ @SnowflakeJdbcInternalApi
+ public static String getRequestId(URI uri) {
+ return URLEncodedUtils.parse(uri, StandardCharsets.UTF_8).stream()
+ .filter(p -> p.getName().equals(SF_QUERY_REQUEST_ID))
+ .findFirst()
+ .map(NameValuePair::getValue)
+ .orElse(null);
+ }
+
+ @SnowflakeJdbcInternalApi
+ public static String getRequestIdLogStr(URI uri) {
+ String requestId = getRequestId(uri);
+
+ return requestId == null ? "" : "[requestId=" + requestId + "] ";
+ }
}
diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java
index 8eaaadc94..2ad5c3ef2 100644
--- a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java
+++ b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java
@@ -11,6 +11,7 @@
import java.util.TimeZone;
import net.snowflake.client.core.ResultUtil;
import net.snowflake.client.core.SFException;
+import net.snowflake.client.core.SnowflakeJdbcInternalApi;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeTimestampWithTimezone;
import net.snowflake.client.log.ArgSupplier;
@@ -151,6 +152,19 @@ public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone ne
* @return
*/
public static Timestamp toJavaTimestamp(long epoch, int scale) {
+ return toJavaTimestamp(epoch, scale, TimeZone.getDefault(), false);
+ }
+
+ /**
+ * generate Java Timestamp object
+ *
+ * @param epoch the value since epoch time
+ * @param scale the scale of the value
+ * @return
+ */
+ @SnowflakeJdbcInternalApi
+ public static Timestamp toJavaTimestamp(
+ long epoch, int scale, TimeZone sessionTimezone, boolean useSessionTimezone) {
long seconds = epoch / powerOfTen(scale);
int fraction = (int) ((epoch % powerOfTen(scale)) * powerOfTen(9 - scale));
if (fraction < 0) {
@@ -158,7 +172,7 @@ public static Timestamp toJavaTimestamp(long epoch, int scale) {
seconds--;
fraction += 1000000000;
}
- return createTimestamp(seconds, fraction, TimeZone.getDefault(), false);
+ return createTimestamp(seconds, fraction, sessionTimezone, useSessionTimezone);
}
/**
diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java
index 236abe553..e2bba45ab 100644
--- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java
@@ -11,6 +11,7 @@
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.ResultUtil;
import net.snowflake.client.core.SFException;
+import net.snowflake.client.core.SnowflakeJdbcInternalApi;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeType;
import net.snowflake.client.jdbc.SnowflakeUtil;
@@ -65,7 +66,7 @@ public Timestamp toTimestamp(int index, TimeZone tz) throws SFException {
private Timestamp getTimestamp(int index, TimeZone tz) throws SFException {
long val = bigIntVector.getDataBuffer().getLong(index * BigIntVector.TYPE_WIDTH);
int scale = context.getScale(columnIndex);
- return getTimestamp(val, scale);
+ return getTimestamp(val, scale, sessionTimeZone, useSessionTimezone);
}
@Override
@@ -90,8 +91,25 @@ public boolean toBoolean(int index) throws SFException {
SnowflakeUtil.BOOLEAN_STR, val);
}
+ /**
+ * Use {@link #getTimestamp(long, int, TimeZone, boolean)}
+ *
+ * @param val epoch
+ * @param scale scale
+ * @return Timestamp value without timezone take into account
+ * @throws SFException
+ */
+ @Deprecated
public static Timestamp getTimestamp(long val, int scale) throws SFException {
Timestamp ts = ArrowResultUtil.toJavaTimestamp(val, scale);
return ResultUtil.adjustTimestamp(ts);
}
+
+ @SnowflakeJdbcInternalApi
+ public static Timestamp getTimestamp(
+ long epoch, int scale, TimeZone sessionTimeZone, boolean useSessionTimezone)
+ throws SFException {
+ return ResultUtil.adjustTimestamp(
+ ArrowResultUtil.toJavaTimestamp(epoch, scale, sessionTimeZone, useSessionTimezone));
+ }
}
diff --git a/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java b/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java
index a07e583ea..14bea858a 100644
--- a/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java
@@ -92,7 +92,8 @@ private Timestamp convertTimestampLtz(Object obj, int scale) throws SFException
false);
}
} else if (obj instanceof Long) {
- return BigIntToTimestampLTZConverter.getTimestamp((long) obj, scale);
+ return BigIntToTimestampLTZConverter.getTimestamp(
+ (long) obj, scale, sessionTimeZone, useSessionTimezone);
}
throw new SFException(
ErrorCode.INVALID_VALUE_CONVERT,
diff --git a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java
index 2e9dbd82d..ae7a492a0 100644
--- a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java
@@ -1,5 +1,6 @@
package net.snowflake.client.core.arrow;
+import java.util.List;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFException;
import net.snowflake.client.jdbc.SnowflakeType;
@@ -22,6 +23,10 @@ public Object toObject(int index) throws SFException {
@Override
public String toString(int index) throws SFException {
- return vector.getObject(index).toString();
+ List> object = vector.getObject(index);
+ if (object == null) {
+ return null;
+ }
+ return object.toString();
}
}
diff --git a/src/main/java/net/snowflake/client/core/bind/BindUploader.java b/src/main/java/net/snowflake/client/core/bind/BindUploader.java
index 2332f0150..6b901da44 100644
--- a/src/main/java/net/snowflake/client/core/bind/BindUploader.java
+++ b/src/main/java/net/snowflake/client/core/bind/BindUploader.java
@@ -159,6 +159,7 @@ private synchronized String synchronizedTimestampFormat(String o, String type) {
int nano = times.right;
Timestamp v1 = new Timestamp(sec * 1000);
+ ZoneOffset offsetId;
// For timestamp_ntz, use UTC timezone. For timestamp_ltz, use the local timezone to minimise
// the gap.
if ("TIMESTAMP_LTZ".equals(type)) {
@@ -166,10 +167,11 @@ private synchronized String synchronizedTimestampFormat(String o, String type) {
cal.setTimeZone(tz);
cal.clear();
timestampFormat.setCalendar(cal);
+ offsetId = ZoneId.systemDefault().getRules().getOffset(Instant.ofEpochMilli(v1.getTime()));
+ } else {
+ offsetId = ZoneOffset.UTC;
}
- ZoneOffset offsetId = ZoneId.systemDefault().getRules().getOffset(Instant.now());
-
return timestampFormat.format(v1) + String.format("%09d", nano) + " " + offsetId;
}
diff --git a/src/main/java/net/snowflake/client/core/json/Converters.java b/src/main/java/net/snowflake/client/core/json/Converters.java
index 584e0d12e..afe663f90 100644
--- a/src/main/java/net/snowflake/client/core/json/Converters.java
+++ b/src/main/java/net/snowflake/client/core/json/Converters.java
@@ -13,8 +13,8 @@
import java.util.TimeZone;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFException;
+import net.snowflake.client.core.SfTimestampUtil;
import net.snowflake.client.core.SnowflakeJdbcInternalApi;
-import net.snowflake.client.core.SqlInputTimestampUtil;
import net.snowflake.client.core.arrow.StructuredTypeDateTimeConverter;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1;
@@ -224,7 +224,7 @@ public Converter timestampFromStringConverter(
TimeZone sessionTimezone) {
return value -> {
Timestamp result =
- SqlInputTimestampUtil.getTimestampFromType(
+ SfTimestampUtil.getTimestampFromType(
columnSubType, (String) value, session, sessionTimezone, tz);
if (result != null) {
return result;
diff --git a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java
index 103f90555..3516966e6 100644
--- a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java
+++ b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java
@@ -212,7 +212,11 @@ private static List initConverters(
break;
case MAP:
- converters.add(new MapConverter((MapVector) vector, i, context));
+ if (vector instanceof MapVector) {
+ converters.add(new MapConverter((MapVector) vector, i, context));
+ } else {
+ converters.add(new VarCharConverter(vector, i, context));
+ }
break;
case VECTOR:
diff --git a/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java b/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java
new file mode 100644
index 000000000..db1c85e7b
--- /dev/null
+++ b/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved.
+ */
+package net.snowflake.client.jdbc;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import java.util.List;
+
+@JsonInclude(JsonInclude.Include.NON_NULL)
+public class BindingParameterMetadata {
+ private String type;
+ private String name;
+ private Integer length;
+ private Integer byteLength;
+ private Integer precision;
+ private Integer scale;
+
+ private boolean nullable = true;
+ private List fields;
+
+ public BindingParameterMetadata(String type) {
+ this.type = type;
+ }
+
+ public BindingParameterMetadata(String type, String name) {
+ this.type = type;
+ this.name = name;
+ }
+
+ public BindingParameterMetadata(
+ String type,
+ String name,
+ Integer length,
+ Integer byteLength,
+ Integer precision,
+ Integer scale,
+ Boolean nullable) {
+ this.type = type;
+ this.name = name;
+ this.length = length;
+ this.byteLength = byteLength;
+ this.precision = precision;
+ this.scale = scale;
+ this.nullable = nullable;
+ }
+
+ public BindingParameterMetadata() {}
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public Integer getLength() {
+ return length;
+ }
+
+ public void setLength(Integer length) {
+ this.length = length;
+ }
+
+ public Integer getByteLength() {
+ return byteLength;
+ }
+
+ public void setByteLength(Integer byteLength) {
+ this.byteLength = byteLength;
+ }
+
+ public Integer getPrecision() {
+ return precision;
+ }
+
+ public void setPrecision(Integer precision) {
+ this.precision = precision;
+ }
+
+ public Integer getScale() {
+ return scale;
+ }
+
+ public void setScale(Integer scale) {
+ this.scale = scale;
+ }
+
+ public Boolean isNullable() {
+ return nullable;
+ }
+
+ public void setNullable(Boolean nullable) {
+ this.nullable = nullable;
+ }
+
+ public List getFields() {
+ return fields;
+ }
+
+ public void setFields(List fields) {
+ this.fields = fields;
+ }
+
+ public static class BindingParameterMetadataBuilder {
+ private BindingParameterMetadata bindingParameterMetadata;
+
+ private BindingParameterMetadataBuilder() {
+ bindingParameterMetadata = new BindingParameterMetadata();
+ }
+
+ public BindingParameterMetadataBuilder withType(String type) {
+ bindingParameterMetadata.type = type;
+ return this;
+ }
+
+ public BindingParameterMetadataBuilder withName(String name) {
+ bindingParameterMetadata.name = name;
+ return this;
+ }
+
+ public BindingParameterMetadataBuilder withLength(Integer length) {
+ bindingParameterMetadata.length = length;
+ return this;
+ }
+
+ public BindingParameterMetadataBuilder withByteLength(Integer byteLength) {
+ bindingParameterMetadata.byteLength = byteLength;
+ return this;
+ }
+
+ public BindingParameterMetadataBuilder withPrecision(Integer precision) {
+ bindingParameterMetadata.precision = precision;
+ return this;
+ }
+
+ public BindingParameterMetadataBuilder withScale(Integer scale) {
+ bindingParameterMetadata.scale = scale;
+ return this;
+ }
+
+ public BindingParameterMetadataBuilder withNullable(Boolean nullable) {
+ bindingParameterMetadata.nullable = nullable;
+ return this;
+ }
+
+ public BindingParameterMetadataBuilder withFields(List fields) {
+ bindingParameterMetadata.fields = fields;
+ return this;
+ }
+
+ public static BindingParameterMetadataBuilder bindingParameterMetadata() {
+ return new BindingParameterMetadataBuilder();
+ }
+
+ public BindingParameterMetadata build() {
+ return bindingParameterMetadata;
+ }
+ }
+}
diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java
index 39f94235b..3ee556bb4 100644
--- a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java
+++ b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java
@@ -11,6 +11,8 @@
import net.snowflake.client.core.ExecTimeTelemetryData;
import net.snowflake.client.core.HttpUtil;
import net.snowflake.client.log.ArgSupplier;
+import net.snowflake.client.log.SFLogger;
+import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.client.util.SecretDetector;
import net.snowflake.common.core.SqlState;
import org.apache.http.Header;
@@ -21,6 +23,8 @@
import org.apache.http.impl.client.CloseableHttpClient;
public class DefaultResultStreamProvider implements ResultStreamProvider {
+ private static final SFLogger logger =
+ SFLoggerFactory.getLogger(DefaultResultStreamProvider.class);
// SSE-C algorithm header
private static final String SSE_C_ALGORITHM = "x-amz-server-side-encryption-customer-algorithm";
@@ -53,16 +57,15 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception
* means failure.
*/
if (response == null || response.getStatusLine().getStatusCode() != 200) {
- SnowflakeResultSetSerializableV1.logger.error(
- "Error fetching chunk from: {}", context.getResultChunk().getScrubbedUrl());
+ logger.error("Error fetching chunk from: {}", context.getResultChunk().getScrubbedUrl());
- SnowflakeUtil.logResponseDetails(response, SnowflakeResultSetSerializableV1.logger);
+ SnowflakeUtil.logResponseDetails(response, logger);
throw new SnowflakeSQLException(
SqlState.IO_ERROR,
ErrorCode.NETWORK_ERROR.getMessageCode(),
"Error encountered when downloading a result chunk: HTTP "
- + "status="
+ + "status: "
+ ((response != null) ? response.getStatusLine().getStatusCode() : "null response"));
}
@@ -72,7 +75,7 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception
// read the chunk data
inputStream = detectContentEncodingAndGetInputStream(response, entity.getContent());
} catch (Exception ex) {
- SnowflakeResultSetSerializableV1.logger.error("Failed to decompress data: {}", response);
+ logger.error("Failed to decompress data: {}", response);
throw new SnowflakeSQLLoggedException(
context.getSession(),
@@ -82,7 +85,7 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception
}
// trace the response if requested
- SnowflakeResultSetSerializableV1.logger.debug("Json response: {}", response);
+ logger.debug("Json response: {}", response);
return inputStream;
}
@@ -94,8 +97,7 @@ private HttpResponse getResultChunk(ChunkDownloadContext context) throws Excepti
if (context.getChunkHeadersMap() != null && context.getChunkHeadersMap().size() != 0) {
for (Map.Entry entry : context.getChunkHeadersMap().entrySet()) {
- SnowflakeResultSetSerializableV1.logger.debug(
- "Adding header key={}, value={}", entry.getKey(), entry.getValue());
+ logger.debug("Adding header key: {}", entry.getKey());
httpRequest.addHeader(entry.getKey(), entry.getValue());
}
}
@@ -103,11 +105,11 @@ private HttpResponse getResultChunk(ChunkDownloadContext context) throws Excepti
else if (context.getQrmk() != null) {
httpRequest.addHeader(SSE_C_ALGORITHM, SSE_C_AES);
httpRequest.addHeader(SSE_C_KEY, context.getQrmk());
- SnowflakeResultSetSerializableV1.logger.debug("Adding SSE-C headers", false);
+ logger.debug("Adding SSE-C headers", false);
}
- SnowflakeResultSetSerializableV1.logger.debug(
- "Thread {} Fetching result #chunk{}: {}",
+ logger.debug(
+ "Thread {} Fetching result chunk#{}: {}",
Thread.currentThread().getId(),
context.getChunkIndex(),
context.getResultChunk().getScrubbedUrl());
@@ -133,8 +135,8 @@ else if (context.getQrmk() != null) {
true, // no retry on http request
new ExecTimeTelemetryData());
- SnowflakeResultSetSerializableV1.logger.debug(
- "Thread {} Call #chunk{} returned for URL: {}, response={}",
+ logger.debug(
+ "Thread {} Call chunk#{} returned for URL: {}, response: {}",
Thread.currentThread().getId(),
context.getChunkIndex(),
(ArgSupplier) () -> SecretDetector.maskSASToken(context.getResultChunk().getUrl()),
diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java
index 7ada3a803..6bb62c82f 100644
--- a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java
+++ b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java
@@ -8,15 +8,19 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLNonTransientConnectionException;
import java.sql.Statement;
import java.util.Map;
import java.util.Properties;
+import java.util.Set;
import java.util.logging.Level;
import net.snowflake.client.config.SFClientConfig;
import net.snowflake.client.config.SFClientConfigParser;
+import net.snowflake.client.core.Constants;
import net.snowflake.client.core.SFBaseResultSet;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFBaseStatement;
@@ -136,13 +140,16 @@ private void setClientConfig() throws SnowflakeSQLLoggedException {
String clientConfigFilePath =
(String) connectionPropertiesMap.getOrDefault(SFSessionProperty.CLIENT_CONFIG_FILE, null);
- SFClientConfig sfClientConfig;
- try {
- sfClientConfig = SFClientConfigParser.loadSFClientConfig(clientConfigFilePath);
- } catch (IOException e) {
- throw new SnowflakeSQLLoggedException(sfSession, ErrorCode.INTERNAL_ERROR, e.getMessage());
+ SFClientConfig sfClientConfig = sfSession.getSfClientConfig();
+ if (sfClientConfig == null) {
+ try {
+ sfClientConfig = SFClientConfigParser.loadSFClientConfig(clientConfigFilePath);
+ } catch (IOException e) {
+ throw new SnowflakeSQLLoggedException(
+ sfSession, ErrorCode.INTERNAL_ERROR, e.getMessage(), e.getCause());
+ }
+ sfSession.setSfClientConfig(sfClientConfig);
}
- sfSession.setSfClientConfig(sfClientConfig);
}
/**
@@ -181,6 +188,7 @@ && systemGetProperty("java.util.logging.config.file") == null) {
if (logLevel != null && logPattern != null) {
try {
+ logger.info("Setting logger with log level {} and log pattern {}", logLevel, logPattern);
JDK14Logger.instantiateLogger(logLevel, logPattern);
} catch (IOException ex) {
throw new SnowflakeSQLLoggedException(
@@ -188,13 +196,10 @@ && systemGetProperty("java.util.logging.config.file") == null) {
}
if (sfClientConfig != null) {
logger.debug(
- String.format(
- "SF Client config found at location: %s.", sfClientConfig.getConfigFilePath()));
+ "SF Client config found at location: {}.", sfClientConfig.getConfigFilePath());
}
logger.debug(
- String.format(
- "Instantiating JDK14Logger with level: %s , output path: %s",
- logLevel, logPattern));
+ "Instantiating JDK14Logger with level: {}, output path: {}", logLevel, logPattern);
}
}
}
@@ -206,25 +211,98 @@ private String constructLogPattern(String logPathFromConfig) throws SnowflakeSQL
String logPattern = "%t/snowflake_jdbc%u.log"; // java.tmpdir
+ Path logPath;
if (logPathFromConfig != null && !logPathFromConfig.isEmpty()) {
- Path path = Paths.get(logPathFromConfig, "jdbc");
- if (!Files.exists(path)) {
+ // Get log path from configuration
+ logPath = Paths.get(logPathFromConfig);
+ if (!Files.exists(logPath)) {
try {
- Files.createDirectories(path);
+ Files.createDirectories(logPath);
} catch (IOException ex) {
throw new SnowflakeSQLLoggedException(
sfSession,
ErrorCode.INTERNAL_ERROR,
String.format(
- "Un-able to create log path mentioned in configfile %s ,%s",
+ "Unable to create log path mentioned in configfile %s ,%s",
logPathFromConfig, ex.getMessage()));
}
}
- logPattern = Paths.get(path.toString(), "snowflake_jdbc%u.log").toString();
+ } else {
+ // Get log path from home directory
+ String homePath = systemGetProperty("user.home");
+ if (homePath == null || homePath.isEmpty()) {
+ throw new SnowflakeSQLLoggedException(
+ sfSession,
+ ErrorCode.INTERNAL_ERROR,
+ String.format(
+ "Log path not set in configfile %s and home directory not set.",
+ logPathFromConfig));
+ }
+ logPath = Paths.get(homePath);
}
+
+ Path path = createLogPathSubDirectory(logPath);
+
+ logPattern = Paths.get(path.toString(), "snowflake_jdbc%u.log").toString();
return logPattern;
}
+ private Path createLogPathSubDirectory(Path logPath) throws SnowflakeSQLLoggedException {
+ Path path = Paths.get(logPath.toString(), "jdbc");
+ if (!Files.exists(path)) {
+ createLogFolder(path);
+ } else {
+ checkLogFolderPermissions(path);
+ }
+ return path;
+ }
+
+ private void createLogFolder(Path path) throws SnowflakeSQLLoggedException {
+ try {
+ if (Constants.getOS() == Constants.OS.WINDOWS) {
+ Files.createDirectories(path);
+ } else {
+ Files.createDirectories(
+ path,
+ PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwx------")));
+ }
+ } catch (IOException ex) {
+ throw new SnowflakeSQLLoggedException(
+ sfSession,
+ ErrorCode.INTERNAL_ERROR,
+ String.format(
+ "Unable to create jdbc subfolder in configfile %s ,%s",
+ path.toString(), ex.getMessage(), ex.getCause()));
+ }
+ }
+
+ private void checkLogFolderPermissions(Path path) throws SnowflakeSQLLoggedException {
+ if (Constants.getOS() != Constants.OS.WINDOWS) {
+ try {
+ Set folderPermissions = Files.getPosixFilePermissions(path);
+ if (folderPermissions.contains(PosixFilePermission.GROUP_WRITE)
+ || folderPermissions.contains(PosixFilePermission.GROUP_READ)
+ || folderPermissions.contains(PosixFilePermission.GROUP_EXECUTE)
+ || folderPermissions.contains(PosixFilePermission.OTHERS_WRITE)
+ || folderPermissions.contains(PosixFilePermission.OTHERS_READ)
+ || folderPermissions.contains(PosixFilePermission.OTHERS_EXECUTE)) {
+ logger.warn(
+ "Access permission for the logs directory '{}' is currently {} and is potentially "
+ + "accessible to users other than the owner of the logs directory.",
+ path.toString(),
+ folderPermissions.toString());
+ }
+ } catch (IOException ex) {
+ throw new SnowflakeSQLLoggedException(
+ sfSession,
+ ErrorCode.INTERNAL_ERROR,
+ String.format(
+ "Unable to get permissions of log directory %s ,%s",
+ path.toString(), ex.getMessage(), ex.getCause()));
+ }
+ }
+ }
+
private void initSessionProperties(SnowflakeConnectString conStr, String appID, String appVersion)
throws SFException {
Map properties = mergeProperties(conStr);
diff --git a/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java
index cf019d62e..d38011c0e 100644
--- a/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java
@@ -3,7 +3,9 @@
*/
package net.snowflake.client.jdbc;
+import java.util.ArrayList;
import java.util.List;
+import net.snowflake.client.core.SnowflakeJdbcInternalApi;
public class FieldMetadata {
@@ -43,6 +45,11 @@ public FieldMetadata(
this.fields = fields;
}
+ @SnowflakeJdbcInternalApi
+ public FieldMetadata() {
+ this.fields = new ArrayList<>();
+ }
+
public String getName() {
return name;
}
diff --git a/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java b/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java
index 2930188eb..14fb7dbdc 100644
--- a/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java
+++ b/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java
@@ -26,6 +26,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import net.snowflake.client.core.FileUtil;
/**
* An {@link OutputStream} that starts buffering to a byte array, but switches to file buffering
@@ -129,6 +130,7 @@ public ByteSource asByteSource() {
private synchronized InputStream openInputStream() throws IOException {
if (file != null) {
+ FileUtil.logFileUsage(file, "Data buffering stream", false);
return new FileInputStream(file);
} else {
return new ByteArrayInputStream(memory.getBuffer(), 0, memory.getCount());
diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java
index fa7826664..5be46c5de 100644
--- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java
+++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java
@@ -6,6 +6,7 @@
import java.io.PrintWriter;
import java.io.StringWriter;
+import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLKeyException;
@@ -17,6 +18,7 @@
import net.snowflake.client.core.HttpUtil;
import net.snowflake.client.core.SFOCSPException;
import net.snowflake.client.core.SessionUtil;
+import net.snowflake.client.core.URLUtil;
import net.snowflake.client.core.UUIDUtils;
import net.snowflake.client.jdbc.telemetryOOB.TelemetryService;
import net.snowflake.client.log.ArgSupplier;
@@ -24,6 +26,7 @@
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.client.util.DecorrelatedJitterBackoff;
import net.snowflake.client.util.SecretDetector;
+import net.snowflake.client.util.Stopwatch;
import net.snowflake.common.core.SqlState;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpRequestBase;
@@ -123,9 +126,32 @@ public static CloseableHttpResponse execute(
boolean noRetry,
ExecTimeTelemetryData execTimeData)
throws SnowflakeSQLException {
- CloseableHttpResponse response = null;
+ Stopwatch stopwatch = null;
+
+ if (logger.isDebugEnabled()) {
+ stopwatch = new Stopwatch();
+ stopwatch.start();
+ }
String requestInfoScrubbed = SecretDetector.maskSASToken(httpRequest.toString());
+ String requestIdStr = URLUtil.getRequestIdLogStr(httpRequest.getURI());
+ logger.debug(
+ "{}Executing rest request: {}, retry timeout: {}, socket timeout: {}, max retries: {},"
+ + " inject socket timeout: {}, canceling: {}, without cookies: {}, include retry parameters: {},"
+ + " include request guid: {}, retry http 403: {}, no retry: {}",
+ requestIdStr,
+ requestInfoScrubbed,
+ retryTimeout,
+ socketTimeout,
+ maxRetries,
+ injectSocketTimeout,
+ canceling,
+ withoutCookies,
+ includeRetryParameters,
+ includeRequestGuid,
+ retryHTTP403,
+ noRetry);
+ CloseableHttpResponse response = null;
// time the client started attempting to submit request
final long startTime = System.currentTimeMillis();
@@ -140,6 +166,10 @@ public static CloseableHttpResponse execute(
// Used to indicate that this is a login/auth request and will be using the new retry strategy.
boolean isLoginRequest = SessionUtil.isNewRetryStrategyRequest(httpRequest);
+ if (isLoginRequest) {
+ logger.debug("{}Request is a login/auth request. Using new retry strategy", requestIdStr);
+ }
+
// total elapsed time due to transient issues.
long elapsedMilliForTransientIssues = 0;
@@ -168,9 +198,14 @@ public static CloseableHttpResponse execute(
// try request till we get a good response or retry timeout
while (true) {
- logger.debug("Retry count: {}", retryCount);
- logger.debug("Attempting request: {}", requestInfoScrubbed);
-
+ logger.debug(
+ "{}Retry count: {}, max retries: {}, retry timeout: {} s, backoff: {} ms. Attempting request: {}",
+ requestIdStr,
+ retryCount,
+ maxRetries,
+ retryTimeout,
+ backoffInMilli,
+ requestInfoScrubbed);
try {
// update start time
startTimePerRequest = System.currentTimeMillis();
@@ -184,7 +219,8 @@ public static CloseableHttpResponse execute(
if (injectSocketTimeout != 0 && retryCount == 0) {
// test code path
logger.debug(
- "Injecting socket timeout by setting " + "socket timeout to {} millisecond ",
+ "{}Injecting socket timeout by setting socket timeout to {} ms",
+ requestIdStr,
injectSocketTimeout);
httpRequest.setConfig(
HttpUtil.getDefaultRequestConfigWithSocketTimeout(
@@ -203,6 +239,7 @@ public static CloseableHttpResponse execute(
// If HTAP
if ("true".equalsIgnoreCase(System.getenv("HTAP_SIMULATION"))
&& builder.getPathSegments().contains("query-request")) {
+ logger.debug("{}Setting htap simulation", requestIdStr);
builder.setParameter("target", "htap_simulation");
}
if (includeRetryParameters && retryCount > 0) {
@@ -215,14 +252,18 @@ public static CloseableHttpResponse execute(
// so that it can be renewed in time and pass it to the http request configuration.
if (authTimeout > 0) {
int requestSocketAndConnectTimeout = (int) authTimeout * 1000;
+ logger.debug(
+ "{}Setting auth timeout as the socket timeout: {} s", requestIdStr, authTimeout);
httpRequest.setConfig(
HttpUtil.getDefaultRequestConfigWithSocketAndConnectTimeout(
requestSocketAndConnectTimeout, withoutCookies));
}
if (includeRequestGuid) {
+ UUID guid = UUIDUtils.getUUID();
+ logger.debug("{}Request {} guid: {}", requestIdStr, requestInfoScrubbed, guid.toString());
// Add request_guid for better tracing
- builder.setParameter(SF_REQUEST_GUID, UUIDUtils.getUUID().toString());
+ builder.setParameter(SF_REQUEST_GUID, guid.toString());
}
httpRequest.setURI(builder.build());
@@ -233,7 +274,7 @@ public static CloseableHttpResponse execute(
// if exception is caused by illegal state, e.g shutdown of http client
// because of closing of connection, then fail immediately and stop retrying.
throw new SnowflakeSQLLoggedException(
- null, ErrorCode.INVALID_STATE, ex, /* session = */ ex.getMessage());
+ null, ErrorCode.INVALID_STATE, ex, /* session= */ ex.getMessage());
} catch (SSLHandshakeException
| SSLKeyException
@@ -247,17 +288,20 @@ public static CloseableHttpResponse execute(
} catch (Exception ex) {
savedEx = ex;
- // if the request took more than 5 min (socket timeout) log an error
- if ((System.currentTimeMillis() - startTimePerRequest)
- > HttpUtil.getSocketTimeout().toMillis()) {
+ // if the request took more than socket timeout log an error
+ long currentMillis = System.currentTimeMillis();
+ if ((currentMillis - startTimePerRequest) > HttpUtil.getSocketTimeout().toMillis()) {
logger.warn(
- "HTTP request took longer than 5 min: {} sec",
- (System.currentTimeMillis() - startTimePerRequest) / 1000);
+ "{}HTTP request took longer than socket timeout {} ms: {} ms",
+ requestIdStr,
+ HttpUtil.getSocketTimeout().toMillis(),
+ (currentMillis - startTimePerRequest));
}
StringWriter sw = new StringWriter();
savedEx.printStackTrace(new PrintWriter(sw));
logger.debug(
- "Exception encountered for: {}, {}, {}",
+ "{}Exception encountered for: {}, {}, {}",
+ requestIdStr,
requestInfoScrubbed,
ex.getLocalizedMessage(),
(ArgSupplier) sw::toString);
@@ -281,7 +325,11 @@ public static CloseableHttpResponse execute(
|| isNonRetryableHTTPCode(response, retryHTTP403)) {
String msg = "Unknown cause";
if (response != null) {
- logger.debug("HTTP response code: {}", response.getStatusLine().getStatusCode());
+ logger.debug(
+ "{}HTTP response code for request {}: {}",
+ requestIdStr,
+ requestInfoScrubbed,
+ response.getStatusLine().getStatusCode());
msg =
"StatusCode: "
+ response.getStatusLine().getStatusCode()
@@ -295,13 +343,16 @@ public static CloseableHttpResponse execute(
if (response == null || response.getStatusLine().getStatusCode() != 200) {
logger.debug(
- "Error response not retryable, " + msg + ", request: {}", requestInfoScrubbed);
+ "{}Error response not retryable, " + msg + ", request: {}",
+ requestIdStr,
+ requestInfoScrubbed);
EventUtil.triggerBasicEvent(
- Event.EventType.NETWORK_ERROR, msg + ", Request: " + httpRequest.toString(), false);
+ Event.EventType.NETWORK_ERROR, msg + ", Request: " + httpRequest, false);
}
breakRetryReason = "status code does not need retry";
if (noRetry) {
- logger.debug("HTTP retry disabled for this request. noRetry: {}", noRetry);
+ logger.debug(
+ "{}HTTP retry disabled for this request. noRetry: {}", requestIdStr, noRetry);
breakRetryReason = "retry is disabled";
}
@@ -311,16 +362,18 @@ public static CloseableHttpResponse execute(
} else {
if (response != null) {
logger.debug(
- "HTTP response not ok: status code: {}, request: {}",
+ "{}HTTP response not ok: status code: {}, request: {}",
+ requestIdStr,
response.getStatusLine().getStatusCode(),
requestInfoScrubbed);
} else if (savedEx != null) {
logger.debug(
- "Null response for cause: {}, request: {}",
+ "{}Null response for cause: {}, request: {}",
+ requestIdStr,
getRootCause(savedEx).getMessage(),
requestInfoScrubbed);
} else {
- logger.debug("Null response for request: {}", requestInfoScrubbed);
+ logger.debug("{}Null response for request: {}", requestIdStr, requestInfoScrubbed);
}
// get the elapsed time for the last request
@@ -331,7 +384,7 @@ public static CloseableHttpResponse execute(
// check canceling flag
if (canceling != null && canceling.get()) {
- logger.debug("Stop retrying since canceling is requested", false);
+ logger.debug("{}Stop retrying since canceling is requested", requestIdStr);
breakRetryReason = "canceling is requested";
break;
}
@@ -349,9 +402,10 @@ public static CloseableHttpResponse execute(
if (elapsedMilliForTransientIssues > retryTimeoutInMilliseconds
&& retryCount >= MIN_RETRY_COUNT) {
logger.error(
- "Stop retrying since elapsed time due to network "
+ "{}Stop retrying since elapsed time due to network "
+ "issues has reached timeout. "
- + "Elapsed: {}(ms), timeout: {}(ms)",
+ + "Elapsed: {} ms, timeout: {} ms",
+ requestIdStr,
elapsedMilliForTransientIssues,
retryTimeoutInMilliseconds);
@@ -362,7 +416,10 @@ public static CloseableHttpResponse execute(
if (maxRetries > 0 && retryCount > maxRetries) {
// check for max retries.
logger.error(
- "Stop retrying as max retries have been reached! max retry count: {}", maxRetries);
+ "{}Stop retrying as max retries have been reached for request: {}! Max retry count: {}",
+ requestIdStr,
+ requestInfoScrubbed,
+ maxRetries);
breakRetryReason = "max retries reached";
breakRetryEventName = "HttpRequestRetryLimitExceeded";
}
@@ -433,30 +490,24 @@ public static CloseableHttpResponse execute(
// sleep for backoff - elapsed amount of time
if (backoffInMilli > elapsedMilliForLastCall) {
try {
- logger.debug("sleeping in {}(ms)", backoffInMilli);
+ logger.debug(
+ "{}Retry request {}: sleeping for {} ms",
+ requestIdStr,
+ requestInfoScrubbed,
+ backoffInMilli);
Thread.sleep(backoffInMilli);
- elapsedMilliForTransientIssues += backoffInMilli;
- if (isLoginRequest) {
- long jitteredBackoffInMilli = backoff.getJitterForLogin(backoffInMilli);
- backoffInMilli =
- (long)
- backoff.chooseRandom(
- jitteredBackoffInMilli + backoffInMilli,
- Math.pow(2, retryCount) + jitteredBackoffInMilli);
- } else {
- backoffInMilli = backoff.nextSleepTime(backoffInMilli);
- }
- if (retryTimeoutInMilliseconds > 0
- && (elapsedMilliForTransientIssues + backoffInMilli) > retryTimeoutInMilliseconds) {
- // If the timeout will be reached before the next backoff, just use the remaining
- // time.
- backoffInMilli =
- Math.min(
- backoffInMilli, retryTimeoutInMilliseconds - elapsedMilliForTransientIssues);
- }
} catch (InterruptedException ex1) {
- logger.debug("Backoff sleep before retrying login got interrupted", false);
+ logger.debug("{}Backoff sleep before retrying login got interrupted", requestIdStr);
}
+ elapsedMilliForTransientIssues += backoffInMilli;
+ backoffInMilli =
+ getNewBackoffInMilli(
+ backoffInMilli,
+ isLoginRequest,
+ backoff,
+ retryCount,
+ retryTimeoutInMilliseconds,
+ elapsedMilliForTransientIssues);
}
retryCount++;
@@ -504,15 +555,18 @@ public static CloseableHttpResponse execute(
if (response == null) {
if (savedEx != null) {
logger.error(
- "Returning null response: cause: {}, request: {}",
+ "{}Returning null response. Cause: {}, request: {}",
+ requestIdStr,
getRootCause(savedEx),
requestInfoScrubbed);
} else {
- logger.error("Returning null response for request: {}", requestInfoScrubbed);
+ logger.error(
+ "{}Returning null response for request: {}", requestIdStr, requestInfoScrubbed);
}
} else if (response.getStatusLine().getStatusCode() != 200) {
logger.error(
- "Error response: HTTP Response code: {}, request: {}",
+ "{}Error response: HTTP Response code: {}, request: {}",
+ requestIdStr,
response.getStatusLine().getStatusCode(),
requestInfoScrubbed);
}
@@ -554,9 +608,58 @@ public static CloseableHttpResponse execute(
}
}
+ if (logger.isDebugEnabled() && stopwatch != null) {
+ stopwatch.stop();
+ }
+ logger.debug(
+ "{}Execution of request {} took {} ms with total of {} retries",
+ requestIdStr,
+ requestInfoScrubbed,
+ stopwatch == null ? "n/a" : stopwatch.elapsedMillis(),
+ retryCount);
return response;
}
+ static long getNewBackoffInMilli(
+ long previousBackoffInMilli,
+ boolean isLoginRequest,
+ DecorrelatedJitterBackoff decorrelatedJitterBackoff,
+ int retryCount,
+ long retryTimeoutInMilliseconds,
+ long elapsedMilliForTransientIssues) {
+ long backoffInMilli;
+ if (isLoginRequest) {
+ long jitteredBackoffInMilli =
+ decorrelatedJitterBackoff.getJitterForLogin(previousBackoffInMilli);
+ backoffInMilli =
+ (long)
+ decorrelatedJitterBackoff.chooseRandom(
+ jitteredBackoffInMilli + previousBackoffInMilli,
+ Math.pow(2, retryCount) + jitteredBackoffInMilli);
+ } else {
+ backoffInMilli = decorrelatedJitterBackoff.nextSleepTime(previousBackoffInMilli);
+ }
+
+ backoffInMilli = Math.min(maxBackoffInMilli, Math.max(previousBackoffInMilli, backoffInMilli));
+
+ if (retryTimeoutInMilliseconds > 0
+ && (elapsedMilliForTransientIssues + backoffInMilli) > retryTimeoutInMilliseconds) {
+ // If the timeout will be reached before the next backoff, just use the remaining
+ // time (but cannot be negative) - this is the only place when backoff is not in range
+ // min-max.
+ backoffInMilli =
+ Math.max(
+ 0,
+ Math.min(
+ backoffInMilli, retryTimeoutInMilliseconds - elapsedMilliForTransientIssues));
+ logger.debug(
+ "We are approaching retry timeout {}ms, setting backoff to {}ms",
+ retryTimeoutInMilliseconds,
+ backoffInMilli);
+ }
+ return backoffInMilli;
+ }
+
static boolean isNonRetryableHTTPCode(CloseableHttpResponse response, boolean retryHTTP403) {
return response != null
&& (response.getStatusLine().getStatusCode() < 500
diff --git a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java
index c50bf4900..0bafbf12d 100644
--- a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java
+++ b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java
@@ -21,11 +21,15 @@
import net.snowflake.client.core.SFBaseResultSet;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFSession;
+import net.snowflake.client.log.SFLogger;
+import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.common.core.SqlState;
/** SFAsyncResultSet implementation. Note: For Snowflake internal use */
public class SFAsyncResultSet extends SnowflakeBaseResultSet
implements SnowflakeResultSet, ResultSet {
+ private static final SFLogger logger = SFLoggerFactory.getLogger(SFAsyncResultSet.class);
+
private ResultSet resultSetForNext = new SnowflakeResultSetV1.EmptyResultSet();
private boolean resultSetForNextInitialized = false;
private String queryID;
@@ -367,7 +371,7 @@ public boolean isBeforeFirst() throws SQLException {
@Override
public boolean isWrapperFor(Class> iface) throws SQLException {
- logger.debug("public boolean isWrapperFor(Class> iface)", false);
+ logger.trace("boolean isWrapperFor(Class> iface)", false);
return iface.isInstance(this);
}
@@ -375,7 +379,7 @@ public boolean isWrapperFor(Class> iface) throws SQLException {
@SuppressWarnings("unchecked")
@Override
public T unwrap(Class iface) throws SQLException {
- logger.debug("public T unwrap(Class iface)", false);
+ logger.trace(" T unwrap(Class iface)", false);
if (!iface.isInstance(this)) {
throw new SQLException(
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java
index 692c7e412..d191b646c 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java
@@ -6,6 +6,7 @@
import static net.snowflake.client.jdbc.SnowflakeUtil.mapSFExceptionToSQLException;
+import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -38,7 +39,6 @@
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
-import net.snowflake.client.core.ArrowSqlInput;
import net.snowflake.client.core.ColumnTypeHelper;
import net.snowflake.client.core.JsonSqlInput;
import net.snowflake.client.core.ObjectMapperFactory;
@@ -52,7 +52,7 @@
/** Base class for query result set and metadata result set */
public abstract class SnowflakeBaseResultSet implements ResultSet {
- static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBaseResultSet.class);
+ private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBaseResultSet.class);
private final int resultSetType;
private final int resultSetConcurrency;
private final int resultSetHoldability;
@@ -150,7 +150,7 @@ public Timestamp getTimestamp(int columnIndex) throws SQLException {
@Override
public InputStream getAsciiStream(int columnIndex) throws SQLException {
- logger.debug("public InputStream getAsciiStream(int columnIndex)", false);
+ logger.trace("InputStream getAsciiStream(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@@ -160,33 +160,33 @@ public InputStream getAsciiStream(int columnIndex) throws SQLException {
@Deprecated
@Override
public InputStream getUnicodeStream(int columnIndex) throws SQLException {
- logger.debug("public InputStream getUnicodeStream(int columnIndex)", false);
+ logger.trace("InputStream getUnicodeStream(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public InputStream getBinaryStream(int columnIndex) throws SQLException {
- logger.debug("public InputStream getBinaryStream(int columnIndex)", false);
+ logger.trace("InputStream getBinaryStream(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public String getString(String columnLabel) throws SQLException {
- logger.debug("public String getString(String columnLabel)", false);
+ logger.trace("String getString(String columnLabel)", false);
return getString(findColumn(columnLabel));
}
@Override
public boolean getBoolean(String columnLabel) throws SQLException {
- logger.debug("public boolean getBoolean(String columnLabel)", false);
+ logger.trace("boolean getBoolean(String columnLabel)", false);
return getBoolean(findColumn(columnLabel));
}
@Override
public byte getByte(String columnLabel) throws SQLException {
- logger.debug("public byte getByte(String columnLabel)", false);
+ logger.trace("byte getByte(String columnLabel)", false);
raiseSQLExceptionIfResultSetIsClosed();
return getByte(findColumn(columnLabel));
@@ -194,35 +194,35 @@ public byte getByte(String columnLabel) throws SQLException {
@Override
public short getShort(String columnLabel) throws SQLException {
- logger.debug("public short getShort(String columnLabel)", false);
+ logger.trace("short getShort(String columnLabel)", false);
return getShort(findColumn(columnLabel));
}
@Override
public int getInt(String columnLabel) throws SQLException {
- logger.debug("public int getInt(String columnLabel)", false);
+ logger.trace("int getInt(String columnLabel)", false);
return getInt(findColumn(columnLabel));
}
@Override
public long getLong(String columnLabel) throws SQLException {
- logger.debug("public long getLong(String columnLabel)", false);
+ logger.trace("long getLong(String columnLabel)", false);
return getLong(findColumn(columnLabel));
}
@Override
public float getFloat(String columnLabel) throws SQLException {
- logger.debug("public float getFloat(String columnLabel)", false);
+ logger.trace("float getFloat(String columnLabel)", false);
return getFloat(findColumn(columnLabel));
}
@Override
public double getDouble(String columnLabel) throws SQLException {
- logger.debug("public double getDouble(String columnLabel)", false);
+ logger.trace("double getDouble(String columnLabel)", false);
return getDouble(findColumn(columnLabel));
}
@@ -233,42 +233,42 @@ public double getDouble(String columnLabel) throws SQLException {
@Deprecated
@Override
public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException {
- logger.debug("public BigDecimal getBigDecimal(String columnLabel, " + "int scale)", false);
+ logger.trace("BigDecimal getBigDecimal(String columnLabel, " + "int scale)", false);
return getBigDecimal(findColumn(columnLabel), scale);
}
@Override
public byte[] getBytes(String columnLabel) throws SQLException {
- logger.debug("public byte[] getBytes(String columnLabel)", false);
+ logger.trace("byte[] getBytes(String columnLabel)", false);
return getBytes(findColumn(columnLabel));
}
@Override
public Date getDate(String columnLabel) throws SQLException {
- logger.debug("public Date getDate(String columnLabel)", false);
+ logger.trace("Date getDate(String columnLabel)", false);
return getDate(findColumn(columnLabel));
}
@Override
public Time getTime(String columnLabel) throws SQLException {
- logger.debug("public Time getTime(String columnLabel)", false);
+ logger.trace("Time getTime(String columnLabel)", false);
return getTime(findColumn(columnLabel));
}
@Override
public Timestamp getTimestamp(String columnLabel) throws SQLException {
- logger.debug("public Timestamp getTimestamp(String columnLabel)", false);
+ logger.trace("Timestamp getTimestamp(String columnLabel)", false);
return getTimestamp(findColumn(columnLabel));
}
@Override
public InputStream getAsciiStream(String columnLabel) throws SQLException {
- logger.debug("public InputStream getAsciiStream(String columnLabel)", false);
+ logger.trace("InputStream getAsciiStream(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@@ -278,55 +278,55 @@ public InputStream getAsciiStream(String columnLabel) throws SQLException {
@Deprecated
@Override
public InputStream getUnicodeStream(String columnLabel) throws SQLException {
- logger.debug("public InputStream getUnicodeStream(String columnLabel)", false);
+ logger.trace("InputStream getUnicodeStream(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public InputStream getBinaryStream(String columnLabel) throws SQLException {
- logger.debug("public InputStream getBinaryStream(String columnLabel)", false);
+ logger.trace("InputStream getBinaryStream(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public SQLWarning getWarnings() throws SQLException {
- logger.debug("public SQLWarning getWarnings()", false);
+ logger.trace("SQLWarning getWarnings()", false);
raiseSQLExceptionIfResultSetIsClosed();
return null;
}
@Override
public void clearWarnings() throws SQLException {
- logger.debug("public void clearWarnings()", false);
+ logger.trace("void clearWarnings()", false);
raiseSQLExceptionIfResultSetIsClosed();
}
@Override
public String getCursorName() throws SQLException {
- logger.debug("public String getCursorName()", false);
+ logger.trace("String getCursorName()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
- logger.debug("public ResultSetMetaData getMetaData()", false);
+ logger.trace("ResultSetMetaData getMetaData()", false);
raiseSQLExceptionIfResultSetIsClosed();
return resultSetMetaData;
}
@Override
public Object getObject(String columnLabel) throws SQLException {
- logger.debug("public Object getObject(String columnLabel)", false);
+ logger.trace("Object getObject(String columnLabel)", false);
return getObject(findColumn(columnLabel));
}
@Override
public int findColumn(String columnLabel) throws SQLException {
- logger.debug("public int findColumn(String columnLabel)", false);
+ logger.trace("int findColumn(String columnLabel)", false);
raiseSQLExceptionIfResultSetIsClosed();
int columnIndex = resultSetMetaData.getColumnIndex(columnLabel);
@@ -340,7 +340,7 @@ public int findColumn(String columnLabel) throws SQLException {
@Override
public Reader getCharacterStream(int columnIndex) throws SQLException {
- logger.debug("public Reader getCharacterStream(int columnIndex)", false);
+ logger.trace("Reader getCharacterStream(int columnIndex)", false);
raiseSQLExceptionIfResultSetIsClosed();
String streamData = getString(columnIndex);
return (streamData == null) ? null : new StringReader(streamData);
@@ -348,76 +348,76 @@ public Reader getCharacterStream(int columnIndex) throws SQLException {
@Override
public Reader getCharacterStream(String columnLabel) throws SQLException {
- logger.debug("public Reader getCharacterStream(String columnLabel)", false);
+ logger.trace("Reader getCharacterStream(String columnLabel)", false);
return getCharacterStream(findColumn(columnLabel));
}
@Override
public BigDecimal getBigDecimal(String columnLabel) throws SQLException {
- logger.debug("public BigDecimal getBigDecimal(String columnLabel)", false);
+ logger.trace("BigDecimal getBigDecimal(String columnLabel)", false);
return getBigDecimal(findColumn(columnLabel));
}
@Override
public void beforeFirst() throws SQLException {
- logger.debug("public void beforeFirst()", false);
+ logger.trace("void beforeFirst()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void afterLast() throws SQLException {
- logger.debug("public void afterLast()", false);
+ logger.trace("void afterLast()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public boolean first() throws SQLException {
- logger.debug("public boolean first()", false);
+ logger.trace("boolean first()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public boolean last() throws SQLException {
- logger.debug("public boolean last()", false);
+ logger.trace("boolean last()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public boolean absolute(int row) throws SQLException {
- logger.debug("public boolean absolute(int row)", false);
+ logger.trace("boolean absolute(int row)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public boolean relative(int rows) throws SQLException {
- logger.debug("public boolean relative(int rows)", false);
+ logger.trace("boolean relative(int rows)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public boolean previous() throws SQLException {
- logger.debug("public boolean previous()", false);
+ logger.trace("boolean previous()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public int getFetchDirection() throws SQLException {
- logger.debug("public int getFetchDirection()", false);
+ logger.trace("int getFetchDirection()", false);
raiseSQLExceptionIfResultSetIsClosed();
return ResultSet.FETCH_FORWARD;
}
@Override
public void setFetchDirection(int direction) throws SQLException {
- logger.debug("public void setFetchDirection(int direction)", false);
+ logger.trace("void setFetchDirection(int direction)", false);
raiseSQLExceptionIfResultSetIsClosed();
if (direction != ResultSet.FETCH_FORWARD) {
@@ -427,14 +427,14 @@ public void setFetchDirection(int direction) throws SQLException {
@Override
public int getFetchSize() throws SQLException {
- logger.debug("public int getFetchSize()", false);
+ logger.trace("int getFetchSize()", false);
raiseSQLExceptionIfResultSetIsClosed();
return this.fetchSize;
}
@Override
public void setFetchSize(int rows) throws SQLException {
- logger.debug("public void setFetchSize(int rows)", false);
+ logger.trace("void setFetchSize(int rows)", false);
raiseSQLExceptionIfResultSetIsClosed();
this.fetchSize = rows;
@@ -442,140 +442,140 @@ public void setFetchSize(int rows) throws SQLException {
@Override
public int getType() throws SQLException {
- logger.debug("public int getType()", false);
+ logger.trace("int getType()", false);
raiseSQLExceptionIfResultSetIsClosed();
return resultSetType;
}
@Override
public int getConcurrency() throws SQLException {
- logger.debug("public int getConcurrency()", false);
+ logger.trace("int getConcurrency()", false);
raiseSQLExceptionIfResultSetIsClosed();
return resultSetConcurrency;
}
@Override
public boolean rowUpdated() throws SQLException {
- logger.debug("public boolean rowUpdated()", false);
+ logger.trace("boolean rowUpdated()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public boolean rowInserted() throws SQLException {
- logger.debug("public boolean rowInserted()", false);
+ logger.trace("boolean rowInserted()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public boolean rowDeleted() throws SQLException {
- logger.debug("public boolean rowDeleted()", false);
+ logger.trace("boolean rowDeleted()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNull(int columnIndex) throws SQLException {
- logger.debug("public void updateNull(int columnIndex)", false);
+ logger.trace("void updateNull(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBoolean(int columnIndex, boolean x) throws SQLException {
- logger.debug("public void updateBoolean(int columnIndex, boolean x)", false);
+ logger.trace("void updateBoolean(int columnIndex, boolean x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateByte(int columnIndex, byte x) throws SQLException {
- logger.debug("public void updateByte(int columnIndex, byte x)", false);
+ logger.trace("void updateByte(int columnIndex, byte x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateShort(int columnIndex, short x) throws SQLException {
- logger.debug("public void updateShort(int columnIndex, short x)", false);
+ logger.trace("void updateShort(int columnIndex, short x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateInt(int columnIndex, int x) throws SQLException {
- logger.debug("public void updateInt(int columnIndex, int x)", false);
+ logger.trace("void updateInt(int columnIndex, int x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateLong(int columnIndex, long x) throws SQLException {
- logger.debug("public void updateLong(int columnIndex, long x)", false);
+ logger.trace("void updateLong(int columnIndex, long x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateFloat(int columnIndex, float x) throws SQLException {
- logger.debug("public void updateFloat(int columnIndex, float x)", false);
+ logger.trace("void updateFloat(int columnIndex, float x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateDouble(int columnIndex, double x) throws SQLException {
- logger.debug("public void updateDouble(int columnIndex, double x)", false);
+ logger.trace("void updateDouble(int columnIndex, double x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
- logger.debug("public void updateBigDecimal(int columnIndex, BigDecimal x)", false);
+ logger.trace("void updateBigDecimal(int columnIndex, BigDecimal x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateString(int columnIndex, String x) throws SQLException {
- logger.debug("public void updateString(int columnIndex, String x)", false);
+ logger.trace("void updateString(int columnIndex, String x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBytes(int columnIndex, byte[] x) throws SQLException {
- logger.debug("public void updateBytes(int columnIndex, byte[] x)", false);
+ logger.trace("void updateBytes(int columnIndex, byte[] x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateDate(int columnIndex, Date x) throws SQLException {
- logger.debug("public void updateDate(int columnIndex, Date x)", false);
+ logger.trace("void updateDate(int columnIndex, Date x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateTime(int columnIndex, Time x) throws SQLException {
- logger.debug("public void updateTime(int columnIndex, Time x)", false);
+ logger.trace("void updateTime(int columnIndex, Time x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException {
- logger.debug("public void updateTimestamp(int columnIndex, Timestamp x)", false);
+ logger.trace("void updateTimestamp(int columnIndex, Timestamp x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateAsciiStream(int columnIndex, " + "InputStream x, int length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -583,7 +583,7 @@ public void updateAsciiStream(int columnIndex, InputStream x, int length) throws
@Override
public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateBinaryStream(int columnIndex, " + "InputStream x, int length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -591,7 +591,7 @@ public void updateBinaryStream(int columnIndex, InputStream x, int length) throw
@Override
public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateCharacterStream(int columnIndex, " + "Reader x, int length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -599,7 +599,7 @@ public void updateCharacterStream(int columnIndex, Reader x, int length) throws
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateObject(int columnIndex, Object x, " + "int scaleOrLength)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -607,112 +607,112 @@ public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQ
@Override
public void updateObject(int columnIndex, Object x) throws SQLException {
- logger.debug("public void updateObject(int columnIndex, Object x)", false);
+ logger.trace("void updateObject(int columnIndex, Object x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNull(String columnLabel) throws SQLException {
- logger.debug("public void updateNull(String columnLabel)", false);
+ logger.trace("void updateNull(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBoolean(String columnLabel, boolean x) throws SQLException {
- logger.debug("public void updateBoolean(String columnLabel, boolean x)", false);
+ logger.trace("void updateBoolean(String columnLabel, boolean x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateByte(String columnLabel, byte x) throws SQLException {
- logger.debug("public void updateByte(String columnLabel, byte x)", false);
+ logger.trace("void updateByte(String columnLabel, byte x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateShort(String columnLabel, short x) throws SQLException {
- logger.debug("public void updateShort(String columnLabel, short x)", false);
+ logger.trace("void updateShort(String columnLabel, short x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateInt(String columnLabel, int x) throws SQLException {
- logger.debug("public void updateInt(String columnLabel, int x)", false);
+ logger.trace("void updateInt(String columnLabel, int x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateLong(String columnLabel, long x) throws SQLException {
- logger.debug("public void updateLong(String columnLabel, long x)", false);
+ logger.trace("void updateLong(String columnLabel, long x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateFloat(String columnLabel, float x) throws SQLException {
- logger.debug("public void updateFloat(String columnLabel, float x)", false);
+ logger.trace("void updateFloat(String columnLabel, float x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateDouble(String columnLabel, double x) throws SQLException {
- logger.debug("public void updateDouble(String columnLabel, double x)", false);
+ logger.trace("void updateDouble(String columnLabel, double x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException {
- logger.debug("public void updateBigDecimal(String columnLabel, " + "BigDecimal x)", false);
+ logger.trace("void updateBigDecimal(String columnLabel, " + "BigDecimal x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateString(String columnLabel, String x) throws SQLException {
- logger.debug("public void updateString(String columnLabel, String x)", false);
+ logger.trace("void updateString(String columnLabel, String x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBytes(String columnLabel, byte[] x) throws SQLException {
- logger.debug("public void updateBytes(String columnLabel, byte[] x)", false);
+ logger.trace("void updateBytes(String columnLabel, byte[] x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateDate(String columnLabel, Date x) throws SQLException {
- logger.debug("public void updateDate(String columnLabel, Date x)", false);
+ logger.trace("void updateDate(String columnLabel, Date x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateTime(String columnLabel, Time x) throws SQLException {
- logger.debug("public void updateTime(String columnLabel, Time x)", false);
+ logger.trace("void updateTime(String columnLabel, Time x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException {
- logger.debug("public void updateTimestamp(String columnLabel, Timestamp x)", false);
+ logger.trace("void updateTimestamp(String columnLabel, Timestamp x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateAsciiStream(String columnLabel, " + "InputStream x, int length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -721,7 +721,7 @@ public void updateAsciiStream(String columnLabel, InputStream x, int length) thr
@Override
public void updateBinaryStream(String columnLabel, InputStream x, int length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateBinaryStream(String columnLabel, " + "InputStream x, int length)",
false);
@@ -731,7 +731,7 @@ public void updateBinaryStream(String columnLabel, InputStream x, int length)
@Override
public void updateCharacterStream(String columnLabel, Reader reader, int length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateCharacterStream(String columnLabel, " + "Reader reader,int length)",
false);
@@ -740,7 +740,7 @@ public void updateCharacterStream(String columnLabel, Reader reader, int length)
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateObject(String columnLabel, Object x, " + "int scaleOrLength)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -748,91 +748,91 @@ public void updateObject(String columnLabel, Object x, int scaleOrLength) throws
@Override
public void updateObject(String columnLabel, Object x) throws SQLException {
- logger.debug("public void updateObject(String columnLabel, Object x)", false);
+ logger.trace("void updateObject(String columnLabel, Object x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void insertRow() throws SQLException {
- logger.debug("public void insertRow()", false);
+ logger.trace("void insertRow()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateRow() throws SQLException {
- logger.debug("public void updateRow()", false);
+ logger.trace("void updateRow()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void deleteRow() throws SQLException {
- logger.debug("public void deleteRow()", false);
+ logger.trace("void deleteRow()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void refreshRow() throws SQLException {
- logger.debug("public void refreshRow()", false);
+ logger.trace("void refreshRow()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void cancelRowUpdates() throws SQLException {
- logger.debug("public void cancelRowUpdates()", false);
+ logger.trace("void cancelRowUpdates()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void moveToInsertRow() throws SQLException {
- logger.debug("public void moveToInsertRow()", false);
+ logger.trace("void moveToInsertRow()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void moveToCurrentRow() throws SQLException {
- logger.debug("public void moveToCurrentRow()", false);
+ logger.trace("void moveToCurrentRow()", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Statement getStatement() throws SQLException {
- logger.debug("public Statement getStatement()", false);
+ logger.trace("Statement getStatement()", false);
raiseSQLExceptionIfResultSetIsClosed();
return statement;
}
@Override
public Object getObject(int columnIndex, Map> map) throws SQLException {
- logger.debug("public Object getObject(int columnIndex, Map> map)", false);
+ logger.trace("Object getObject(int columnIndex, Map> map)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Ref getRef(int columnIndex) throws SQLException {
- logger.debug("public Ref getRef(int columnIndex)", false);
+ logger.trace("Ref getRef(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Blob getBlob(int columnIndex) throws SQLException {
- logger.debug("public Blob getBlob(int columnIndex)", false);
+ logger.trace("Blob getBlob(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Clob getClob(int columnIndex) throws SQLException {
- logger.debug("public Clob getClob(int columnIndex)", false);
+ logger.trace("Clob getClob(int columnIndex)", false);
String columnValue = getString(columnIndex);
return columnValue == null ? null : new SnowflakeClob(columnValue);
@@ -840,14 +840,14 @@ public Clob getClob(int columnIndex) throws SQLException {
@Override
public Array getArray(int columnIndex) throws SQLException {
- logger.debug("public Array getArray(int columnIndex)", false);
+ logger.trace("Array getArray(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Object getObject(String columnLabel, Map> map) throws SQLException {
- logger.debug(
+ logger.trace(
"public Object getObject(String columnLabel, " + "Map> map)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -855,21 +855,21 @@ public Object getObject(String columnLabel, Map> map) throws SQ
@Override
public Ref getRef(String columnLabel) throws SQLException {
- logger.debug("public Ref getRef(String columnLabel)", false);
+ logger.trace("Ref getRef(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Blob getBlob(String columnLabel) throws SQLException {
- logger.debug("public Blob getBlob(String columnLabel)", false);
+ logger.trace("Blob getBlob(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Clob getClob(String columnLabel) throws SQLException {
- logger.debug("public Clob getClob(String columnLabel)", false);
+ logger.trace("Clob getClob(String columnLabel)", false);
String columnValue = getString(columnLabel);
return columnValue == null ? null : new SnowflakeClob(columnValue);
@@ -877,258 +877,258 @@ public Clob getClob(String columnLabel) throws SQLException {
@Override
public Array getArray(String columnLabel) throws SQLException {
- logger.debug("public Array getArray(String columnLabel)", false);
+ logger.trace("Array getArray(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Date getDate(int columnIndex, Calendar cal) throws SQLException {
- logger.debug("public Date getDate(int columnIndex, Calendar cal)", false);
+ logger.trace("Date getDate(int columnIndex, Calendar cal)", false);
return getDate(columnIndex, cal.getTimeZone());
}
@Override
public Date getDate(String columnLabel, Calendar cal) throws SQLException {
- logger.debug("public Date getDate(String columnLabel, Calendar cal)", false);
+ logger.trace("Date getDate(String columnLabel, Calendar cal)", false);
return getDate(findColumn(columnLabel), cal.getTimeZone());
}
@Override
public Time getTime(int columnIndex, Calendar cal) throws SQLException {
- logger.debug("public Time getTime(int columnIndex, Calendar cal)", false);
+ logger.trace("Time getTime(int columnIndex, Calendar cal)", false);
return getTime(columnIndex);
}
@Override
public Time getTime(String columnLabel, Calendar cal) throws SQLException {
- logger.debug("public Time getTime(String columnLabel, Calendar cal)", false);
+ logger.trace("Time getTime(String columnLabel, Calendar cal)", false);
return getTime(columnLabel);
}
@Override
public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
- logger.debug("public Timestamp getTimestamp(int columnIndex, Calendar cal)", false);
+ logger.trace("Timestamp getTimestamp(int columnIndex, Calendar cal)", false);
return getTimestamp(columnIndex, cal.getTimeZone());
}
@Override
public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException {
- logger.debug("public Timestamp getTimestamp(String columnLabel, " + "Calendar cal)", false);
+ logger.trace("Timestamp getTimestamp(String columnLabel, " + "Calendar cal)", false);
return getTimestamp(findColumn(columnLabel), cal.getTimeZone());
}
@Override
public URL getURL(int columnIndex) throws SQLException {
- logger.debug("public URL getURL(int columnIndex)", false);
+ logger.trace("URL getURL(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public URL getURL(String columnLabel) throws SQLException {
- logger.debug("public URL getURL(String columnLabel)", false);
+ logger.trace("URL getURL(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateRef(int columnIndex, Ref x) throws SQLException {
- logger.debug("public void updateRef(int columnIndex, Ref x)", false);
+ logger.trace("void updateRef(int columnIndex, Ref x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateRef(String columnLabel, Ref x) throws SQLException {
- logger.debug("public void updateRef(String columnLabel, Ref x)", false);
+ logger.trace("void updateRef(String columnLabel, Ref x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBlob(int columnIndex, Blob x) throws SQLException {
- logger.debug("public void updateBlob(int columnIndex, Blob x)", false);
+ logger.trace("void updateBlob(int columnIndex, Blob x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBlob(String columnLabel, Blob x) throws SQLException {
- logger.debug("public void updateBlob(String columnLabel, Blob x)", false);
+ logger.trace("void updateBlob(String columnLabel, Blob x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateClob(int columnIndex, Clob x) throws SQLException {
- logger.debug("public void updateClob(int columnIndex, Clob x)", false);
+ logger.trace("void updateClob(int columnIndex, Clob x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateClob(String columnLabel, Clob x) throws SQLException {
- logger.debug("public void updateClob(String columnLabel, Clob x)", false);
+ logger.trace("void updateClob(String columnLabel, Clob x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateArray(int columnIndex, Array x) throws SQLException {
- logger.debug("public void updateArray(int columnIndex, Array x)", false);
+ logger.trace("void updateArray(int columnIndex, Array x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateArray(String columnLabel, Array x) throws SQLException {
- logger.debug("public void updateArray(String columnLabel, Array x)", false);
+ logger.trace("void updateArray(String columnLabel, Array x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public RowId getRowId(int columnIndex) throws SQLException {
- logger.debug("public RowId getRowId(int columnIndex)", false);
+ logger.trace("RowId getRowId(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public RowId getRowId(String columnLabel) throws SQLException {
- logger.debug("public RowId getRowId(String columnLabel)", false);
+ logger.trace("RowId getRowId(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateRowId(int columnIndex, RowId x) throws SQLException {
- logger.debug("public void updateRowId(int columnIndex, RowId x)", false);
+ logger.trace("void updateRowId(int columnIndex, RowId x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateRowId(String columnLabel, RowId x) throws SQLException {
- logger.debug("public void updateRowId(String columnLabel, RowId x)", false);
+ logger.trace("void updateRowId(String columnLabel, RowId x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public int getHoldability() throws SQLException {
- logger.debug("public int getHoldability()", false);
+ logger.trace("int getHoldability()", false);
raiseSQLExceptionIfResultSetIsClosed();
return resultSetHoldability;
}
@Override
public void updateNString(int columnIndex, String nString) throws SQLException {
- logger.debug("public void updateNString(int columnIndex, String nString)", false);
+ logger.trace("void updateNString(int columnIndex, String nString)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNString(String columnLabel, String nString) throws SQLException {
- logger.debug("public void updateNString(String columnLabel, String nString)", false);
+ logger.trace("void updateNString(String columnLabel, String nString)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNClob(int columnIndex, NClob nClob) throws SQLException {
- logger.debug("public void updateNClob(int columnIndex, NClob nClob)", false);
+ logger.trace("void updateNClob(int columnIndex, NClob nClob)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNClob(String columnLabel, NClob nClob) throws SQLException {
- logger.debug("public void updateNClob(String columnLabel, NClob nClob)", false);
+ logger.trace("void updateNClob(String columnLabel, NClob nClob)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public NClob getNClob(int columnIndex) throws SQLException {
- logger.debug("public NClob getNClob(int columnIndex)", false);
+ logger.trace("NClob getNClob(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public NClob getNClob(String columnLabel) throws SQLException {
- logger.debug("public NClob getNClob(String columnLabel)", false);
+ logger.trace("NClob getNClob(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public SQLXML getSQLXML(int columnIndex) throws SQLException {
- logger.debug("public SQLXML getSQLXML(int columnIndex)", false);
+ logger.trace("SQLXML getSQLXML(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public SQLXML getSQLXML(String columnLabel) throws SQLException {
- logger.debug("public SQLXML getSQLXML(String columnLabel)", false);
+ logger.trace("SQLXML getSQLXML(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
- logger.debug("public void updateSQLXML(int columnIndex, SQLXML xmlObject)", false);
+ logger.trace("void updateSQLXML(int columnIndex, SQLXML xmlObject)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
- logger.debug("public void updateSQLXML(String columnLabel, SQLXML xmlObject)", false);
+ logger.trace("void updateSQLXML(String columnLabel, SQLXML xmlObject)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public String getNString(int columnIndex) throws SQLException {
- logger.debug("public String getNString(int columnIndex)", false);
+ logger.trace("String getNString(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public String getNString(String columnLabel) throws SQLException {
- logger.debug("public String getNString(String columnLabel)", false);
+ logger.trace("String getNString(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Reader getNCharacterStream(int columnIndex) throws SQLException {
- logger.debug("public Reader getNCharacterStream(int columnIndex)", false);
+ logger.trace("Reader getNCharacterStream(int columnIndex)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public Reader getNCharacterStream(String columnLabel) throws SQLException {
- logger.debug("public Reader getNCharacterStream(String columnLabel)", false);
+ logger.trace("Reader getNCharacterStream(String columnLabel)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateNCharacterStream(int columnIndex, " + "Reader x, long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1137,7 +1137,7 @@ public void updateNCharacterStream(int columnIndex, Reader x, long length) throw
@Override
public void updateNCharacterStream(String columnLabel, Reader reader, long length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateNCharacterStream(String columnLabel, " + "Reader reader,long length)",
false);
@@ -1146,7 +1146,7 @@ public void updateNCharacterStream(String columnLabel, Reader reader, long lengt
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateAsciiStream(int columnIndex, " + "InputStream x, long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1154,7 +1154,7 @@ public void updateAsciiStream(int columnIndex, InputStream x, long length) throw
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateBinaryStream(int columnIndex, " + "InputStream x, long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1162,7 +1162,7 @@ public void updateBinaryStream(int columnIndex, InputStream x, long length) thro
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateCharacterStream(int columnIndex, Reader x, " + "long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1171,7 +1171,7 @@ public void updateCharacterStream(int columnIndex, Reader x, long length) throws
@Override
public void updateAsciiStream(String columnLabel, InputStream x, long length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateAsciiStream(String columnLabel, " + "InputStream x, long length)",
false);
@@ -1181,7 +1181,7 @@ public void updateAsciiStream(String columnLabel, InputStream x, long length)
@Override
public void updateBinaryStream(String columnLabel, InputStream x, long length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateBinaryStream(String columnLabel, " + "InputStream x, long length)",
false);
@@ -1191,7 +1191,7 @@ public void updateBinaryStream(String columnLabel, InputStream x, long length)
@Override
public void updateCharacterStream(String columnLabel, Reader reader, long length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateCharacterStream(String columnLabel, " + "Reader reader,long length)",
false);
@@ -1201,7 +1201,7 @@ public void updateCharacterStream(String columnLabel, Reader reader, long length
@Override
public void updateBlob(int columnIndex, InputStream inputStream, long length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateBlob(int columnIndex, InputStream " + "inputStream, long length)",
false);
@@ -1211,7 +1211,7 @@ public void updateBlob(int columnIndex, InputStream inputStream, long length)
@Override
public void updateBlob(String columnLabel, InputStream inputStream, long length)
throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateBlob(String columnLabel, " + "InputStream inputStream,long length)",
false);
@@ -1220,14 +1220,14 @@ public void updateBlob(String columnLabel, InputStream inputStream, long length)
@Override
public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
- logger.debug("public void updateClob(int columnIndex, Reader reader, " + "long length)", false);
+ logger.trace("void updateClob(int columnIndex, Reader reader, " + "long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateClob(String columnLabel, Reader reader, " + "long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1235,7 +1235,7 @@ public void updateClob(String columnLabel, Reader reader, long length) throws SQ
@Override
public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateNClob(int columnIndex, Reader reader, " + "long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1243,7 +1243,7 @@ public void updateNClob(int columnIndex, Reader reader, long length) throws SQLE
@Override
public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateNClob(String columnLabel, Reader reader, " + "long length)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1251,14 +1251,14 @@ public void updateNClob(String columnLabel, Reader reader, long length) throws S
@Override
public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
- logger.debug("public void updateNCharacterStream(int columnIndex, Reader x)", false);
+ logger.trace("void updateNCharacterStream(int columnIndex, Reader x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateNCharacterStream(String columnLabel, " + "Reader reader)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1266,42 +1266,42 @@ public void updateNCharacterStream(String columnLabel, Reader reader) throws SQL
@Override
public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
- logger.debug("public void updateAsciiStream(int columnIndex, InputStream x)", false);
+ logger.trace("void updateAsciiStream(int columnIndex, InputStream x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
- logger.debug("public void updateBinaryStream(int columnIndex, InputStream x)", false);
+ logger.trace("void updateBinaryStream(int columnIndex, InputStream x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
- logger.debug("public void updateCharacterStream(int columnIndex, Reader x)", false);
+ logger.trace("void updateCharacterStream(int columnIndex, Reader x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
- logger.debug("public void updateAsciiStream(String columnLabel, InputStream x)", false);
+ logger.trace("void updateAsciiStream(String columnLabel, InputStream x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
- logger.debug("public void updateBinaryStream(String columnLabel, InputStream x)", false);
+ logger.trace("void updateBinaryStream(String columnLabel, InputStream x)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
- logger.debug(
+ logger.trace(
"public void updateCharacterStream(String columnLabel, " + "Reader reader)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -1309,52 +1309,54 @@ public void updateCharacterStream(String columnLabel, Reader reader) throws SQLE
@Override
public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
- logger.debug("public void updateBlob(int columnIndex, InputStream inputStream)", false);
+ logger.trace("void updateBlob(int columnIndex, InputStream inputStream)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
- logger.debug("public void updateBlob(String columnLabel, InputStream " + "inputStream)", false);
+ logger.trace("void updateBlob(String columnLabel, InputStream " + "inputStream)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateClob(int columnIndex, Reader reader) throws SQLException {
- logger.debug("public void updateClob(int columnIndex, Reader reader)", false);
+ logger.trace("void updateClob(int columnIndex, Reader reader)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateClob(String columnLabel, Reader reader) throws SQLException {
- logger.debug("public void updateClob(String columnLabel, Reader reader)", false);
+ logger.trace("void updateClob(String columnLabel, Reader reader)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNClob(int columnIndex, Reader reader) throws SQLException {
- logger.debug("public void updateNClob(int columnIndex, Reader reader)", false);
+ logger.trace("void updateNClob(int columnIndex, Reader reader)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public void updateNClob(String columnLabel, Reader reader) throws SQLException {
- logger.debug("public void updateNClob(String columnLabel, Reader reader)", false);
+ logger.trace("void updateNClob(String columnLabel, Reader reader)", false);
throw new SnowflakeLoggedFeatureNotSupportedException(session);
}
@Override
public T getObject(int columnIndex, Class type) throws SQLException {
- logger.debug("public T getObject(int columnIndex,Class type)", false);
+ logger.trace(" T getObject(int columnIndex,Class type)", false);
if (resultSetMetaData.isStructuredTypeColumn(columnIndex)) {
if (SQLData.class.isAssignableFrom(type)) {
- SQLInput sqlInput = (SQLInput) getObject(columnIndex);
+ SQLInput sqlInput =
+ SnowflakeUtil.mapSFExceptionToSQLException(
+ () -> (SQLInput) sfBaseResultSet.getObject(columnIndex));
if (sqlInput == null) {
return null;
} else {
@@ -1366,12 +1368,17 @@ public T getObject(int columnIndex, Class type) throws SQLException {
Object object = getObject(columnIndex);
if (object == null) {
return null;
- } else if (object instanceof JsonSqlInput) {
- JsonNode jsonNode = ((JsonSqlInput) object).getInput();
- return (T)
- OBJECT_MAPPER.convertValue(jsonNode, new TypeReference