diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.md b/.github/ISSUE_TEMPLATE/BUG_REPORT.md
index 2c4c5bc91..dcbb8109f 100644
--- a/.github/ISSUE_TEMPLATE/BUG_REPORT.md
+++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.md
@@ -37,4 +37,5 @@ In order to accurately debug the issue this information is required. Thanks!
https://community.snowflake.com/s/article/How-to-generate-log-file-on-Snowflake-connectors
-7. What is your Snowflake account identifier, if any? (Optional)
+ Before sharing any information, please be sure to review the log and remove any sensitive
+ information.
diff --git a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
index 83c2ada99..a3b4e6517 100644
--- a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
+++ b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
@@ -17,6 +17,4 @@ otherwise continue here.
## How would this improve `snowflake-jdbc`?
## References, Other Background
-
-## What is your Snowflake account identifier, if any?
diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml
index 190425de4..c93f081f0 100644
--- a/.github/workflows/build-test.yml
+++ b/.github/workflows/build-test.yml
@@ -29,7 +29,7 @@ jobs:
name: Build
runs-on: ubuntu-20.04
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v4
- name: Build
shell: bash
env:
@@ -38,13 +38,16 @@ jobs:
test-windows:
needs: build
- name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }}
+ name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }}
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}]
- category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips']
+ category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'},
+ {suites: 'OthersTestSuite', name: 'TestCategoryOthers'},
+ {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'},
+ {suites: 'FipsTestSuite', name: "TestCategoryFips"}]
additionalMavenProfile: ['']
steps:
- uses: actions/checkout@v4
@@ -53,7 +56,7 @@ jobs:
java-version: ${{ matrix.runConfig.javaVersion }}
distribution: 'temurin'
cache: maven
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.7'
architecture: 'x64'
@@ -62,19 +65,22 @@ jobs:
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }}
- JDBC_TEST_CATEGORY: ${{ matrix.category }}
+ JDBC_TEST_SUITES: ${{ matrix.category.suites }}
ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }}
run: ci\\test_windows.bat
test-mac:
needs: build
- name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }}
+ name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }}
runs-on: macos-13
strategy:
fail-fast: false
matrix:
runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}]
- category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips']
+ category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'},
+ {suites: 'OthersTestSuite', name: 'TestCategoryOthers'},
+ {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'},
+ {suites: 'FipsTestSuite', name: "TestCategoryFips"}]
additionalMavenProfile: ['']
steps:
- uses: actions/checkout@v4
@@ -83,7 +89,7 @@ jobs:
java-version: ${{ matrix.runConfig.javaVersion }}
distribution: 'temurin'
cache: maven
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.7'
- name: Install Homebrew Bash
@@ -94,51 +100,56 @@ jobs:
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }}
- JDBC_TEST_CATEGORY: ${{ matrix.category }}
+ JDBC_TEST_SUITES: ${{ matrix.category.suites }}
ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }}
run: /usr/local/bin/bash ./ci/test_mac.sh
test-linux:
needs: build
- name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }}
+ name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17', 'jdbc-centos7-openjdk21' ]
cloud: [ 'AWS', 'AZURE', 'GCP' ]
- category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips']
+ category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'},
+ {suites: 'OthersTestSuite', name: 'TestCategoryOthers'},
+ {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'},
+ {suites: 'FipsTestSuite', name: "TestCategoryFips"}]
additionalMavenProfile: ['', '-Dthin-jar']
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v4
- name: Tests
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
TARGET_DOCKER_TEST_IMAGE: ${{ matrix.image }}
- JDBC_TEST_CATEGORY: ${{ matrix.category }}
+ JDBC_TEST_SUITES: ${{ matrix.category.suites }}
ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }}
run: ./ci/test.sh
test-linux-old-driver:
- name: Old JDBC ${{ matrix.category }} on ${{ matrix.image }}
+ name: Old JDBC ${{ matrix.category.name }} on ${{ matrix.image }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
image: [ 'jdbc-centos7-openjdk8' ]
cloud: [ 'AWS' ]
- category: ['TestCategoryResultSet,TestCategoryOthers', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryCore,TestCategoryLoader']
+ category: [{suites: 'OthersOldDriverTestSuite', name: 'TestCategoryOthers'},
+ {suites: 'ConnectionOldDriverTestSuite,StatementOldDriverTestSuite', name: 'TestCategoryConnection,TestCategoryStatement'},
+ {suites: 'LoaderOldDriverTestSuite,ResultSetOldDriverTestSuite', name: 'TestCategoryLoader,TestCategoryResultSet'}]
is_old_driver: ['true']
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v4
- name: Tests
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
TARGET_DOCKER_TEST_IMAGE: ${{ matrix.image }}
- JDBC_TEST_CATEGORY: ${{ matrix.category }}
+ JDBC_TEST_SUITES: ${{ matrix.category.suites }}
is_old_driver: ${{ matrix.is_old_driver }}
run: ./ci/test.sh
diff --git a/.github/workflows/check-style.yml b/.github/workflows/check-style.yml
index 221651298..d26f41865 100644
--- a/.github/workflows/check-style.yml
+++ b/.github/workflows/check-style.yml
@@ -9,7 +9,7 @@ jobs:
name: Check Style
runs-on: ubuntu-20.04
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v4
- name: Check Style
shell: bash
run: mvn clean validate --batch-mode --show-version -P check-style
diff --git a/.github/workflows/jira_close.yml b/.github/workflows/jira_close.yml
index dfcb8bc73..0dacf7fab 100644
--- a/.github/workflows/jira_close.yml
+++ b/.github/workflows/jira_close.yml
@@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
with:
repository: snowflakedb/gh-actions
ref: jira_v1
diff --git a/.github/workflows/jira_issue.yml b/.github/workflows/jira_issue.yml
index 943ad70aa..92501da8f 100644
--- a/.github/workflows/jira_issue.yml
+++ b/.github/workflows/jira_issue.yml
@@ -14,7 +14,7 @@ jobs:
if: ((github.event_name == 'issue_comment' && github.event.comment.body == 'recreate jira' && github.event.comment.user.login == 'sfc-gh-mkeller') || (github.event_name == 'issues' && github.event.pull_request.user.login != 'whitesource-for-github-com[bot]'))
steps:
- name: Checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
with:
repository: snowflakedb/gh-actions
ref: jira_v1
diff --git a/.github/workflows/snyk-issue.yml b/.github/workflows/snyk-issue.yml
index 7b58bb12a..1e36dae35 100644
--- a/.github/workflows/snyk-issue.yml
+++ b/.github/workflows/snyk-issue.yml
@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: checkout action
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
repository: snowflakedb/whitesource-actions
token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }}
diff --git a/.github/workflows/snyk-pr.yml b/.github/workflows/snyk-pr.yml
index 5fc21951b..0c101e391 100644
--- a/.github/workflows/snyk-pr.yml
+++ b/.github/workflows/snyk-pr.yml
@@ -15,13 +15,13 @@ jobs:
if: ${{ github.event.pull_request.user.login == 'sfc-gh-snyk-sca-sa' }}
steps:
- name: checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
fetch-depth: 0
- name: checkout action
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
repository: snowflakedb/whitesource-actions
token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }}
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index b83a77291..76e948b1d 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,3 +1,11 @@
+**JDBC Driver 3.20.0**
+
+- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc
+
+**JDBC Driver 3.19.1**
+
+- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc
+
**JDBC Driver 3.19.0**
- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc
diff --git a/FIPS/pom.xml b/FIPS/pom.xml
index 04fa6a5f5..78e83700d 100644
--- a/FIPS/pom.xml
+++ b/FIPS/pom.xml
@@ -5,29 +5,17 @@
net.snowflakesnowflake-jdbc-parent
- 3.19.0
+ 3.20.1-SNAPSHOT../parent-pom.xmlsnowflake-jdbc-fips
- 3.19.0
+ 3.20.1-SNAPSHOTjarsnowflake-jdbc-fipshttp://maven.apache.org
-
-
- Central
- Internal Central Repo2
- default
- https://nexus.int.snowflakecomputing.com/repository/maven-central/
-
- false
-
-
-
-
3.3.9
@@ -409,10 +397,59 @@
com.google${shadeBase}.google
+
+
+ google.api
+ ${shadeBase}.google.api
+
+
+ google.apps
+ ${shadeBase}.google.apps
+
+
+ google.cloud
+ ${shadeBase}.google.cloud
+ google.geo${shadeBase}.google.geo
+
+ google.iam
+ ${shadeBase}.google.iam
+
+
+ google.logging
+ ${shadeBase}.google.logging
+
+
+ google.longrunning
+ ${shadeBase}.google.longrunning
+
+
+ google.monitoring
+ ${shadeBase}.google.monitoring
+
+
+ google.protobuf
+ ${shadeBase}.google.protobuf
+
+
+ google.rpc
+ ${shadeBase}.google.rpc
+
+
+ google.shopping
+ ${shadeBase}.google.shopping
+
+
+ google.storage
+ ${shadeBase}.google.storage
+
+
+ google.type
+ ${shadeBase}.google.type
+ org.joda${shadeBase}.joda
@@ -457,53 +494,37 @@
com.carrotsearch${shadeBase}.com.carrotsearch
-
- google.type
- ${shadeBase}.google.type
-
-
- google.rpc
- ${shadeBase}.google.rpc
-
-
- google.iam
- ${shadeBase}.google.iam
- io.opencensus${shadeBase}.opencensus
- org.threeten
- ${shadeBase}.threeten
-
-
- google.protobuf
- ${shadeBase}.google.protobuf
+ io.opentelemetry
+ ${shadeBase}.opentelemetry
- google.api
- ${shadeBase}.google.api
+ org.threeten
+ ${shadeBase}.threeten
-
- google.storage
- ${shadeBase}.google.storage
- io.grpc${shadeBase}.grpc
- google.longrunning
- ${shadeBase}.google.longrunning
+ META-INF.native.io_grpc_netty_shaded_netty_tcnative
+ META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_tcnative
- google.cloud
- ${shadeBase}.google.cloud
+ META-INF.native.libio_grpc_netty_shaded_netty_tcnative
+ META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_tcnative
- google.logging
- ${shadeBase}.google.logging
+ META-INF.native.io_grpc_netty_shaded_netty_transport_native_epoll
+ META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll
+
+
+ META-INF.native.libio_grpc_netty_shaded_netty_transport_native_epoll
+ META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epollorg.checkerframework
@@ -521,6 +542,18 @@
org.conscrypt${shadeBase}.org.conscrypt
+
+ conscrypt_openjdk_jni
+ ${shadeNativeBase}_conscrypt_openjdk_jni
+
+
+ META-INF.native.conscrypt_openjdk_jni
+ META-INF.native.${shadeNativeBase}_conscrypt_openjdk_jni
+
+
+ META-INF.native.libconscrypt_openjdk_jni
+ META-INF.native.lib${shadeNativeBase}_conscrypt_openjdk_jni
+ opencensus${shadeBase}.opencensus
@@ -584,7 +617,9 @@
-
+
+
+ META-INF/io.netty.versions.properties
@@ -606,17 +641,24 @@
+
-
+
+
-
+
+
+
+
+
-
+
+
@@ -683,6 +725,13 @@
maven-failsafe-plugin
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+ ${version.plugin.failsafe}
@@ -727,6 +776,13 @@
org.apache.maven.pluginsmaven-failsafe-plugin
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+ ${version.plugin.failsafe}
diff --git a/FIPS/public_pom.xml b/FIPS/public_pom.xml
index d180e4a57..00bc9738c 100644
--- a/FIPS/public_pom.xml
+++ b/FIPS/public_pom.xml
@@ -32,8 +32,8 @@
- 1.0.2.4
- 1.0.5
+ 1.0.2.5
+ 1.0.75.13.0
diff --git a/FIPS/scripts/check_content.sh b/FIPS/scripts/check_content.sh
index 8b818b1b4..a30eacec6 100755
--- a/FIPS/scripts/check_content.sh
+++ b/FIPS/scripts/check_content.sh
@@ -1,12 +1,12 @@
#!/bin/bash -e
-# scripts used to check if all dependency is shaded into snowflake internal path
+# scripts used to check if all dependencies are shaded into snowflake internal path
set -o pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
-if jar tvf $DIR/../target/snowflake-jdbc-fips.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types; then
+if jar tvf $DIR/../target/snowflake-jdbc-fips.jar | awk '{print $8}' | grep -v -E "/$" | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types | grep -v -E "^com/github/luben/zstd/" | grep -v -E "^aix/" | grep -v -E "^darwin/" | grep -v -E "^freebsd/" | grep -v -E "^linux/" | grep -v -E "^win/"; then
echo "[ERROR] JDBC jar includes class not under the snowflake namespace"
exit 1
fi
diff --git a/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java b/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java
index 05c389208..360a1fcbb 100644
--- a/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java
+++ b/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java
@@ -21,12 +21,10 @@
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.junit.Rule;
/** Base test class with common constants, data structures and methods */
public class AbstractDriverIT {
// This is required to use ConditionalIgnore annotation.
- @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule();
public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver";
public static final String DRIVER_CLASS_COM = "com.snowflake.client.jdbc.SnowflakeDriver";
diff --git a/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java b/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java
deleted file mode 100644
index fe20883db..000000000
--- a/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package net.snowflake.client;
-
-/*
- * Created by hyu on 1/22/18.
- */
-
-/*
-Copyright (c) 2013,2014 RĂ¼diger Herrmann
-All rights reserved. This program and the accompanying materials
-are made available under the terms of the Eclipse Public License v1.0
-which accompanies this distribution, and is available at
-http://www.eclipse.org/legal/epl-v10.html
-
-Contributors:
-RĂ¼diger Herrmann - initial API and implementation
-Matt Morrissette - allow to use non-static inner IgnoreConditions
-*/
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import java.lang.reflect.Modifier;
-import org.junit.Assume;
-import org.junit.rules.MethodRule;
-import org.junit.runners.model.FrameworkMethod;
-import org.junit.runners.model.Statement;
-
-public class ConditionalIgnoreRule implements MethodRule {
-
- public interface IgnoreCondition {
- boolean isSatisfied();
- }
-
- @Retention(RetentionPolicy.RUNTIME)
- @Target({ElementType.METHOD})
- public @interface ConditionalIgnore {
- Class extends IgnoreCondition> condition();
- }
-
- @Override
- public Statement apply(Statement base, FrameworkMethod method, Object target) {
- Statement result = base;
- if (hasConditionalIgnoreAnnotation(method)) {
- IgnoreCondition condition = getIgnoreCondition(target, method);
- if (condition.isSatisfied()) {
- result = new IgnoreStatement(condition);
- }
- }
- return result;
- }
-
- private static boolean hasConditionalIgnoreAnnotation(FrameworkMethod method) {
- return method.getAnnotation(ConditionalIgnore.class) != null;
- }
-
- private static IgnoreCondition getIgnoreCondition(Object target, FrameworkMethod method) {
- ConditionalIgnore annotation = method.getAnnotation(ConditionalIgnore.class);
- return new IgnoreConditionCreator(target, annotation).create();
- }
-
- private static class IgnoreConditionCreator {
- private final Object target;
- private final Class extends IgnoreCondition> conditionType;
-
- IgnoreConditionCreator(Object target, ConditionalIgnore annotation) {
- this.target = target;
- this.conditionType = annotation.condition();
- }
-
- IgnoreCondition create() {
- checkConditionType();
- try {
- return createCondition();
- } catch (RuntimeException re) {
- throw re;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private IgnoreCondition createCondition() throws Exception {
- IgnoreCondition result;
- if (isConditionTypeStandalone()) {
- result = conditionType.newInstance();
- } else {
- result = conditionType.getDeclaredConstructor(target.getClass()).newInstance(target);
- }
- return result;
- }
-
- private void checkConditionType() {
- if (!isConditionTypeStandalone() && !isConditionTypeDeclaredInTarget()) {
- String msg =
- "Conditional class '%s' is a member class "
- + "but was not declared inside the test case using it.\n"
- + "Either make this class a static class, "
- + "standalone class (by declaring it in it's own file) "
- + "or move it inside the test case using it";
- throw new IllegalArgumentException(String.format(msg, conditionType.getName()));
- }
- }
-
- private boolean isConditionTypeStandalone() {
- return !conditionType.isMemberClass() || Modifier.isStatic(conditionType.getModifiers());
- }
-
- private boolean isConditionTypeDeclaredInTarget() {
- return target.getClass().isAssignableFrom(conditionType.getDeclaringClass());
- }
- }
-
- private static class IgnoreStatement extends Statement {
- private final IgnoreCondition condition;
-
- IgnoreStatement(IgnoreCondition condition) {
- this.condition = condition;
- }
-
- @Override
- public void evaluate() {
- Assume.assumeTrue("Ignored by " + condition.getClass().getSimpleName(), false);
- }
- }
-}
diff --git a/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java b/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java
new file mode 100644
index 000000000..ccdf83206
--- /dev/null
+++ b/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)GCP(?-i)")
+public @interface DontRunOnGCP {}
\ No newline at end of file
diff --git a/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java b/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java
new file mode 100644
index 000000000..98232e097
--- /dev/null
+++ b/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*")
+public @interface DontRunOnGithubActions {}
diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java
deleted file mode 100644
index c902dc5f9..000000000
--- a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-/** Run tests only on specified cloud provider or ignore */
-public class RunningOnGCP implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER");
- return cloudProvider != null && cloudProvider.equalsIgnoreCase("GCP");
- }
-}
diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java
deleted file mode 100644
index d717b65dc..000000000
--- a/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java
+++ /dev/null
@@ -1,11 +0,0 @@
-/*
- * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-/** Run tests on CI */
-public class RunningOnGithubActions implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null;
- }
-}
diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java b/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java
deleted file mode 100644
index e69de29bb..000000000
diff --git a/FIPS/src/test/java/net/snowflake/client/TestUtil.java b/FIPS/src/test/java/net/snowflake/client/TestUtil.java
index 703d59953..8bec5498f 100644
--- a/FIPS/src/test/java/net/snowflake/client/TestUtil.java
+++ b/FIPS/src/test/java/net/snowflake/client/TestUtil.java
@@ -9,7 +9,7 @@
import net.snowflake.client.core.SFException;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
-import org.junit.Assert;
+import org.junit.jupiter.api.Assertions;
public class TestUtil {
private static final SFLogger logger = SFLoggerFactory.getLogger(TestUtil.class);
@@ -22,7 +22,7 @@ public class TestUtil {
public static void assertSFException(int errorCode, TestRunInterface testCode) {
try {
testCode.run();
- Assert.fail();
+ Assertions.fail();
} catch (SFException e) {
assertThat(e.getVendorCode(), is(errorCode));
}
diff --git a/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java b/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java
new file mode 100644
index 000000000..d61ce2a83
--- /dev/null
+++ b/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.category;
+
+import org.junit.platform.suite.api.IncludeTags;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.platform.suite.api.ExcludePackages;
+import org.junit.platform.suite.api.IncludeClassNamePatterns;
+import org.junit.platform.suite.api.SelectPackages;
+import org.junit.platform.suite.api.Suite;
+import org.junit.platform.suite.api.SuiteDisplayName;
+
+@Suite
+@SelectPackages("net.snowflake.client")
+@ExcludePackages("net.snowflake.client.suites")
+@IncludeClassNamePatterns(".+")
+public class FipsTestSuite {
+}
diff --git a/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java b/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java
deleted file mode 100644
index 06ae9faad..000000000
--- a/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryFips {}
diff --git a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java
index c1509a6a8..0204e9a5d 100644
--- a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java
+++ b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java
@@ -3,7 +3,7 @@
*/
package net.snowflake.client.jdbc;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
import java.net.URL;
import java.nio.file.Files;
@@ -20,21 +20,20 @@
import java.util.Properties;
import javax.net.ssl.HttpsURLConnection;
import net.snowflake.client.AbstractDriverIT;
-import net.snowflake.client.ConditionalIgnoreRule;
-import net.snowflake.client.RunningOnGCP;
-import net.snowflake.client.RunningOnGithubActions;
-import net.snowflake.client.category.TestCategoryFips;
+import net.snowflake.client.DontRunOnGCP;
+import net.snowflake.client.DontRunOnGithubActions;
import net.snowflake.client.core.SecurityUtil;
import org.apache.commons.codec.binary.Base64;
import org.bouncycastle.crypto.CryptoServicesRegistrar;
import org.bouncycastle.crypto.fips.FipsStatus;
import org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category(TestCategoryFips.class)
+
+@Tag("fips")
public class ConnectionFipsIT extends AbstractDriverIT {
private static final String JCE_PROVIDER_BOUNCY_CASTLE_FIPS = "BCFIPS";
private static final String JCE_PROVIDER_SUN_JCE = "SunJCE";
@@ -106,7 +105,7 @@ public class ConnectionFipsIT extends AbstractDriverIT {
private static int JCE_PROVIDER_SUN_JCE_PROVIDER_POSITION;
private static int JCE_PROVIDER_SUN_RSA_SIGN_PROVIDER_POSITION;
- @BeforeClass
+ @BeforeAll
public static void setup() throws Exception {
System.setProperty("javax.net.debug", "ssl");
// get keystore types for BouncyCastle libraries
@@ -166,7 +165,7 @@ public static void setup() throws Exception {
// connectToGoogle();
}
- @AfterClass
+ @AfterAll
public static void teardown() throws Exception {
// Remove BouncyCastle FIPS Provider
Security.removeProvider(JCE_PROVIDER_BOUNCY_CASTLE_FIPS);
@@ -227,7 +226,7 @@ public void connectWithFips() throws SQLException {
}
@Test
- @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class)
+ @DontRunOnGithubActions
public void connectWithFipsKeyPair() throws Exception {
Map parameters = getConnectionParameters();
String testUser = parameters.get("user");
@@ -256,7 +255,7 @@ public void connectWithFipsKeyPair() throws Exception {
}
@Test
- @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class)
+ @DontRunOnGithubActions
public void testConnectUsingKeyPair() throws Exception {
Map parameters = getConnectionParameters();
String testUser = parameters.get("user");
@@ -295,7 +294,7 @@ public void testConnectUsingKeyPair() throws Exception {
* Currently ignored execution on GCP due to exception thrown "SSlException Could not generate XDH keypair"
*/
@Test
- @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGCP.class)
+ @DontRunOnGCP
public void connectWithFipsAndQuery() throws SQLException {
try (Connection con = getConnection()) {
Statement statement = con.createStatement();
@@ -329,7 +328,7 @@ public void connectWithFipsAndPut() throws Exception {
/** Added in > 3.15.1 */
@Test
- @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class)
+ @DontRunOnGithubActions
public void connectWithFipsKeyPairWithBouncyCastle() throws Exception {
System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true");
connectWithFipsKeyPair();
@@ -337,7 +336,7 @@ public void connectWithFipsKeyPairWithBouncyCastle() throws Exception {
/** Added in > 3.15.1 */
@Test
- @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class)
+ @DontRunOnGithubActions
public void testConnectUsingKeyPairWithBouncyCastle() throws Exception {
System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true");
testConnectUsingKeyPair();
diff --git a/Jenkinsfile b/Jenkinsfile
index 8e5925b8c..261a2968b 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -87,6 +87,8 @@ timestamps {
}.collectEntries { jobDefinition ->
return [(jobDefinition.runName): { build job: jobDefinition.jobToRun, parameters: jobDefinition.params }]
}
+
+ jobDefinitions.put('JDBC-AIX-Unit', { build job: 'JDBC-AIX-UnitTests', parameters: [ string(name: 'BRANCH', value: scmInfo.GIT_BRANCH ) ] } )
stage('Test') {
parallel (jobDefinitions)
}
diff --git a/README.rst b/README.rst
index c2e296e95..a9d3cacb2 100644
--- a/README.rst
+++ b/README.rst
@@ -214,3 +214,8 @@ Support
Feel free to file an issue or submit a PR here for general cases. For official support, contact Snowflake support at:
https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge
+
+Note
+----------
+
+This driver currently does not support GCP regional endpoints. Please ensure that any workloads using through this driver do not require support for regional endpoints on GCP. If you have questions about this, please contact Snowflake Support.
diff --git a/TestOnly/pom.xml b/TestOnly/pom.xml
index 109b03c74..509cb8925 100644
--- a/TestOnly/pom.xml
+++ b/TestOnly/pom.xml
@@ -4,7 +4,7 @@
net.snowflakesnowflake-jdbc-test
- 3.9.2
+ 3.13.21snowflake-jdbc-testhttp://maven.apache.org
@@ -18,33 +18,82 @@
0.8.4true5.13.0
+ 5.11.1
+ 3.5.13.5.6net.snowflake.client.jdbc.internal
- net.snowflake.client.category.AllTestCategoryio.nettynetty-common
- 4.1.111.Final
+ 4.1.115.Finalio.nettynetty-buffer
- 4.1.111.Final
+ 4.1.115.Finalorg.apache.maven.pluginsmaven-failsafe-plugin3.0.0-M1
+
test
- junit
- junit
- 4.13.1
- jar
+ org.junit.jupiter
+ junit-jupiter-api
+ ${junit.version}
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ ${junit.version}
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ ${junit.version}
+ test
+
+
+ org.junit.platform
+ junit-platform-suite
+ 1.11.1
+ test
+
+
+ org.junit.platform
+ junit-platform-engine
+ 1.11.1
+ test
+
+
+ org.junit.platform
+ junit-platform-runner
+ 1.11.1
+ test
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ 1.11.1
+ test
+
+
+ org.junit.platform
+ junit-platform-suite-engine
+ 1.11.1
+ test
+
+
+ org.junit.platform
+ junit-platform-launcher
+ 1.11.1test
@@ -371,7 +420,26 @@
org.apache.maven.pluginsmaven-surefire-plugin
- 3.0.0-M5
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${surefire.version}
+
+
+ ${surefire.version}
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${surefire.version}
+
+
+ ${surefire.version}
@@ -387,35 +455,40 @@
org.apache.maven.plugins
- maven-failsafe-plugin
+ maven-surefire-plugin
- ${testCategory}
+ false
+
+
+ test
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+
+
+
+ verify
+
+ DefaultITintegration-test
-
- **/DellBoomiCloudIT.java
-
-
- net.snowflake.client.log.JDK14Logger
-
-
- ${basedir}/../src/test/resources/logging.properties
-
+ net.snowflake.client.log.JDK14Logger
+ ${basedir}/src/test/resources/logging.properties
+ ${integrationTestSuites}
-
-
- verify
-
-
diff --git a/ci/container/test_component.sh b/ci/container/test_component.sh
index da245a627..65efed88d 100755
--- a/ci/container/test_component.sh
+++ b/ci/container/test_component.sh
@@ -68,9 +68,6 @@ echo "[INFO] Running Hang Web Server"
kill -9 $(ps -ewf | grep hang_webserver | grep -v grep | awk '{print $2}') || true
python3 $THIS_DIR/hang_webserver.py 12345&
-IFS=','
-read -ra CATEGORY <<< "$JDBC_TEST_CATEGORY"
-
# Avoid connection timeouts
export MAVEN_OPTS="$MAVEN_OPTS -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120"
@@ -79,41 +76,39 @@ cd $SOURCE_ROOT
# Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched
$MVNW_EXE --batch-mode --show-version dependency:go-offline
-for c in "${CATEGORY[@]}"; do
- c=$(echo $c | sed 's/ *$//g')
- if [[ "$is_old_driver" == "true" ]]; then
- pushd TestOnly >& /dev/null
- JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]")
- echo "[INFO] Run JDBC $JDBC_VERSION tests"
- $MVNW_EXE -DjenkinsIT \
- -Djava.io.tmpdir=$WORKSPACE \
- -Djacoco.skip.instrument=false \
- -DtestCategory=net.snowflake.client.category.$c \
- -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
- verify \
- --batch-mode --show-version
- popd >& /dev/null
- elif [[ "$c" == "TestCategoryFips" ]]; then
- pushd FIPS >& /dev/null
- echo "[INFO] Run Fips tests"
- $MVNW_EXE -DjenkinsIT \
- -Djava.io.tmpdir=$WORKSPACE \
- -Djacoco.skip.instrument=false \
- -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
- -Dnot-self-contained-jar \
- verify \
- --batch-mode --show-version
- popd >& /dev/null
- else
- echo "[INFO] Run $c tests"
+if [[ "$is_old_driver" == "true" ]]; then
+ pushd TestOnly >& /dev/null
+ JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]")
+ echo "[INFO] Run JDBC $JDBC_VERSION tests"
$MVNW_EXE -DjenkinsIT \
-Djava.io.tmpdir=$WORKSPACE \
-Djacoco.skip.instrument=false \
- -DtestCategory=net.snowflake.client.category.$c \
+ -DintegrationTestSuites="$JDBC_TEST_SUITES" \
-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
- -Dnot-self-contained-jar $ADDITIONAL_MAVEN_PROFILE \
verify \
--batch-mode --show-version
- fi
-done
+ popd >& /dev/null
+elif [[ "$JDBC_TEST_SUITES" == "FipsTestSuite" ]]; then
+ pushd FIPS >& /dev/null
+ echo "[INFO] Run Fips tests"
+ $MVNW_EXE -DjenkinsIT \
+ -Djava.io.tmpdir=$WORKSPACE \
+ -Djacoco.skip.instrument=false \
+ -DintegrationTestSuites=FipsTestSuite \
+ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
+ -Dnot-self-contained-jar \
+ verify \
+ --batch-mode --show-version
+ popd >& /dev/null
+else
+ echo "[INFO] Run $JDBC_TEST_SUITES tests"
+ $MVNW_EXE -DjenkinsIT \
+ -Djava.io.tmpdir=$WORKSPACE \
+ -Djacoco.skip.instrument=false \
+ -DintegrationTestSuites="$JDBC_TEST_SUITES" \
+ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
+ -Dnot-self-contained-jar $ADDITIONAL_MAVEN_PROFILE \
+ verify \
+ --batch-mode --show-version
+fi
IFS=' '
diff --git a/ci/log_analyze_setup.sh b/ci/log_analyze_setup.sh
index fd573d194..63303964e 100755
--- a/ci/log_analyze_setup.sh
+++ b/ci/log_analyze_setup.sh
@@ -36,7 +36,7 @@ LOG_PROPERTY_FILE=$(cd "$(dirname "${BASH_SOURCE[0]}")/.."; pwd)/src/test/resour
export CLIENT_DRIVER_NAME=JDBC
function setup_log_env() {
- if ["$WORKSPACE" == "/mnt/workspace"]; then
+ if [[ "$WORKSPACE" == "/mnt/workspace" ]]; then
CLIENT_LOG_DIR_PATH=$LOCAL_CLIENT_LOG_DIR_PATH_DOCKER
CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH_DOCKER
CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH_DOCKER
@@ -53,7 +53,7 @@ function setup_log_env() {
sed -i'' -e "s|^java.util.logging.FileHandler.pattern.*|java.util.logging.FileHandler.pattern = $CLIENT_LOG_FILE_PATH|" ${LOG_PROPERTY_FILE}
if [[ ! -d ${CLIENT_LOG_DIR_PATH} ]]; then
- echo "[INFO] create clien log directory $CLIENT_LOG_DIR_PATH"
+ echo "[INFO] create client log directory $CLIENT_LOG_DIR_PATH"
mkdir -p ${CLIENT_LOG_DIR_PATH}
fi
diff --git a/ci/scripts/check_content.sh b/ci/scripts/check_content.sh
index a9c0768b6..1af33e56a 100755
--- a/ci/scripts/check_content.sh
+++ b/ci/scripts/check_content.sh
@@ -8,12 +8,12 @@ set -o pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
-if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types; then
+if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -v -E "/$" | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types | grep -v -E "^com/github/luben/zstd/" | grep -v -E "^aix/" | grep -v -E "^darwin/" | grep -v -E "^freebsd/" | grep -v -E "^linux/" | grep -v -E "^win/"; then
echo "[ERROR] JDBC jar includes class not under the snowflake namespace"
exit 1
fi
-if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -E "^META-INF/versions/.*.class" | grep -v -E "^META-INF/versions/.*/(net|com)/snowflake"; then
- echo "[ERROR] JDBC jar includes multi release classes not under the snowflake namespace"
+if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -E "^META-INF/versions/.*.class" | grep -v -E "^META-INF/versions/.*/(net|com)/snowflake"; then
+ echo "[ERROR] JDBC jar includes multi-release classes not under the snowflake namespace"
exit 1
fi
diff --git a/ci/test.sh b/ci/test.sh
index 03c66c502..125e91d1f 100755
--- a/ci/test.sh
+++ b/ci/test.sh
@@ -30,8 +30,8 @@ else
exit 2
fi
-if [[ -z "$JDBC_TEST_CATEGORY" ]]; then
- echo "[ERROR] Set JDBC_TEST_CATEGORY to the JDBC test category."
+if [[ -z "$JDBC_TEST_SUITES" ]]; then
+ echo "[ERROR] Set JDBC_TEST_SUITES to the JDBC test category."
find $THIS_DIR/../src/test/java -type f -exec grep -E "^import net.snowflake.client.category" {} \; | sort | uniq | awk -F. '{print $NF}' | awk -F\; '{print $1}'
exit 2
fi
@@ -56,7 +56,7 @@ for name in "${!TARGET_TEST_IMAGES[@]}"; do
-e RUNNER_TRACKING_ID \
-e JOB_NAME \
-e BUILD_NUMBER \
- -e JDBC_TEST_CATEGORY \
+ -e JDBC_TEST_SUITES \
-e ADDITIONAL_MAVEN_PROFILE \
-e CLOUD_PROVIDER \
-e is_old_driver \
diff --git a/ci/test_windows.bat b/ci/test_windows.bat
index 4a5a8ebe3..0234b105c 100644
--- a/ci/test_windows.bat
+++ b/ci/test_windows.bat
@@ -111,47 +111,45 @@ echo "MAVEN OPTIONS %MAVEN_OPTS%"
REM Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched
cmd /c %MVNW_EXE% --batch-mode --show-version dependency:go-offline
-echo list = "%JDBC_TEST_CATEGORY%"
-for %%a in ("%JDBC_TEST_CATEGORY:,=" "%") do (
- echo "Current category to execute" %%a
- if /i %%a=="TestCategoryFips" (
- pushd FIPS
- echo "[INFO] Run Fips tests"
- cmd /c %MVNW_EXE% -B -DjenkinsIT ^
- -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^
- -Djacoco.skip.instrument=false ^
- -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^
- -Dnot-self-contained-jar ^
- verify ^
- --batch-mode --show-version > log.txt & type log.txt
- echo "[INFO] Check for test execution status"
- find /i /c "BUILD FAILURE" log.txt > NUL
- set isfound=!errorlevel!
- if !isfound! equ 0 (
- echo [ERROR] Failed run %%a test
- exit /b 1
- ) else (
- echo [INFO] Success run %%a test
- )
- popd ) else (
- echo "[INFO] Run %%a tests"
- cmd /c %MVNW_EXE% -B -DjenkinsIT ^
- -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^
- -Djacoco.skip.instrument=false ^
- -DtestCategory=net.snowflake.client.category.%%a ^
- -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^
- -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^
- verify ^
- --batch-mode --show-version > log.txt & type log.txt
- echo "[INFO] Check for test execution status"
- find /i /c "BUILD FAILURE" log.txt > NUL
- set isfound=!errorlevel!
- if !isfound! equ 0 (
- echo [ERROR] Failed run %%a test
- exit /b 1
- ) else (
- echo [INFO] Success run %%a test
- )
+if "%JDBC_TEST_SUITES%"=="FipsTestSuite" (
+ pushd FIPS
+ echo "[INFO] Run Fips tests"
+ cmd /c %MVNW_EXE% -B -DjenkinsIT ^
+ -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^
+ -Djacoco.skip.instrument=false ^
+ -DintegrationTestSuites=FipsTestSuite ^
+ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^
+ -Dnot-self-contained-jar ^
+ verify ^
+ --batch-mode --show-version > log.txt & type log.txt
+ echo "[INFO] Check for test execution status"
+ find /i /c "BUILD FAILURE" log.txt > NUL
+ set isfound=!errorlevel!
+ if !isfound! equ 0 (
+ echo [ERROR] Failed run %%a test
+ exit /b 1
+ ) else (
+ echo [INFO] Success run %%a test
+ )
+ popd
+) else (
+ echo "[INFO] Run %JDBC_TEST_SUITES% tests"
+ cmd /c %MVNW_EXE% -B -DjenkinsIT ^
+ -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^
+ -Djacoco.skip.instrument=false ^
+ -DintegrationTestSuites="%JDBC_TEST_SUITES%" ^
+ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^
+ -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^
+ verify ^
+ --batch-mode --show-version > log.txt & type log.txt
+ echo "[INFO] Check for test execution status"
+ find /i /c "BUILD FAILURE" log.txt > NUL
+ set isfound=!errorlevel!
+ if !isfound! equ 0 (
+ echo [ERROR] Failed run %%a test
+ exit /b 1
+ ) else (
+ echo [INFO] Success run %%a test
)
)
diff --git a/dependencies/Readme.md b/dependencies/Readme.md
index 7b4a4c73c..28afe4031 100644
--- a/dependencies/Readme.md
+++ b/dependencies/Readme.md
@@ -1,2 +1,2 @@
-Arrow dependencies are built from internal branch `upgradeto17.0.0`. This build was applied the AIX fix.
+Arrow dependencies are built from internal branch `upgradeTo17.0.0-v3`. This build was applied the AIX fix and the customer logger instead of slf4j logger.
diff --git a/dependencies/arrow-format-17.0.0.jar b/dependencies/arrow-format-17.0.0.jar
index 349272113..103c9c00d 100644
Binary files a/dependencies/arrow-format-17.0.0.jar and b/dependencies/arrow-format-17.0.0.jar differ
diff --git a/dependencies/arrow-memory-core-17.0.0.jar b/dependencies/arrow-memory-core-17.0.0.jar
index a218df5db..916e38238 100644
Binary files a/dependencies/arrow-memory-core-17.0.0.jar and b/dependencies/arrow-memory-core-17.0.0.jar differ
diff --git a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar
index 2004a461e..63f032a2b 100644
Binary files a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar and b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar differ
diff --git a/dependencies/arrow-memory-unsafe-17.0.0.jar b/dependencies/arrow-memory-unsafe-17.0.0.jar
index b9897fe47..174af274e 100644
Binary files a/dependencies/arrow-memory-unsafe-17.0.0.jar and b/dependencies/arrow-memory-unsafe-17.0.0.jar differ
diff --git a/dependencies/arrow-vector-17.0.0.jar b/dependencies/arrow-vector-17.0.0.jar
index 69ccfaf0d..73061da73 100644
Binary files a/dependencies/arrow-vector-17.0.0.jar and b/dependencies/arrow-vector-17.0.0.jar differ
diff --git a/linkage-checker-exclusion-rules.xml b/linkage-checker-exclusion-rules.xml
index 8bad89714..64b5860c2 100644
--- a/linkage-checker-exclusion-rules.xml
+++ b/linkage-checker-exclusion-rules.xml
@@ -19,11 +19,6 @@
Optional
-
-
-
- Optional
-
@@ -44,6 +39,16 @@
?
+
+
+
+ ?
+
+
+
+
+ ?
+
+ org.bouncycastle
+ bcutil-jdk18on
+ ${bouncycastle.version}
+
org.bouncycastle
@@ -462,11 +542,6 @@
${bouncycastle.bcpkixfips.version}provided
-
- org.threeten
- threetenbp
- ${threeten.version}
- org.tukaanixz
@@ -496,18 +571,6 @@
${awaitility.version}test
-
- org.apache.maven.surefire
- surefire-junit4
- ${version.plugin.surefire}
- test
-
-
- org.apache.maven.surefire
- common-junit48
- ${version.plugin.surefire}
- test
- org.wiremockwiremock-standalone
@@ -644,6 +707,10 @@
org.apache.httpcomponentshttpcore
+
+ com.github.luben
+ zstd-jni
+ org.apache.tikatika-core
@@ -723,6 +790,46 @@
junitjunit
+
+ org.junit.jupiter
+ junit-jupiter
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+
+
+ org.junit.platform
+ junit-platform-suite
+
+
+ org.junit.platform
+ junit-platform-engine
+
+
+ org.junit.platform
+ junit-platform-runner
+
+
+ org.junit.platform
+ junit-platform-suite-api
+
+
+ org.junit.platform
+ junit-platform-suite-engine
+
+
+ org.junit.platform
+ junit-platform-launcher
+ org.apache.avroavro
@@ -755,15 +862,6 @@
org.awaitilityawaitility
-
-
- org.apache.maven.surefire
- surefire-junit4
-
-
- org.apache.maven.surefire
- common-junit48
- org.wiremockwiremock-standalone
diff --git a/pom.xml b/pom.xml
index 096641174..2cfb0425e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,13 +6,13 @@
net.snowflakesnowflake-jdbc-parent
- 3.19.0
+ 3.20.1-SNAPSHOT./parent-pom.xml${artifactId}
- 3.19.0
+ 3.20.1-SNAPSHOTjar${artifactId}
@@ -36,6 +36,10 @@
org.bouncycastlebcprov-jdk18on
+
+ org.bouncycastle
+ bcutil-jdk18on
+
@@ -102,6 +106,13 @@
org.apache.maven.pluginsmaven-failsafe-plugin${version.plugin.failsafe}
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+ org.apache.maven.plugins
@@ -142,6 +153,13 @@
org.apache.maven.pluginsmaven-surefire-plugin${version.plugin.surefire}
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+ org.codehaus.mojo
@@ -720,6 +738,9 @@
+
+ META-INF/io.netty.versions.properties
+
@@ -819,14 +840,59 @@
com.google${shadeBase}.google
+
+
+ google.api
+ ${shadeBase}.google.api
+
+
+ google.apps
+ ${shadeBase}.google.apps
+
+
+ google.cloud
+ ${shadeBase}.google.cloud
+ google.geo${shadeBase}.google.geo
+
+ google.iam
+ ${shadeBase}.google.iam
+
+
+ google.logging
+ ${shadeBase}.google.logging
+
+
+ google.longrunning
+ ${shadeBase}.google.longrunning
+
+
+ google.monitoring
+ ${shadeBase}.google.monitoring
+
+
+ google.protobuf
+ ${shadeBase}.google.protobuf
+
+
+ google.rpc
+ ${shadeBase}.google.rpc
+
+
+ google.shopping
+ ${shadeBase}.google.shopping
+ google.storage${shadeBase}.google.storage
+
+ google.type
+ ${shadeBase}.google.type
+ org.joda${shadeBase}.joda
@@ -875,49 +941,37 @@
com.carrotsearch${shadeBase}.com.carrotsearch
-
- google.type
- ${shadeBase}.google.type
-
-
- google.rpc
- ${shadeBase}.google.rpc
-
-
- google.iam
- ${shadeBase}.google.iam
- io.opencensus${shadeBase}.opencensus
- org.threeten
- ${shadeBase}.threeten
+ io.opentelemetry
+ ${shadeBase}.opentelemetry
- google.protobuf
- ${shadeBase}.google.protobuf
-
-
- google.api
- ${shadeBase}.google.api
+ org.threeten
+ ${shadeBase}.threetenio.grpc${shadeBase}.grpc
- google.longrunning
- ${shadeBase}.google.longrunning
+ META-INF.native.io_grpc_netty_shaded_netty_tcnative
+ META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_tcnative
- google.cloud
- ${shadeBase}.google.cloud
+ META-INF.native.libio_grpc_netty_shaded_netty_tcnative
+ META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_tcnative
- google.logging
- ${shadeBase}.google.logging
+ META-INF.native.io_grpc_netty_shaded_netty_transport_native_epoll
+ META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll
+
+
+ META-INF.native.libio_grpc_netty_shaded_netty_transport_native_epoll
+ META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epollorg.checkerframework
@@ -935,6 +989,18 @@
org.conscrypt${shadeBase}.org.conscrypt
+
+ conscrypt_openjdk_jni
+ ${shadeNativeBase}_conscrypt_openjdk_jni
+
+
+ META-INF.native.conscrypt_openjdk_jni
+ META-INF.native.${shadeNativeBase}_conscrypt_openjdk_jni
+
+
+ META-INF.native.libconscrypt_openjdk_jni
+ META-INF.native.lib${shadeNativeBase}_conscrypt_openjdk_jni
+ opencensus${shadeBase}.opencensus
@@ -1002,6 +1068,9 @@
+
+ META-INF/io.netty.versions.properties
+
@@ -1027,20 +1096,26 @@
+
+
+
+
+
+
@@ -1105,7 +1180,7 @@
@@ -1136,10 +1211,28 @@
org.apache.maven.plugins
- maven-failsafe-plugin
+ maven-surefire-plugin
- ${testCategory}
+ UnitTestSuite
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+
+
+
+
+ test
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
@@ -1152,13 +1245,11 @@
integration-test
-
- **/DellBoomiCloudIT.java
- net.snowflake.client.log.JDK14Logger${basedir}/src/test/resources/logging.properties
+ ${integrationTestSuites}
@@ -1277,27 +1368,24 @@
org.apache.maven.pluginsmaven-failsafe-plugin
+
+
+ **/*IT.java
+
+
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+ verify
-
- ClientTelemetryIT
-
- integration-test
-
-
-
- **/ConnectionIT.java
- **/SFTrustManagerIT.java
-
-
- ${basedir}/src/test/resources/logback-test.xml
-
-
-
@@ -1315,21 +1403,24 @@
org.apache.maven.pluginsmaven-failsafe-plugin
+
+
+ **/*IT.java
+
+
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+ verify
-
- DellBoomiIT
-
- integration-test
-
-
- DellBoomiCloudIT.java
-
-
@@ -1347,27 +1438,24 @@
org.apache.maven.pluginsmaven-failsafe-plugin
+
+
+ **/*IT.java
+
+
+
+
+ org.apache.maven.surefire
+ surefire-junit-platform
+ ${version.plugin.surefire}
+
+ verify
-
- ClientTelemetryIT
-
- integration-test
-
-
-
- **/ConnectionIT.java
- **/SFTrustManagerIT.java
-
-
- ${basedir}/src/test/resources/logback-test.xml
-
-
-
diff --git a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java
index a0ca0fa11..45b38dbfa 100644
--- a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java
+++ b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java
@@ -33,6 +33,7 @@ public class SFClientConfigParser {
* @param configFilePath SF_CLIENT_CONFIG_FILE parameter read from connection URL or connection
* properties
* @return SFClientConfig
+ * @throws IOException if exception encountered when reading config file.
*/
public static SFClientConfig loadSFClientConfig(String configFilePath) throws IOException {
if (configFilePath != null) {
diff --git a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java
index 35698c557..1da9f766a 100644
--- a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java
+++ b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java
@@ -1,5 +1,6 @@
package net.snowflake.client.config;
+import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemGetEnvToBooleanValue;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetEnv;
import com.fasterxml.jackson.dataformat.toml.TomlMapper;
@@ -34,6 +35,53 @@ public class SFConnectionConfigParser {
"SNOWFLAKE_DEFAULT_CONNECTION_NAME";
public static final String DEFAULT = "default";
public static final String SNOWFLAKE_TOKEN_FILE_PATH = "/snowflake/session/token";
+ public static final String SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION =
+ "SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION";
+
+ public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException {
+ String defaultConnectionName =
+ Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT);
+ Map fileConnectionConfiguration =
+ loadDefaultConnectionConfiguration(defaultConnectionName);
+
+ if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) {
+ Properties connectionProperties = new Properties();
+ connectionProperties.putAll(fileConnectionConfiguration);
+
+ String url = createUrl(fileConnectionConfiguration);
+ logger.debug("Url created using parameters from connection configuration file: {}", url);
+
+ if ("oauth".equals(fileConnectionConfiguration.get("authenticator"))
+ && fileConnectionConfiguration.get("token") == null) {
+ Path path =
+ Paths.get(
+ Optional.ofNullable(fileConnectionConfiguration.get("token_file_path"))
+ .orElse(SNOWFLAKE_TOKEN_FILE_PATH));
+ logger.debug("Token used in connect is read from file: {}", path);
+ try {
+ boolean shouldSkipTokenFilePermissionsVerification =
+ convertSystemGetEnvToBooleanValue(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, false);
+ if (!shouldSkipTokenFilePermissionsVerification) {
+ verifyFilePermissionSecure(path);
+ } else {
+ logger.debug("Skip token file permissions verification");
+ }
+ String token = new String(Files.readAllBytes(path), Charset.defaultCharset());
+ if (!token.isEmpty()) {
+ putPropertyIfNotNull(connectionProperties, "token", token.trim());
+ } else {
+ throw new SnowflakeSQLException(
+ "Non-empty token must be set when the authenticator type is OAUTH");
+ }
+ } catch (Exception ex) {
+ throw new SnowflakeSQLException(ex, "There is a problem during reading token from file");
+ }
+ }
+ return new ConnectionParameters(url, connectionProperties);
+ } else {
+ return null;
+ }
+ }
private static Map loadDefaultConnectionConfiguration(
String defaultConnectionName) throws SnowflakeSQLException {
@@ -88,44 +136,6 @@ private static void verifyFilePermissionSecure(Path configFilePath)
}
}
- public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException {
- String defaultConnectionName =
- Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT);
- Map fileConnectionConfiguration =
- loadDefaultConnectionConfiguration(defaultConnectionName);
-
- if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) {
- Properties conectionProperties = new Properties();
- conectionProperties.putAll(fileConnectionConfiguration);
-
- String url = createUrl(fileConnectionConfiguration);
- logger.debug("Url created using parameters from connection configuration file: {}", url);
-
- if ("oauth".equals(fileConnectionConfiguration.get("authenticator"))
- && fileConnectionConfiguration.get("token") == null) {
- Path path =
- Paths.get(
- Optional.ofNullable(fileConnectionConfiguration.get("token_file_path"))
- .orElse(SNOWFLAKE_TOKEN_FILE_PATH));
- logger.debug("Token used in connect is read from file: {}", path);
- try {
- verifyFilePermissionSecure(path);
- String token = new String(Files.readAllBytes(path), Charset.defaultCharset());
- if (!token.isEmpty()) {
- putPropertyIfNotNull(conectionProperties, "token", token.trim());
- } else {
- logger.warn("The token has empty value");
- }
- } catch (Exception ex) {
- throw new SnowflakeSQLException(ex, "There is a problem during reading token from file");
- }
- }
- return new ConnectionParameters(url, conectionProperties);
- } else {
- return null;
- }
- }
-
private static String createUrl(Map fileConnectionConfiguration)
throws SnowflakeSQLException {
Optional maybeAccount = Optional.ofNullable(fileConnectionConfiguration.get("account"));
diff --git a/src/main/java/net/snowflake/client/core/CancellationReason.java b/src/main/java/net/snowflake/client/core/CancellationReason.java
new file mode 100644
index 000000000..e3ae4e308
--- /dev/null
+++ b/src/main/java/net/snowflake/client/core/CancellationReason.java
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.core;
+
+@SnowflakeJdbcInternalApi
+public enum CancellationReason {
+ UNKNOWN,
+ CLIENT_REQUESTED,
+ TIMEOUT
+}
diff --git a/src/main/java/net/snowflake/client/core/ChunkDownloader.java b/src/main/java/net/snowflake/client/core/ChunkDownloader.java
index 8818c9c17..e881eb9a1 100644
--- a/src/main/java/net/snowflake/client/core/ChunkDownloader.java
+++ b/src/main/java/net/snowflake/client/core/ChunkDownloader.java
@@ -14,6 +14,8 @@ public interface ChunkDownloader {
* be blocked if the chunk is not ready to be consumed (a.k.a not loaded into memory yet)
*
* @return result chunk with data loaded
+ * @throws InterruptedException if downloading thread was interrupted
+ * @throws SnowflakeSQLException if downloader encountered an error
*/
SnowflakeResultChunk getNextChunkToConsume() throws InterruptedException, SnowflakeSQLException;
@@ -21,6 +23,7 @@ public interface ChunkDownloader {
* Terminate the chunk downloader, release all resources allocated
*
* @return metrics measuring downloader performance
+ * @throws InterruptedException if error encountered
*/
DownloaderMetrics terminate() throws InterruptedException;
}
diff --git a/src/main/java/net/snowflake/client/core/CredentialManager.java b/src/main/java/net/snowflake/client/core/CredentialManager.java
index a5b919d3d..08e9e6b9a 100644
--- a/src/main/java/net/snowflake/client/core/CredentialManager.java
+++ b/src/main/java/net/snowflake/client/core/CredentialManager.java
@@ -47,7 +47,7 @@ void resetSecureStorageManager() {
/**
* Testing purpose. Inject a mock manager.
*
- * @param manager
+ * @param manager SecureStorageManager
*/
void injectSecureStorageManager(SecureStorageManager manager) {
logger.debug("Injecting secure storage manager");
diff --git a/src/main/java/net/snowflake/client/core/DataConversionContext.java b/src/main/java/net/snowflake/client/core/DataConversionContext.java
index 86bba8208..d0f80e021 100644
--- a/src/main/java/net/snowflake/client/core/DataConversionContext.java
+++ b/src/main/java/net/snowflake/client/core/DataConversionContext.java
@@ -12,25 +12,42 @@
* to a single result set. a.k.a each result set object should have its own formatter info
*/
public interface DataConversionContext {
- /** timestamp_ltz formatter */
+ /**
+ * @return timestamp_ltz formatter
+ */
SnowflakeDateTimeFormat getTimestampLTZFormatter();
- /** timestamp_ntz formatter */
+ /**
+ * @return timestamp_ntz formatter
+ */
SnowflakeDateTimeFormat getTimestampNTZFormatter();
- /** timestamp_tz formatter */
+ /**
+ * @return timestamp_ntz formatter
+ */
SnowflakeDateTimeFormat getTimestampTZFormatter();
- /** date formatter */
+ /**
+ * @return date formatter
+ */
SnowflakeDateTimeFormat getDateFormatter();
- /** time formatter */
+ /**
+ * @return time formatter
+ */
SnowflakeDateTimeFormat getTimeFormatter();
- /** binary formatter */
+ /**
+ * @return binary formatter
+ */
SFBinaryFormat getBinaryFormatter();
- /** get scale from Snowflake metadata */
+ /**
+ * get scale from Snowflake metadata
+ *
+ * @param columnIndex column index
+ * @return scale value
+ */
int getScale(int columnIndex);
/**
diff --git a/src/main/java/net/snowflake/client/core/EventUtil.java b/src/main/java/net/snowflake/client/core/EventUtil.java
index d45cd0676..ed25c5988 100644
--- a/src/main/java/net/snowflake/client/core/EventUtil.java
+++ b/src/main/java/net/snowflake/client/core/EventUtil.java
@@ -36,7 +36,7 @@ public class EventUtil {
/**
* Junit is not recognizing the system properties for EventTest, so overriding the value here
*
- * @param value
+ * @param value string value
*/
public static void setDumpPathPrefixForTesting(String value) {
DUMP_PATH_PREFIX = value;
diff --git a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java
index 25ba5f946..6942a9e5a 100644
--- a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java
+++ b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java
@@ -67,6 +67,7 @@ private HeartbeatBackground() {}
* @param session the session will be added
* @param masterTokenValidityInSecs time interval for which client need to check validity of
* master token with server
+ * @param heartbeatFrequencyInSecs heartbeat frequency in seconds
*/
protected synchronized void addSession(
SFSession session, long masterTokenValidityInSecs, int heartbeatFrequencyInSecs) {
diff --git a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java
index f65b9e29d..d3a356e5a 100644
--- a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java
+++ b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java
@@ -122,7 +122,11 @@ public String getUserAgentSuffix() {
return this.userAgentSuffix;
}
- /** Be careful of using this! Should only be called when password is later masked. */
+ /**
+ * Be careful of using this! Should only be called when password is later masked.
+ *
+ * @return proxy password
+ */
@SnowflakeJdbcInternalApi
public String getProxyPassword() {
return this.proxyPassword;
diff --git a/src/main/java/net/snowflake/client/core/HttpUtil.java b/src/main/java/net/snowflake/client/core/HttpUtil.java
index 166bd7e0a..23b83df09 100644
--- a/src/main/java/net/snowflake/client/core/HttpUtil.java
+++ b/src/main/java/net/snowflake/client/core/HttpUtil.java
@@ -65,6 +65,7 @@
import org.apache.http.ssl.SSLInitializationException;
import org.apache.http.util.EntityUtils;
+/** HttpUtil class */
public class HttpUtil {
private static final SFLogger logger = SFLoggerFactory.getLogger(HttpUtil.class);
@@ -168,7 +169,7 @@ public static void setProxyForS3(HttpClientSettingsKey key, ClientConfiguration
*
* @param proxyProperties proxy properties
* @param clientConfig the configuration needed by S3 to set the proxy
- * @throws SnowflakeSQLException
+ * @throws SnowflakeSQLException when exception encountered
* @deprecated Use {@link S3HttpUtil#setSessionlessProxyForS3(Properties, ClientConfiguration)}
* instead
*/
@@ -184,7 +185,7 @@ public static void setSessionlessProxyForS3(
*
* @param proxyProperties proxy properties
* @param opContext the configuration needed by Azure to set the proxy
- * @throws SnowflakeSQLException
+ * @throws SnowflakeSQLException when invalid proxy properties encountered
*/
public static void setSessionlessProxyForAzure(
Properties proxyProperties, OperationContext opContext) throws SnowflakeSQLException {
@@ -723,6 +724,7 @@ public static String executeGeneralRequest(
* @param includeRetryParameters whether to include retry parameters in retried requests
* @param retryOnHTTP403 whether to retry on HTTP 403 or not
* @param ocspAndProxyKey OCSP mode and proxy settings for httpclient
+ * @param execTimeData query execution time telemetry data object
* @return response
* @throws SnowflakeSQLException if Snowflake error occurs
* @throws IOException raises if a general IO error occurs
diff --git a/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java
index 8d4a01742..e60f6859d 100644
--- a/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java
+++ b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java
@@ -6,6 +6,9 @@ public class PrivateLinkDetector {
* We can only tell if private link is enabled for certain hosts when the hostname contains the
* word 'privatelink' but we don't have a good way of telling if a private link connection is
* expected for internal stages for example.
+ *
+ * @param host host
+ * @return true if host is considered as privatelink environment
*/
public static boolean isPrivateLink(String host) {
return host.toLowerCase().contains(".privatelink.snowflakecomputing.");
diff --git a/src/main/java/net/snowflake/client/core/QueryContextCache.java b/src/main/java/net/snowflake/client/core/QueryContextCache.java
index 85fde42ac..60cd8501a 100644
--- a/src/main/java/net/snowflake/client/core/QueryContextCache.java
+++ b/src/main/java/net/snowflake/client/core/QueryContextCache.java
@@ -274,6 +274,8 @@ private static QueryContextElement deserializeQueryContextElement(JsonNode node)
* Deserialize the QueryContext cache from a QueryContextDTO object. This function currently is
* only used in QueryContextCacheTest.java where we check that after serialization and
* deserialization, the cache is the same as before.
+ *
+ * @param queryContextDTO QueryContextDTO to deserialize.
*/
public void deserializeQueryContextDTO(QueryContextDTO queryContextDTO) {
synchronized (this) {
@@ -335,6 +337,8 @@ private static QueryContextElement deserializeQueryContextElementDTO(
/**
* Serialize the QueryContext cache to a QueryContextDTO object, which can be serialized to JSON
* automatically later.
+ *
+ * @return {@link QueryContextDTO}
*/
public QueryContextDTO serializeQueryContextDTO() {
synchronized (this) {
diff --git a/src/main/java/net/snowflake/client/core/QueryStatus.java b/src/main/java/net/snowflake/client/core/QueryStatus.java
index bc16abf62..792f4b538 100644
--- a/src/main/java/net/snowflake/client/core/QueryStatus.java
+++ b/src/main/java/net/snowflake/client/core/QueryStatus.java
@@ -39,6 +39,7 @@ public String getDescription() {
/**
* @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead
+ * @return error message
*/
@Deprecated
public String getErrorMessage() {
@@ -47,6 +48,7 @@ public String getErrorMessage() {
/**
* @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead
+ * @return error code
*/
@Deprecated
public int getErrorCode() {
@@ -55,6 +57,7 @@ public int getErrorCode() {
/**
* @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead
+ * @param message the error message
*/
@Deprecated
public void setErrorMessage(String message) {
@@ -63,12 +66,19 @@ public void setErrorMessage(String message) {
/**
* @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead
+ * @param errorCode the error code
*/
@Deprecated
public void setErrorCode(int errorCode) {
this.errorCode = errorCode;
}
+ /**
+ * Check if query is still running.
+ *
+ * @param status QueryStatus
+ * @return true if query is still running
+ */
public static boolean isStillRunning(QueryStatus status) {
switch (status.getValue()) {
case 0: // "RUNNING"
@@ -83,6 +93,12 @@ public static boolean isStillRunning(QueryStatus status) {
}
}
+ /**
+ * Check if query status is an error
+ *
+ * @param status QueryStatus
+ * @return true if query status is an error status
+ */
public static boolean isAnError(QueryStatus status) {
switch (status.getValue()) {
case 1: // Aborting
@@ -97,6 +113,12 @@ public static boolean isAnError(QueryStatus status) {
}
}
+ /**
+ * Get the query status from a string description
+ *
+ * @param description the status description
+ * @return QueryStatus
+ */
public static QueryStatus getStatusFromString(String description) {
if (description != null) {
for (QueryStatus st : QueryStatus.values()) {
diff --git a/src/main/java/net/snowflake/client/core/ResultUtil.java b/src/main/java/net/snowflake/client/core/ResultUtil.java
index b894f4259..20acee866 100644
--- a/src/main/java/net/snowflake/client/core/ResultUtil.java
+++ b/src/main/java/net/snowflake/client/core/ResultUtil.java
@@ -83,6 +83,12 @@ public static Object effectiveParamValue(Map parameters, String
/**
* Helper function building a formatter for a specialized timestamp type. Note that it will be
* based on either the 'param' value if set, or the default format provided.
+ *
+ * @param parameters keyed in parameter name and valued in parameter value
+ * @param id id
+ * @param param timestamp output format param
+ * @param defaultFormat default format
+ * @return {@link SnowflakeDateTimeFormat}
*/
public static SnowflakeDateTimeFormat specializedFormatter(
Map parameters, String id, String param, String defaultFormat) {
diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java
index f14e74e5d..dcea1d575 100644
--- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java
+++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java
@@ -189,7 +189,8 @@ public SFArrowResultSet(
*
* @param resultSetSerializable data returned in query response
* @param telemetryClient telemetryClient
- * @throws SQLException
+ * @param sortResult set if results should be sorted
+ * @throws SQLException if exception encountered
*/
public SFArrowResultSet(
SnowflakeResultSetSerializableV1 resultSetSerializable,
diff --git a/src/main/java/net/snowflake/client/core/SFBaseSession.java b/src/main/java/net/snowflake/client/core/SFBaseSession.java
index 382dcb877..9222b4a57 100644
--- a/src/main/java/net/snowflake/client/core/SFBaseSession.java
+++ b/src/main/java/net/snowflake/client/core/SFBaseSession.java
@@ -162,6 +162,8 @@ public long getMemoryLimitForTesting() {
* Part of the JDBC API, where client applications may fetch a Map of Properties to set various
* attributes. This is not used internally by any driver component, but should be maintained by
* the Session object.
+ *
+ * @return client info as Properties
*/
public Properties getClientInfo() {
// defensive copy to avoid client from changing the properties
@@ -171,10 +173,20 @@ public Properties getClientInfo() {
return copy;
}
+ /**
+ * Set common parameters
+ *
+ * @param parameters the parameters to set
+ */
public void setCommonParameters(Map parameters) {
this.commonParameters = parameters;
}
+ /**
+ * Get common parameters
+ *
+ * @return Map of common parameters
+ */
public Map getCommonParameters() {
return this.commonParameters;
}
@@ -183,12 +195,17 @@ public Map getCommonParameters() {
* Gets the Property associated with the key 'name' in the ClientInfo map.
*
* @param name The key from which to fetch the Property.
+ * @return The ClientInfo entry property.
*/
public String getClientInfo(String name) {
return this.clientInfo.getProperty(name);
}
- /** Returns a unique id for this session. */
+ /**
+ * Returns a unique id for this session.
+ *
+ * @return unique id for session
+ */
public String getSessionId() {
return sessionId;
}
@@ -202,86 +219,200 @@ public void setSessionId(String sessionId) {
this.sessionId = sessionId;
}
+ /**
+ * @return true if session is in SQLMode
+ */
public boolean isSfSQLMode() {
return sfSQLMode;
}
+ /**
+ * Set sfSQLMode
+ *
+ * @param sfSQLMode boolean
+ */
public void setSfSQLMode(boolean sfSQLMode) {
this.sfSQLMode = sfSQLMode;
}
+ /**
+ * Get the database version
+ *
+ * @return database version
+ */
public String getDatabaseVersion() {
return databaseVersion;
}
+ /**
+ * Set database version
+ *
+ * @param databaseVersion the version to set
+ */
public void setDatabaseVersion(String databaseVersion) {
this.databaseVersion = databaseVersion;
}
+ /**
+ * Get databse major version
+ *
+ * @return the database major version
+ */
public int getDatabaseMajorVersion() {
return databaseMajorVersion;
}
+ /**
+ * Set database major version
+ *
+ * @param databaseMajorVersion the database major version
+ */
public void setDatabaseMajorVersion(int databaseMajorVersion) {
this.databaseMajorVersion = databaseMajorVersion;
}
+ /**
+ * Get the database minor version
+ *
+ * @return database minor version
+ */
public int getDatabaseMinorVersion() {
return databaseMinorVersion;
}
+ /**
+ * Set the database minor version
+ *
+ * @param databaseMinorVersion the minor version
+ */
public void setDatabaseMinorVersion(int databaseMinorVersion) {
this.databaseMinorVersion = databaseMinorVersion;
}
+ /**
+ * Gets the value of CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS if one has been set. False by
+ * default.
+ *
+ * @see CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS
+ * @return true if enabled
+ */
public boolean getPreparedStatementLogging() {
return this.preparedStatementLogging;
}
+ /**
+ * Set prepared statement logging
+ *
+ * @see SFBaseSession#getPreparedStatementLogging()
+ * @param value boolean
+ */
public void setPreparedStatementLogging(boolean value) {
this.preparedStatementLogging = value;
}
+ /**
+ * Get inject file upload failure. Note: Should only be used in internal tests!
+ *
+ * @return file to fail
+ */
public String getInjectFileUploadFailure() {
return this.injectFileUploadFailure;
}
+ /**
+ * Set inject file upload failure Note: Should only be used in internal tests!
+ *
+ * @param fileToFail the file to fail
+ */
public void setInjectFileUploadFailure(String fileToFail) {
this.injectFileUploadFailure = fileToFail;
}
+ /**
+ * Get timestamp mapped type
+ *
+ * @see CLIENT_TIMESTAMP_TYPE_MAPPING
+ * @return {@link SnowflakeType}
+ */
public SnowflakeType getTimestampMappedType() {
return timestampMappedType;
}
+ /**
+ * Set the timestamp mapped type
+ *
+ * @see SFBaseSession#getTimestampMappedType()
+ * @param timestampMappedType SnowflakeType
+ */
public void setTimestampMappedType(SnowflakeType timestampMappedType) {
this.timestampMappedType = timestampMappedType;
}
+ /**
+ * Get if result column is case-insensitive
+ *
+ * @see SFBaseSession#setResultColumnCaseInsensitive(boolean)
+ * @return true if result column is case-insensitive
+ */
public boolean isResultColumnCaseInsensitive() {
return isResultColumnCaseInsensitive;
}
+ /**
+ * Set if result column is case-insensitive
+ *
+ * @see CLIENT_RESULT_COLUMN_CASE_INSENSITIVE
+ * @param resultColumnCaseInsensitive boolean
+ */
public void setResultColumnCaseInsensitive(boolean resultColumnCaseInsensitive) {
isResultColumnCaseInsensitive = resultColumnCaseInsensitive;
}
+ /**
+ * Check if we want to treat decimal as int JDBC types
+ *
+ * @see JDBC_TREAT_DECIMAL_AS_INT
+ * @return true if decimal is treated as int
+ */
public boolean isJdbcTreatDecimalAsInt() {
return isJdbcTreatDecimalAsInt;
}
+ /**
+ * Set if decimal should be treated as int type
+ *
+ * @see SFBaseSession#isJdbcTreatDecimalAsInt()
+ * @param jdbcTreatDecimalAsInt boolean
+ */
public void setJdbcTreatDecimalAsInt(boolean jdbcTreatDecimalAsInt) {
isJdbcTreatDecimalAsInt = jdbcTreatDecimalAsInt;
}
+ /**
+ * @return true if decimal should be treated as int for arrow types
+ */
public boolean isJdbcArrowTreatDecimalAsInt() {
return isJdbcArrowTreatDecimalAsInt;
}
+ /**
+ * Set if decimal should be treated as int for arrow types
+ *
+ * @param jdbcArrowTreatDecimalAsInt boolean
+ */
public void setJdbcArrowTreatDecimalAsInt(boolean jdbcArrowTreatDecimalAsInt) {
isJdbcArrowTreatDecimalAsInt = jdbcArrowTreatDecimalAsInt;
}
+ /**
+ * Get the server url
+ *
+ * @return the server url or null if it is not set
+ */
public String getServerUrl() {
if (connectionPropertiesMap.containsKey(SFSessionProperty.SERVER_URL)) {
return (String) connectionPropertiesMap.get(SFSessionProperty.SERVER_URL);
@@ -289,6 +420,11 @@ public String getServerUrl() {
return null;
}
+ /**
+ * Get whether columns strings are quoted.
+ *
+ * @return value of 'stringsQuotedForColumnDef' connection property or false if not set.
+ */
public boolean isStringQuoted() {
if (connectionPropertiesMap.containsKey(SFSessionProperty.STRINGS_QUOTED)) {
return (Boolean) connectionPropertiesMap.get(SFSessionProperty.STRINGS_QUOTED);
@@ -346,10 +482,21 @@ public void addProperty(String propertyName, Object propertyValue) throws SFExce
}
}
+ /**
+ * Get the connection properties map
+ *
+ * @return the connection properties map
+ */
public Map getConnectionPropertiesMap() {
return connectionPropertiesMap;
}
+ /**
+ * Get the http client key
+ *
+ * @return HttpClientSettingsKey
+ * @throws SnowflakeSQLException if exception encountered
+ */
public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException {
// if key is already created, return it without making a new one
if (ocspAndProxyAndGzipKey != null) {
@@ -547,6 +694,7 @@ private void logHttpClientInitInfo(HttpClientSettingsKey key) {
}
}
+ /** Unset invalid proxy host and port values. */
public void unsetInvalidProxyHostAndPort() {
// If proxyHost and proxyPort are used without http or https unset them, so they are not used
// later by the ProxySelector.
@@ -558,6 +706,11 @@ public void unsetInvalidProxyHostAndPort() {
}
}
+ /**
+ * Get OCSP mode
+ *
+ * @return {@link OCSPMode}
+ */
public OCSPMode getOCSPMode() {
OCSPMode ret;
@@ -576,18 +729,38 @@ public OCSPMode getOCSPMode() {
return ret;
}
+ /**
+ * Get the query timeout
+ *
+ * @return the query timeout value
+ */
public Integer getQueryTimeout() {
return (Integer) this.connectionPropertiesMap.get(SFSessionProperty.QUERY_TIMEOUT);
}
+ /**
+ * Get the user name
+ *
+ * @return user name
+ */
public String getUser() {
return (String) this.connectionPropertiesMap.get(SFSessionProperty.USER);
}
+ /**
+ * Get the server URL
+ *
+ * @return the server URL
+ */
public String getUrl() {
return (String) this.connectionPropertiesMap.get(SFSessionProperty.SERVER_URL);
}
+ /**
+ * Get inject wait input
+ *
+ * @return the value of 'inject_wait_in_put' or 0 if not set
+ */
public int getInjectWaitInPut() {
Object retVal = this.connectionPropertiesMap.get(SFSessionProperty.INJECT_WAIT_IN_PUT);
if (retVal != null) {
@@ -600,42 +773,92 @@ public int getInjectWaitInPut() {
return 0;
}
+ /**
+ * Get whether the metadata request should use the session database.
+ *
+ * @return true if it should use the session database
+ */
public boolean getMetadataRequestUseSessionDatabase() {
return metadataRequestUseSessionDatabase;
}
+ /**
+ * Set to true if the metadata request should use the session database.
+ *
+ * @param enabled boolean
+ */
public void setMetadataRequestUseSessionDatabase(boolean enabled) {
this.metadataRequestUseSessionDatabase = enabled;
}
+ /**
+ * Get if metadata request should use the connection ctx
+ *
+ * @return true if it should use the connection ctx
+ */
public boolean getMetadataRequestUseConnectionCtx() {
return this.metadataRequestUseConnectionCtx;
}
+ /**
+ * Set to true if metadata request should use connection ctx
+ *
+ * @param enabled boolean
+ */
public void setMetadataRequestUseConnectionCtx(boolean enabled) {
this.metadataRequestUseConnectionCtx = enabled;
}
+ /**
+ * Get injected delay
+ *
+ * @return {@link AtomicInteger}
+ */
AtomicInteger getInjectedDelay() {
return _injectedDelay;
}
+ /**
+ * Set the injected delay
+ *
+ * @param injectedDelay injectedDelay value
+ */
public void setInjectedDelay(int injectedDelay) {
this._injectedDelay.set(injectedDelay);
}
+ /**
+ * Get if NTZ should be treated as UTC
+ *
+ * @return true if NTZ should be treated as UTC
+ */
public boolean getTreatNTZAsUTC() {
return treatNTZAsUTC;
}
+ /**
+ * Set whether NTZ should be treated as UTC
+ *
+ * @param treatNTZAsUTC boolean
+ */
public void setTreatNTZAsUTC(boolean treatNTZAsUTC) {
this.treatNTZAsUTC = treatNTZAsUTC;
}
+ /**
+ * Get if heartbeat is enabled
+ *
+ * @return true if enabled
+ */
public boolean getEnableHeartbeat() {
return enableHeartbeat;
}
+ /**
+ * Set if heartbeat is enabled
+ *
+ * @param enableHeartbeat boolean
+ */
public void setEnableHeartbeat(boolean enableHeartbeat) {
this.enableHeartbeat = enableHeartbeat;
}
@@ -656,39 +879,88 @@ public void setHeartbeatFrequency(int frequency) {
}
}
- /** Retrieve session heartbeat frequency in seconds */
+ /**
+ * Retrieve session heartbeat frequency in seconds
+ *
+ * @return the heartbeat frequency in seconds
+ */
public int getHeartbeatFrequency() {
return this.heartbeatFrequency;
}
+ /**
+ * autoCommit field specifies whether autocommit is enabled for the session. Autocommit determines
+ * whether a DML statement, when executed without an active transaction, is automatically
+ * committed after the statement successfully completes. default: true
+ *
+ * @see Transactions/Autocommit
+ * @return a boolean value of autocommit field
+ */
public boolean getAutoCommit() {
return autoCommit.get();
}
+ /**
+ * Sets value of autoCommit field
+ *
+ * @see SFBaseSession#getAutoCommit()
+ * @param autoCommit boolean
+ */
public void setAutoCommit(boolean autoCommit) {
this.autoCommit.set(autoCommit);
}
+ /**
+ * Get if date should be formatted with timezone
+ *
+ * @return true if date should be formatted with timezone
+ */
public boolean getFormatDateWithTimezone() {
return formatDateWithTimezone;
}
+ /**
+ * Set if date should be formatted with timezone
+ *
+ * @param formatDateWithTimezone boolean
+ */
public void setFormatDateWithTimezone(boolean formatDateWithTimezone) {
this.formatDateWithTimezone = formatDateWithTimezone;
}
+ /**
+ * Get if session timezone should be used.
+ *
+ * @return true if using session timezone
+ */
public boolean getUseSessionTimezone() {
return useSessionTimezone;
}
+ /**
+ * Get if using default date format with timezone.
+ *
+ * @return true if using default date format with timezone.
+ */
public boolean getDefaultFormatDateWithTimezone() {
return defaultFormatDateWithTimezone;
}
+ /**
+ * Set if session timezone should be used.
+ *
+ * @param useSessionTimezone boolean
+ */
public void setUseSessionTimezone(boolean useSessionTimezone) {
this.useSessionTimezone = useSessionTimezone;
}
+ /**
+ * Set if default date format with timezone should be used
+ *
+ * @param defaultFormatDateWithTimezone boolean
+ */
public void setDefaultFormatDateWithTimezone(boolean defaultFormatDateWithTimezone) {
this.defaultFormatDateWithTimezone = defaultFormatDateWithTimezone;
}
@@ -906,6 +1178,7 @@ public void setSessionPropertyByKey(String propertyName, Object propertyValue) {
* Fetch the value for a custom session property.
*
* @param propertyName The key of the session property to fetch.
+ * @return session property value
*/
public Object getSessionPropertyByKey(String propertyName) {
return this.customSessionProperties.get(propertyName);
@@ -914,6 +1187,8 @@ public Object getSessionPropertyByKey(String propertyName) {
/**
* Function that checks if the active session can be closed when the connection is closed. Called
* by SnowflakeConnectionV1.
+ *
+ * @return true if the active session is safe to close.
*/
public abstract boolean isSafeToClose();
@@ -921,7 +1196,7 @@ public Object getSessionPropertyByKey(String propertyName) {
* @param queryID query ID of the query whose status is being investigated
* @return enum of type QueryStatus indicating the query's status
* @deprecated Use {@link #getQueryStatusV2(String)}
- * @throws SQLException
+ * @throws SQLException if error encountered
*/
@Deprecated
public abstract QueryStatus getQueryStatus(String queryID) throws SQLException;
@@ -929,13 +1204,15 @@ public Object getSessionPropertyByKey(String propertyName) {
/**
* @param queryID query ID of the query whose status is being investigated
* @return QueryStatusV2 indicating the query's status
- * @throws SQLException
+ * @throws SQLException if error encountered
*/
public abstract QueryStatusV2 getQueryStatusV2(String queryID) throws SQLException;
/**
* Validates the connection properties used by this session, and returns a list of missing
* properties.
+ *
+ * @return List of DriverPropertyInfo
*/
public abstract List checkProperties();
@@ -948,17 +1225,25 @@ public Object getSessionPropertyByKey(String propertyName) {
public abstract void close() throws SFException, SnowflakeSQLException;
/**
- * Returns the telemetry client, if supported, by this session. If not, should return a
- * NoOpTelemetryClient.
+ * @return Returns the telemetry client, if supported, by this session. If not, should return a
+ * NoOpTelemetryClient.
*/
public abstract Telemetry getTelemetryClient();
- /** Makes a heartbeat call to check for session validity. */
+ /**
+ * Makes a heartbeat call to check for session validity.
+ *
+ * @param timeout timeout value
+ * @throws Exception if exception occurs
+ * @throws SFException if exception occurs
+ */
public abstract void callHeartBeat(int timeout) throws Exception, SFException;
/**
* JDBC API. Returns a list of warnings generated since starting this session, or the last time it
* was cleared.
+ *
+ * @return List of SFException's
*/
public List getSqlWarnings() {
return sqlWarnings;
@@ -972,29 +1257,59 @@ public void clearSqlWarnings() {
sqlWarnings.clear();
}
+ /**
+ * Get the SFConnectionHandler
+ *
+ * @return {@link SFConnectionHandler}
+ */
public SFConnectionHandler getSfConnectionHandler() {
return sfConnectionHandler;
}
+ /**
+ * Get network timeout in milliseconds
+ *
+ * @return network timeout in milliseconds
+ */
public abstract int getNetworkTimeoutInMilli();
+ /**
+ * @return auth timeout in seconds
+ */
public abstract int getAuthTimeout();
+ /**
+ * @return max http retries
+ */
public abstract int getMaxHttpRetries();
+ /**
+ * @return {@link SnowflakeConnectString}
+ */
public abstract SnowflakeConnectString getSnowflakeConnectionString();
+ /**
+ * @return true if this is an async session
+ */
public abstract boolean isAsyncSession();
+ /**
+ * @return QueryContextDTO containing opaque information shared with the cloud service.
+ */
public abstract QueryContextDTO getQueryContextDTO();
+ /**
+ * Set query context
+ *
+ * @param queryContext the query context string
+ */
public abstract void setQueryContext(String queryContext);
/**
- * If true, JDBC will enable returning TIMESTAMP_WITH_TIMEZONE as column type, otherwise it will
- * not. This function will always return true for JDBC client, so that the client JDBC will not
- * have any behavior change. Stored proc JDBC will override this function to return the value of
- * SP_JDBC_ENABLE_TIMESTAMP_WITH_TIMEZONE from server for backward compatibility.
+ * @return If true, JDBC will enable returning TIMESTAMP_WITH_TIMEZONE as column type, otherwise
+ * it will not. This function will always return true for JDBC client, so that the client JDBC
+ * will not have any behavior change. Stored proc JDBC will override this function to return
+ * the value of SP_JDBC_ENABLE_TIMESTAMP_WITH_TIMEZONE from server for backward compatibility.
*/
public boolean getEnableReturnTimestampWithTimeZone() {
return enableReturnTimestampWithTimeZone;
diff --git a/src/main/java/net/snowflake/client/core/SFBaseStatement.java b/src/main/java/net/snowflake/client/core/SFBaseStatement.java
index 17b2fd1b6..104d49387 100644
--- a/src/main/java/net/snowflake/client/core/SFBaseStatement.java
+++ b/src/main/java/net/snowflake/client/core/SFBaseStatement.java
@@ -93,6 +93,7 @@ public abstract SFBaseResultSet execute(
* @param sql sql statement.
* @param parametersBinding parameters to bind
* @param caller the JDBC interface method that called this method, if any
+ * @param execTimeData ExecTimeTelemetryData
* @return whether there is result set or not
* @throws SQLException if failed to execute sql
* @throws SFException exception raised from Snowflake components
@@ -116,9 +117,23 @@ public abstract SFBaseResultSet asyncExecute(
*
* @throws SFException if the statement is already closed.
* @throws SQLException if there are server-side errors from trying to abort.
+ * @deprecated use {@link #cancel(CancellationReason)} instead
*/
+ @Deprecated
public abstract void cancel() throws SFException, SQLException;
+ /**
+ * Aborts the statement.
+ *
+ * @param cancellationReason reason for the cancellation
+ * @throws SFException if the statement is already closed.
+ * @throws SQLException if there are server-side errors from trying to abort.
+ */
+ @SnowflakeJdbcInternalApi
+ public void cancel(CancellationReason cancellationReason) throws SFException, SQLException {
+ cancel(); // default cancel is called to keep interface backward compatibility
+ }
+
/**
* Sets a property within session properties, i.e., if the sql is using set-sf-property
*
@@ -150,8 +165,6 @@ public void executeSetProperty(final String sql) {
* A method to check if a sql is file upload statement with consideration for potential comments
* in front of put keyword.
*
- *
- *
* @param sql sql statement
* @return true if the command is upload statement
*/
@@ -160,15 +173,25 @@ public static boolean isFileTransfer(String sql) {
return statementType == SFStatementType.PUT || statementType == SFStatementType.GET;
}
- /** If this is a multi-statement, i.e., has child results. */
+ /**
+ * If this is a multi-statement, i.e., has child results.
+ *
+ * @return true if has child results
+ */
public abstract boolean hasChildren();
- /** Returns the SFBaseSession associated with this SFBaseStatement. */
+ /**
+ * Get the SFBaseSession associated with this SFBaseStatement.
+ *
+ * @return The SFBaseSession associated with this SFBaseStatement.
+ */
public abstract SFBaseSession getSFBaseSession();
/**
* Retrieves the current result as a ResultSet, if any. This is invoked by SnowflakeStatement and
* should return an SFBaseResultSet, which is then wrapped in a SnowflakeResultSet.
+ *
+ * @return {@link SFBaseResultSet}
*/
public abstract SFBaseResultSet getResultSet();
@@ -195,7 +218,9 @@ public enum CallingMethod {
public abstract int getConservativePrefetchThreads();
/**
+ * @param queryID the queryID
* @return the child query IDs for the multiple statements query.
+ * @throws SQLException if an error occurs while getting child query ID's
*/
public abstract String[] getChildQueryIds(String queryID) throws SQLException;
}
diff --git a/src/main/java/net/snowflake/client/core/SFException.java b/src/main/java/net/snowflake/client/core/SFException.java
index 77c2b1355..a2ea0c551 100644
--- a/src/main/java/net/snowflake/client/core/SFException.java
+++ b/src/main/java/net/snowflake/client/core/SFException.java
@@ -24,24 +24,47 @@ public class SFException extends Throwable {
private int vendorCode;
private Object[] params;
- /** use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} */
+ /**
+ * Use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)}
+ *
+ * @param errorCode the error code
+ * @param params additional params
+ */
@Deprecated
public SFException(ErrorCode errorCode, Object... params) {
this(null, null, errorCode, params);
}
- /** use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} */
+ /**
+ * use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)}
+ *
+ * @param queryID the query id
+ * @param errorCode the error code
+ * @param params additional params
+ */
@Deprecated
public SFException(String queryID, ErrorCode errorCode, Object... params) {
this(queryID, null, errorCode, params);
}
- /** use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} */
+ /**
+ * use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)}
+ *
+ * @param cause throwable
+ * @param errorCode error code
+ * @param params additional params
+ */
@Deprecated
public SFException(Throwable cause, ErrorCode errorCode, Object... params) {
this(null, cause, errorCode, params);
}
+ /**
+ * @param queryId query ID
+ * @param cause throwable
+ * @param errorCode error code
+ * @param params additional params
+ */
public SFException(String queryId, Throwable cause, ErrorCode errorCode, Object... params) {
super(
errorResourceBundleManager.getLocalizedMessage(
@@ -55,22 +78,47 @@ public SFException(String queryId, Throwable cause, ErrorCode errorCode, Object.
this.params = params;
}
+ /**
+ * Get the error cause
+ *
+ * @return Throwable
+ */
public Throwable getCause() {
return cause;
}
+ /**
+ * Get the query ID
+ *
+ * @return query ID string
+ */
public String getQueryId() {
return queryId;
}
+ /**
+ * Get the SQL state
+ *
+ * @return SQL state string
+ */
public String getSqlState() {
return sqlState;
}
+ /**
+ * Get the vendor code
+ *
+ * @return vendor code
+ */
public int getVendorCode() {
return vendorCode;
}
+ /**
+ * Get additional parameters
+ *
+ * @return parameter array
+ */
public Object[] getParams() {
return params;
}
diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java
index 1011870df..c32a16424 100644
--- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java
+++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java
@@ -108,7 +108,7 @@ public Object getObject(int columnIndex) throws SFException {
*
* @param columnIndex the column index
* @return an object of type long or BigDecimal depending on number size
- * @throws SFException
+ * @throws SFException if an error occurs
*/
private Object getBigInt(int columnIndex, Object obj) throws SFException {
return converters.getNumberConverter().getBigInt(obj, columnIndex);
diff --git a/src/main/java/net/snowflake/client/core/SFLoginOutput.java b/src/main/java/net/snowflake/client/core/SFLoginOutput.java
index 8daf81f10..3470076b9 100644
--- a/src/main/java/net/snowflake/client/core/SFLoginOutput.java
+++ b/src/main/java/net/snowflake/client/core/SFLoginOutput.java
@@ -18,6 +18,7 @@ public class SFLoginOutput {
private int databaseMajorVersion;
private int databaseMinorVersion;
private Duration httpClientSocketTimeout;
+ private Duration httpClientConnectionTimeout;
private String sessionDatabase;
private String sessionSchema;
private String sessionRole;
@@ -53,6 +54,7 @@ public class SFLoginOutput {
this.databaseMajorVersion = databaseMajorVersion;
this.databaseMinorVersion = databaseMinorVersion;
this.httpClientSocketTimeout = Duration.ofMillis(httpClientSocketTimeout);
+ this.httpClientConnectionTimeout = Duration.ofMillis(httpClientConnectionTimeout);
this.sessionDatabase = sessionDatabase;
this.sessionSchema = sessionSchema;
this.sessionRole = sessionRole;
@@ -113,7 +115,7 @@ Duration getHttpClientSocketTimeout() {
}
Duration getHttpClientConnectionTimeout() {
- return httpClientSocketTimeout;
+ return httpClientConnectionTimeout;
}
Map getCommonParams() {
diff --git a/src/main/java/net/snowflake/client/core/SFResultSet.java b/src/main/java/net/snowflake/client/core/SFResultSet.java
index fee90a3ee..7c66e1f5a 100644
--- a/src/main/java/net/snowflake/client/core/SFResultSet.java
+++ b/src/main/java/net/snowflake/client/core/SFResultSet.java
@@ -30,8 +30,6 @@
/**
* Snowflake ResultSet implementation
*
- *
- *
* @author jhuang
*/
public class SFResultSet extends SFJsonResultSet {
@@ -129,7 +127,7 @@ public SFResultSet(
* @param resultSetSerializable data returned in query response
* @param telemetryClient telemetryClient
* @param sortResult should sorting take place
- * @throws SQLException
+ * @throws SQLException if exception is encountered
*/
public SFResultSet(
SnowflakeResultSetSerializableV1 resultSetSerializable,
@@ -147,7 +145,7 @@ public SFResultSet(
* @param session snowflake session
* @param telemetryClient telemetryClient
* @param sortResult should sorting take place
- * @throws SQLException
+ * @throws SQLException if an exception is encountered.
*/
public SFResultSet(
SnowflakeResultSetSerializableV1 resultSetSerializable,
diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java
index 8e2e834a0..c3708ea7e 100644
--- a/src/main/java/net/snowflake/client/core/SFSession.java
+++ b/src/main/java/net/snowflake/client/core/SFSession.java
@@ -290,7 +290,7 @@ else if (ex instanceof SFException) {
/**
* @param queryID query ID of the query whose status is being investigated
* @return enum of type QueryStatus indicating the query's status
- * @throws SQLException
+ * @throws SQLException if an error is encountered
* @deprecated the returned enum is error-prone, use {@link #getQueryStatusV2} instead
*/
@Deprecated
@@ -337,7 +337,7 @@ else if (isAnError(result)) {
/**
* @param queryID query ID of the query whose status is being investigated
* @return a QueryStatusV2 instance indicating the query's status
- * @throws SQLException
+ * @throws SQLException if an error is encountered
*/
public QueryStatusV2 getQueryStatusV2(String queryID) throws SQLException {
JsonNode queryNode = getQueryMetadata(queryID);
diff --git a/src/main/java/net/snowflake/client/core/SFSqlInput.java b/src/main/java/net/snowflake/client/core/SFSqlInput.java
index 2b3d6ba95..6ca9988d9 100644
--- a/src/main/java/net/snowflake/client/core/SFSqlInput.java
+++ b/src/main/java/net/snowflake/client/core/SFSqlInput.java
@@ -37,6 +37,7 @@ static SFSqlInput unwrap(SQLInput sqlInput) {
*
* @param the type of the class modeled by this Class object
* @param type Class representing the Java data type to convert the attribute to.
+ * @param tz timezone to consider.
* @return the attribute at the head of the stream as an {@code Object} in the Java programming
* language;{@code null} if the attribute is SQL {@code NULL}
* @exception SQLException if a database access error occurs
diff --git a/src/main/java/net/snowflake/client/core/SFStatement.java b/src/main/java/net/snowflake/client/core/SFStatement.java
index 6142b8eb9..173ecf21f 100644
--- a/src/main/java/net/snowflake/client/core/SFStatement.java
+++ b/src/main/java/net/snowflake/client/core/SFStatement.java
@@ -298,7 +298,7 @@ private TimeBombTask(SFStatement statement) {
@Override
public Void call() throws SQLException {
try {
- statement.cancel();
+ statement.cancel(CancellationReason.TIMEOUT);
} catch (SFException ex) {
throw new SnowflakeSQLLoggedException(
session, ex.getSqlState(), ex.getVendorCode(), ex, ex.getParams());
@@ -318,6 +318,8 @@ public Void call() throws SQLException {
* @param bindValues map of binding values
* @param describeOnly whether only show the result set metadata
* @param internal run internal query not showing up in history
+ * @param asyncExec is async execute
+ * @param execTimeData ExecTimeTelemetryData
* @return raw json response
* @throws SFException if query is canceled
* @throws SnowflakeSQLException if query is already running
@@ -711,10 +713,11 @@ private void reauthenticate() throws SFException, SnowflakeSQLException {
*
* @param sql sql statement
* @param mediaType media type
+ * @param cancellationReason reason for the cancellation
* @throws SnowflakeSQLException if failed to cancel the statement
* @throws SFException if statement is already closed
*/
- private void cancelHelper(String sql, String mediaType)
+ private void cancelHelper(String sql, String mediaType, CancellationReason cancellationReason)
throws SnowflakeSQLException, SFException {
synchronized (this) {
if (isClosed) {
@@ -734,7 +737,7 @@ private void cancelHelper(String sql, String mediaType)
.setMaxRetries(session.getMaxHttpRetries())
.setHttpClientSettingsKey(session.getHttpClientKey());
- StmtUtil.cancel(stmtInput);
+ StmtUtil.cancel(stmtInput, cancellationReason);
synchronized (this) {
/*
@@ -751,8 +754,10 @@ private void cancelHelper(String sql, String mediaType)
* Execute sql
*
* @param sql sql statement.
+ * @param asyncExec is async exec
* @param parametersBinding parameters to bind
* @param caller the JDBC interface method that called this method, if any
+ * @param execTimeData ExecTimeTelemetryData
* @return whether there is result set or not
* @throws SQLException if failed to execute sql
* @throws SFException exception raised from Snowflake components
@@ -842,6 +847,12 @@ public void close() {
@Override
public void cancel() throws SFException, SQLException {
logger.trace("void cancel()", false);
+ cancel(CancellationReason.UNKNOWN);
+ }
+
+ @Override
+ public void cancel(CancellationReason cancellationReason) throws SFException, SQLException {
+ logger.trace("void cancel(CancellationReason)", false);
if (canceling.get()) {
logger.debug("Query is already cancelled", false);
@@ -866,7 +877,7 @@ public void cancel() throws SFException, SQLException {
}
// cancel the query on the server side if it has been issued
- cancelHelper(this.sqlText, StmtUtil.SF_MEDIA_TYPE);
+ cancelHelper(this.sqlText, StmtUtil.SF_MEDIA_TYPE, cancellationReason);
}
}
diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java
index a12f20ce3..de0eb3a87 100644
--- a/src/main/java/net/snowflake/client/core/SessionUtil.java
+++ b/src/main/java/net/snowflake/client/core/SessionUtil.java
@@ -953,11 +953,22 @@ private static String nullStringAsEmptyString(String value) {
return value;
}
- /** Delete the id token cache */
+ /**
+ * Delete the id token cache
+ *
+ * @param host The host string
+ * @param user The user
+ */
public static void deleteIdTokenCache(String host, String user) {
CredentialManager.getInstance().deleteIdTokenCache(host, user);
}
+ /**
+ * Delete the mfa token cache
+ *
+ * @param host The host string
+ * @param user The user
+ */
public static void deleteMfaTokenCache(String host, String user) {
CredentialManager.getInstance().deleteMfaTokenCache(host, user);
}
@@ -1710,6 +1721,7 @@ enum TokenRequestType {
* private link, do nothing.
*
* @param serverUrl The Snowflake URL includes protocol such as "https://"
+ * @throws IOException If exception encountered
*/
public static void resetOCSPUrlIfNecessary(String serverUrl) throws IOException {
if (PrivateLinkDetector.isPrivateLink(serverUrl)) {
diff --git a/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java b/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java
index 3b371536d..31e6af391 100644
--- a/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java
+++ b/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java
@@ -29,6 +29,10 @@ public class SnowflakeMutableProxyRoutePlanner implements HttpRoutePlanner, Seri
/**
* @deprecated Use {@link #SnowflakeMutableProxyRoutePlanner(String, int, HttpProtocol, String)}
* instead
+ * @param host host
+ * @param proxyPort proxy port
+ * @param proxyProtocol proxy protocol
+ * @param nonProxyHosts non-proxy hosts
*/
@Deprecated
public SnowflakeMutableProxyRoutePlanner(
@@ -36,6 +40,12 @@ public SnowflakeMutableProxyRoutePlanner(
this(host, proxyPort, toSnowflakeProtocol(proxyProtocol), nonProxyHosts);
}
+ /**
+ * @param host host
+ * @param proxyPort proxy port
+ * @param proxyProtocol proxy protocol
+ * @param nonProxyHosts non-proxy hosts
+ */
public SnowflakeMutableProxyRoutePlanner(
String host, int proxyPort, HttpProtocol proxyProtocol, String nonProxyHosts) {
proxyRoutePlanner =
@@ -46,12 +56,20 @@ public SnowflakeMutableProxyRoutePlanner(
this.protocol = proxyProtocol;
}
+ /**
+ * Set non-proxy hosts
+ *
+ * @param nonProxyHosts non-proxy hosts
+ */
public void setNonProxyHosts(String nonProxyHosts) {
this.nonProxyHosts = nonProxyHosts;
proxyRoutePlanner =
new SdkProxyRoutePlanner(host, proxyPort, toAwsProtocol(protocol), nonProxyHosts);
}
+ /**
+ * @return non-proxy hosts string
+ */
public String getNonProxyHosts() {
return nonProxyHosts;
}
diff --git a/src/main/java/net/snowflake/client/core/StmtUtil.java b/src/main/java/net/snowflake/client/core/StmtUtil.java
index 96fefe5dc..18b7ae7f7 100644
--- a/src/main/java/net/snowflake/client/core/StmtUtil.java
+++ b/src/main/java/net/snowflake/client/core/StmtUtil.java
@@ -270,6 +270,7 @@ public JsonNode getResult() {
* submission, but continue the ping pong process.
*
* @param stmtInput input statement
+ * @param execTimeData ExecTimeTelemetryData
* @return StmtOutput output statement
* @throws SFException exception raised from Snowflake components
* @throws SnowflakeSQLException exception raised from Snowflake components
@@ -584,8 +585,6 @@ protected static String getQueryResult(
/**
* Issue get-result call to get query result given an in-progress response.
*
- *
- *
* @param getResultPath path to results
* @param stmtInput object with context information
* @return results in string form
@@ -645,8 +644,6 @@ protected static String getQueryResult(String getResultPath, StmtInput stmtInput
/**
* Issue get-result call to get query result given an in progress response.
*
- *
- *
* @param queryId id of query to get results for
* @param session the current session
* @return results in JSON
@@ -681,8 +678,23 @@ protected static JsonNode getQueryResultJSON(String queryId, SFSession session)
* @param stmtInput input statement
* @throws SFException if there is an internal exception
* @throws SnowflakeSQLException if failed to cancel the statement
+ * @deprecated use {@link #cancel(StmtInput, CancellationReason)} instead
*/
+ @Deprecated
public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLException {
+ cancel(stmtInput, CancellationReason.UNKNOWN);
+ }
+
+ /**
+ * Cancel a statement identifiable by a request id
+ *
+ * @param stmtInput input statement
+ * @param cancellationReason reason for the cancellation
+ * @throws SFException if there is an internal exception
+ * @throws SnowflakeSQLException if failed to cancel the statement
+ */
+ public static void cancel(StmtInput stmtInput, CancellationReason cancellationReason)
+ throws SFException, SnowflakeSQLException {
HttpPost httpRequest = null;
AssertUtil.assertTrue(
@@ -701,7 +713,7 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE
try {
URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl);
-
+ logger.warn("Cancelling query {} with reason {}", stmtInput.requestId, cancellationReason);
logger.debug("Aborting query: {}", stmtInput.sql);
uriBuilder.setPath(SF_PATH_ABORT_REQUEST_V1);
diff --git a/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java b/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java
index c7054442c..855f128b2 100644
--- a/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java
@@ -43,9 +43,18 @@ abstract class AbstractArrowVectorConverter implements ArrowVectorConverter {
/** Field names of the struct vectors used by timestamp */
public static final String FIELD_NAME_EPOCH = "epoch"; // seconds since epoch
+ /** Timezone index */
public static final String FIELD_NAME_TIME_ZONE_INDEX = "timezone"; // time zone index
+
+ /** Fraction in nanoseconds */
public static final String FIELD_NAME_FRACTION = "fraction"; // fraction in nanoseconds
+ /**
+ * @param logicalTypeStr snowflake logical type of the target arrow vector.
+ * @param valueVector value vector
+ * @param vectorIndex value index
+ * @param context DataConversionContext
+ */
AbstractArrowVectorConverter(
String logicalTypeStr,
ValueVector valueVector,
@@ -153,6 +162,11 @@ public BigDecimal toBigDecimal(int index) throws SFException {
ErrorCode.INVALID_VALUE_CONVERT, logicalTypeStr, SnowflakeUtil.BIG_DECIMAL_STR, "");
}
+ /**
+ * True if should treat decimal as int type.
+ *
+ * @return true or false if decimal should be treated as int type.
+ */
boolean shouldTreatDecimalAsInt() {
return shouldTreatDecimalAsInt;
}
diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java b/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java
index 08ce23eec..ad9926eac 100644
--- a/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java
@@ -2,13 +2,22 @@
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFException;
+import net.snowflake.client.core.arrow.tostringhelpers.ArrowArrayStringRepresentationBuilder;
+import net.snowflake.client.jdbc.SnowflakeSQLException;
import net.snowflake.client.jdbc.SnowflakeType;
+import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.complex.ListVector;
+/** Array type converter. */
public class ArrayConverter extends AbstractArrowVectorConverter {
private final ListVector vector;
+ /**
+ * @param valueVector ListVector
+ * @param vectorIndex vector index
+ * @param context DataConversionContext
+ */
public ArrayConverter(ListVector valueVector, int vectorIndex, DataConversionContext context) {
super(SnowflakeType.ARRAY.name(), valueVector, vectorIndex, context);
this.vector = valueVector;
@@ -21,6 +30,25 @@ public Object toObject(int index) throws SFException {
@Override
public String toString(int index) throws SFException {
- return vector.getObject(index).toString();
+ FieldVector vectorUnpacked = vector.getChildrenFromFields().get(0);
+ SnowflakeType logicalType =
+ ArrowVectorConverterUtil.getSnowflakeTypeFromFieldMetadata(vectorUnpacked.getField());
+
+ ArrowArrayStringRepresentationBuilder builder =
+ new ArrowArrayStringRepresentationBuilder(logicalType);
+
+ final ArrowVectorConverter converter;
+
+ try {
+ converter = ArrowVectorConverterUtil.initConverter(vectorUnpacked, context, columnIndex);
+ } catch (SnowflakeSQLException e) {
+ return vector.getObject(index).toString();
+ }
+
+ for (int i = vector.getElementStartIndex(index); i < vector.getElementEndIndex(index); i++) {
+ builder.appendValue(converter.toString(i));
+ }
+
+ return builder.toString();
}
}
diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java b/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java
index 0478b2996..5966447e8 100644
--- a/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java
@@ -41,7 +41,7 @@ private void initIndices() {
* This method is only used when sf-property sort is on
*
* @return sorted indices
- * @throws SFException
+ * @throws SFException when exception encountered
*/
public IntVector sort() throws SFException {
quickSort(0, resultChunk.get(0).getValueCount() - 1);
diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java
index 2ad5c3ef2..03d5c03e8 100644
--- a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java
+++ b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java
@@ -46,7 +46,7 @@ public static String getStringFormat(int scale) {
/**
* new method to get Date from integer
*
- * @param day
+ * @param day The day to convert.
* @return Date
*/
public static Date getDate(int day) {
@@ -57,11 +57,11 @@ public static Date getDate(int day) {
/**
* Method to get Date from integer using timezone offsets
*
- * @param day
- * @param oldTz
- * @param newTz
- * @return
- * @throws SFException
+ * @param day The day to convert.
+ * @param oldTz The old timezone.
+ * @param newTz The new timezone.
+ * @return Date
+ * @throws SFException if date value is invalid
*/
public static Date getDate(int day, TimeZone oldTz, TimeZone newTz) throws SFException {
try {
@@ -90,10 +90,10 @@ public static Date getDate(int day, TimeZone oldTz, TimeZone newTz) throws SFExc
/**
* simplified moveToTimeZone method
*
- * @param milliSecsSinceEpoch
- * @param oldTZ
- * @param newTZ
- * @return offset
+ * @param milliSecsSinceEpoch milliseconds since Epoch
+ * @param oldTZ old timezone
+ * @param newTZ new timezone
+ * @return offset offset value
*/
private static long moveToTimeZoneOffset(
long milliSecsSinceEpoch, TimeZone oldTZ, TimeZone newTZ) {
@@ -128,9 +128,9 @@ private static long moveToTimeZoneOffset(
/**
* move the input timestamp form oldTZ to newTZ
*
- * @param ts
- * @param oldTZ
- * @param newTZ
+ * @param ts Timestamp
+ * @param oldTZ Old timezone
+ * @param newTZ New timezone
* @return timestamp in newTZ
*/
public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone newTZ) {
@@ -149,7 +149,7 @@ public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone ne
*
* @param epoch the value since epoch time
* @param scale the scale of the value
- * @return
+ * @return Timestamp
*/
public static Timestamp toJavaTimestamp(long epoch, int scale) {
return toJavaTimestamp(epoch, scale, TimeZone.getDefault(), false);
@@ -160,7 +160,9 @@ public static Timestamp toJavaTimestamp(long epoch, int scale) {
*
* @param epoch the value since epoch time
* @param scale the scale of the value
- * @return
+ * @param sessionTimezone the session timezone
+ * @param useSessionTimezone should the session timezone be used
+ * @return Timestamp
*/
@SnowflakeJdbcInternalApi
public static Timestamp toJavaTimestamp(
@@ -178,8 +180,8 @@ public static Timestamp toJavaTimestamp(
/**
* check whether the input seconds out of the scope of Java timestamp
*
- * @param seconds
- * @return
+ * @param seconds long value to check
+ * @return true if value is out of the scope of Java timestamp.
*/
public static boolean isTimestampOverflow(long seconds) {
return seconds < Long.MIN_VALUE / powerOfTen(3) || seconds > Long.MAX_VALUE / powerOfTen(3);
@@ -191,10 +193,10 @@ public static boolean isTimestampOverflow(long seconds) {
* represents as epoch = -1233 and fraction = 766,000,000 For example, -0.13 represents as epoch =
* -1 and fraction = 870,000,000
*
- * @param seconds
- * @param fraction
- * @param timezone - The timezone being used for the toString() formatting
- * @param timezone -
+ * @param seconds seconds value
+ * @param fraction fraction
+ * @param timezone The timezone being used for the toString() formatting
+ * @param useSessionTz boolean useSessionTz
* @return java timestamp object
*/
public static Timestamp createTimestamp(
diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java
index f61e9954d..1a1cff542 100644
--- a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java
@@ -16,7 +16,7 @@ public interface ArrowVectorConverter {
/**
* Set to true when time value should be displayed in wallclock time (no timezone offset)
*
- * @param useSessionTimezone
+ * @param useSessionTimezone boolean value indicating if there is a timezone offset.
*/
void setUseSessionTimezone(boolean useSessionTimezone);
@@ -160,6 +160,8 @@ public interface ArrowVectorConverter {
Object toObject(int index) throws SFException;
/**
+ * Set to true if NTZ timestamp should be set to UTC
+ *
* @param isUTC true or false value of whether NTZ timestamp should be set to UTC
*/
void setTreatNTZAsUTC(boolean isUTC);
diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java
index 45185072f..a6799b223 100644
--- a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java
+++ b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java
@@ -9,12 +9,14 @@
import net.snowflake.client.jdbc.SnowflakeSQLLoggedException;
import net.snowflake.client.jdbc.SnowflakeType;
import net.snowflake.common.core.SqlState;
+import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.complex.FixedSizeListVector;
import org.apache.arrow.vector.complex.ListVector;
import org.apache.arrow.vector.complex.MapVector;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.arrow.vector.types.Types;
+import org.apache.arrow.vector.types.pojo.Field;
@SnowflakeJdbcInternalApi
public final class ArrowVectorConverterUtil {
@@ -34,13 +36,20 @@ public static int getScale(ValueVector vector, SFBaseSession session)
}
}
+ public static SnowflakeType getSnowflakeTypeFromFieldMetadata(Field field) {
+ Map customMeta = field.getMetadata();
+ if (customMeta != null && customMeta.containsKey("logicalType")) {
+ return SnowflakeType.valueOf(customMeta.get("logicalType"));
+ }
+
+ return null;
+ }
+
/**
* Given an arrow vector (a single column in a single record batch), return an arrow vector
* converter. Note, converter is built on top of arrow vector, so that arrow data can be converted
* back to java data
*
- *
- *
*
Arrow converter mappings for Snowflake fixed-point numbers
* ----------------------------------------------------------------------------------------- Max
* position and scale Converter
@@ -57,6 +66,7 @@ public static int getScale(ValueVector vector, SFBaseSession session)
* @param session SFBaseSession for purposes of logging
* @param idx the index of the vector in its batch
* @return A converter on top og the vector
+ * @throws SnowflakeSQLException if error encountered
*/
public static ArrowVectorConverter initConverter(
ValueVector vector, DataConversionContext context, SFBaseSession session, int idx)
@@ -65,12 +75,11 @@ public static ArrowVectorConverter initConverter(
Types.MinorType type = Types.getMinorTypeForArrowType(vector.getField().getType());
// each column's metadata
- Map customMeta = vector.getField().getMetadata();
+ SnowflakeType st = getSnowflakeTypeFromFieldMetadata(vector.getField());
if (type == Types.MinorType.DECIMAL) {
// Note: Decimal vector is different from others
return new DecimalToScaledFixedConverter(vector, idx, context);
- } else if (!customMeta.isEmpty()) {
- SnowflakeType st = SnowflakeType.valueOf(customMeta.get("logicalType"));
+ } else if (st != null) {
switch (st) {
case ANY:
case CHAR:
@@ -229,4 +238,10 @@ public static ArrowVectorConverter initConverter(
"Unexpected Arrow Field for ",
type.toString());
}
+
+ public static ArrowVectorConverter initConverter(
+ FieldVector vector, DataConversionContext context, int columnIndex)
+ throws SnowflakeSQLException {
+ return initConverter(vector, context, context.getSession(), columnIndex);
+ }
}
diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java
index 71bd123a0..13a026e4f 100644
--- a/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java
@@ -23,6 +23,11 @@ public class BigIntToFixedConverter extends AbstractArrowVectorConverter {
protected ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public BigIntToFixedConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(
diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java
index 74d01f98a..87b3d43d1 100644
--- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java
@@ -18,10 +18,16 @@
import org.apache.arrow.vector.BigIntVector;
import org.apache.arrow.vector.ValueVector;
+/** BigInt to Time type converter. */
public class BigIntToTimeConverter extends AbstractArrowVectorConverter {
private BigIntVector bigIntVector;
protected ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public BigIntToTimeConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TIME.name(), fieldVector, columnIndex, context);
@@ -49,6 +55,15 @@ public Time toTime(int index) throws SFException {
}
}
+ /**
+ * Return the long value as a Time object.
+ *
+ * @param value long value to represent as Time
+ * @param scale the scale
+ * @param useSessionTimezone boolean indicating use of session timezone
+ * @return Time object representing the value
+ * @throws SFException invalid data conversion
+ */
public static Time getTime(long value, int scale, boolean useSessionTimezone) throws SFException {
SFTime sfTime = SFTime.fromFractionalSeconds(value, scale);
Time ts =
diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java
index e2bba45ab..774a0cd74 100644
--- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java
@@ -23,6 +23,11 @@ public class BigIntToTimestampLTZConverter extends AbstractArrowVectorConverter
private BigIntVector bigIntVector;
private ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public BigIntToTimestampLTZConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TIMESTAMP_LTZ.name(), fieldVector, columnIndex, context);
@@ -97,7 +102,7 @@ public boolean toBoolean(int index) throws SFException {
* @param val epoch
* @param scale scale
* @return Timestamp value without timezone take into account
- * @throws SFException
+ * @throws SFException if exception encountered
*/
@Deprecated
public static Timestamp getTimestamp(long val, int scale) throws SFException {
diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java
index cec64d59e..82d107209 100644
--- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java
@@ -24,6 +24,11 @@ public class BigIntToTimestampNTZConverter extends AbstractArrowVectorConverter
private static final TimeZone NTZ = TimeZone.getTimeZone("UTC");
private ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public BigIntToTimestampNTZConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TIMESTAMP_NTZ.name(), fieldVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java b/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java
index 2f5a8cf83..cddc7b3b4 100644
--- a/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java
@@ -14,6 +14,11 @@
public class BitToBooleanConverter extends AbstractArrowVectorConverter {
private BitVector bitVector;
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public BitToBooleanConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.BOOLEAN.name(), fieldVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/DateConverter.java b/src/main/java/net/snowflake/client/core/arrow/DateConverter.java
index a6f50e388..7d18417e2 100644
--- a/src/main/java/net/snowflake/client/core/arrow/DateConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/DateConverter.java
@@ -31,6 +31,12 @@ public DateConverter(ValueVector fieldVector, int columnIndex, DataConversionCon
this.useDateFormat = false;
}
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ * @param useDateFormat boolean indicates whether to use session timezone
+ */
public DateConverter(
ValueVector fieldVector,
int columnIndex,
diff --git a/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java
index 259913d95..b6d9b7a0b 100644
--- a/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java
@@ -17,6 +17,11 @@
public class DecimalToScaledFixedConverter extends AbstractArrowVectorConverter {
protected DecimalVector decimalVector;
+ /**
+ * @param fieldVector ValueVector
+ * @param vectorIndex vector index
+ * @param context DataConversionContext
+ */
public DecimalToScaledFixedConverter(
ValueVector fieldVector, int vectorIndex, DataConversionContext context) {
super(
diff --git a/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java b/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java
index d2f925867..731407861 100644
--- a/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java
@@ -13,10 +13,16 @@
import org.apache.arrow.vector.Float8Vector;
import org.apache.arrow.vector.ValueVector;
+/** Convert from Arrow Float8Vector to Real. */
public class DoubleToRealConverter extends AbstractArrowVectorConverter {
private Float8Vector float8Vector;
private ByteBuffer byteBuf = ByteBuffer.allocate(Float8Vector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public DoubleToRealConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.REAL.name(), fieldVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java
index 8055081ef..8cca3c930 100644
--- a/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java
@@ -18,6 +18,11 @@ public class IntToFixedConverter extends AbstractArrowVectorConverter {
protected int sfScale;
protected ByteBuffer byteBuf = ByteBuffer.allocate(IntVector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public IntToFixedConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(
diff --git a/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java b/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java
index d704e31bd..27ca0b4ad 100644
--- a/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java
@@ -18,10 +18,16 @@
import org.apache.arrow.vector.IntVector;
import org.apache.arrow.vector.ValueVector;
+/** Convert from Arrow IntVector to Time. */
public class IntToTimeConverter extends AbstractArrowVectorConverter {
private IntVector intVector;
private ByteBuffer byteBuf = ByteBuffer.allocate(IntVector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public IntToTimeConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TIME.name(), fieldVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/MapConverter.java b/src/main/java/net/snowflake/client/core/arrow/MapConverter.java
index 433792294..4099cd5fb 100644
--- a/src/main/java/net/snowflake/client/core/arrow/MapConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/MapConverter.java
@@ -4,14 +4,23 @@
import java.util.stream.Collectors;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFException;
+import net.snowflake.client.core.arrow.tostringhelpers.ArrowObjectStringRepresentationBuilder;
+import net.snowflake.client.jdbc.SnowflakeSQLException;
import net.snowflake.client.jdbc.SnowflakeType;
+import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.complex.MapVector;
import org.apache.arrow.vector.util.JsonStringHashMap;
+/** Arrow MapVector converter. */
public class MapConverter extends AbstractArrowVectorConverter {
private final MapVector vector;
+ /**
+ * @param valueVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public MapConverter(MapVector valueVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.MAP.name(), valueVector, columnIndex, context);
this.vector = valueVector;
@@ -28,6 +37,30 @@ public Object toObject(int index) throws SFException {
@Override
public String toString(int index) throws SFException {
- return vector.getObject(index).toString();
+ ArrowObjectStringRepresentationBuilder builder = new ArrowObjectStringRepresentationBuilder();
+
+ FieldVector vectorUnpacked = vector.getChildrenFromFields().get(0);
+
+ FieldVector keys = vectorUnpacked.getChildrenFromFields().get(0);
+ FieldVector values = vectorUnpacked.getChildrenFromFields().get(1);
+ final ArrowVectorConverter keyConverter;
+ final ArrowVectorConverter valueConverter;
+
+ SnowflakeType valueLogicalType =
+ ArrowVectorConverterUtil.getSnowflakeTypeFromFieldMetadata(values.getField());
+
+ try {
+ keyConverter = ArrowVectorConverterUtil.initConverter(keys, context, columnIndex);
+ valueConverter = ArrowVectorConverterUtil.initConverter(values, context, columnIndex);
+ } catch (SnowflakeSQLException e) {
+ return vector.getObject(index).toString();
+ }
+
+ for (int i = vector.getElementStartIndex(index); i < vector.getElementEndIndex(index); i++) {
+ builder.appendKeyValue(
+ keyConverter.toString(i), valueConverter.toString(i), valueLogicalType);
+ }
+
+ return builder.toString();
}
}
diff --git a/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java
index bfa398d88..13aa87db5 100644
--- a/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java
@@ -18,6 +18,11 @@ public class SmallIntToFixedConverter extends AbstractArrowVectorConverter {
protected SmallIntVector smallIntVector;
ByteBuffer byteBuf = ByteBuffer.allocate(SmallIntVector.TYPE_WIDTH);
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public SmallIntToFixedConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(
diff --git a/src/main/java/net/snowflake/client/core/arrow/StructConverter.java b/src/main/java/net/snowflake/client/core/arrow/StructConverter.java
index 84ccd7c0f..4c0516c51 100644
--- a/src/main/java/net/snowflake/client/core/arrow/StructConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/StructConverter.java
@@ -3,7 +3,10 @@
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFException;
import net.snowflake.client.core.SnowflakeJdbcInternalApi;
+import net.snowflake.client.core.arrow.tostringhelpers.ArrowObjectStringRepresentationBuilder;
+import net.snowflake.client.jdbc.SnowflakeSQLException;
import net.snowflake.client.jdbc.SnowflakeType;
+import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.complex.StructVector;
@SnowflakeJdbcInternalApi
@@ -23,6 +26,19 @@ public Object toObject(int index) throws SFException {
@Override
public String toString(int index) throws SFException {
- return structVector.getObject(index).toString();
+ ArrowObjectStringRepresentationBuilder builder = new ArrowObjectStringRepresentationBuilder();
+ for (String childName : structVector.getChildFieldNames()) {
+ FieldVector fieldVector = structVector.getChild(childName);
+ SnowflakeType logicalType =
+ ArrowVectorConverterUtil.getSnowflakeTypeFromFieldMetadata(fieldVector.getField());
+ try {
+ ArrowVectorConverter converter =
+ ArrowVectorConverterUtil.initConverter(fieldVector, context, columnIndex);
+ builder.appendKeyValue(childName, converter.toString(index), logicalType);
+ } catch (SnowflakeSQLException e) {
+ return structVector.getObject(index).toString();
+ }
+ }
+ return builder.toString();
}
}
diff --git a/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java
index 88d3e53ba..929045dd1 100644
--- a/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java
@@ -29,6 +29,11 @@ public class ThreeFieldStructToTimestampTZConverter extends AbstractArrowVectorC
private IntVector timeZoneIndices;
private TimeZone timeZone = TimeZone.getTimeZone("UTC");
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public ThreeFieldStructToTimestampTZConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TIMESTAMP_LTZ.name(), fieldVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java
index 26c90c228..ace873f7f 100644
--- a/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java
@@ -17,6 +17,11 @@ public class TinyIntToFixedConverter extends AbstractArrowVectorConverter {
protected TinyIntVector tinyIntVector;
protected int sfScale = 0;
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public TinyIntToFixedConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(
diff --git a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java
index 86eeb93b8..6e3904751 100644
--- a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java
@@ -26,6 +26,11 @@ public class TwoFieldStructToTimestampLTZConverter extends AbstractArrowVectorCo
private BigIntVector epochs;
private IntVector fractions;
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public TwoFieldStructToTimestampLTZConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TIMESTAMP_LTZ.name(), fieldVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java
index f4d0d9417..30467169e 100644
--- a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java
@@ -27,6 +27,11 @@ public class TwoFieldStructToTimestampNTZConverter extends AbstractArrowVectorCo
private static final TimeZone NTZ = TimeZone.getTimeZone("UTC");
+ /**
+ * @param fieldVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public TwoFieldStructToTimestampNTZConverter(
ValueVector fieldVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TIMESTAMP_NTZ.name(), fieldVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java b/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java
index f45e561f4..2c4774fb0 100644
--- a/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java
@@ -11,9 +11,15 @@
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.VarBinaryVector;
+/** Converter from Arrow VarBinaryVector to Binary. */
public class VarBinaryToBinaryConverter extends AbstractArrowVectorConverter {
private VarBinaryVector varBinaryVector;
+ /**
+ * @param valueVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public VarBinaryToBinaryConverter(
ValueVector valueVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.BINARY.name(), valueVector, columnIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java b/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java
index 8a6ce64e5..b53595d42 100644
--- a/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java
@@ -22,6 +22,11 @@
public class VarCharConverter extends AbstractArrowVectorConverter {
private VarCharVector varCharVector;
+ /**
+ * @param valueVector ValueVector
+ * @param columnIndex column index
+ * @param context DataConversionContext
+ */
public VarCharConverter(ValueVector valueVector, int columnIndex, DataConversionContext context) {
super(SnowflakeType.TEXT.name(), valueVector, columnIndex, context);
this.varCharVector = (VarCharVector) valueVector;
diff --git a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java
index ae7a492a0..8d1ae2942 100644
--- a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java
+++ b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java
@@ -6,10 +6,16 @@
import net.snowflake.client.jdbc.SnowflakeType;
import org.apache.arrow.vector.complex.FixedSizeListVector;
+/** Arrow FixedSizeListVector converter. */
public class VectorTypeConverter extends AbstractArrowVectorConverter {
private final FixedSizeListVector vector;
+ /**
+ * @param valueVector ValueVector
+ * @param vectorIndex vector index
+ * @param context DataConversionContext
+ */
public VectorTypeConverter(
FixedSizeListVector valueVector, int vectorIndex, DataConversionContext context) {
super(SnowflakeType.ARRAY.name(), valueVector, vectorIndex, context);
diff --git a/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowArrayStringRepresentationBuilder.java b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowArrayStringRepresentationBuilder.java
new file mode 100644
index 000000000..7ee6a07aa
--- /dev/null
+++ b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowArrayStringRepresentationBuilder.java
@@ -0,0 +1,19 @@
+package net.snowflake.client.core.arrow.tostringhelpers;
+
+import net.snowflake.client.core.SnowflakeJdbcInternalApi;
+import net.snowflake.client.jdbc.SnowflakeType;
+
+@SnowflakeJdbcInternalApi
+public class ArrowArrayStringRepresentationBuilder extends ArrowStringRepresentationBuilderBase {
+
+ private final SnowflakeType valueType;
+
+ public ArrowArrayStringRepresentationBuilder(SnowflakeType valueType) {
+ super(",", "[", "]");
+ this.valueType = valueType;
+ }
+
+ public ArrowStringRepresentationBuilderBase appendValue(String value) {
+ return add(quoteIfNeeded(value, valueType));
+ }
+}
diff --git a/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowObjectStringRepresentationBuilder.java b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowObjectStringRepresentationBuilder.java
new file mode 100644
index 000000000..53513836b
--- /dev/null
+++ b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowObjectStringRepresentationBuilder.java
@@ -0,0 +1,21 @@
+package net.snowflake.client.core.arrow.tostringhelpers;
+
+import java.util.StringJoiner;
+import net.snowflake.client.core.SnowflakeJdbcInternalApi;
+import net.snowflake.client.jdbc.SnowflakeType;
+
+@SnowflakeJdbcInternalApi
+public class ArrowObjectStringRepresentationBuilder extends ArrowStringRepresentationBuilderBase {
+
+ public ArrowObjectStringRepresentationBuilder() {
+ super(",", "{", "}");
+ }
+
+ public ArrowStringRepresentationBuilderBase appendKeyValue(
+ String key, String value, SnowflakeType valueType) {
+ StringJoiner joiner = new StringJoiner(": ");
+ joiner.add('"' + key + '"');
+ joiner.add(quoteIfNeeded(value, valueType));
+ return add(joiner.toString());
+ }
+}
diff --git a/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowStringRepresentationBuilderBase.java b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowStringRepresentationBuilderBase.java
new file mode 100644
index 000000000..cc25bb7e0
--- /dev/null
+++ b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowStringRepresentationBuilderBase.java
@@ -0,0 +1,65 @@
+package net.snowflake.client.core.arrow.tostringhelpers;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.StringJoiner;
+import net.snowflake.client.core.SnowflakeJdbcInternalApi;
+import net.snowflake.client.jdbc.SnowflakeType;
+
+/**
+ * StringBuilder like class to aggregate the string representation of snowflake Native ARROW
+ * structured types as JSON one-liners. Provides some additional snowflake-specific logic in order
+ * to determine whether the value should be quoted or case should be changed.
+ */
+@SnowflakeJdbcInternalApi
+public abstract class ArrowStringRepresentationBuilderBase {
+ private final StringJoiner joiner;
+ private static final Set quotableTypes;
+
+ static {
+ quotableTypes = new HashSet<>();
+ quotableTypes.add(SnowflakeType.ANY);
+ quotableTypes.add(SnowflakeType.CHAR);
+ quotableTypes.add(SnowflakeType.TEXT);
+ quotableTypes.add(SnowflakeType.VARIANT);
+ quotableTypes.add(SnowflakeType.BINARY);
+ quotableTypes.add(SnowflakeType.DATE);
+ quotableTypes.add(SnowflakeType.TIME);
+ quotableTypes.add(SnowflakeType.TIMESTAMP_LTZ);
+ quotableTypes.add(SnowflakeType.TIMESTAMP_NTZ);
+ quotableTypes.add(SnowflakeType.TIMESTAMP_TZ);
+ }
+
+ public ArrowStringRepresentationBuilderBase(String delimiter, String prefix, String suffix) {
+ joiner = new StringJoiner(delimiter, prefix, suffix);
+ }
+
+ protected ArrowStringRepresentationBuilderBase add(String string) {
+ joiner.add(string);
+ return this;
+ }
+
+ private boolean shouldQuoteValue(SnowflakeType type) {
+ return quotableTypes.contains(type);
+ }
+
+ protected String quoteIfNeeded(String string, SnowflakeType type) {
+ // Turn Boolean string representations lowercase to make the output JSON-compatible
+ // this should be changed on the converter level, but it would be a breaking change thus
+ // for now only structured types will be valid JSONs while in NATIVE ARROW mode
+ if (type == SnowflakeType.BOOLEAN) {
+ string = string.toLowerCase();
+ }
+
+ if (shouldQuoteValue(type)) {
+ return '"' + string + '"';
+ }
+
+ return string;
+ }
+
+ @Override
+ public String toString() {
+ return joiner.toString();
+ }
+}
diff --git a/src/main/java/net/snowflake/client/core/bind/BindUploader.java b/src/main/java/net/snowflake/client/core/bind/BindUploader.java
index 6b901da44..ed1f11249 100644
--- a/src/main/java/net/snowflake/client/core/bind/BindUploader.java
+++ b/src/main/java/net/snowflake/client/core/bind/BindUploader.java
@@ -187,7 +187,13 @@ public static synchronized BindUploader newInstance(SFBaseSession session, Strin
return new BindUploader(session, stageDir);
}
- /** Wrapper around upload() with default compression to true. */
+ /**
+ * Wrapper around upload() with default compression to true.
+ *
+ * @param bindValues the bind map to upload
+ * @throws BindException if there is an error when uploading bind values
+ * @throws SQLException if any error occurs
+ */
public void upload(Map bindValues)
throws BindException, SQLException {
upload(bindValues, true);
@@ -199,8 +205,8 @@ public void upload(Map bindValues)
*
* @param bindValues the bind map to upload
* @param compressData whether or not to compress data
- * @throws BindException
- * @throws SQLException
+ * @throws BindException if there is an error when uploading bind values
+ * @throws SQLException if any error occurs
*/
public void upload(Map bindValues, boolean compressData)
throws BindException, SQLException {
@@ -254,6 +260,7 @@ public void upload(Map bindValues, boolean compress
* @param destFileName destination file name to use
* @param compressData whether compression is requested fore uploading data
* @throws SQLException raises if any error occurs
+ * @throws BindException if there is an error when uploading bind values
*/
private void uploadStreamInternal(
InputStream inputStream, String destFileName, boolean compressData)
diff --git a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java
index c080a2f36..896437def 100644
--- a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java
+++ b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java
@@ -147,12 +147,16 @@ public void freeData() {
}
/**
+ * @param dataConversionContext DataConversionContext
* @return an iterator to iterate over current chunk
*/
public ArrowChunkIterator getIterator(DataConversionContext dataConversionContext) {
return new ArrowChunkIterator(dataConversionContext);
}
+ /**
+ * @return an empty iterator to iterate over current chunk
+ */
public static ArrowChunkIterator getEmptyChunkIterator() {
return new EmptyArrowResultChunk().new ArrowChunkIterator(null);
}
@@ -209,7 +213,12 @@ private List initConverters(List vectors)
return converters;
}
- /** advance to next row */
+ /**
+ * Advance to next row.
+ *
+ * @return true if there is a next row
+ * @throws SnowflakeSQLException if an error is encountered.
+ */
public boolean next() throws SnowflakeSQLException {
currentRowInRecordBatch++;
if (currentRowInRecordBatch < rowCountInCurrentRecordBatch) {
@@ -279,6 +288,8 @@ public int getCurrentRowInRecordBatch() {
/**
* merge arrow result chunk with more than one batches into one record batch (Only used for the
* first chunk when client side sorting is required)
+ *
+ * @throws SnowflakeSQLException if failed to merge first result chunk
*/
public void mergeBatchesIntoOne() throws SnowflakeSQLException {
try {
diff --git a/src/main/java/net/snowflake/client/jdbc/CompressedStreamFactory.java b/src/main/java/net/snowflake/client/jdbc/CompressedStreamFactory.java
new file mode 100644
index 000000000..ebb376db9
--- /dev/null
+++ b/src/main/java/net/snowflake/client/jdbc/CompressedStreamFactory.java
@@ -0,0 +1,38 @@
+package net.snowflake.client.jdbc;
+
+import static net.snowflake.client.core.Constants.MB;
+import static net.snowflake.common.core.FileCompressionType.GZIP;
+import static net.snowflake.common.core.FileCompressionType.ZSTD;
+
+import com.github.luben.zstd.ZstdInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.zip.GZIPInputStream;
+import net.snowflake.common.core.SqlState;
+import org.apache.http.Header;
+
+class CompressedStreamFactory {
+
+ private static final int STREAM_BUFFER_SIZE = MB;
+
+ /**
+ * Determine the format of the response, if it is not either plain text or gzip, raise an error.
+ */
+ public InputStream createBasedOnEncodingHeader(InputStream is, Header encoding)
+ throws IOException, SnowflakeSQLException {
+ if (encoding != null) {
+ if (GZIP.name().equalsIgnoreCase(encoding.getValue())) {
+ return new GZIPInputStream(is, STREAM_BUFFER_SIZE);
+ } else if (ZSTD.name().equalsIgnoreCase(encoding.getValue())) {
+ return new ZstdInputStream(is);
+ } else {
+ throw new SnowflakeSQLException(
+ SqlState.INTERNAL_ERROR,
+ ErrorCode.INTERNAL_ERROR.getMessageCode(),
+ "Exception: unexpected compression got " + encoding.getValue());
+ }
+ } else {
+ return DefaultResultStreamProvider.detectGzipAndGetStream(is);
+ }
+ }
+}
diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java
index 3ee556bb4..e7a1e8a0c 100644
--- a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java
+++ b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java
@@ -1,7 +1,5 @@
package net.snowflake.client.jdbc;
-import static net.snowflake.client.core.Constants.MB;
-
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
@@ -34,7 +32,11 @@ public class DefaultResultStreamProvider implements ResultStreamProvider {
// SSE-C algorithm value
private static final String SSE_C_AES = "AES256";
- private static final int STREAM_BUFFER_SIZE = MB;
+ private CompressedStreamFactory compressedStreamFactory;
+
+ public DefaultResultStreamProvider() {
+ this.compressedStreamFactory = new CompressedStreamFactory();
+ }
@Override
public InputStream getInputStream(ChunkDownloadContext context) throws Exception {
@@ -71,9 +73,11 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception
InputStream inputStream;
final HttpEntity entity = response.getEntity();
+ Header encoding = response.getFirstHeader("Content-Encoding");
try {
- // read the chunk data
- inputStream = detectContentEncodingAndGetInputStream(response, entity.getContent());
+ // create stream based on compression type
+ inputStream =
+ compressedStreamFactory.createBasedOnEncodingHeader(entity.getContent(), encoding);
} catch (Exception ex) {
logger.error("Failed to decompress data: {}", response);
@@ -144,28 +148,6 @@ else if (context.getQrmk() != null) {
return response;
}
- private InputStream detectContentEncodingAndGetInputStream(HttpResponse response, InputStream is)
- throws IOException, SnowflakeSQLException {
- InputStream inputStream = is; // Determine the format of the response, if it is not
- // either plain text or gzip, raise an error.
- Header encoding = response.getFirstHeader("Content-Encoding");
- if (encoding != null) {
- if ("gzip".equalsIgnoreCase(encoding.getValue())) {
- /* specify buffer size for GZIPInputStream */
- inputStream = new GZIPInputStream(is, STREAM_BUFFER_SIZE);
- } else {
- throw new SnowflakeSQLException(
- SqlState.INTERNAL_ERROR,
- ErrorCode.INTERNAL_ERROR.getMessageCode(),
- "Exception: unexpected compression got " + encoding.getValue());
- }
- } else {
- inputStream = detectGzipAndGetStream(is);
- }
-
- return inputStream;
- }
-
public static InputStream detectGzipAndGetStream(InputStream is) throws IOException {
PushbackInputStream pb = new PushbackInputStream(is, 2);
byte[] signature = new byte[2];
diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java
index 6bb62c82f..67151bea8 100644
--- a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java
+++ b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java
@@ -81,6 +81,7 @@ public DefaultSFConnectionHandler(SnowflakeConnectString conStr, boolean skipOpe
* schemaName from the URL if it is specified there.
*
* @param conStr Connection string object
+ * @return a map containing accountName, databaseName and schemaName if specified
*/
public static Map mergeProperties(SnowflakeConnectString conStr) {
conStr.getParameters().remove("SSL");
diff --git a/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java b/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java
index 743447ef0..a40976461 100644
--- a/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java
+++ b/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java
@@ -127,7 +127,11 @@ public String getWarehouseServerType() {
return warehouseServerType;
}
- /** To preserve compatibility with {@link QueryStatus} */
+ /**
+ * To preserve compatibility with {@link QueryStatus}
+ *
+ * @return name
+ */
public String getDescription() {
return name;
}
diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java
index 5be46c5de..35e61efd9 100644
--- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java
+++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java
@@ -106,6 +106,7 @@ public static CloseableHttpResponse execute(
* @param includeRequestGuid whether to include request_guid parameter
* @param retryHTTP403 whether to retry on HTTP 403 or not
* @param noRetry should we disable retry on non-successful http resp code
+ * @param execTimeData ExecTimeTelemetryData
* @return HttpResponse Object get from server
* @throws net.snowflake.client.jdbc.SnowflakeSQLException Request timeout Exception or Illegal
* State Exception i.e. connection is already shutdown etc
@@ -283,7 +284,14 @@ public static CloseableHttpResponse execute(
// if an SSL issue occurs like an SSLHandshakeException then fail
// immediately and stop retrying the requests
- throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, ex.getMessage());
+ String formattedMsg =
+ ex.getMessage()
+ + "\n"
+ + "Verify that the hostnames and portnumbers in SYSTEM$ALLOWLIST are added to your firewall's allowed list.\n"
+ + "To troubleshoot your connection further, you can refer to this article:\n"
+ + "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview";
+
+ throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, formattedMsg);
} catch (Exception ex) {
diff --git a/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java b/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java
index 795cf94ff..4cc748876 100644
--- a/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java
+++ b/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java
@@ -59,6 +59,10 @@ public void startParsing(JsonResultChunk resultChunk, SFBaseSession session)
/**
* Check if the chunk has been parsed correctly. After calling this it is safe to acquire the
* output data
+ *
+ * @param in byte buffer
+ * @param session SFBaseSession
+ * @throws SnowflakeSQLException if parsing fails
*/
public void endParsing(ByteBuffer in, SFBaseSession session) throws SnowflakeSQLException {
continueParsingInternal(in, true, session);
@@ -79,6 +83,9 @@ public void endParsing(ByteBuffer in, SFBaseSession session) throws SnowflakeSQL
*
* @param in readOnly byteBuffer backed by an array (the data to be reed is from position to
* limit)
+ * @param session SFBaseSession
+ * @return int remaining number of elements in byteBuffer
+ * @throws SnowflakeSQLException if an error is encountered during parsing
*/
public int continueParsing(ByteBuffer in, SFBaseSession session) throws SnowflakeSQLException {
if (state == State.UNINITIALIZED) {
@@ -95,6 +102,7 @@ public int continueParsing(ByteBuffer in, SFBaseSession session) throws Snowflak
/**
* @param in readOnly byteBuffer backed by an array (the data is from position to limit)
* @param lastData If true, this signifies this is the last data in parsing
+ * @param session SFBaseSession
* @throws SnowflakeSQLException Will be thrown if parsing the chunk data fails
*/
private void continueParsingInternal(ByteBuffer in, boolean lastData, SFBaseSession session)
diff --git a/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java b/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java
index 64297ff57..959754fd9 100644
--- a/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java
+++ b/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java
@@ -17,25 +17,47 @@
public interface SFConnectionHandler {
/**
- * Whether this Connection supports asynchronous queries. If yes, createAsyncResultSet may be
- * called.
+ * @return Whether this Connection supports asynchronous queries. If yes, createAsyncResultSet may
+ * be called.
*/
boolean supportsAsyncQuery();
- /** Initializes the SnowflakeConnection */
+ /**
+ * Initializes the SnowflakeConnection
+ *
+ * @param url url string
+ * @param info connection parameters
+ * @throws SQLException if any error is encountered
+ */
void initializeConnection(String url, Properties info) throws SQLException;
- /** Gets the SFBaseSession implementation for this connection implementation */
+ /**
+ * @return Gets the SFBaseSession implementation for this connection implementation
+ */
SFBaseSession getSFSession();
- /** Returns the SFStatementInterface implementation for this connection implementation */
+ /**
+ * @return Returns the SFStatementInterface implementation for this connection implementation
+ * @throws SQLException if any error occurs
+ */
SFBaseStatement getSFStatement() throws SQLException;
- /** Creates a result set from a query id. */
+ /**
+ * Creates a result set from a query id.
+ *
+ * @param queryID the query ID
+ * @param statement Statement object
+ * @return ResultSet
+ * @throws SQLException if any error occurs
+ */
ResultSet createResultSet(String queryID, Statement statement) throws SQLException;
/**
- * Creates a SnowflakeResultSet from a base SFBaseResultSet for this connection implementation.
+ * @param resultSet SFBaseResultSet
+ * @param statement Statement
+ * @return Creates a SnowflakeResultSet from a base SFBaseResultSet for this connection
+ * implementation.
+ * @throws SQLException if an error occurs
*/
SnowflakeBaseResultSet createResultSet(SFBaseResultSet resultSet, Statement statement)
throws SQLException;
@@ -43,6 +65,11 @@ SnowflakeBaseResultSet createResultSet(SFBaseResultSet resultSet, Statement stat
/**
* Creates an asynchronous result set from a base SFBaseResultSet for this connection
* implementation.
+ *
+ * @param resultSet SFBaseResultSet
+ * @param statement Statement
+ * @return An asynchronous result set from SFBaseResultSet
+ * @throws SQLException if an error occurs
*/
SnowflakeBaseResultSet createAsyncResultSet(SFBaseResultSet resultSet, Statement statement)
throws SQLException;
@@ -50,6 +77,9 @@ SnowflakeBaseResultSet createAsyncResultSet(SFBaseResultSet resultSet, Statement
/**
* @param command The command to parse for this file transfer (e.g., PUT/GET)
* @param statement The statement to use for this file transfer
+ * @return SFBaseFileTransferAgent
+ * @throws SQLNonTransientConnectionException if a connection error occurs
+ * @throws SnowflakeSQLException if any other exception occurs
*/
SFBaseFileTransferAgent getFileTransferAgent(String command, SFBaseStatement statement)
throws SQLNonTransientConnectionException, SnowflakeSQLException;
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java
index d9149412e..ced00e325 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java
@@ -93,6 +93,7 @@ private static SFBaseSession maybeGetSession(Statement statement) {
*
* @param resultSetSerializable The result set serializable object which includes all metadata to
* create the result set
+ * @throws SQLException if an error occurs
*/
public SnowflakeBaseResultSet(SnowflakeResultSetSerializableV1 resultSetSerializable)
throws SQLException {
@@ -108,7 +109,7 @@ public SnowflakeBaseResultSet(SnowflakeResultSetSerializableV1 resultSetSerializ
/**
* This should never be used. Simply needed this for SFAsynchronousResult subclass
*
- * @throws SQLException
+ * @throws SQLException if an error occurs
*/
protected SnowflakeBaseResultSet() throws SQLException {
this.resultSetType = 0;
@@ -139,6 +140,14 @@ protected void raiseSQLExceptionIfResultSetIsClosed() throws SQLException {
@Override
public abstract byte[] getBytes(int columnIndex) throws SQLException;
+ /**
+ * Get Date value
+ *
+ * @param columnIndex column index
+ * @param tz timezone
+ * @return Date value at column index
+ * @throws SQLException if data at column index is incompatible with Date type
+ */
public abstract Date getDate(int columnIndex, TimeZone tz) throws SQLException;
private boolean getGetDateUseNullTimezone() {
@@ -168,6 +177,14 @@ public Timestamp getTimestamp(int columnIndex) throws SQLException {
return getTimestamp(columnIndex, (TimeZone) null);
}
+ /**
+ * Get timestamp value
+ *
+ * @param columnIndex column index
+ * @param tz timezone
+ * @return timestamp value at column index
+ * @throws SQLException if data at column index is incompatible with timestamp
+ */
public abstract Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SQLException;
@Override
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java
index 6db9aede0..5163f3299 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java
@@ -212,6 +212,7 @@ public void uncaughtException(Thread t, Throwable e) {
*
* @param resultSetSerializable the result set serializable object which includes required
* metadata to start chunk downloader
+ * @throws SnowflakeSQLException if an error is encountered
*/
public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerializable)
throws SnowflakeSQLException {
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java
index 10f06dafa..13bad3195 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java
@@ -13,21 +13,21 @@
/**
* (Optional) The name for a column in database,
*
- *
The default value is empty string. Provided name can override SqlData field name
+ * @return The default value is empty string. Provided name can override SqlData field name.
*/
String name() default "";
/**
* (Optional) The snowflake type for a column
*
- *
The default value is empty string Provided type can override default type
+ * @return The default value is empty string Provided type can override default type.
*/
String type() default "";
/**
* (Optional) The snowflake nullable flag for a column
*
- *
The default value is true Provided value can override default nullable value
+ * @return The default value is true Provided value can override default nullable value.
*/
boolean nullable() default true;
@@ -37,7 +37,8 @@
*
*
Applies only to columns of exact varchar and binary type.
*
- *
The default value {@code -1} indicates that a provider-determined length should be inferred.
+ * @return The default value {@code -1} indicates that a provider-determined length should be
+ * inferred.
*/
int length() default -1;
/**
@@ -46,8 +47,8 @@
*
*
Applies only to columns of exact varchar and binary type.
*
- *
The default value {@code -1} indicates that a provider-determined byteLength should be
- * inferred.
+ * @return The default value {@code -1} indicates that a provider-determined byteLength should be
+ * inferred.
*/
int byteLength() default -1;
@@ -57,8 +58,8 @@
*
*
Applies only to columns of exact numeric type.
*
- *
The default value {@code -1} indicates that a provider-determined precision should be
- * inferred.
+ * @return The default value {@code -1} indicates that a provider-determined precision should be
+ * inferred.
*/
int precision() default -1;
@@ -68,7 +69,8 @@
*
*
Applies only to columns of exact numeric type.
*
- *
The default value {@code 0} indicates that a provider-determined scale should be inferred.
+ * @return The default value {@code 0} indicates that a provider-determined scale should be
+ * inferred.
*/
int scale() default -1;
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java
index 4525c2efb..69f467b90 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java
@@ -76,6 +76,19 @@ public SnowflakeColumnMetadata(
* @deprecated Use {@link SnowflakeColumnMetadata#SnowflakeColumnMetadata(String, int, boolean,
* int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean, int)}
* instead
+ * @param name name
+ * @param type type
+ * @param nullable is nullable
+ * @param length length
+ * @param precision precision
+ * @param scale scale
+ * @param typeName type name
+ * @param fixed is fixed
+ * @param base SnowflakeType
+ * @param columnSrcDatabase column source database
+ * @param columnSrcSchema column source schema
+ * @param columnSrcTable column source table
+ * @param isAutoIncrement is auto-increment
*/
@Deprecated
public SnowflakeColumnMetadata(
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java
index e997b053e..6edc510f8 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java
@@ -47,7 +47,7 @@ InputStream downloadStream(String stageName, String sourceFileName, boolean deco
* Return unique session ID from current session generated by making connection
*
* @return a unique alphanumeric value representing current session ID
- * @throws SQLException
+ * @throws SQLException if an error occurs
*/
String getSessionID() throws SQLException;
@@ -56,12 +56,16 @@ InputStream downloadStream(String stageName, String sourceFileName, boolean deco
* of corresponding query. Used when original ResultSet object is no longer available, such as
* when original connection has been closed.
*
- * @param queryID
- * @return
- * @throws SQLException
+ * @param queryID the query ID
+ * @return ResultSet based off the query ID
+ * @throws SQLException if an error occurs
*/
ResultSet createResultSet(String queryID) throws SQLException;
- /** Returns the SnowflakeConnectionImpl from the connection object. */
+ /**
+ * Returns the SnowflakeConnectionImpl from the connection object.
+ *
+ * @return SFConnectionHandler
+ */
SFConnectionHandler getHandler();
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java
index 498e6393b..1f55c83f4 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java
@@ -91,6 +91,7 @@ public class SnowflakeConnectionV1 implements Connection, SnowflakeConnection {
* Instantiates a SnowflakeConnectionV1 with the passed-in SnowflakeConnectionImpl.
*
* @param sfConnectionHandler The SnowflakeConnectionImpl.
+ * @throws SQLException if failed to instantiate a SnowflakeConnectionV1.
*/
public SnowflakeConnectionV1(SFConnectionHandler sfConnectionHandler) throws SQLException {
initConnectionWithImpl(sfConnectionHandler, null, null);
@@ -100,6 +101,9 @@ public SnowflakeConnectionV1(SFConnectionHandler sfConnectionHandler) throws SQL
* Instantiates a SnowflakeConnectionV1 with the passed-in SnowflakeConnectionImpl.
*
* @param sfConnectionHandler The SnowflakeConnectionImpl.
+ * @param url The URL string.
+ * @param info Connection properties.
+ * @throws SQLException if failed to instantiate connection.
*/
public SnowflakeConnectionV1(SFConnectionHandler sfConnectionHandler, String url, Properties info)
throws SQLException {
@@ -195,9 +199,9 @@ public Statement createStatement() throws SQLException {
/**
* Get an instance of a ResultSet object
*
- * @param queryID
- * @return
- * @throws SQLException
+ * @param queryID the query ID
+ * @return ResultSet
+ * @throws SQLException if connection is closed
*/
public ResultSet createResultSet(String queryID) throws SQLException {
raiseSQLExceptionIfConnectionIsClosed();
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java
index 05566da82..298b64ee7 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java
@@ -37,7 +37,7 @@ public class SnowflakeDriver implements Driver {
static SnowflakeDriver INSTANCE;
public static final Properties EMPTY_PROPERTIES = new Properties();
- public static String implementVersion = "3.19.0";
+ public static String implementVersion = "3.20.1";
static int majorVersion = 0;
static int minorVersion = 0;
@@ -167,7 +167,7 @@ public static String getDisableArrowResultFormatMessage() {
/**
* Utility method to verify if the standard or fips snowflake-jdbc driver is being used.
*
- * @return
+ * @return the title of the implementation, null is returned if it is not known.
*/
public static String getImplementationTitle() {
Package pkg = Package.getPackage("net.snowflake.client.jdbc");
@@ -177,7 +177,7 @@ public static String getImplementationTitle() {
/**
* Utility method to get the complete jar name with version.
*
- * @return
+ * @return the jar name with version
*/
public static String getJdbcJarname() {
return String.format("%s-%s", getImplementationTitle(), implementVersion);
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java
index bd5a3945e..4213b33b0 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java
@@ -791,6 +791,7 @@ public static Callable getDownloadFileCallable(
* @param encMat remote store encryption material
* @param parallel number of parallel threads for downloading
* @param presignedUrl Presigned URL for file download
+ * @param queryId the query ID
* @return a callable responsible for downloading files
*/
public static Callable getDownloadFileCallable(
@@ -925,11 +926,13 @@ private void parseCommand() throws SnowflakeSQLException {
// get source file locations as array (apply to both upload and download)
JsonNode locationsNode = jsonNode.path("data").path("src_locations");
+ if (!locationsNode.isArray()) {
+ throw new SnowflakeSQLException(
+ queryID, ErrorCode.INTERNAL_ERROR, "src_locations must be an array");
+ }
queryID = jsonNode.path("data").path("queryId").asText();
- assert locationsNode.isArray();
-
String[] src_locations;
try {
@@ -1108,8 +1111,16 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
// specifically
// for FIPS or VPCE S3 endpoint. SNOW-652696
String endPoint = null;
- if ("AZURE".equalsIgnoreCase(stageLocationType) || "S3".equalsIgnoreCase(stageLocationType)) {
+ if ("AZURE".equalsIgnoreCase(stageLocationType)
+ || "S3".equalsIgnoreCase(stageLocationType)
+ || "GCS".equalsIgnoreCase(stageLocationType)) {
endPoint = jsonNode.path("data").path("stageInfo").findValue("endPoint").asText();
+ if ("GCS".equalsIgnoreCase(stageLocationType)
+ && endPoint != null
+ && (endPoint.trim().isEmpty() || "null".equals(endPoint))) {
+ // setting to null to preserve previous behaviour for GCS
+ endPoint = null;
+ }
}
String stgAcct = null;
@@ -1176,6 +1187,8 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
}
}
+ setupUseRegionalUrl(jsonNode, stageInfo);
+
if (stageInfo.getStageType() == StageInfo.StageType.S3) {
if (session == null) {
// This node's value is set if PUT is used without Session. (For Snowpipe Streaming, we rely
@@ -1197,6 +1210,18 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
return stageInfo;
}
+ private static void setupUseRegionalUrl(JsonNode jsonNode, StageInfo stageInfo) {
+ if (stageInfo.getStageType() != StageInfo.StageType.GCS
+ && stageInfo.getStageType() != StageInfo.StageType.S3) {
+ return;
+ }
+ JsonNode useRegionalURLNode = jsonNode.path("data").path("stageInfo").path("useRegionalUrl");
+ if (!useRegionalURLNode.isMissingNode()) {
+ boolean useRegionalURL = useRegionalURLNode.asBoolean(false);
+ stageInfo.setUseRegionalUrl(useRegionalURL);
+ }
+ }
+
/**
* A helper method to verify if the local file path from GS matches what's parsed locally. This is
* for security purpose as documented in SNOW-15153.
@@ -1459,7 +1484,13 @@ public static List getFileTransferMetadatas(
}
// For UPLOAD we expect encryptionMaterial to have length 1
- assert encryptionMaterial.size() == 1;
+ if (encryptionMaterial.size() != 1) {
+ throw new SnowflakeSQLException(
+ queryId,
+ ErrorCode.INTERNAL_ERROR,
+ "Encryption material for UPLOAD should have size 1 but have "
+ + encryptionMaterial.size());
+ }
final Set sourceFiles = expandFileNames(srcLocations, queryId);
@@ -1649,6 +1680,7 @@ private void uploadStream() throws SnowflakeSQLException {
/** Download a file from remote, and return an input stream */
@Override
public InputStream downloadStream(String fileName) throws SnowflakeSQLException {
+ logger.debug("Downloading file as stream: {}", fileName);
if (stageInfo.getStageType() == StageInfo.StageType.LOCAL_FS) {
logger.error("downloadStream function doesn't support local file system", false);
@@ -1662,14 +1694,32 @@ public InputStream downloadStream(String fileName) throws SnowflakeSQLException
remoteLocation remoteLocation = extractLocationAndPath(stageInfo.getLocation());
- String stageFilePath = fileName;
+ // when downloading files as stream there should be only one file in source files
+ String sourceLocation =
+ sourceFiles.stream()
+ .findFirst()
+ .orElseThrow(
+ () ->
+ new SnowflakeSQLException(
+ queryID,
+ SqlState.NO_DATA,
+ ErrorCode.FILE_NOT_FOUND.getMessageCode(),
+ session,
+ "File not found: " + fileName));
+
+ if (!fileName.equals(sourceLocation)) {
+ // filename may be different from source location e.g. in git repositories
+ logger.debug("Changing file to download location from {} to {}", fileName, sourceLocation);
+ }
+ String stageFilePath = sourceLocation;
if (!remoteLocation.path.isEmpty()) {
- stageFilePath = SnowflakeUtil.concatFilePathNames(remoteLocation.path, fileName, "/");
+ stageFilePath = SnowflakeUtil.concatFilePathNames(remoteLocation.path, sourceLocation, "/");
}
+ logger.debug("Stage file path for {} is {}", sourceLocation, stageFilePath);
- RemoteStoreFileEncryptionMaterial encMat = srcFileToEncMat.get(fileName);
- String presignedUrl = srcFileToPresignedUrl.get(fileName);
+ RemoteStoreFileEncryptionMaterial encMat = srcFileToEncMat.get(sourceLocation);
+ String presignedUrl = srcFileToPresignedUrl.get(sourceLocation);
return storageFactory
.createClient(stageInfo, parallel, encMat, session)
@@ -3346,7 +3396,7 @@ public static void throwJCEMissingError(String operation, Exception ex, String q
* @param session the current session
* @param operation the operation i.e. GET
* @param ex the exception caught
- * @throws SnowflakeSQLLoggedException
+ * @throws SnowflakeSQLLoggedException if not enough space left on device to download file.
*/
@Deprecated
public static void throwNoSpaceLeftError(SFSession session, String operation, Exception ex)
@@ -3361,7 +3411,8 @@ public static void throwNoSpaceLeftError(SFSession session, String operation, Ex
* @param session the current session
* @param operation the operation i.e. GET
* @param ex the exception caught
- * @throws SnowflakeSQLLoggedException
+ * @param queryId the query ID
+ * @throws SnowflakeSQLLoggedException if not enough space left on device to download file.
*/
public static void throwNoSpaceLeftError(
SFSession session, String operation, Exception ex, String queryId)
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java
index 60ca632ad..438abb4b2 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java
@@ -190,7 +190,12 @@ public Builder setUseS3RegionalUrl(boolean useS3RegUrl) {
return this;
}
- /** Streaming ingest client name, used to calculate streaming ingest billing per client */
+ /**
+ * Streaming ingest client name, used to calculate streaming ingest billing per client
+ *
+ * @param streamingIngestClientName streaming ingest client name
+ * @return Builder
+ */
public Builder setStreamingIngestClientName(String streamingIngestClientName) {
this.streamingIngestClientName = streamingIngestClientName;
return this;
@@ -199,6 +204,9 @@ public Builder setStreamingIngestClientName(String streamingIngestClientName) {
/**
* Streaming ingest client key provided by Snowflake, used to calculate streaming ingest billing
* per client
+ *
+ * @param streamingIngestClientKey streaming ingest client key
+ * @return Builder
*/
public Builder setStreamingIngestClientKey(String streamingIngestClientKey) {
this.streamingIngestClientKey = streamingIngestClientKey;
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java
index ee3dc3ec8..2f7ec66f4 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java
@@ -8,6 +8,7 @@
public interface SnowflakePreparedStatement {
/**
* @return the Snowflake query ID of the latest executed query
+ * @throws SQLException if an error occurs
*/
String getQueryID() throws SQLException;
@@ -15,25 +16,27 @@ public interface SnowflakePreparedStatement {
* Execute a query asynchronously
*
* @return ResultSet containing results
- * @throws SQLException
+ * @throws SQLException if an error occurs
*/
ResultSet executeAsyncQuery() throws SQLException;
/**
* Sets the designated parameter to the given BigInteger value.
*
- * @param parameterIndex
- * @param x
- * @throws SQLException
+ * @param parameterIndex the parameter index
+ * @param x the BigInteger value
+ * @throws SQLException if an error occurs
*/
void setBigInteger(int parameterIndex, BigInteger x) throws SQLException;
/**
* Sets the designated parameter to the given Map instance.
*
- * @param parameterIndex
- * @param map
- * @throws SQLException
+ * @param parameterIndex the parameter index
+ * @param map the map instance
+ * @param type the type
+ * @param generic type
+ * @throws SQLException if an error occurs
*/
void setMap(int parameterIndex, Map map, int type) throws SQLException;
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java
index 000d4634d..cb293690d 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java
@@ -58,14 +58,19 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1
implements PreparedStatement, SnowflakePreparedStatement {
private static final SFLogger logger =
SFLoggerFactory.getLogger(SnowflakePreparedStatementV1.class);
+
/** Error code returned when describing a statement that is binding table name */
private static final Integer ERROR_CODE_TABLE_BIND_VARIABLE_NOT_SET = 2128;
+
/** Error code when preparing statement with binding object names */
private static final Integer ERROR_CODE_OBJECT_BIND_NOT_SET = 2129;
+
/** Error code returned when describing a ddl command */
private static final Integer ERROR_CODE_STATEMENT_CANNOT_BE_PREPARED = 7;
+
/** snow-44393 Workaround for compiler cannot prepare to_timestamp(?, 3) */
private static final Integer ERROR_CODE_FORMAT_ARGUMENT_NOT_STRING = 1026;
+
/** A hash set that contains the error code that will not lead to exception in describe mode */
private static final Set errorCodesIgnoredInDescribeMode =
new HashSet<>(
@@ -88,10 +93,12 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1
*
Currently, bind name is just value index
*/
private Map parameterBindings = new HashMap<>();
+
/** map of bind values for batch query executions */
private Map batchParameterBindings = new HashMap<>();
private Map wasPrevValueNull = new HashMap<>();
+
/** Counter for batch size if we are executing a statement with array bind supported */
private int batchSize = 0;
@@ -133,6 +140,12 @@ private void describeSqlIfNotTried() throws SQLException {
if (!alreadyDescribed) {
try {
this.preparedStatementMetaData = sfBaseStatement.describe(sql);
+ if (preparedStatementMetaData != null
+ && !preparedStatementMetaData.isArrayBindSupported()) {
+ logger.debug(
+ "Array bind is not supported - each batch entry will be executed as a single request for query: {}",
+ sql);
+ }
} catch (SFException e) {
throw new SnowflakeSQLLoggedException(connection.getSFBaseSession(), e);
} catch (SnowflakeSQLException e) {
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java
index 03171a599..5b6304bdf 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java
@@ -12,6 +12,7 @@
public interface SnowflakeResultSet {
/**
* @return the Snowflake query ID of the query which generated this result set
+ * @throws SQLException if an error is encountered
*/
String getQueryID() throws SQLException;
@@ -23,7 +24,7 @@ public interface SnowflakeResultSet {
* query statuses. QueryStatus = SUCCESS means results can be retrieved.
*
* @return QueryStatus enum showing status of query
- * @throws SQLException
+ * @throws SQLException if an error is encountered
*/
QueryStatus getStatus() throws SQLException;
@@ -33,7 +34,7 @@ public interface SnowflakeResultSet {
* returned.
*
* @return String value of query's error message
- * @throws SQLException
+ * @throws SQLException if an error is encountered
*/
String getQueryErrorMessage() throws SQLException;
@@ -45,7 +46,7 @@ public interface SnowflakeResultSet {
*
status.isSuccess() means that results can be retrieved.
*
* @return an instance containing query metadata
- * @throws SQLException
+ * @throws SQLException if an error is encountered
*/
QueryStatusV2 getStatusV2() throws SQLException;
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java
index f5a9aa97c..2a1ba82a1 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java
@@ -71,6 +71,7 @@ public Builder setSfFullURL(String sfFullURL) {
* sc:2.8.2/jdbc:3.12.12 since Sept 2020. It is safe to remove it after Sept 2022.
*
* @return a ResultSet which represents for the data wrapped in the object
+ * @throws SQLException if an error occurs
* @deprecated Use {@link #getResultSet(ResultSetRetrieveConfig)} instead
*/
@Deprecated
@@ -84,6 +85,7 @@ public Builder setSfFullURL(String sfFullURL) {
*
* @param info The proxy server information if proxy is necessary.
* @return a ResultSet which represents for the data wrapped in the object
+ * @throws SQLException if an error occurs
* @deprecated Use {@link #getResultSet(ResultSetRetrieveConfig)} instead
*/
@Deprecated
@@ -94,6 +96,7 @@ public Builder setSfFullURL(String sfFullURL) {
*
* @param resultSetRetrieveConfig The extra info to retrieve the result set.
* @return a ResultSet which represents for the data wrapped in the object
+ * @throws SQLException if an error occurs
*/
ResultSet getResultSet(ResultSetRetrieveConfig resultSetRetrieveConfig) throws SQLException;
@@ -101,6 +104,7 @@ public Builder setSfFullURL(String sfFullURL) {
* Retrieve total row count included in the ResultSet Serializable object.
*
* @return the total row count from metadata
+ * @throws SQLException if an error occurs
*/
long getRowCount() throws SQLException;
@@ -108,6 +112,7 @@ public Builder setSfFullURL(String sfFullURL) {
* Retrieve compressed data size included in the ResultSet Serializable object.
*
* @return the total compressed data size in bytes from metadata
+ * @throws SQLException if an error occurs
*/
long getCompressedDataSizeInBytes() throws SQLException;
@@ -115,6 +120,7 @@ public Builder setSfFullURL(String sfFullURL) {
* Retrieve uncompressed data size included in the ResultSet Serializable object.
*
* @return the total uncompressed data size in bytes from metadata
+ * @throws SQLException if an error occurs
*/
long getUncompressedDataSizeInBytes() throws SQLException;
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java
index 082dc2e30..2baf8027a 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java
@@ -282,6 +282,7 @@ private SnowflakeResultSetSerializableV1(SnowflakeResultSetSerializableV1 toCopy
* @param sfStatement the Snowflake statement
* @param resultStreamProvider a ResultStreamProvider for computing a custom data source for
* result-file streams
+ * @param disableChunksPrefetch is prefetch disabled
* @throws SnowflakeSQLException if failed to parse the result JSON node
*/
protected SnowflakeResultSetSerializableV1(
@@ -754,6 +755,12 @@ public static SnowflakeResultSetSerializableV1 create(
/**
* A factory function for internal usage only. It creates SnowflakeResultSetSerializableV1 with
* NoOpChunksDownloader which disables chunks prefetch.
+ *
+ * @param rootNode JSON root node
+ * @param sfSession SFBaseSession
+ * @param sfStatement SFBaseStatement
+ * @return SnowflakeResultSetSerializableV1 with NoOpChunksDownloader
+ * @throws SnowflakeSQLException if an error occurs
*/
@SnowflakeJdbcInternalApi
public static SnowflakeResultSetSerializableV1 createWithChunksPrefetchDisabled(
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java
index 085c8bcc3..e290db3f2 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java
@@ -64,7 +64,7 @@ public SnowflakeResultSetV1(SFBaseResultSet sfBaseResultSet, Statement statement
* This function is not supported for synchronous queries
*
* @return no return value; exception is always thrown
- * @throws SQLFeatureNotSupportedException
+ * @throws SQLFeatureNotSupportedException always thrown because feature is not supported
*/
public QueryStatus getStatus() throws SQLException {
throw new SnowflakeLoggedFeatureNotSupportedException(session);
@@ -74,7 +74,7 @@ public QueryStatus getStatus() throws SQLException {
* This function is not supported for synchronous queries
*
* @return no return value; exception is always thrown
- * @throws SQLFeatureNotSupportedException
+ * @throws SQLFeatureNotSupportedException always thrown because feature is not supported
*/
@Override
public QueryStatusV2 getStatusV2() throws SQLException {
@@ -86,7 +86,7 @@ public QueryStatusV2 getStatusV2() throws SQLException {
* This function is not supported for synchronous queries
*
* @return no return value; exception is always thrown
- * @throws SQLFeatureNotSupportedException
+ * @throws SQLFeatureNotSupportedException always thrown because feature is not supported
*/
@Override
public String getQueryErrorMessage() throws SQLException {
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java
index 194748317..084e62ba8 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java
@@ -38,6 +38,12 @@ public class SnowflakeRichResultSetSerializableV1 extends SnowflakeResultSetSeri
/**
* A factory function for internal usage only. It creates SnowflakeRichResultSetSerializableV1
* with NoOpChunksDownloader which disables chunks prefetch.
+ *
+ * @param rootNode JSON root node
+ * @param sfSession SFBaseSession
+ * @param sfStatement SFBaseStatement
+ * @return SnowflakeRichResultSetSerializableV1 with NoOpChunksDownloader
+ * @throws SnowflakeSQLException if an error occurs
*/
public static SnowflakeRichResultSetSerializableV1 createWithChunksPrefetchDisabled(
JsonNode rootNode, SFBaseSession sfSession, SFBaseStatement sfStatement)
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java
index ebe84c13c..48faec24c 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java
@@ -51,12 +51,22 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState, int
queryId);
}
- /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, String)} */
+ /**
+ * use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, String)}
+ *
+ * @param reason exception reason
+ * @param sqlState the SQL state
+ */
@Deprecated
public SnowflakeSQLException(String reason, String sqlState) {
this((String) null, reason, sqlState);
}
+ /**
+ * @param queryId the queryID
+ * @param reason exception reason
+ * @param sqlState the SQL state
+ */
public SnowflakeSQLException(String queryId, String reason, String sqlState) {
super(reason, sqlState);
this.queryId = queryId;
@@ -64,12 +74,22 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState) {
logger.debug("Snowflake exception: {}, sqlState:{}", reason, sqlState);
}
- /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int)} */
+ /**
+ * use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int)}
+ *
+ * @param sqlState the SQL state
+ * @param vendorCode the vendor code
+ */
@Deprecated
public SnowflakeSQLException(String sqlState, int vendorCode) {
this((String) null, sqlState, vendorCode);
}
+ /**
+ * @param queryId query ID
+ * @param sqlState SQL state
+ * @param vendorCode vendor code
+ */
public SnowflakeSQLException(String queryId, String sqlState, int vendorCode) {
super(
errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode)),
@@ -83,12 +103,24 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode) {
vendorCode);
}
- /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int, Object...)} */
+ /**
+ * use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int, Object...)}
+ *
+ * @param sqlState the SQL state
+ * @param vendorCode the vendor code
+ * @param params additional parameters
+ */
@Deprecated
public SnowflakeSQLException(String sqlState, int vendorCode, Object... params) {
this((String) null, sqlState, vendorCode, params);
}
+ /**
+ * @param queryId query ID
+ * @param sqlState the SQL state
+ * @param vendorCode the vendor code
+ * @param params additional parameters
+ */
public SnowflakeSQLException(String queryId, String sqlState, int vendorCode, Object... params) {
super(
errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode), params),
@@ -102,6 +134,11 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode, Ob
vendorCode);
}
+ /**
+ * @param ex Throwable exception
+ * @param sqlState the SQL state
+ * @param vendorCode the vendor code
+ */
public SnowflakeSQLException(Throwable ex, String sqlState, int vendorCode) {
super(
errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode)),
@@ -115,6 +152,11 @@ public SnowflakeSQLException(Throwable ex, String sqlState, int vendorCode) {
ex);
}
+ /**
+ * @param ex Throwable exception
+ * @param errorCode the error code
+ * @param params additional parameters
+ */
public SnowflakeSQLException(Throwable ex, ErrorCode errorCode, Object... params) {
this(ex, errorCode.getSqlState(), errorCode.getMessageCode(), params);
}
@@ -122,12 +164,23 @@ public SnowflakeSQLException(Throwable ex, ErrorCode errorCode, Object... params
/**
* @deprecated use {@link SnowflakeSQLException#SnowflakeSQLException(String, Throwable, String,
* int, Object...)}
+ * @param ex Throwable exception
+ * @param sqlState the SQL state
+ * @param vendorCode the vendor code
+ * @param params additional parameters
*/
@Deprecated
public SnowflakeSQLException(Throwable ex, String sqlState, int vendorCode, Object... params) {
this(null, ex, sqlState, vendorCode, params);
}
+ /**
+ * @param queryId query ID
+ * @param ex Throwable exception
+ * @param sqlState the SQL state
+ * @param vendorCode the vendor code
+ * @param params additional parameters
+ */
public SnowflakeSQLException(
String queryId, Throwable ex, String sqlState, int vendorCode, Object... params) {
super(
@@ -143,6 +196,10 @@ public SnowflakeSQLException(
ex);
}
+ /**
+ * @param errorCode the error code
+ * @param params additional parameters
+ */
public SnowflakeSQLException(ErrorCode errorCode, Object... params) {
super(
errorResourceBundleManager.getLocalizedMessage(
@@ -151,6 +208,11 @@ public SnowflakeSQLException(ErrorCode errorCode, Object... params) {
errorCode.getMessageCode());
}
+ /**
+ * @param queryId query ID
+ * @param errorCode error code
+ * @param params additional parameters
+ */
public SnowflakeSQLException(String queryId, ErrorCode errorCode, Object... params) {
super(
errorResourceBundleManager.getLocalizedMessage(
@@ -160,6 +222,12 @@ public SnowflakeSQLException(String queryId, ErrorCode errorCode, Object... para
this.queryId = queryId;
}
+ /**
+ * @param errorCode error code
+ * @param retryCount retry count
+ * @param issocketTimeoutNoBackoff issocketTimeoutNoBackoff
+ * @param elapsedSeconds time elapsed in seconds
+ */
public SnowflakeSQLException(
ErrorCode errorCode, int retryCount, boolean issocketTimeoutNoBackoff, long elapsedSeconds) {
super(
@@ -171,6 +239,9 @@ public SnowflakeSQLException(
this.elapsedSeconds = elapsedSeconds;
}
+ /**
+ * @param e the SFException
+ */
public SnowflakeSQLException(SFException e) {
this(e.getQueryId(), e.getMessage(), e.getSqlState(), e.getVendorCode());
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java
index 9f8f2d7a9..78d4fb971 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java
@@ -100,7 +100,7 @@ private static Future sendInBandTelemetryMessage(
* Helper function to remove sensitive data (error message, reason) from the stacktrace.
*
* @param stackTrace original stacktrace
- * @return
+ * @return stack trace with sensitive data removed
*/
static String maskStacktrace(String stackTrace) {
Pattern STACKTRACE_BEGINNING =
@@ -118,9 +118,9 @@ static String maskStacktrace(String stackTrace) {
/**
* Helper function to create JSONObject node for OOB telemetry log
*
- * @param queryId
- * @param SQLState
- * @param vendorCode
+ * @param queryId query ID
+ * @param SQLState the SQL state
+ * @param vendorCode the vendor code
* @return JSONObject with data about SQLException
*/
static JSONObject createOOBValue(String queryId, String SQLState, int vendorCode) {
@@ -143,10 +143,10 @@ static JSONObject createOOBValue(String queryId, String SQLState, int vendorCode
/**
* Helper function to create ObjectNode for IB telemetry log
*
- * @param queryId
- * @param SQLState
- * @param vendorCode
- * @return
+ * @param queryId query ID
+ * @param SQLState the SQL state
+ * @param vendorCode the vendor code
+ * @return ObjectNode for IB telemetry log
*/
static ObjectNode createIBValue(String queryId, String SQLState, int vendorCode) {
ObjectNode ibValue = mapper.createObjectNode();
@@ -224,17 +224,35 @@ public static void sendTelemetryData(
}
}
+ /**
+ * @param session SFBaseSession
+ * @param reason exception reason
+ * @param SQLState the SQL state
+ * @param vendorCode the vendor code
+ * @param queryId the query ID
+ */
public SnowflakeSQLLoggedException(
SFBaseSession session, String reason, String SQLState, int vendorCode, String queryId) {
super(queryId, reason, SQLState, vendorCode);
sendTelemetryData(queryId, SQLState, vendorCode, session, this);
}
+ /**
+ * @param session SFBaseSession
+ * @param vendorCode the vendor code
+ * @param SQLState the SQL state
+ */
public SnowflakeSQLLoggedException(SFBaseSession session, int vendorCode, String SQLState) {
super(SQLState, vendorCode);
sendTelemetryData(null, SQLState, vendorCode, session, this);
}
+ /**
+ * @param queryId the query ID
+ * @param session SFBaseSession
+ * @param vendorCode the vendor code
+ * @param SQLState the SQL state
+ */
public SnowflakeSQLLoggedException(
String queryId, SFBaseSession session, int vendorCode, String SQLState) {
super(queryId, SQLState, vendorCode);
@@ -244,41 +262,85 @@ public SnowflakeSQLLoggedException(
/**
* use {@link SnowflakeSQLLoggedException#SnowflakeSQLLoggedException(String, SFBaseSession,
* String, String)}
+ *
+ * @param session SFBaseSession
+ * @param SQLState the SQL state
+ * @param reason exception reason
*/
@Deprecated
public SnowflakeSQLLoggedException(SFBaseSession session, String SQLState, String reason) {
this(null, session, SQLState, reason);
}
+ /**
+ * @param queryId the query ID
+ * @param session SFBaseSession
+ * @param SQLState the SQL state
+ * @param reason the exception reason
+ */
public SnowflakeSQLLoggedException(
String queryId, SFBaseSession session, String SQLState, String reason) {
super(reason, SQLState);
sendTelemetryData(queryId, SQLState, -1, session, this);
}
+ /**
+ * @param session SFBaseSession
+ * @param vendorCode the vendor code
+ * @param SQLState the SQL state
+ * @param params additional parameters
+ */
public SnowflakeSQLLoggedException(
SFBaseSession session, int vendorCode, String SQLState, Object... params) {
this(null, session, vendorCode, SQLState, params);
}
+ /**
+ * @param queryId the query ID
+ * @param session SFBaseSession
+ * @param vendorCode the vendor code
+ * @param SQLState the SQL state
+ * @param params additional parameters
+ */
public SnowflakeSQLLoggedException(
String queryId, SFBaseSession session, int vendorCode, String SQLState, Object... params) {
super(queryId, SQLState, vendorCode, params);
sendTelemetryData(queryId, SQLState, vendorCode, session, this);
}
+ /**
+ * @param session SFBaseSession
+ * @param errorCode the error code
+ * @param ex Throwable exception
+ * @param params additional parameters
+ */
public SnowflakeSQLLoggedException(
SFBaseSession session, ErrorCode errorCode, Throwable ex, Object... params) {
super(ex, errorCode, params);
sendTelemetryData(null, errorCode.getSqlState(), errorCode.getMessageCode(), session, this);
}
+ /**
+ * @param session SFBaseSession
+ * @param SQLState the SQL state
+ * @param vendorCode the vendor code
+ * @param ex Throwable exception
+ * @param params additional parameters
+ */
public SnowflakeSQLLoggedException(
SFBaseSession session, String SQLState, int vendorCode, Throwable ex, Object... params) {
super(ex, SQLState, vendorCode, params);
sendTelemetryData(null, SQLState, vendorCode, session, this);
}
+ /**
+ * @param queryId the query ID
+ * @param session SFBaseSession
+ * @param SQLState the SQL state
+ * @param vendorCode the vendor code
+ * @param ex Throwable exception
+ * @param params additional parameters
+ */
public SnowflakeSQLLoggedException(
String queryId,
SFBaseSession session,
@@ -293,18 +355,32 @@ public SnowflakeSQLLoggedException(
/**
* use {@link SnowflakeSQLLoggedException#SnowflakeSQLLoggedException(String, SFBaseSession,
* ErrorCode, Object...)}
+ *
+ * @param session SFBaseSession
+ * @param errorCode the error code
+ * @param params additional parameters
*/
@Deprecated
public SnowflakeSQLLoggedException(SFBaseSession session, ErrorCode errorCode, Object... params) {
this(null, session, errorCode, params);
}
+ /**
+ * @param queryId the query ID
+ * @param session SFBaseSession
+ * @param errorCode the error code
+ * @param params additional parameters
+ */
public SnowflakeSQLLoggedException(
String queryId, SFBaseSession session, ErrorCode errorCode, Object... params) {
super(queryId, errorCode, params);
sendTelemetryData(queryId, null, -1, session, this);
}
+ /**
+ * @param session SFBaseSession
+ * @param e throwable exception
+ */
public SnowflakeSQLLoggedException(SFBaseSession session, SFException e) {
super(e);
sendTelemetryData(null, null, -1, session, this);
@@ -313,12 +389,20 @@ public SnowflakeSQLLoggedException(SFBaseSession session, SFException e) {
/**
* use {@link SnowflakeSQLLoggedException#SnowflakeSQLLoggedException(String, SFBaseSession,
* String)}
+ *
+ * @param session SFBaseSession
+ * @param reason exception reason
*/
@Deprecated
public SnowflakeSQLLoggedException(SFBaseSession session, String reason) {
this(null, session, reason);
}
+ /**
+ * @param queryId the query ID
+ * @param session SFBaseSession
+ * @param reason exception reason
+ */
public SnowflakeSQLLoggedException(String queryId, SFBaseSession session, String reason) {
super(queryId, reason, null);
sendTelemetryData(queryId, null, -1, session, this);
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java
index f1f41d4d0..d684c3d27 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java
@@ -14,11 +14,13 @@ public interface SnowflakeStatement {
/**
* @return the Snowflake query ID of the latest executed query (even failed one) or null when the
* last query ID is not available
+ * @throws SQLException if an error is encountered
*/
String getQueryID() throws SQLException;
/**
* @return the Snowflake query IDs of the latest executed batch queries
+ * @throws SQLException if an error is encountered
*/
List getBatchQueryIDs() throws SQLException;
@@ -27,9 +29,15 @@ public interface SnowflakeStatement {
*
* @param name parameter name
* @param value parameter value
+ * @throws SQLException if an error is encountered
*/
void setParameter(String name, Object value) throws SQLException;
+ /**
+ * Set batch ID
+ *
+ * @param batchID the batch ID
+ */
void setBatchID(String batchID);
/**
@@ -46,8 +54,8 @@ public interface SnowflakeStatement {
* required as SnowflakeStatementV1 doesn't directly expose ResultSet to the sub-classes making it
* challenging to get additional information from the previously executed query.
*
- * @param resultSet
- * @throws SQLException
+ * @param resultSet SFBaseResultSet
+ * @throws SQLException if an error is encountered
*/
void resultSetMetadataHandler(SFBaseResultSet resultSet) throws SQLException;
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java
index 5016c175b..08cb3fac7 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java
@@ -20,6 +20,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
+import net.snowflake.client.core.CancellationReason;
import net.snowflake.client.core.ExecTimeTelemetryData;
import net.snowflake.client.core.ParameterBindingDTO;
import net.snowflake.client.core.ResultUtil;
@@ -952,7 +953,7 @@ public void cancel() throws SQLException {
raiseSQLExceptionIfStatementIsClosed();
try {
- sfBaseStatement.cancel();
+ sfBaseStatement.cancel(CancellationReason.CLIENT_REQUESTED);
} catch (SFException ex) {
throw new SnowflakeSQLException(ex, ex.getSqlState(), ex.getVendorCode(), ex.getParams());
}
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java
index ea958c551..5e59dcbc4 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java
@@ -102,7 +102,12 @@ public static JavaDataType getJavaType(SnowflakeType type, boolean isStructuredT
}
}
- /** Converts text of data type (returned from SQL query) into Types type, represented by an int */
+ /**
+ * Converts text of data type (returned from SQL query) into Types type, represented by an int
+ *
+ * @param typeName type name
+ * @return int representation of type
+ */
public static int convertStringToType(String typeName) {
int retval = Types.NULL;
if (typeName == null || typeName.trim().isEmpty()) {
diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java
index 1485249b3..8e9a683a0 100644
--- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java
+++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java
@@ -4,6 +4,7 @@
package net.snowflake.client.jdbc;
+import static java.util.Arrays.stream;
import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -32,10 +33,12 @@
import java.util.Optional;
import java.util.Properties;
import java.util.Random;
+import java.util.TreeMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
import net.snowflake.client.core.Constants;
import net.snowflake.client.core.HttpClientSettingsKey;
import net.snowflake.client.core.OCSPMode;
@@ -53,6 +56,7 @@
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
+import org.apache.http.NameValuePair;
/**
* @author jhuang
@@ -172,7 +176,15 @@ private static void checkErrorAndThrowExceptionSub(
throw new SnowflakeSQLException(queryId, errorMessage, sqlState, errorCode);
}
- /** This method should only be used internally */
+ /**
+ * This method should only be used internally
+ *
+ * @param colNode JsonNode
+ * @param jdbcTreatDecimalAsInt true if should treat Decimal as Int
+ * @param session SFBaseSession
+ * @return SnowflakeColumnMetadata
+ * @throws SnowflakeSQLException if an error occurs
+ */
@Deprecated
public static SnowflakeColumnMetadata extractColumnMetadata(
JsonNode colNode, boolean jdbcTreatDecimalAsInt, SFBaseSession session)
@@ -661,7 +673,12 @@ public static String systemGetEnv(String env) {
return null;
}
- /** System.setEnv function. Can be used for unit tests. */
+ /**
+ * System.setEnv function. Can be used for unit tests.
+ *
+ * @param key key
+ * @param value value
+ */
public static void systemSetEnv(String key, String value) {
try {
Map env = System.getenv();
@@ -692,7 +709,7 @@ public static void systemSetEnv(String key, String value) {
/**
* System.unsetEnv function to remove a system environment parameter in the map
*
- * @param key
+ * @param key key value
*/
public static void systemUnsetEnv(String key) {
try {
@@ -714,6 +731,8 @@ public static void systemUnsetEnv(String key) {
*
* @param mode OCSP mode
* @param info proxy server properties.
+ * @return HttpClientSettingsKey
+ * @throws SnowflakeSQLException if an error occurs
*/
public static HttpClientSettingsKey convertProxyPropertiesToHttpClientKey(
OCSPMode mode, Properties info) throws SnowflakeSQLException {
@@ -769,8 +788,8 @@ public static HttpClientSettingsKey convertProxyPropertiesToHttpClientKey(
* SimpleDateFormatter. Negative values have to be rounded to the next negative value, while
* positive values should be cut off with no rounding.
*
- * @param millis
- * @return
+ * @param millis milliseconds
+ * @return seconds as long value
*/
public static long getSecondsFromMillis(long millis) {
long returnVal;
@@ -818,6 +837,22 @@ public static boolean convertSystemPropertyToBooleanValue(
}
return defaultValue;
}
+ /**
+ * Helper function to convert environment variable to boolean
+ *
+ * @param envVariableKey property name of the environment variable
+ * @param defaultValue default value used
+ * @return the value of the environment variable as boolean, else the default value
+ */
+ @SnowflakeJdbcInternalApi
+ public static boolean convertSystemGetEnvToBooleanValue(
+ String envVariableKey, boolean defaultValue) {
+ String environmentVariableValue = systemGetEnv(envVariableKey);
+ if (environmentVariableValue != null) {
+ return Boolean.parseBoolean(environmentVariableValue);
+ }
+ return defaultValue;
+ }
@SnowflakeJdbcInternalApi
public static T mapSFExceptionToSQLException(ThrowingCallable action)
@@ -835,4 +870,37 @@ public static String getJsonNodeStringValue(JsonNode node) throws SFException {
}
return node.isValueNode() ? node.asText() : node.toString();
}
+
+ /**
+ * Method introduced to avoid inconsistencies in custom headers handling, since these are defined
+ * on drivers side e.g. some drivers might internally convert headers to canonical form.
+ *
+ * @param input map input
+ * @return case insensitive map
+ */
+ @SnowflakeJdbcInternalApi
+ public static Map createCaseInsensitiveMap(Map input) {
+ Map caseInsensitiveMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ if (input != null) {
+ caseInsensitiveMap.putAll(input);
+ }
+ return caseInsensitiveMap;
+ }
+
+ /**
+ * toCaseInsensitiveMap, but adjusted to Headers[] argument type
+ *
+ * @param headers array of headers
+ * @return case insensitive map
+ */
+ @SnowflakeJdbcInternalApi
+ public static Map createCaseInsensitiveMap(Header[] headers) {
+ if (headers != null) {
+ return createCaseInsensitiveMap(
+ stream(headers)
+ .collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue)));
+ } else {
+ return new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ }
+ }
}
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java
index 93646e104..c3602fcf7 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java
@@ -3,8 +3,9 @@
*/
package net.snowflake.client.jdbc.cloud.storage;
-import java.util.HashMap;
import java.util.Map;
+import java.util.TreeMap;
+import net.snowflake.client.jdbc.SnowflakeUtil;
/**
* Implements platform-independent interface Azure BLOB and GCS object metadata
@@ -16,11 +17,11 @@
*/
public class CommonObjectMetadata implements StorageObjectMetadata {
private long contentLength;
- private Map userDefinedMetadata;
+ private final Map userDefinedMetadata;
private String contentEncoding;
CommonObjectMetadata() {
- userDefinedMetadata = new HashMap<>();
+ userDefinedMetadata = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
}
/*
@@ -31,7 +32,7 @@ public class CommonObjectMetadata implements StorageObjectMetadata {
long contentLength, String contentEncoding, Map userDefinedMetadata) {
this.contentEncoding = contentEncoding;
this.contentLength = contentLength;
- this.userDefinedMetadata = userDefinedMetadata;
+ this.userDefinedMetadata = SnowflakeUtil.createCaseInsensitiveMap(userDefinedMetadata);
}
/**
@@ -41,7 +42,6 @@ public class CommonObjectMetadata implements StorageObjectMetadata {
public Map getUserMetadata() {
return userDefinedMetadata;
}
- ;
/**
* @return returns the size of object in bytes
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java
index d9999457d..7acb2fc4a 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java
@@ -43,7 +43,22 @@ public class EncryptionProvider {
private static final int BUFFER_SIZE = 2 * 1024 * 1024; // 2 MB
private static SecureRandom secRnd;
- /** Decrypt a InputStream */
+ /**
+ * Decrypt a InputStream
+ *
+ * @param inputStream input stream
+ * @param keyBase64 keyBase64
+ * @param ivBase64 ivBase64
+ * @param encMat RemoteStoreFileEncryptionMaterial
+ * @return InputStream
+ * @throws NoSuchPaddingException when padding mechanism is not available for this environment
+ * @throws NoSuchAlgorithmException when the requested algorithm is not available for this
+ * environment
+ * @throws InvalidKeyException when there is an issue with the key value
+ * @throws BadPaddingException when the data is not padded as expected
+ * @throws IllegalBlockSizeException when the length of data is incorrect
+ * @throws InvalidAlgorithmParameterException when the provided KeyStore has no trustAnchors
+ */
public static InputStream decryptStream(
InputStream inputStream,
String keyBase64,
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java
index fa31a6a0c..c3f53c0ea 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java
@@ -30,7 +30,8 @@
import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial;
class GcmEncryptionProvider {
- private static final int TAG_LENGTH = 128;
+ private static final int TAG_LENGTH_IN_BITS = 128;
+ private static final int IV_LENGTH_IN_BYTES = 12;
private static final String AES = "AES";
private static final String FILE_CIPHER = "AES/GCM/NoPadding";
private static final String KEY_CIPHER = "AES/GCM/NoPadding";
@@ -64,8 +65,8 @@ static InputStream encrypt(
byte[] kek = base64Decoder.decode(encMat.getQueryStageMasterKey());
int keySize = kek.length;
byte[] keyBytes = new byte[keySize];
- byte[] dataIvBytes = new byte[blockSize];
- byte[] keyIvBytes = new byte[blockSize];
+ byte[] dataIvBytes = new byte[IV_LENGTH_IN_BYTES];
+ byte[] keyIvBytes = new byte[IV_LENGTH_IN_BYTES];
initRandomIvsAndFileKey(dataIvBytes, keyIvBytes, keyBytes);
byte[] encryptedKey = encryptKey(kek, keyBytes, keyIvBytes, keyAad);
CipherInputStream cis = encryptContent(src, keyBytes, dataIvBytes, dataAad);
@@ -94,7 +95,7 @@ private static byte[] encryptKey(byte[] kekBytes, byte[] keyBytes, byte[] keyIvD
throws InvalidKeyException, InvalidAlgorithmParameterException, IllegalBlockSizeException,
BadPaddingException, NoSuchPaddingException, NoSuchAlgorithmException {
SecretKey kek = new SecretKeySpec(kekBytes, 0, kekBytes.length, AES);
- GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, keyIvData);
+ GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, keyIvData);
Cipher keyCipher = Cipher.getInstance(KEY_CIPHER);
keyCipher.init(Cipher.ENCRYPT_MODE, kek, gcmParameterSpec);
if (aad != null) {
@@ -108,7 +109,7 @@ private static CipherInputStream encryptContent(
throws InvalidKeyException, InvalidAlgorithmParameterException, NoSuchPaddingException,
NoSuchAlgorithmException {
SecretKey fileKey = new SecretKeySpec(keyBytes, 0, keyBytes.length, AES);
- GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, dataIvBytes);
+ GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, dataIvBytes);
Cipher fileCipher = Cipher.getInstance(FILE_CIPHER);
fileCipher.init(Cipher.ENCRYPT_MODE, fileKey, gcmParameterSpec);
if (aad != null) {
@@ -180,7 +181,7 @@ private static CipherInputStream decryptContentFromStream(
InputStream inputStream, byte[] ivBytes, byte[] fileKeyBytes, byte[] aad)
throws InvalidKeyException, InvalidAlgorithmParameterException, NoSuchPaddingException,
NoSuchAlgorithmException {
- GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, ivBytes);
+ GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, ivBytes);
SecretKey fileKey = new SecretKeySpec(fileKeyBytes, AES);
Cipher fileCipher = Cipher.getInstance(FILE_CIPHER);
fileCipher.init(Cipher.DECRYPT_MODE, fileKey, gcmParameterSpec);
@@ -195,7 +196,7 @@ private static void decryptContentFromFile(
throws InvalidKeyException, InvalidAlgorithmParameterException, IOException,
NoSuchPaddingException, NoSuchAlgorithmException {
SecretKey fileKey = new SecretKeySpec(fileKeyBytes, AES);
- GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, cekIvBytes);
+ GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, cekIvBytes);
byte[] buffer = new byte[BUFFER_SIZE];
Cipher fileCipher = Cipher.getInstance(FILE_CIPHER);
fileCipher.init(Cipher.DECRYPT_MODE, fileKey, gcmParameterSpec);
@@ -224,7 +225,7 @@ private static byte[] decryptKey(byte[] kekBytes, byte[] ivBytes, byte[] keyByte
throws InvalidKeyException, InvalidAlgorithmParameterException, IllegalBlockSizeException,
BadPaddingException, NoSuchPaddingException, NoSuchAlgorithmException {
SecretKey kek = new SecretKeySpec(kekBytes, 0, kekBytes.length, AES);
- GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, ivBytes);
+ GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, ivBytes);
Cipher keyCipher = Cipher.getInstance(KEY_CIPHER);
keyCipher.init(Cipher.DECRYPT_MODE, kek, gcmParameterSpec);
if (aad != null) {
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java
index 49b3542fd..565db0210 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java
@@ -67,7 +67,7 @@ public static void setProxyForS3(HttpClientSettingsKey key, ClientConfiguration
*
* @param proxyProperties proxy properties
* @param clientConfig the configuration needed by S3 to set the proxy
- * @throws SnowflakeSQLException
+ * @throws SnowflakeSQLException when an error is encountered
*/
public static void setSessionlessProxyForS3(
Properties proxyProperties, ClientConfiguration clientConfig) throws SnowflakeSQLException {
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java
index ec54508f9..38f20cf65 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java
@@ -5,6 +5,7 @@
import com.amazonaws.services.s3.model.ObjectMetadata;
import java.util.Map;
+import net.snowflake.client.jdbc.SnowflakeUtil;
/**
* s3 implementation of platform independent StorageObjectMetadata interface, wraps an S3
@@ -28,7 +29,7 @@ public class S3ObjectMetadata implements StorageObjectMetadata {
@Override
public Map getUserMetadata() {
- return objectMetadata.getUserMetadata();
+ return SnowflakeUtil.createCaseInsensitiveMap(objectMetadata.getUserMetadata());
}
@Override
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java
index 3bb209c48..853d461b5 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java
@@ -5,6 +5,7 @@
import com.amazonaws.services.s3.model.ObjectMetadata;
import java.util.Map;
+import net.snowflake.client.jdbc.SnowflakeUtil;
/**
* Implementation of StorageObjectMetadata for S3 for remote storage object metadata.
@@ -26,7 +27,7 @@ public S3StorageObjectMetadata(ObjectMetadata s3Metadata) {
*/
@Override
public Map getUserMetadata() {
- return this.s3Metadata.getUserMetadata();
+ return SnowflakeUtil.createCaseInsensitiveMap(this.s3Metadata.getUserMetadata());
}
/**
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java
index cdf303bbd..0f8014ef6 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java
@@ -4,6 +4,8 @@
package net.snowflake.client.jdbc.cloud.storage;
import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
+import static net.snowflake.client.core.HttpUtil.setProxyForAzure;
+import static net.snowflake.client.core.HttpUtil.setSessionlessProxyForAzure;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
import com.fasterxml.jackson.core.JsonFactory;
@@ -41,7 +43,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import net.snowflake.client.core.HttpUtil;
import net.snowflake.client.core.ObjectMapperFactory;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFSession;
@@ -154,9 +155,9 @@ private void setupAzureClient(
this.azStorageClient = new CloudBlobClient(storageEndpoint, azCreds);
opContext = new OperationContext();
if (session != null) {
- HttpUtil.setProxyForAzure(session.getHttpClientKey(), opContext);
+ setProxyForAzure(session.getHttpClientKey(), opContext);
} else {
- HttpUtil.setSessionlessProxyForAzure(stage.getProxyProperties(), opContext);
+ setSessionlessProxyForAzure(stage.getProxyProperties(), opContext);
}
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("invalid_azure_credentials");
@@ -273,7 +274,8 @@ public StorageObjectMetadata getObjectMetadata(String remoteStorageLocation, Str
blob.downloadAttributes(null, null, opContext);
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
// Get the BLOB system properties we care about
BlobProperties properties = blob.getProperties();
@@ -348,7 +350,8 @@ public void download(
blob.downloadAttributes(null, transferOptions, opContext);
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);
@@ -447,13 +450,11 @@ public InputStream downloadToStream(
InputStream stream = blob.openInputStream(null, null, opContext);
stopwatch.stop();
long downloadMillis = stopwatch.elapsedMillis();
- Map userDefinedMetadata = blob.getMetadata();
-
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);
-
String key = encryptionData.getKey();
-
String iv = encryptionData.getValue();
if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) {
@@ -574,7 +575,7 @@ public void upload(
CloudBlockBlob blob = container.getBlockBlobReference(destFileName);
// Set the user-defined/Snowflake metadata and upload the BLOB
- blob.setMetadata((HashMap) meta.getUserMetadata());
+ blob.setMetadata(new HashMap<>(meta.getUserMetadata()));
BlobRequestOptions transferOptions = new BlobRequestOptions();
transferOptions.setConcurrentRequestCount(parallelism);
@@ -694,7 +695,7 @@ private SFPair createUploadStream(
final InputStream stream;
FileInputStream srcFileStream = null;
try {
- if (isEncrypting() && getEncryptionKeySize() < 256) {
+ if (isEncrypting() && getEncryptionKeySize() <= 256) {
try {
final InputStream uploadStream =
uploadFromStream
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java
index d907973ac..d6bf6ba84 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java
@@ -4,6 +4,10 @@
package net.snowflake.client.jdbc.cloud.storage;
import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
+import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemPropertyToBooleanValue;
+import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap;
+import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause;
+import static net.snowflake.client.jdbc.SnowflakeUtil.isBlank;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
import com.fasterxml.jackson.core.JsonFactory;
@@ -14,9 +18,11 @@
import com.google.api.gax.rpc.FixedHeaderProvider;
import com.google.auth.oauth2.AccessToken;
import com.google.auth.oauth2.GoogleCredentials;
+import com.google.cloud.NoCredentials;
import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
+import com.google.cloud.storage.HttpStorageOptions;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.Storage.BlobListOption;
import com.google.cloud.storage.StorageException;
@@ -62,7 +68,6 @@
import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial;
import net.snowflake.common.core.SqlState;
import org.apache.commons.io.IOUtils;
-import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.methods.HttpGet;
@@ -183,8 +188,8 @@ public void shutdown() {
*
* @param remoteStorageLocation bucket name
* @param prefix Path
- * @return
- * @throws StorageProviderException
+ * @return a collection of storage summary objects
+ * @throws StorageProviderException cloud storage provider error
*/
@Override
public StorageObjectSummaryCollection listObjects(String remoteStorageLocation, String prefix)
@@ -310,18 +315,14 @@ public void download(
outStream.close();
bodyStream.close();
if (isEncrypting()) {
- for (Header header : response.getAllHeaders()) {
- if (header
- .getName()
- .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) {
- AbstractMap.SimpleEntry encryptionData =
- parseEncryptionData(header.getValue(), queryId);
-
- key = encryptionData.getKey();
- iv = encryptionData.getValue();
- break;
- }
- }
+ Map userDefinedHeaders =
+ createCaseInsensitiveMap(response.getAllHeaders());
+ AbstractMap.SimpleEntry encryptionData =
+ parseEncryptionData(
+ userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP),
+ queryId);
+ key = encryptionData.getKey();
+ iv = encryptionData.getValue();
}
stopwatch.stop();
downloadMillis = stopwatch.elapsedMillis();
@@ -355,9 +356,10 @@ public void download(
logger.debug("Download successful", false);
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
if (isEncrypting()) {
- if (userDefinedMetadata != null) {
+ if (!userDefinedMetadata.isEmpty()) {
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId);
@@ -499,18 +501,14 @@ public InputStream downloadToStream(
inputStream = response.getEntity().getContent();
if (isEncrypting()) {
- for (Header header : response.getAllHeaders()) {
- if (header
- .getName()
- .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) {
- AbstractMap.SimpleEntry encryptionData =
- parseEncryptionData(header.getValue(), queryId);
-
- key = encryptionData.getKey();
- iv = encryptionData.getValue();
- break;
- }
- }
+ Map userDefinedHeaders =
+ createCaseInsensitiveMap(response.getAllHeaders());
+ AbstractMap.SimpleEntry encryptionData =
+ parseEncryptionData(
+ userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP),
+ queryId);
+ key = encryptionData.getKey();
+ iv = encryptionData.getValue();
}
stopwatch.stop();
downloadMillis = stopwatch.elapsedMillis();
@@ -538,7 +536,8 @@ public InputStream downloadToStream(
inputStream = Channels.newInputStream(blob.reader());
if (isEncrypting()) {
// Get the user-defined BLOB metadata
- Map userDefinedMetadata = blob.getMetadata();
+ Map userDefinedMetadata =
+ SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry encryptionData =
parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId);
@@ -1039,7 +1038,7 @@ private SFPair createUploadStream(
final InputStream stream;
FileInputStream srcFileStream = null;
try {
- if (isEncrypting() && getEncryptionKeySize() < 256) {
+ if (isEncrypting() && getEncryptionKeySize() <= 256) {
try {
final InputStream uploadStream =
uploadFromStream
@@ -1121,7 +1120,7 @@ public void handleStorageException(
// If there is no space left in the download location, java.io.IOException is thrown.
// Don't retry.
- if (SnowflakeUtil.getRootCause(ex) instanceof IOException) {
+ if (getRootCause(ex) instanceof IOException) {
SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId);
}
@@ -1181,7 +1180,7 @@ public void handleStorageException(
}
}
} else if (ex instanceof InterruptedException
- || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) {
+ || getRootCause(ex) instanceof SocketTimeoutException) {
if (retryCount > getMaxRetries()) {
throw new SnowflakeSQLLoggedException(
queryId,
@@ -1278,7 +1277,7 @@ private AbstractMap.SimpleEntry parseEncryptionData(
/** Adds digest metadata to the StorageObjectMetadata object */
@Override
public void addDigestMetadata(StorageObjectMetadata meta, String digest) {
- if (!SnowflakeUtil.isBlank(digest)) {
+ if (!isBlank(digest)) {
meta.addUserMetadata("sfc-digest", digest);
}
}
@@ -1315,6 +1314,8 @@ private void setupGCSClient(
if (accessToken != null) {
// We are authenticated with an oauth access token.
StorageOptions.Builder builder = StorageOptions.newBuilder();
+ stage.gcsCustomEndpoint().ifPresent(builder::setHost);
+
if (areDisabledGcsDefaultCredentials(session)) {
logger.debug(
"Adding explicit credentials to avoid default credential lookup by the GCS client");
@@ -1332,7 +1333,10 @@ private void setupGCSClient(
.getService();
} else {
// Use anonymous authentication.
- this.gcsClient = StorageOptions.getUnauthenticatedInstance().getService();
+ HttpStorageOptions.Builder builder =
+ HttpStorageOptions.newBuilder().setCredentials(NoCredentials.getInstance());
+ stage.gcsCustomEndpoint().ifPresent(builder::setHost);
+ this.gcsClient = builder.build().getService();
}
if (encMat != null) {
@@ -1355,7 +1359,7 @@ private void setupGCSClient(
private static boolean areDisabledGcsDefaultCredentials(SFSession session) {
return session != null && session.getDisableGcsDefaultCredentials()
- || SnowflakeUtil.convertSystemPropertyToBooleanValue(
+ || convertSystemPropertyToBooleanValue(
DISABLE_GCS_DEFAULT_CREDENTIALS_PROPERTY_NAME, false);
}
@@ -1374,13 +1378,11 @@ public void addStreamingIngestMetadata(
meta.addUserMetadata(GCS_STREAMING_INGEST_CLIENT_KEY, clientKey);
}
- /** Gets streaming ingest client name to the StorageObjectMetadata object */
@Override
public String getStreamingIngestClientName(StorageObjectMetadata meta) {
return meta.getUserMetadata().get(GCS_STREAMING_INGEST_CLIENT_NAME);
}
- /** Gets streaming ingest client key to the StorageObjectMetadata object */
@Override
public String getStreamingIngestClientKey(StorageObjectMetadata meta) {
return meta.getUserMetadata().get(GCS_STREAMING_INGEST_CLIENT_KEY);
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java
index 3b33b60f0..f1a2392bb 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java
@@ -5,6 +5,8 @@
package net.snowflake.client.jdbc.cloud.storage;
import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED;
+import static net.snowflake.client.jdbc.SnowflakeUtil.createDefaultExecutorService;
+import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
import com.amazonaws.AmazonClientException;
@@ -368,7 +370,7 @@ public void download(
new ExecutorFactory() {
@Override
public ExecutorService newExecutor() {
- return SnowflakeUtil.createDefaultExecutorService(
+ return createDefaultExecutorService(
"s3-transfer-manager-downloader-", parallelism);
}
})
@@ -379,7 +381,8 @@ public ExecutorService newExecutor() {
// Pull object metadata from S3
ObjectMetadata meta = amazonClient.getObjectMetadata(remoteStorageLocation, stageFilePath);
- Map metaMap = meta.getUserMetadata();
+ Map metaMap =
+ SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata());
String key = metaMap.get(AMZ_KEY);
String iv = metaMap.get(AMZ_IV);
@@ -481,7 +484,8 @@ public InputStream downloadToStream(
InputStream stream = file.getObjectContent();
stopwatch.stop();
long downloadMillis = stopwatch.elapsedMillis();
- Map metaMap = meta.getUserMetadata();
+ Map metaMap =
+ SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata());
String key = metaMap.get(AMZ_KEY);
String iv = metaMap.get(AMZ_IV);
@@ -611,7 +615,7 @@ public void upload(
new ExecutorFactory() {
@Override
public ExecutorService newExecutor() {
- return SnowflakeUtil.createDefaultExecutorService(
+ return createDefaultExecutorService(
"s3-transfer-manager-uploader-", parallelism);
}
})
@@ -821,7 +825,7 @@ private static void handleS3Exception(
// If there is no space left in the download location, java.io.IOException is thrown.
// Don't retry.
- if (SnowflakeUtil.getRootCause(ex) instanceof IOException) {
+ if (getRootCause(ex) instanceof IOException) {
SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId);
}
@@ -912,7 +916,7 @@ private static void handleS3Exception(
}
} else {
if (ex instanceof InterruptedException
- || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) {
+ || getRootCause(ex) instanceof SocketTimeoutException) {
if (retryCount > s3Client.getMaxRetries()) {
throw new SnowflakeSQLLoggedException(
queryId,
@@ -940,7 +944,12 @@ private static void handleS3Exception(
}
}
- /** Checks the status code of the exception to see if it's a 400 or 404 */
+ /**
+ * Checks the status code of the exception to see if it's a 400 or 404
+ *
+ * @param ex exception
+ * @return true if it's a 400 or 404 status code
+ */
public boolean isClientException400Or404(Exception ex) {
if (ex instanceof AmazonServiceException) {
AmazonServiceException asEx = (AmazonServiceException) (ex);
@@ -950,13 +959,13 @@ public boolean isClientException400Or404(Exception ex) {
return false;
}
- /** Returns the material descriptor key */
+ /* Returns the material descriptor key */
@Override
public String getMatdescKey() {
return "x-amz-matdesc";
}
- /** Adds encryption metadata to the StorageObjectMetadata object */
+ /* Adds encryption metadata to the StorageObjectMetadata object */
@Override
public void addEncryptionMetadata(
StorageObjectMetadata meta,
@@ -970,13 +979,13 @@ public void addEncryptionMetadata(
meta.setContentLength(contentLength);
}
- /** Adds digest metadata to the StorageObjectMetadata object */
+ /* Adds digest metadata to the StorageObjectMetadata object */
@Override
public void addDigestMetadata(StorageObjectMetadata meta, String digest) {
meta.addUserMetadata("sfc-digest", digest);
}
- /** Gets digest metadata to the StorageObjectMetadata object */
+ /* Gets digest metadata to the StorageObjectMetadata object */
@Override
public String getDigestMetadata(StorageObjectMetadata meta) {
return meta.getUserMetadata().get("sfc-digest");
@@ -1001,7 +1010,7 @@ private static SSLConnectionSocketFactory getSSLConnectionSocketFactory() {
return s3ConnectionSocketFactory;
}
- /**
+ /*
* Adds streaming ingest metadata to the StorageObjectMetadata object, used for streaming ingest
* per client billing calculation
*/
@@ -1012,13 +1021,11 @@ public void addStreamingIngestMetadata(
meta.addUserMetadata(S3_STREAMING_INGEST_CLIENT_KEY, clientKey);
}
- /** Gets streaming ingest client name to the StorageObjectMetadata object */
@Override
public String getStreamingIngestClientName(StorageObjectMetadata meta) {
return meta.getUserMetadata().get(S3_STREAMING_INGEST_CLIENT_NAME);
}
- /** Gets streaming ingest client key to the StorageObjectMetadata object */
@Override
public String getStreamingIngestClientKey(StorageObjectMetadata meta) {
return meta.getUserMetadata().get(S3_STREAMING_INGEST_CLIENT_KEY);
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java
index 4be936763..ba74ac7d2 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java
@@ -523,9 +523,19 @@ default void addEncryptionMetadataForGcm(
*/
void addStreamingIngestMetadata(StorageObjectMetadata meta, String clientName, String clientKey);
- /** Gets streaming ingest client name to the StorageObjectMetadata object */
+ /**
+ * Gets streaming ingest client name to the StorageObjectMetadata object
+ *
+ * @param meta StorageObjectMetadata
+ * @return Client name
+ */
String getStreamingIngestClientName(StorageObjectMetadata meta);
- /** Gets streaming ingest client key to the StorageObjectMetadata object */
+ /**
+ * Gets streaming ingest client key to the StorageObjectMetadata object
+ *
+ * @param meta StorageObjectMetadata
+ * @return Client key
+ */
String getStreamingIngestClientKey(StorageObjectMetadata meta);
}
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java
index 7a8bf4d36..3a14b8fa0 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java
@@ -2,10 +2,17 @@
import java.io.Serializable;
import java.util.Map;
+import java.util.Optional;
import java.util.Properties;
+import net.snowflake.client.core.SnowflakeJdbcInternalApi;
-/** Encapsulates all the required stage properties used by GET/PUT for Azure and S3 stages */
+/** Encapsulates all the required stage properties used by GET/PUT for Azure, GCS and S3 stages */
public class StageInfo implements Serializable {
+
+ // me-central2 GCS region always use regional urls
+ // TODO SNOW-1818804: the value is hardcoded now, but it should be server driven
+ private static final String GCS_REGION_ME_CENTRAL_2 = "me-central2";
+
public enum StageType {
S3,
AZURE,
@@ -17,12 +24,18 @@ public enum StageType {
private StageType stageType; // The stage type
private String location; // The container or bucket
private Map, ?> credentials; // the credentials required for the stage
- private String region; // AWS/S3/GCS region (S3/GCS only)
- private String endPoint; // The Azure Storage endpoint (Azure only)
+ private String region; // S3/GCS region
+ // An endpoint (Azure, AWS FIPS and GCS custom endpoint override)
+ private String endPoint;
private String storageAccount; // The Azure Storage account (Azure only)
private String presignedUrl; // GCS gives us back a presigned URL instead of a cred
private boolean isClientSideEncrypted; // whether to encrypt/decrypt files on the stage
- private boolean useS3RegionalUrl; // whether to use s3 regional URL (AWS Only)
+ // whether to use s3 regional URL (AWS Only)
+ // TODO SNOW-1818804: this field will be deprecated when the server returns {@link
+ // #useRegionalUrl}
+ private boolean useS3RegionalUrl;
+ // whether to use regional URL (AWS and GCS only)
+ private boolean useRegionalUrl;
private Properties proxyProperties;
/*
@@ -166,6 +179,16 @@ public boolean getUseS3RegionalUrl() {
return useS3RegionalUrl;
}
+ @SnowflakeJdbcInternalApi
+ public void setUseRegionalUrl(boolean useRegionalUrl) {
+ this.useRegionalUrl = useRegionalUrl;
+ }
+
+ @SnowflakeJdbcInternalApi
+ public boolean getUseRegionalUrl() {
+ return useRegionalUrl;
+ }
+
private static boolean isSpecified(String arg) {
return !(arg == null || arg.equalsIgnoreCase(""));
}
@@ -173,9 +196,22 @@ private static boolean isSpecified(String arg) {
public void setProxyProperties(Properties proxyProperties) {
this.proxyProperties = proxyProperties;
}
- ;
public Properties getProxyProperties() {
return proxyProperties;
}
+
+ @SnowflakeJdbcInternalApi
+ public Optional gcsCustomEndpoint() {
+ if (stageType != StageType.GCS) {
+ return Optional.empty();
+ }
+ if (endPoint != null && !endPoint.trim().isEmpty() && !"null".equals(endPoint)) {
+ return Optional.of(endPoint);
+ }
+ if (GCS_REGION_ME_CENTRAL_2.equalsIgnoreCase(region) || useRegionalUrl) {
+ return Optional.of(String.format("storage.%s.rep.googleapis.com", region.toLowerCase()));
+ }
+ return Optional.empty();
+ }
}
diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java
index ac7de73a6..69d56e195 100644
--- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java
+++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java
@@ -47,6 +47,7 @@ public static StorageClientFactory getFactory() {
* @param stage the stage properties
* @param parallel the degree of parallelism to be used by the client
* @param encMat encryption material for the client
+ * @param session SFSession
* @return a SnowflakeStorageClient interface to the instance created
* @throws SnowflakeSQLException if any error occurs
*/
@@ -58,8 +59,9 @@ public SnowflakeStorageClient createClient(
switch (stage.getStageType()) {
case S3:
boolean useS3RegionalUrl =
- (stage.getUseS3RegionalUrl()
- || (session != null && session.getUseRegionalS3EndpointsForPresignedURL()));
+ stage.getUseS3RegionalUrl()
+ || stage.getUseRegionalUrl()
+ || session != null && session.getUseRegionalS3EndpointsForPresignedURL();
return createS3Client(
stage.getCredentials(),
parallel,
diff --git a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java
index ed360789e..5e163c8bf 100644
--- a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java
+++ b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java
@@ -158,7 +158,11 @@ public JSONObject getContext() {
return context;
}
- /** Note: Only used for IT */
+ /**
+ * Note: Only used for IT
+ *
+ * @param params parameter map
+ */
public void updateContextForIT(Map params) {
Properties info = new Properties();
for (String key : params.keySet()) {
@@ -247,7 +251,11 @@ private void configureDeployment(SnowflakeConnectString conStr) {
this.setDeployment(deployment);
}
- /** whether the telemetry service is enabled for current deployment */
+ /**
+ * whether the telemetry service is enabled for current deployment
+ *
+ * @return true if the telemetry service is enabled for current deployment
+ */
public boolean isDeploymentEnabled() {
return ENABLED_DEPLOYMENT.contains(this.serverDeployment.name);
}
@@ -372,7 +380,11 @@ public void count() {
eventCnt.incrementAndGet();
}
- /** Report the event to the telemetry server in a new thread */
+ /**
+ * Report the event to the telemetry server in a new thread
+ *
+ * @param event TelemetryEvent
+ */
public void report(TelemetryEvent event) {
reportChooseEvent(event, /* isHTAP */ false);
}
@@ -389,7 +401,12 @@ public void reportChooseEvent(TelemetryEvent event, boolean isHTAP) {
TelemetryThreadPool.getInstance().execute(runUpload);
}
- /** Convert an event to a payload in string */
+ /**
+ * Convert an event to a payload in string
+ *
+ * @param event TelemetryEvent
+ * @return the string payload
+ */
public String exportQueueToString(TelemetryEvent event) {
JSONArray logs = new JSONArray();
logs.add(event);
@@ -509,7 +526,13 @@ private void uploadPayload() {
}
}
- /** log OCSP exception to telemetry */
+ /**
+ * log OCSP exception to telemetry
+ *
+ * @param eventType event type
+ * @param telemetryData JSON telemetry data
+ * @param ex CertificateException
+ */
public void logOCSPExceptionTelemetryEvent(
String eventType, JSONObject telemetryData, CertificateException ex) {
if (enabled) {
@@ -533,7 +556,24 @@ public void logOCSPExceptionTelemetryEvent(
}
}
- /** log error http response to telemetry */
+ /**
+ * log error http response to telemetry
+ *
+ * @param eventName the event name
+ * @param request the HttpRequestBase
+ * @param injectSocketTimeout the socket timeout
+ * @param canceling cancelling
+ * @param withoutCookies without cookies
+ * @param includeRetryParameters include retry parameters
+ * @param includeRequestGuid include rest GUID
+ * @param response the CloseableHttpResponse
+ * @param savedEx the saved exception
+ * @param breakRetryReason the break retry reason
+ * @param retryTimeout the retry timeout
+ * @param retryCount retry count
+ * @param sqlState the SQL state
+ * @param errorCode the error code
+ */
public void logHttpRequestTelemetryEvent(
String eventName,
HttpRequestBase request,
@@ -593,7 +633,12 @@ public void logHttpRequestTelemetryEvent(
}
}
- /** log execution times from various processing slices */
+ /**
+ * log execution times from various processing slices
+ *
+ * @param telemetryData JSON telemetry data
+ * @param eventName the event name
+ */
public void logExecutionTimeTelemetryEvent(JSONObject telemetryData, String eventName) {
if (htapEnabled) {
TelemetryEvent.LogBuilder logBuilder = new TelemetryEvent.LogBuilder();
diff --git a/src/main/java/net/snowflake/client/log/ArgSupplier.java b/src/main/java/net/snowflake/client/log/ArgSupplier.java
index f7fef53a6..adead308d 100644
--- a/src/main/java/net/snowflake/client/log/ArgSupplier.java
+++ b/src/main/java/net/snowflake/client/log/ArgSupplier.java
@@ -11,5 +11,10 @@
*/
@FunctionalInterface
public interface ArgSupplier {
+ /**
+ * Get value
+ *
+ * @return Object value.
+ */
Object get();
}
diff --git a/src/main/java/net/snowflake/client/log/JDK14Logger.java b/src/main/java/net/snowflake/client/log/JDK14Logger.java
index d70009e16..e9ae25696 100644
--- a/src/main/java/net/snowflake/client/log/JDK14Logger.java
+++ b/src/main/java/net/snowflake/client/log/JDK14Logger.java
@@ -185,7 +185,9 @@ public static Level getLevel() {
/**
* This is way to enable logging in JDBC through TRACING parameter or sf client config file.
*
- * @param level
+ * @param level log level
+ * @param logPath log path
+ * @throws IOException if there is an error writing to the log
*/
public static synchronized void instantiateLogger(Level level, String logPath)
throws IOException {
@@ -212,6 +214,9 @@ public static synchronized void instantiateLogger(Level level, String logPath)
* places.
*
*
This method will convert string in ex.1 to ex.2
+ *
+ * @param original original string
+ * @return refactored string
*/
private String refactorString(String original) {
StringBuilder sb = new StringBuilder();
diff --git a/src/main/java/net/snowflake/client/log/SFLogLevel.java b/src/main/java/net/snowflake/client/log/SFLogLevel.java
index 18aeeb2a6..94e530af2 100644
--- a/src/main/java/net/snowflake/client/log/SFLogLevel.java
+++ b/src/main/java/net/snowflake/client/log/SFLogLevel.java
@@ -23,8 +23,8 @@ public enum SFLogLevel {
* Method to parse the input loglevel string and returns corresponding loglevel. This method uses
* case in-sensitive matching.
*
- * @param levelStr
- * @return
+ * @param levelStr log level string
+ * @return SFLogLevel
*/
public static SFLogLevel getLogLevel(String levelStr) {
for (SFLogLevel level : SFLogLevel.values()) {
diff --git a/src/main/java/net/snowflake/client/util/SecretDetector.java b/src/main/java/net/snowflake/client/util/SecretDetector.java
index 3ae48defa..3c0727de7 100644
--- a/src/main/java/net/snowflake/client/util/SecretDetector.java
+++ b/src/main/java/net/snowflake/client/util/SecretDetector.java
@@ -95,7 +95,8 @@ public class SecretDetector {
/**
* Check whether the name is sensitive
*
- * @param name
+ * @param name the name
+ * @return true if the name is sensitive.
*/
public static boolean isSensitive(String name) {
return SENSITIVE_NAME_SET.contains(name.toLowerCase());
diff --git a/src/main/java/net/snowflake/client/util/TimeMeasurement.java b/src/main/java/net/snowflake/client/util/TimeMeasurement.java
index 390294236..797f454c1 100644
--- a/src/main/java/net/snowflake/client/util/TimeMeasurement.java
+++ b/src/main/java/net/snowflake/client/util/TimeMeasurement.java
@@ -12,7 +12,11 @@ public class TimeMeasurement {
private long start;
private long end;
- /** Get the start time as epoch time in microseconds. */
+ /**
+ * Get the start time as epoch time in microseconds.
+ *
+ * @return the start time as epoch time in microseconds.
+ */
public long getStart() {
return start;
}
@@ -22,7 +26,11 @@ public void setStart() {
this.start = SnowflakeUtil.getEpochTimeInMicroSeconds();
}
- /** Get the stop time as epoch time in microseconds. */
+ /**
+ * Get the stop time as epoch time in microseconds.
+ *
+ * @return the stop time as epoch time in microseconds.
+ */
public long getEnd() {
return end;
}
diff --git a/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java
index 8cf16c6bd..f4f226fa9 100644
--- a/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java
+++ b/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java
@@ -1,12 +1,15 @@
package com.snowflake.client.jdbc;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.sql.Connection;
import java.sql.SQLException;
import net.snowflake.client.AbstractDriverIT;
-import org.junit.Test;
+import net.snowflake.client.category.TestTags;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+@Tag(TestTags.CONNECTION)
public class SnowflakeDriverIT extends AbstractDriverIT {
@Test
diff --git a/src/test/java/net/snowflake/client/AbstractDriverIT.java b/src/test/java/net/snowflake/client/AbstractDriverIT.java
index 4a3acea23..3104ce7e9 100644
--- a/src/test/java/net/snowflake/client/AbstractDriverIT.java
+++ b/src/test/java/net/snowflake/client/AbstractDriverIT.java
@@ -24,12 +24,9 @@
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;
-import org.junit.Rule;
/** Base test class with common constants, data structures and methods */
public class AbstractDriverIT {
- // This is required to use ConditionalIgnore annotation.
- @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule();
public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver";
public static final String DRIVER_CLASS_COM = "com.snowflake.client.jdbc.SnowflakeDriver";
diff --git a/src/test/java/net/snowflake/client/AssumptionUtils.java b/src/test/java/net/snowflake/client/AssumptionUtils.java
new file mode 100644
index 000000000..73ae13fbb
--- /dev/null
+++ b/src/test/java/net/snowflake/client/AssumptionUtils.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client;
+
+import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
+import net.snowflake.client.core.Constants;
+
+public class AssumptionUtils {
+ public static void assumeNotRunningOnGithubActionsMac() {
+ assumeFalse(isRunningOnGithubActions() && Constants.getOS() == Constants.OS.MAC);
+ }
+
+ public static void assumeNotRunningOnJava8() {
+ assumeFalse(systemGetProperty("java.version").startsWith("1.8.0"));
+ }
+
+ public static void assumeNotRunningOnJava21() {
+ assumeFalse(systemGetProperty("java.version").startsWith("21."));
+ }
+
+ public static void assumeRunningOnGithubActions() {
+ assumeTrue(isRunningOnGithubActions());
+ }
+
+ public static boolean isRunningOnGithubActions() {
+ return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null;
+ }
+
+ public static void assumeRunningOnLinuxMac() {
+ assumeTrue(Constants.getOS() == Constants.OS.LINUX || Constants.getOS() == Constants.OS.MAC);
+ }
+}
diff --git a/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java b/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java
deleted file mode 100644
index fe20883db..000000000
--- a/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package net.snowflake.client;
-
-/*
- * Created by hyu on 1/22/18.
- */
-
-/*
-Copyright (c) 2013,2014 RĂ¼diger Herrmann
-All rights reserved. This program and the accompanying materials
-are made available under the terms of the Eclipse Public License v1.0
-which accompanies this distribution, and is available at
-http://www.eclipse.org/legal/epl-v10.html
-
-Contributors:
-RĂ¼diger Herrmann - initial API and implementation
-Matt Morrissette - allow to use non-static inner IgnoreConditions
-*/
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import java.lang.reflect.Modifier;
-import org.junit.Assume;
-import org.junit.rules.MethodRule;
-import org.junit.runners.model.FrameworkMethod;
-import org.junit.runners.model.Statement;
-
-public class ConditionalIgnoreRule implements MethodRule {
-
- public interface IgnoreCondition {
- boolean isSatisfied();
- }
-
- @Retention(RetentionPolicy.RUNTIME)
- @Target({ElementType.METHOD})
- public @interface ConditionalIgnore {
- Class extends IgnoreCondition> condition();
- }
-
- @Override
- public Statement apply(Statement base, FrameworkMethod method, Object target) {
- Statement result = base;
- if (hasConditionalIgnoreAnnotation(method)) {
- IgnoreCondition condition = getIgnoreCondition(target, method);
- if (condition.isSatisfied()) {
- result = new IgnoreStatement(condition);
- }
- }
- return result;
- }
-
- private static boolean hasConditionalIgnoreAnnotation(FrameworkMethod method) {
- return method.getAnnotation(ConditionalIgnore.class) != null;
- }
-
- private static IgnoreCondition getIgnoreCondition(Object target, FrameworkMethod method) {
- ConditionalIgnore annotation = method.getAnnotation(ConditionalIgnore.class);
- return new IgnoreConditionCreator(target, annotation).create();
- }
-
- private static class IgnoreConditionCreator {
- private final Object target;
- private final Class extends IgnoreCondition> conditionType;
-
- IgnoreConditionCreator(Object target, ConditionalIgnore annotation) {
- this.target = target;
- this.conditionType = annotation.condition();
- }
-
- IgnoreCondition create() {
- checkConditionType();
- try {
- return createCondition();
- } catch (RuntimeException re) {
- throw re;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private IgnoreCondition createCondition() throws Exception {
- IgnoreCondition result;
- if (isConditionTypeStandalone()) {
- result = conditionType.newInstance();
- } else {
- result = conditionType.getDeclaredConstructor(target.getClass()).newInstance(target);
- }
- return result;
- }
-
- private void checkConditionType() {
- if (!isConditionTypeStandalone() && !isConditionTypeDeclaredInTarget()) {
- String msg =
- "Conditional class '%s' is a member class "
- + "but was not declared inside the test case using it.\n"
- + "Either make this class a static class, "
- + "standalone class (by declaring it in it's own file) "
- + "or move it inside the test case using it";
- throw new IllegalArgumentException(String.format(msg, conditionType.getName()));
- }
- }
-
- private boolean isConditionTypeStandalone() {
- return !conditionType.isMemberClass() || Modifier.isStatic(conditionType.getModifiers());
- }
-
- private boolean isConditionTypeDeclaredInTarget() {
- return target.getClass().isAssignableFrom(conditionType.getDeclaringClass());
- }
- }
-
- private static class IgnoreStatement extends Statement {
- private final IgnoreCondition condition;
-
- IgnoreStatement(IgnoreCondition condition) {
- this.condition = condition;
- }
-
- @Override
- public void evaluate() {
- Assume.assumeTrue("Ignored by " + condition.getClass().getSimpleName(), false);
- }
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnAWS.java b/src/test/java/net/snowflake/client/RunningNotOnAWS.java
deleted file mode 100644
index 70f54ab8f..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnAWS.java
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-/** Run tests only on specified cloud provider or ignore */
-public class RunningNotOnAWS implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER");
- return cloudProvider != null && !cloudProvider.equalsIgnoreCase("AWS");
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnAzure.java b/src/test/java/net/snowflake/client/RunningNotOnAzure.java
deleted file mode 100644
index e2a00966c..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnAzure.java
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-/** Run tests only on specified cloud provider or ignore */
-public class RunningNotOnAzure implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER");
- return cloudProvider != null && !cloudProvider.equalsIgnoreCase("Azure");
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnGCP.java b/src/test/java/net/snowflake/client/RunningNotOnGCP.java
deleted file mode 100644
index 7a5c7aafb..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnGCP.java
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-/** Run tests only on specified cloud provider or ignore */
-public class RunningNotOnGCP implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER");
- return cloudProvider != null && !cloudProvider.equalsIgnoreCase("GCP");
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java b/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java
deleted file mode 100644
index 9b872fc8b..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-import net.snowflake.client.core.Constants;
-
-public class RunningNotOnGithubActionsMac implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return isRunningOnGithubActionsMac();
- }
-
- public static boolean isRunningOnGithubActionsMac() {
- return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null && Constants.getOS() == Constants.OS.MAC;
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnJava21.java b/src/test/java/net/snowflake/client/RunningNotOnJava21.java
deleted file mode 100644
index 4e2e3e03c..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnJava21.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package net.snowflake.client;
-
-import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
-
-public class RunningNotOnJava21 implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return isRunningOnJava21();
- }
-
- public static boolean isRunningOnJava21() {
- return systemGetProperty("java.version").startsWith("21.");
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnJava8.java b/src/test/java/net/snowflake/client/RunningNotOnJava8.java
deleted file mode 100644
index 8ee4b3e40..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnJava8.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package net.snowflake.client;
-
-import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
-
-public class RunningNotOnJava8 implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return isRunningOnJava8();
- }
-
- public static boolean isRunningOnJava8() {
- return systemGetProperty("java.version").startsWith("1.8.0");
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinux.java b/src/test/java/net/snowflake/client/RunningNotOnLinux.java
deleted file mode 100644
index 3cbaf1339..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnLinux.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package net.snowflake.client;
-
-import net.snowflake.client.core.Constants;
-
-public class RunningNotOnLinux implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return Constants.getOS() != Constants.OS.LINUX;
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java b/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java
deleted file mode 100644
index a99eaa3b7..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package net.snowflake.client;
-
-import net.snowflake.client.core.Constants;
-
-public class RunningNotOnLinuxMac implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC;
- }
-
- public static boolean isNotRunningOnLinuxMac() {
- return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC;
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java b/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java
deleted file mode 100644
index 596f5ca55..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package net.snowflake.client;
-
-import static net.snowflake.client.RunningOnGithubAction.isRunningOnGithubAction;
-
-public class RunningNotOnTestaccount implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return (!("testaccount".equals(TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT")))
- || isRunningOnGithubAction());
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnWin.java b/src/test/java/net/snowflake/client/RunningNotOnWin.java
deleted file mode 100644
index ce5cdf7d1..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnWin.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package net.snowflake.client;
-
-import net.snowflake.client.core.Constants;
-
-public class RunningNotOnWin implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return Constants.getOS() != Constants.OS.WINDOWS;
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningNotOnWinMac.java b/src/test/java/net/snowflake/client/RunningNotOnWinMac.java
deleted file mode 100644
index 9d1c32bdc..000000000
--- a/src/test/java/net/snowflake/client/RunningNotOnWinMac.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package net.snowflake.client;
-
-import net.snowflake.client.core.Constants;
-
-public class RunningNotOnWinMac implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return Constants.getOS() != Constants.OS.MAC && Constants.getOS() != Constants.OS.WINDOWS;
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningOnGithubAction.java b/src/test/java/net/snowflake/client/RunningOnGithubAction.java
deleted file mode 100644
index 0326c4fca..000000000
--- a/src/test/java/net/snowflake/client/RunningOnGithubAction.java
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-/** Run tests on CI */
-public class RunningOnGithubAction implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null;
- }
-
- public static boolean isRunningOnGithubAction() {
- return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null;
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningOnTestaccount.java b/src/test/java/net/snowflake/client/RunningOnTestaccount.java
deleted file mode 100644
index 186496977..000000000
--- a/src/test/java/net/snowflake/client/RunningOnTestaccount.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package net.snowflake.client;
-
-public class RunningOnTestaccount implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT").contains("testaccount");
- }
-}
diff --git a/src/test/java/net/snowflake/client/RunningOnWin.java b/src/test/java/net/snowflake/client/RunningOnWin.java
deleted file mode 100644
index 025ab1e04..000000000
--- a/src/test/java/net/snowflake/client/RunningOnWin.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package net.snowflake.client;
-
-import net.snowflake.client.core.Constants;
-
-public class RunningOnWin implements ConditionalIgnoreRule.IgnoreCondition {
- public boolean isSatisfied() {
- return Constants.getOS() == Constants.OS.WINDOWS;
- }
-}
diff --git a/src/test/java/net/snowflake/client/SkipOnThinJar.java b/src/test/java/net/snowflake/client/SkipOnThinJar.java
deleted file mode 100644
index d02d104dd..000000000
--- a/src/test/java/net/snowflake/client/SkipOnThinJar.java
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved.
- */
-package net.snowflake.client;
-
-/** Skip tests on CI when thin jar is tested */
-public class SkipOnThinJar implements ConditionalIgnoreRule.IgnoreCondition {
- @Override
- public boolean isSatisfied() {
- return "-Dthin-jar".equals(TestUtil.systemGetEnv("ADDITIONAL_MAVEN_PROFILE"));
- }
-}
diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java
index 76487bcb4..7f4b8d90a 100644
--- a/src/test/java/net/snowflake/client/TestUtil.java
+++ b/src/test/java/net/snowflake/client/TestUtil.java
@@ -5,10 +5,10 @@
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.hamcrest.Matchers.matchesPattern;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.fail;
import java.sql.SQLException;
import java.sql.Statement;
@@ -19,7 +19,7 @@
import net.snowflake.client.jdbc.SnowflakeUtil;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
-import org.junit.Assert;
+import org.hamcrest.MatcherAssert;
public class TestUtil {
private static final SFLogger logger = SFLoggerFactory.getLogger(TestUtil.class);
@@ -53,7 +53,7 @@ public static boolean isSchemaGeneratedInTests(String schema) {
public static void assertSFException(int errorCode, TestRunInterface testCode) {
try {
testCode.run();
- Assert.fail();
+ fail();
} catch (SFException e) {
assertThat(e.getVendorCode(), is(errorCode));
}
@@ -91,8 +91,8 @@ public static String systemGetEnv(String env) {
public static void assertValidQueryId(String queryId) {
assertNotNull(queryId);
- assertTrue(
- "Expecting " + queryId + " is a valid UUID", QUERY_ID_REGEX.matcher(queryId).matches());
+ MatcherAssert.assertThat(
+ "Expecting " + queryId + " is a valid UUID", queryId, matchesPattern(QUERY_ID_REGEX));
}
/**
@@ -144,4 +144,14 @@ public static void expectSnowflakeLoggedFeatureNotSupportedException(MethodRaise
assertEquals(ex.getClass().getSimpleName(), "SnowflakeLoggedFeatureNotSupportedException");
}
}
+
+ /**
+ * Compares two string values both values are cleaned of whitespaces
+ *
+ * @param expected expected value
+ * @param actual actual value
+ */
+ public static void assertEqualsIgnoringWhitespace(String expected, String actual) {
+ assertEquals(expected.replaceAll("\\s+", ""), actual.replaceAll("\\s+", ""));
+ }
}
diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java b/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java
new file mode 100644
index 000000000..993d9d6ad
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*")
+public @interface DontRunOnGithubActions {}
diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java b/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java
new file mode 100644
index 000000000..29374b837
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledOnJre;
+import org.junit.jupiter.api.condition.JRE;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledOnJre(JRE.JAVA_21)
+public @interface DontRunOnJava21 {}
diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java b/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java
new file mode 100644
index 000000000..81a3a0c03
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledOnJre;
+import org.junit.jupiter.api.condition.JRE;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledOnJre(JRE.JAVA_8)
+public @interface DontRunOnJava8 {}
diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java b/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java
new file mode 100644
index 000000000..5c9fff944
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledIfEnvironmentVariable(named = "SNOWFLAKE_TEST_ACCOUNT", matches = "testaccount")
+public @interface DontRunOnTestaccount {}
diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java b/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java
new file mode 100644
index 000000000..bb254a2c4
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledIfEnvironmentVariable(named = "ADDITIONAL_MAVEN_PROFILE", matches = "-Dthin-jar")
+public @interface DontRunOnThinJar {}
diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java b/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java
new file mode 100644
index 000000000..140f0d752
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledOnOs;
+import org.junit.jupiter.api.condition.OS;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@DisabledOnOs(OS.WINDOWS)
+public @interface DontRunOnWindows {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnAWS.java b/src/test/java/net/snowflake/client/annotations/RunOnAWS.java
new file mode 100644
index 000000000..fd3acc546
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnAWS.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)AWS(?-i)")
+public @interface RunOnAWS {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnAzure.java b/src/test/java/net/snowflake/client/annotations/RunOnAzure.java
new file mode 100644
index 000000000..13c8379b3
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnAzure.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)Azure(?-i)")
+public @interface RunOnAzure {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnGCP.java b/src/test/java/net/snowflake/client/annotations/RunOnGCP.java
new file mode 100644
index 000000000..e361aa808
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnGCP.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)GCP(?-i)")
+public @interface RunOnGCP {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java b/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java
new file mode 100644
index 000000000..f133022e3
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledOnOs;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+import org.junit.jupiter.api.condition.OS;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*")
+@DisabledOnOs(OS.MAC)
+public @interface RunOnGithubActionsNotMac {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnLinux.java b/src/test/java/net/snowflake/client/annotations/RunOnLinux.java
new file mode 100644
index 000000000..33231effe
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnLinux.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledOnOs;
+import org.junit.jupiter.api.condition.OS;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledOnOs({OS.LINUX, OS.AIX})
+public @interface RunOnLinux {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java b/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java
new file mode 100644
index 000000000..6c6013154
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledOnOs;
+import org.junit.jupiter.api.condition.OS;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledOnOs({OS.MAC, OS.LINUX, OS.AIX})
+public @interface RunOnLinuxOrMac {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnMac.java b/src/test/java/net/snowflake/client/annotations/RunOnMac.java
new file mode 100644
index 000000000..a5f18a345
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnMac.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledOnOs;
+import org.junit.jupiter.api.condition.OS;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledOnOs(OS.MAC)
+public @interface RunOnMac {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java b/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java
new file mode 100644
index 000000000..6dacdb993
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledIfEnvironmentVariable(named = "SNOWFLAKE_TEST_ACCOUNT", matches = "testaccount")
+@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*")
+public @interface RunOnTestaccountNotOnGithubActions {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnWindows.java b/src/test/java/net/snowflake/client/annotations/RunOnWindows.java
new file mode 100644
index 000000000..69a2ee7ff
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnWindows.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledOnOs;
+import org.junit.jupiter.api.condition.OS;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledOnOs(OS.WINDOWS)
+public @interface RunOnWindows {}
diff --git a/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java b/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java
new file mode 100644
index 000000000..77d50109c
--- /dev/null
+++ b/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.junit.jupiter.api.condition.EnabledOnOs;
+import org.junit.jupiter.api.condition.OS;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+@EnabledOnOs({OS.WINDOWS, OS.MAC})
+public @interface RunOnWindowsOrMac {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryArrow.java b/src/test/java/net/snowflake/client/category/TestCategoryArrow.java
deleted file mode 100644
index 59a8396cd..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryArrow.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryArrow {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryConnection.java b/src/test/java/net/snowflake/client/category/TestCategoryConnection.java
deleted file mode 100644
index cfa5bfd30..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryConnection.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryConnection {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryCore.java b/src/test/java/net/snowflake/client/category/TestCategoryCore.java
deleted file mode 100644
index 7c97c58ef..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryCore.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryCore {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java b/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java
deleted file mode 100644
index ecb5c0509..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryDiagnostic {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryLoader.java b/src/test/java/net/snowflake/client/category/TestCategoryLoader.java
deleted file mode 100644
index eac9e7bef..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryLoader.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package net.snowflake.client.category;
-
-/** Test category Loader */
-public interface TestCategoryLoader {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryOthers.java b/src/test/java/net/snowflake/client/category/TestCategoryOthers.java
deleted file mode 100644
index 7f11baaa9..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryOthers.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryOthers {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java b/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java
deleted file mode 100644
index 7d9824823..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryResultSet {}
diff --git a/src/test/java/net/snowflake/client/category/TestCategoryStatement.java b/src/test/java/net/snowflake/client/category/TestCategoryStatement.java
deleted file mode 100644
index 5381cbb00..000000000
--- a/src/test/java/net/snowflake/client/category/TestCategoryStatement.java
+++ /dev/null
@@ -1,3 +0,0 @@
-package net.snowflake.client.category;
-
-public interface TestCategoryStatement {}
diff --git a/src/test/java/net/snowflake/client/category/TestTags.java b/src/test/java/net/snowflake/client/category/TestTags.java
new file mode 100644
index 000000000..92cd7ce3b
--- /dev/null
+++ b/src/test/java/net/snowflake/client/category/TestTags.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved.
+ */
+package net.snowflake.client.category;
+
+public class TestTags {
+ private TestTags() {}
+
+ public static final String ARROW = "arrow";
+ public static final String CONNECTION = "connection";
+ public static final String CORE = "core";
+ public static final String DIAGNOSTIC = "diagnostic";
+ public static final String LOADER = "loader";
+ public static final String OTHERS = "others";
+ public static final String RESULT_SET = "resultSet";
+ public static final String STATEMENT = "statement";
+}
diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java
index a00784f68..f570cfb7f 100644
--- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java
+++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java
@@ -7,10 +7,10 @@
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemSetEnv;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemUnsetEnv;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mockStatic;
import java.io.IOException;
@@ -18,8 +18,8 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import net.snowflake.client.jdbc.SnowflakeUtil;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
import org.mockito.MockedStatic;
public class SFClientConfigParserTest {
@@ -30,7 +30,7 @@ public class SFClientConfigParserTest {
private Path configFilePath;
- @After
+ @AfterEach
public void cleanup() throws IOException {
if (configFilePath != null) {
Files.deleteIfExists(configFilePath);
diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java
index 01da714e5..50dd75ff2 100644
--- a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java
+++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java
@@ -1,12 +1,13 @@
package net.snowflake.client.config;
+import static net.snowflake.client.AssumptionUtils.assumeRunningOnLinuxMac;
+import static net.snowflake.client.config.SFConnectionConfigParser.SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION;
import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY;
import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_HOME_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertThrows;
-import static org.junit.Assume.assumeFalse;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
import com.fasterxml.jackson.dataformat.toml.TomlMapper;
import java.io.File;
@@ -17,35 +18,52 @@
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.Set;
-import net.snowflake.client.RunningNotOnLinuxMac;
import net.snowflake.client.core.Constants;
import net.snowflake.client.jdbc.SnowflakeSQLException;
import net.snowflake.client.jdbc.SnowflakeUtil;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
public class SFConnectionConfigParserTest {
+ private static final List ENV_VARIABLES_KEYS =
+ new ArrayList<>(
+ Arrays.asList(
+ SNOWFLAKE_HOME_KEY,
+ SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY,
+ SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION));
private Path tempPath = null;
private TomlMapper tomlMapper = new TomlMapper();
+ private Map envVariables = new HashMap();
- @Before
+ @BeforeEach
public void setUp() throws IOException {
tempPath = Files.createTempDirectory(".snowflake");
+ ENV_VARIABLES_KEYS.stream()
+ .forEach(
+ key -> {
+ if (SnowflakeUtil.systemGetEnv(key) != null) {
+ envVariables.put(key, SnowflakeUtil.systemGetEnv(key));
+ }
+ });
}
- @After
+ @AfterEach
public void close() throws IOException {
SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_HOME_KEY);
SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY);
+ SnowflakeUtil.systemUnsetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION);
Files.walk(tempPath).map(Path::toFile).forEach(File::delete);
Files.delete(tempPath);
+ envVariables.forEach((key, value) -> SnowflakeUtil.systemSetEnv(key, value));
}
@Test
@@ -87,7 +105,7 @@ public void testThrowErrorWhenWrongPermissionsForConnectionConfigurationFile()
File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri());
prepareConnectionConfigurationTomlFile(
Collections.singletonMap("token_file_path", tokenFile.toString()), false, false);
- assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac());
+ assumeRunningOnLinuxMac();
assertThrows(
SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters());
}
@@ -98,11 +116,26 @@ public void testThrowErrorWhenWrongPermissionsForTokenFile() throws IOException
File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri());
prepareConnectionConfigurationTomlFile(
Collections.singletonMap("token_file_path", tokenFile.toString()), true, false);
- assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac());
+ assumeRunningOnLinuxMac();
assertThrows(
SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters());
}
+ @Test
+ public void testNoThrowErrorWhenWrongPermissionsForTokenFileButSkippingFlagIsEnabled()
+ throws SnowflakeSQLException, IOException {
+ SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString());
+ SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default");
+ SnowflakeUtil.systemSetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, "true");
+ File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri());
+ prepareConnectionConfigurationTomlFile(
+ Collections.singletonMap("token_file_path", tokenFile.toString()), true, false);
+
+ ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters();
+ assertNotNull(data);
+ assertEquals(tokenFile.toString(), data.getParams().get("token_file_path"));
+ }
+
@Test
public void testLoadSFConnectionConfigWithHostConfigured()
throws SnowflakeSQLException, IOException {
@@ -129,7 +162,20 @@ public void shouldThrowExceptionIfNoneOfHostAndAccountIsSet() throws IOException
extraparams.put("host", null);
extraparams.put("account", null);
prepareConnectionConfigurationTomlFile(extraparams);
- Assert.assertThrows(
+ assertThrows(
+ SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters());
+ }
+
+ @Test
+ public void shouldThrowExceptionIfTokenIsNotSetForOauth() throws IOException {
+ SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString());
+ SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default");
+ SnowflakeUtil.systemSetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, "true");
+ File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri());
+ prepareConnectionConfigurationTomlFile(
+ Collections.singletonMap("token_file_path", tokenFile.toString()), true, false, "");
+
+ assertThrows(
SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters());
}
@@ -144,6 +190,16 @@ private void prepareConnectionConfigurationTomlFile(Map moreParameters) throws I
private void prepareConnectionConfigurationTomlFile(
Map moreParameters, boolean onlyUserPermissionConnection, boolean onlyUserPermissionToken)
throws IOException {
+ prepareConnectionConfigurationTomlFile(
+ moreParameters, onlyUserPermissionConnection, onlyUserPermissionToken, "token_from_file");
+ }
+
+ private void prepareConnectionConfigurationTomlFile(
+ Map moreParameters,
+ boolean onlyUserPermissionConnection,
+ boolean onlyUserPermissionToken,
+ String token)
+ throws IOException {
Path path = Paths.get(tempPath.toString(), "connections.toml");
Path filePath = createFilePathWithPermission(path, onlyUserPermissionConnection);
File file = filePath.toFile();
@@ -166,7 +222,16 @@ private void prepareConnectionConfigurationTomlFile(
createFilePathWithPermission(
Paths.get(configurationParams.get("token_file_path").toString()),
onlyUserPermissionToken);
- Files.write(tokenFilePath, "token_from_file".getBytes());
+ Files.write(tokenFilePath, token.getBytes());
+ Path emptyTokenFilePath =
+ createFilePathWithPermission(
+ Paths.get(
+ configurationParams
+ .get("token_file_path")
+ .toString()
+ .replaceAll("token", "emptytoken")),
+ onlyUserPermissionToken);
+ Files.write(emptyTokenFilePath, "".getBytes());
}
}
diff --git a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java
index 92ec8a624..f5e41e260 100644
--- a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java
+++ b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java
@@ -1,79 +1,55 @@
package net.snowflake.client.config;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermissions;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-import net.snowflake.client.ConditionalIgnoreRule;
-import net.snowflake.client.RunningOnWin;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import net.snowflake.client.annotations.DontRunOnWindows;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
-@RunWith(Parameterized.class)
public class SFPermissionsTest {
- @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule();
-
- @Parameterized.Parameters(name = "permission={0}")
- public static Set> data() {
- Map testConfigFilePermissions =
- new HashMap() {
- {
- put("rwx------", false);
- put("rw-------", false);
- put("r-x------", false);
- put("r--------", false);
- put("rwxrwx---", true);
- put("rwxrw----", true);
- put("rwxr-x---", false);
- put("rwxr-----", false);
- put("rwx-wx---", true);
- put("rwx-w----", true);
- put("rwx--x---", false);
- put("rwx---rwx", true);
- put("rwx---rw-", true);
- put("rwx---r-x", false);
- put("rwx---r--", false);
- put("rwx----wx", true);
- put("rwx----w-", true);
- put("rwx-----x", false);
- }
- };
- return testConfigFilePermissions.entrySet();
- }
-
Path configFilePath = Paths.get("config.json");
String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"logs\"}}";
- String permission;
- Boolean isSucceed;
-
- public SFPermissionsTest(Map.Entry permission) {
- this.permission = permission.getKey();
- this.isSucceed = permission.getValue();
- }
- @Before
+ @BeforeEach
public void createConfigFile() throws IOException {
Files.write(configFilePath, configJson.getBytes());
}
- @After
+ @AfterEach
public void cleanupConfigFile() throws IOException {
Files.deleteIfExists(configFilePath);
}
- @Test
- @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnWin.class)
- public void testLogDirectoryPermissions() throws IOException {
+ @ParameterizedTest
+ @CsvSource({
+ "rwx------,false",
+ "rw-------,false",
+ "r-x------,false",
+ "r--------,false",
+ "rwxrwx---,true",
+ "rwxrw----,true",
+ "rwxr-x---,false",
+ "rwxr-----,false",
+ "rwx-wx---,true",
+ "rwx-w----,true",
+ "rwx--x---,false",
+ "rwx---rwx,true",
+ "rwx---rw-,true",
+ "rwx---r-x,false",
+ "rwx---r--,false",
+ "rwx----wx,true",
+ "rwx----w-,true",
+ "rwx-----x,false"
+ })
+ @DontRunOnWindows
+ public void testLogDirectoryPermissions(String permission, boolean isSucceed) throws IOException {
// TODO: SNOW-1503722 Change to check for thrown exceptions
// Don't run on Windows
Files.setPosixFilePermissions(configFilePath, PosixFilePermissions.fromString(permission));
diff --git a/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java b/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java
index f11614c8b..beb0ad292 100644
--- a/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java
+++ b/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java
@@ -4,10 +4,10 @@
package net.snowflake.client.core;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
@@ -16,13 +16,12 @@
import java.net.Proxy;
import java.util.HashMap;
import java.util.Properties;
-import net.snowflake.client.ConditionalIgnoreRule;
-import net.snowflake.client.RunningOnGithubAction;
+import net.snowflake.client.annotations.DontRunOnGithubActions;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeSQLException;
import net.snowflake.client.jdbc.SnowflakeUtil;
import net.snowflake.client.jdbc.cloud.storage.S3HttpUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
public class CoreUtilsMiscellaneousTest {
@@ -41,7 +40,7 @@ public void testSnowflakeAssertTrue() {
/** Test that Constants.getOS function is working as expected */
@Test
- @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
+ @DontRunOnGithubActions
public void testgetOS() {
Constants.clearOSForTesting();
String originalOS = systemGetProperty("os.name");
diff --git a/src/test/java/net/snowflake/client/core/EventHandlerTest.java b/src/test/java/net/snowflake/client/core/EventHandlerTest.java
index eb930f7c6..56b48b987 100644
--- a/src/test/java/net/snowflake/client/core/EventHandlerTest.java
+++ b/src/test/java/net/snowflake/client/core/EventHandlerTest.java
@@ -3,8 +3,8 @@
*/
package net.snowflake.client.core;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -14,18 +14,17 @@
import java.util.logging.LogRecord;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.IOUtils;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
public class EventHandlerTest {
- @Rule public TemporaryFolder tmpFolder = new TemporaryFolder();
+ @TempDir private File tmpFolder;
- @Before
+ @BeforeEach
public void setUp() throws IOException {
- tmpFolder.newFolder("snowflake_dumps");
- System.setProperty("snowflake.dump_path", tmpFolder.getRoot().getCanonicalPath());
+ new File(tmpFolder, "snowflake_dumps").mkdirs();
+ System.setProperty("snowflake.dump_path", tmpFolder.getCanonicalPath());
}
@Test
diff --git a/src/test/java/net/snowflake/client/core/EventTest.java b/src/test/java/net/snowflake/client/core/EventTest.java
index e9ee978e5..7ca041744 100644
--- a/src/test/java/net/snowflake/client/core/EventTest.java
+++ b/src/test/java/net/snowflake/client/core/EventTest.java
@@ -5,8 +5,8 @@
package net.snowflake.client.core;
import static net.snowflake.client.core.EventUtil.DUMP_PATH_PROP;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -14,24 +14,25 @@
import java.nio.file.Files;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.IOUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
public class EventTest {
- @Rule public TemporaryFolder tmpFolder = new TemporaryFolder();
+ @TempDir private File tmpFolder;
private File homeDirectory;
private File dmpDirectory;
- @Before
+ @BeforeEach
public void setUp() throws IOException {
- homeDirectory = tmpFolder.newFolder("homedir");
- dmpDirectory = tmpFolder.newFolder("homedir", "snowflake_dumps");
+ homeDirectory = new File(tmpFolder, "homedir");
+ homeDirectory.mkdirs();
+ dmpDirectory = new File(homeDirectory, "snowflake_dumps");
+ dmpDirectory.mkdirs();
}
- @After
+ @AfterEach
public void tearDown() {
dmpDirectory.delete();
}
@@ -58,7 +59,7 @@ public void testWriteEventDumpLine() throws IOException {
// created
String dmpPath1 = EventUtil.getDumpPathPrefix();
String dmpPath2 = dmpDirectory.getCanonicalPath();
- assertEquals("dump path is: " + EventUtil.getDumpPathPrefix(), dmpPath2, dmpPath1);
+ assertEquals(dmpPath2, dmpPath1, "dump path is: " + EventUtil.getDumpPathPrefix());
File dumpFile =
new File(
EventUtil.getDumpPathPrefix()
diff --git a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java
index f7ad06b46..04cec29fb 100644
--- a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java
+++ b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java
@@ -1,14 +1,14 @@
package net.snowflake.client.core;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
import net.minidev.json.JSONObject;
import net.minidev.json.parser.JSONParser;
import net.minidev.json.parser.ParseException;
import net.snowflake.client.jdbc.telemetryOOB.TelemetryService;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
public class ExecTimeTelemetryDataTest {
diff --git a/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java b/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java
index 34892843c..00c318227 100644
--- a/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java
+++ b/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java
@@ -3,21 +3,22 @@
*/
package net.snowflake.client.core;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.time.Duration;
-import net.snowflake.client.category.TestCategoryCore;
+import net.snowflake.client.category.TestTags;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
-@Category(TestCategoryCore.class)
+@Tag(TestTags.CORE)
public class HttpUtilLatestIT {
private static final String HANG_WEBSERVER_ADDRESS = "http://localhost:12345/hang";
@@ -30,7 +31,8 @@ public void shouldGetDefaultConnectionAndSocketTimeouts() {
}
/** Added in > 3.14.5 */
- @Test(timeout = 1000L)
+ @Test
+ @Timeout(1)
public void shouldOverrideConnectionAndSocketTimeouts() {
// it's hard to test connection timeout so there is only a test for socket timeout
HttpUtil.setConnectionTimeout(100);
diff --git a/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java b/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java
index cd2e89806..5ffe7c5d3 100644
--- a/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java
+++ b/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java
@@ -6,23 +6,23 @@
import static net.snowflake.client.core.IncidentUtil.INC_DUMP_FILE_EXT;
import static net.snowflake.client.core.IncidentUtil.INC_DUMP_FILE_NAME;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.util.zip.GZIPInputStream;
-import net.snowflake.client.category.TestCategoryCore;
+import net.snowflake.client.category.TestTags;
import net.snowflake.client.jdbc.BaseJDBCTest;
import org.apache.commons.io.IOUtils;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
-@Category(TestCategoryCore.class)
+@Tag(TestTags.CORE)
public class IncidentUtilLatestIT extends BaseJDBCTest {
- @Rule public TemporaryFolder tmpFolder = new TemporaryFolder();
+ @TempDir private File tmpFolder;
private static final String FILE_NAME = "sf_incident_123456.dmp.gz";
@Test
@@ -34,7 +34,9 @@ public void testOneLinerDescription() {
/** Tests dumping JVM metrics for the current process */
@Test
public void testDumpVmMetrics() throws IOException {
- String dumpPath = tmpFolder.newFolder().getCanonicalPath();
+ File dumpDir = new File(tmpFolder, "dump");
+ dumpDir.mkdirs();
+ String dumpPath = dumpDir.getCanonicalPath();
System.setProperty("snowflake.dump_path", dumpPath);
String incidentId = "123456";
@@ -47,13 +49,15 @@ public void testDumpVmMetrics() throws IOException {
EventUtil.getDumpPathPrefix() + "/" + INC_DUMP_FILE_NAME + incidentId + INC_DUMP_FILE_EXT;
// Read back the file contents
- GZIPInputStream gzip = new GZIPInputStream(new FileInputStream(targetVMFileLocation));
- StringWriter sWriter = new StringWriter();
- IOUtils.copy(gzip, sWriter, "UTF-8");
- String output = sWriter.toString();
- assertEquals(
- "\n\n\n--------------------------- METRICS " + "---------------------------\n\n",
- output.substring(0, 69));
- sWriter.close();
+ try (FileInputStream fis = new FileInputStream(targetVMFileLocation);
+ GZIPInputStream gzip = new GZIPInputStream(fis)) {
+ StringWriter sWriter = new StringWriter();
+ IOUtils.copy(gzip, sWriter, "UTF-8");
+ String output = sWriter.toString();
+ assertEquals(
+ "\n\n\n--------------------------- METRICS " + "---------------------------\n\n",
+ output.substring(0, 69));
+ sWriter.close();
+ }
}
}
diff --git a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java
index 9a5af03b2..37bfea5c6 100644
--- a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java
+++ b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java
@@ -1,93 +1,76 @@
package net.snowflake.client.core;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import java.util.stream.Stream;
+import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.ArgumentsProvider;
+import org.junit.jupiter.params.provider.ArgumentsSource;
-@RunWith(Parameterized.class)
public class OCSPCacheServerTest {
- @Parameterized.Parameters(
- name = "For host {0} cache server fetch url should be {1} and retry url {2}")
- public static Object[][] data() {
- return new Object[][] {
- {
- "bla-12345.global.snowflakecomputing.com",
- "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch",
- "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry"
- },
- {
- "bla-12345.global.snowflakecomputing.cn",
- "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch",
- "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry"
- },
- {
- "bla-12345.global.snowflakecomputing.xyz",
- "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch",
- "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry"
- },
- {
- "bla-12345.GLOBAL.snowflakecomputing.xyz",
- "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch",
- "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry"
- },
- {
- "bla-12345.snowflakecomputing.com",
- "https://ocspssd.snowflakecomputing.com/ocsp/fetch",
- "https://ocspssd.snowflakecomputing.com/ocsp/retry"
- },
- {
- "bla-12345.snowflakecomputing.cn",
- "https://ocspssd.snowflakecomputing.cn/ocsp/fetch",
- "https://ocspssd.snowflakecomputing.cn/ocsp/retry"
- },
- {
- "bla-12345.snowflakecomputing.xyz",
- "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch",
- "https://ocspssd.snowflakecomputing.xyz/ocsp/retry"
- },
- {
- "bla-12345.SNOWFLAKEcomputing.xyz",
- "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch",
- "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry"
- },
- {
- "s3.amazoncomaws.com",
- "https://ocspssd.snowflakecomputing.com/ocsp/fetch",
- "https://ocspssd.snowflakecomputing.com/ocsp/retry"
- },
- {
- "s3.amazoncomaws.COM",
- "https://ocspssd.snowflakecomputing.COM/ocsp/fetch",
- "https://ocspssd.snowflakecomputing.COM/ocsp/retry"
- },
- {
- "s3.amazoncomaws.com.cn",
- "https://ocspssd.snowflakecomputing.cn/ocsp/fetch",
- "https://ocspssd.snowflakecomputing.cn/ocsp/retry"
- },
- {
- "S3.AMAZONCOMAWS.COM.CN",
- "https://ocspssd.snowflakecomputing.CN/ocsp/fetch",
- "https://ocspssd.snowflakecomputing.CN/ocsp/retry"
- },
- };
- }
-
- private final String host;
- private final String expectedFetchUrl;
- private final String expectedRetryUrl;
+ static class URLProvider implements ArgumentsProvider {
- public OCSPCacheServerTest(String host, String expectedFetchUrl, String expectedRetryUrl) {
- this.host = host;
- this.expectedFetchUrl = expectedFetchUrl;
- this.expectedRetryUrl = expectedRetryUrl;
+ @Override
+ public Stream extends Arguments> provideArguments(ExtensionContext context) throws Exception {
+ return Stream.of(
+ Arguments.of(
+ "bla-12345.global.snowflakecomputing.com",
+ "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch",
+ "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry"),
+ Arguments.of(
+ "bla-12345.global.snowflakecomputing.cn",
+ "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch",
+ "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry"),
+ Arguments.of(
+ "bla-12345.global.snowflakecomputing.xyz",
+ "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch",
+ "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry"),
+ Arguments.of(
+ "bla-12345.GLOBAL.snowflakecomputing.xyz",
+ "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch",
+ "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry"),
+ Arguments.of(
+ "bla-12345.snowflakecomputing.com",
+ "https://ocspssd.snowflakecomputing.com/ocsp/fetch",
+ "https://ocspssd.snowflakecomputing.com/ocsp/retry"),
+ Arguments.of(
+ "bla-12345.snowflakecomputing.cn",
+ "https://ocspssd.snowflakecomputing.cn/ocsp/fetch",
+ "https://ocspssd.snowflakecomputing.cn/ocsp/retry"),
+ Arguments.of(
+ "bla-12345.snowflakecomputing.xyz",
+ "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch",
+ "https://ocspssd.snowflakecomputing.xyz/ocsp/retry"),
+ Arguments.of(
+ "bla-12345.SNOWFLAKEcomputing.xyz",
+ "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch",
+ "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry"),
+ Arguments.of(
+ "s3.amazoncomaws.com",
+ "https://ocspssd.snowflakecomputing.com/ocsp/fetch",
+ "https://ocspssd.snowflakecomputing.com/ocsp/retry"),
+ Arguments.of(
+ "s3.amazoncomaws.COM",
+ "https://ocspssd.snowflakecomputing.COM/ocsp/fetch",
+ "https://ocspssd.snowflakecomputing.COM/ocsp/retry"),
+ Arguments.of(
+ "s3.amazoncomaws.com.cn",
+ "https://ocspssd.snowflakecomputing.cn/ocsp/fetch",
+ "https://ocspssd.snowflakecomputing.cn/ocsp/retry"),
+ Arguments.of(
+ "S3.AMAZONCOMAWS.COM.CN",
+ "https://ocspssd.snowflakecomputing.CN/ocsp/fetch",
+ "https://ocspssd.snowflakecomputing.CN/ocsp/retry"));
+ }
}
- @Test
- public void shouldChooseOcspCacheServerUrls() {
+ @ParameterizedTest(name = "For host {0} cache server fetch url should be {1} and retry url {2}")
+ @ArgumentsSource(URLProvider.class)
+ public void shouldChooseOcspCacheServerUrls(
+ String host, String expectedFetchUrl, String expectedRetryUrl) {
SFTrustManager.OCSPCacheServer ocspCacheServer = new SFTrustManager.OCSPCacheServer();
ocspCacheServer.resetOCSPResponseCacheServer(host);
diff --git a/src/test/java/net/snowflake/client/core/ObjectMapperTest.java b/src/test/java/net/snowflake/client/core/ObjectMapperTest.java
index 6868d186e..e0a9e11ab 100644
--- a/src/test/java/net/snowflake/client/core/ObjectMapperTest.java
+++ b/src/test/java/net/snowflake/client/core/ObjectMapperTest.java
@@ -4,52 +4,58 @@
package net.snowflake.client.core;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
-import java.util.ArrayList;
import java.util.Base64;
-import java.util.Collection;
-import java.util.List;
+import java.util.stream.Stream;
import net.snowflake.client.jdbc.SnowflakeUtil;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.ArgumentsProvider;
+import org.junit.jupiter.params.provider.ArgumentsSource;
-@RunWith(Parameterized.class)
public class ObjectMapperTest {
private static final int jacksonDefaultMaxStringLength = 20_000_000;
+ static String originalLogger;
- @Parameterized.Parameters(name = "lobSizeInMB={0}, maxJsonStringLength={1}")
- public static Collection