diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.md b/.github/ISSUE_TEMPLATE/BUG_REPORT.md index 2c4c5bc91..dcbb8109f 100644 --- a/.github/ISSUE_TEMPLATE/BUG_REPORT.md +++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.md @@ -37,4 +37,5 @@ In order to accurately debug the issue this information is required. Thanks! https://community.snowflake.com/s/article/How-to-generate-log-file-on-Snowflake-connectors -7. What is your Snowflake account identifier, if any? (Optional) + Before sharing any information, please be sure to review the log and remove any sensitive + information. diff --git a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md index 83c2ada99..a3b4e6517 100644 --- a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md +++ b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md @@ -17,6 +17,4 @@ otherwise continue here. ## How would this improve `snowflake-jdbc`? ## References, Other Background - -## What is your Snowflake account identifier, if any? diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 190425de4..c93f081f0 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -29,7 +29,7 @@ jobs: name: Build runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Build shell: bash env: @@ -38,13 +38,16 @@ jobs: test-windows: needs: build - name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }} runs-on: windows-latest strategy: fail-fast: false matrix: runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] - category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'}, + {suites: 'OthersTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'}, + {suites: 'FipsTestSuite', name: "TestCategoryFips"}] additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 @@ -53,7 +56,7 @@ jobs: java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.7' architecture: 'x64' @@ -62,19 +65,22 @@ jobs: env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: ci\\test_windows.bat test-mac: needs: build - name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }} runs-on: macos-13 strategy: fail-fast: false matrix: runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] - category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'}, + {suites: 'OthersTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'}, + {suites: 'FipsTestSuite', name: "TestCategoryFips"}] additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 @@ -83,7 +89,7 @@ jobs: java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.7' - name: Install Homebrew Bash @@ -94,51 +100,56 @@ jobs: env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: /usr/local/bin/bash ./ci/test_mac.sh test-linux: needs: build - name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17', 'jdbc-centos7-openjdk21' ] cloud: [ 'AWS', 'AZURE', 'GCP' ] - category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'}, + {suites: 'OthersTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'}, + {suites: 'FipsTestSuite', name: "TestCategoryFips"}] additionalMavenProfile: ['', '-Dthin-jar'] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Tests shell: bash env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.cloud }} TARGET_DOCKER_TEST_IMAGE: ${{ matrix.image }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: ./ci/test.sh test-linux-old-driver: - name: Old JDBC ${{ matrix.category }} on ${{ matrix.image }} + name: Old JDBC ${{ matrix.category.name }} on ${{ matrix.image }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: image: [ 'jdbc-centos7-openjdk8' ] cloud: [ 'AWS' ] - category: ['TestCategoryResultSet,TestCategoryOthers', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryCore,TestCategoryLoader'] + category: [{suites: 'OthersOldDriverTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ConnectionOldDriverTestSuite,StatementOldDriverTestSuite', name: 'TestCategoryConnection,TestCategoryStatement'}, + {suites: 'LoaderOldDriverTestSuite,ResultSetOldDriverTestSuite', name: 'TestCategoryLoader,TestCategoryResultSet'}] is_old_driver: ['true'] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Tests shell: bash env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.cloud }} TARGET_DOCKER_TEST_IMAGE: ${{ matrix.image }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} is_old_driver: ${{ matrix.is_old_driver }} run: ./ci/test.sh diff --git a/.github/workflows/check-style.yml b/.github/workflows/check-style.yml index 221651298..d26f41865 100644 --- a/.github/workflows/check-style.yml +++ b/.github/workflows/check-style.yml @@ -9,7 +9,7 @@ jobs: name: Check Style runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Check Style shell: bash run: mvn clean validate --batch-mode --show-version -P check-style diff --git a/.github/workflows/jira_close.yml b/.github/workflows/jira_close.yml index dfcb8bc73..0dacf7fab 100644 --- a/.github/workflows/jira_close.yml +++ b/.github/workflows/jira_close.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: repository: snowflakedb/gh-actions ref: jira_v1 diff --git a/.github/workflows/jira_issue.yml b/.github/workflows/jira_issue.yml index 943ad70aa..92501da8f 100644 --- a/.github/workflows/jira_issue.yml +++ b/.github/workflows/jira_issue.yml @@ -14,7 +14,7 @@ jobs: if: ((github.event_name == 'issue_comment' && github.event.comment.body == 'recreate jira' && github.event.comment.user.login == 'sfc-gh-mkeller') || (github.event_name == 'issues' && github.event.pull_request.user.login != 'whitesource-for-github-com[bot]')) steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: repository: snowflakedb/gh-actions ref: jira_v1 diff --git a/.github/workflows/snyk-issue.yml b/.github/workflows/snyk-issue.yml index 7b58bb12a..1e36dae35 100644 --- a/.github/workflows/snyk-issue.yml +++ b/.github/workflows/snyk-issue.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: checkout action - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: snowflakedb/whitesource-actions token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }} diff --git a/.github/workflows/snyk-pr.yml b/.github/workflows/snyk-pr.yml index 5fc21951b..0c101e391 100644 --- a/.github/workflows/snyk-pr.yml +++ b/.github/workflows/snyk-pr.yml @@ -15,13 +15,13 @@ jobs: if: ${{ github.event.pull_request.user.login == 'sfc-gh-snyk-sca-sa' }} steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.ref }} fetch-depth: 0 - name: checkout action - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: snowflakedb/whitesource-actions token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }} diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b83a77291..76e948b1d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,11 @@ +**JDBC Driver 3.20.0** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + +**JDBC Driver 3.19.1** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + **JDBC Driver 3.19.0** - \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc diff --git a/FIPS/pom.xml b/FIPS/pom.xml index 04fa6a5f5..78e83700d 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,29 +5,17 @@ net.snowflake snowflake-jdbc-parent - 3.19.0 + 3.20.1-SNAPSHOT ../parent-pom.xml snowflake-jdbc-fips - 3.19.0 + 3.20.1-SNAPSHOT jar snowflake-jdbc-fips http://maven.apache.org - - - Central - Internal Central Repo2 - default - https://nexus.int.snowflakecomputing.com/repository/maven-central/ - - false - - - - 3.3.9 @@ -409,10 +397,59 @@ com.google ${shadeBase}.google + + + google.api + ${shadeBase}.google.api + + + google.apps + ${shadeBase}.google.apps + + + google.cloud + ${shadeBase}.google.cloud + google.geo ${shadeBase}.google.geo + + google.iam + ${shadeBase}.google.iam + + + google.logging + ${shadeBase}.google.logging + + + google.longrunning + ${shadeBase}.google.longrunning + + + google.monitoring + ${shadeBase}.google.monitoring + + + google.protobuf + ${shadeBase}.google.protobuf + + + google.rpc + ${shadeBase}.google.rpc + + + google.shopping + ${shadeBase}.google.shopping + + + google.storage + ${shadeBase}.google.storage + + + google.type + ${shadeBase}.google.type + org.joda ${shadeBase}.joda @@ -457,53 +494,37 @@ com.carrotsearch ${shadeBase}.com.carrotsearch - - google.type - ${shadeBase}.google.type - - - google.rpc - ${shadeBase}.google.rpc - - - google.iam - ${shadeBase}.google.iam - io.opencensus ${shadeBase}.opencensus - org.threeten - ${shadeBase}.threeten - - - google.protobuf - ${shadeBase}.google.protobuf + io.opentelemetry + ${shadeBase}.opentelemetry - google.api - ${shadeBase}.google.api + org.threeten + ${shadeBase}.threeten - - google.storage - ${shadeBase}.google.storage - io.grpc ${shadeBase}.grpc - google.longrunning - ${shadeBase}.google.longrunning + META-INF.native.io_grpc_netty_shaded_netty_tcnative + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_tcnative - google.cloud - ${shadeBase}.google.cloud + META-INF.native.libio_grpc_netty_shaded_netty_tcnative + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_tcnative - google.logging - ${shadeBase}.google.logging + META-INF.native.io_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll + + + META-INF.native.libio_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll org.checkerframework @@ -521,6 +542,18 @@ org.conscrypt ${shadeBase}.org.conscrypt + + conscrypt_openjdk_jni + ${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.conscrypt_openjdk_jni + META-INF.native.${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.libconscrypt_openjdk_jni + META-INF.native.lib${shadeNativeBase}_conscrypt_openjdk_jni + opencensus ${shadeBase}.opencensus @@ -584,7 +617,9 @@ - + + + META-INF/io.netty.versions.properties @@ -606,17 +641,24 @@ + - + + - + + + + + - + + @@ -683,6 +725,13 @@ maven-failsafe-plugin + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + ${version.plugin.failsafe} @@ -727,6 +776,13 @@ org.apache.maven.plugins maven-failsafe-plugin + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + ${version.plugin.failsafe} diff --git a/FIPS/public_pom.xml b/FIPS/public_pom.xml index d180e4a57..00bc9738c 100644 --- a/FIPS/public_pom.xml +++ b/FIPS/public_pom.xml @@ -32,8 +32,8 @@ - 1.0.2.4 - 1.0.5 + 1.0.2.5 + 1.0.7 5.13.0 diff --git a/FIPS/scripts/check_content.sh b/FIPS/scripts/check_content.sh index 8b818b1b4..a30eacec6 100755 --- a/FIPS/scripts/check_content.sh +++ b/FIPS/scripts/check_content.sh @@ -1,12 +1,12 @@ #!/bin/bash -e -# scripts used to check if all dependency is shaded into snowflake internal path +# scripts used to check if all dependencies are shaded into snowflake internal path set -o pipefail DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" -if jar tvf $DIR/../target/snowflake-jdbc-fips.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types; then +if jar tvf $DIR/../target/snowflake-jdbc-fips.jar | awk '{print $8}' | grep -v -E "/$" | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types | grep -v -E "^com/github/luben/zstd/" | grep -v -E "^aix/" | grep -v -E "^darwin/" | grep -v -E "^freebsd/" | grep -v -E "^linux/" | grep -v -E "^win/"; then echo "[ERROR] JDBC jar includes class not under the snowflake namespace" exit 1 fi diff --git a/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java b/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java index 05c389208..360a1fcbb 100644 --- a/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java +++ b/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java @@ -21,12 +21,10 @@ import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; -import org.junit.Rule; /** Base test class with common constants, data structures and methods */ public class AbstractDriverIT { // This is required to use ConditionalIgnore annotation. - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; public static final String DRIVER_CLASS_COM = "com.snowflake.client.jdbc.SnowflakeDriver"; diff --git a/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java b/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java deleted file mode 100644 index fe20883db..000000000 --- a/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java +++ /dev/null @@ -1,125 +0,0 @@ -package net.snowflake.client; - -/* - * Created by hyu on 1/22/18. - */ - -/* -Copyright (c) 2013,2014 RĂ¼diger Herrmann -All rights reserved. This program and the accompanying materials -are made available under the terms of the Eclipse Public License v1.0 -which accompanies this distribution, and is available at -http://www.eclipse.org/legal/epl-v10.html - -Contributors: -RĂ¼diger Herrmann - initial API and implementation -Matt Morrissette - allow to use non-static inner IgnoreConditions -*/ - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.lang.reflect.Modifier; -import org.junit.Assume; -import org.junit.rules.MethodRule; -import org.junit.runners.model.FrameworkMethod; -import org.junit.runners.model.Statement; - -public class ConditionalIgnoreRule implements MethodRule { - - public interface IgnoreCondition { - boolean isSatisfied(); - } - - @Retention(RetentionPolicy.RUNTIME) - @Target({ElementType.METHOD}) - public @interface ConditionalIgnore { - Class condition(); - } - - @Override - public Statement apply(Statement base, FrameworkMethod method, Object target) { - Statement result = base; - if (hasConditionalIgnoreAnnotation(method)) { - IgnoreCondition condition = getIgnoreCondition(target, method); - if (condition.isSatisfied()) { - result = new IgnoreStatement(condition); - } - } - return result; - } - - private static boolean hasConditionalIgnoreAnnotation(FrameworkMethod method) { - return method.getAnnotation(ConditionalIgnore.class) != null; - } - - private static IgnoreCondition getIgnoreCondition(Object target, FrameworkMethod method) { - ConditionalIgnore annotation = method.getAnnotation(ConditionalIgnore.class); - return new IgnoreConditionCreator(target, annotation).create(); - } - - private static class IgnoreConditionCreator { - private final Object target; - private final Class conditionType; - - IgnoreConditionCreator(Object target, ConditionalIgnore annotation) { - this.target = target; - this.conditionType = annotation.condition(); - } - - IgnoreCondition create() { - checkConditionType(); - try { - return createCondition(); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private IgnoreCondition createCondition() throws Exception { - IgnoreCondition result; - if (isConditionTypeStandalone()) { - result = conditionType.newInstance(); - } else { - result = conditionType.getDeclaredConstructor(target.getClass()).newInstance(target); - } - return result; - } - - private void checkConditionType() { - if (!isConditionTypeStandalone() && !isConditionTypeDeclaredInTarget()) { - String msg = - "Conditional class '%s' is a member class " - + "but was not declared inside the test case using it.\n" - + "Either make this class a static class, " - + "standalone class (by declaring it in it's own file) " - + "or move it inside the test case using it"; - throw new IllegalArgumentException(String.format(msg, conditionType.getName())); - } - } - - private boolean isConditionTypeStandalone() { - return !conditionType.isMemberClass() || Modifier.isStatic(conditionType.getModifiers()); - } - - private boolean isConditionTypeDeclaredInTarget() { - return target.getClass().isAssignableFrom(conditionType.getDeclaringClass()); - } - } - - private static class IgnoreStatement extends Statement { - private final IgnoreCondition condition; - - IgnoreStatement(IgnoreCondition condition) { - this.condition = condition; - } - - @Override - public void evaluate() { - Assume.assumeTrue("Ignored by " + condition.getClass().getSimpleName(), false); - } - } -} diff --git a/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java b/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java new file mode 100644 index 000000000..ccdf83206 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)GCP(?-i)") +public @interface DontRunOnGCP {} \ No newline at end of file diff --git a/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java b/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java new file mode 100644 index 000000000..98232e097 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +public @interface DontRunOnGithubActions {} diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java deleted file mode 100644 index c902dc5f9..000000000 --- a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningOnGCP implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && cloudProvider.equalsIgnoreCase("GCP"); - } -} diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java deleted file mode 100644 index d717b65dc..000000000 --- a/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests on CI */ -public class RunningOnGithubActions implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; - } -} diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java b/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java deleted file mode 100644 index e69de29bb..000000000 diff --git a/FIPS/src/test/java/net/snowflake/client/TestUtil.java b/FIPS/src/test/java/net/snowflake/client/TestUtil.java index 703d59953..8bec5498f 100644 --- a/FIPS/src/test/java/net/snowflake/client/TestUtil.java +++ b/FIPS/src/test/java/net/snowflake/client/TestUtil.java @@ -9,7 +9,7 @@ import net.snowflake.client.core.SFException; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; public class TestUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(TestUtil.class); @@ -22,7 +22,7 @@ public class TestUtil { public static void assertSFException(int errorCode, TestRunInterface testCode) { try { testCode.run(); - Assert.fail(); + Assertions.fail(); } catch (SFException e) { assertThat(e.getVendorCode(), is(errorCode)); } diff --git a/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java b/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java new file mode 100644 index 000000000..d61ce2a83 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.category; + +import org.junit.platform.suite.api.IncludeTags; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.platform.suite.api.ExcludePackages; +import org.junit.platform.suite.api.IncludeClassNamePatterns; +import org.junit.platform.suite.api.SelectPackages; +import org.junit.platform.suite.api.Suite; +import org.junit.platform.suite.api.SuiteDisplayName; + +@Suite +@SelectPackages("net.snowflake.client") +@ExcludePackages("net.snowflake.client.suites") +@IncludeClassNamePatterns(".+") +public class FipsTestSuite { +} diff --git a/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java b/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java deleted file mode 100644 index 06ae9faad..000000000 --- a/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryFips {} diff --git a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java index c1509a6a8..0204e9a5d 100644 --- a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java +++ b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import java.net.URL; import java.nio.file.Files; @@ -20,21 +20,20 @@ import java.util.Properties; import javax.net.ssl.HttpsURLConnection; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGCP; -import net.snowflake.client.RunningOnGithubActions; -import net.snowflake.client.category.TestCategoryFips; +import net.snowflake.client.DontRunOnGCP; +import net.snowflake.client.DontRunOnGithubActions; import net.snowflake.client.core.SecurityUtil; import org.apache.commons.codec.binary.Base64; import org.bouncycastle.crypto.CryptoServicesRegistrar; import org.bouncycastle.crypto.fips.FipsStatus; import org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryFips.class) + +@Tag("fips") public class ConnectionFipsIT extends AbstractDriverIT { private static final String JCE_PROVIDER_BOUNCY_CASTLE_FIPS = "BCFIPS"; private static final String JCE_PROVIDER_SUN_JCE = "SunJCE"; @@ -106,7 +105,7 @@ public class ConnectionFipsIT extends AbstractDriverIT { private static int JCE_PROVIDER_SUN_JCE_PROVIDER_POSITION; private static int JCE_PROVIDER_SUN_RSA_SIGN_PROVIDER_POSITION; - @BeforeClass + @BeforeAll public static void setup() throws Exception { System.setProperty("javax.net.debug", "ssl"); // get keystore types for BouncyCastle libraries @@ -166,7 +165,7 @@ public static void setup() throws Exception { // connectToGoogle(); } - @AfterClass + @AfterAll public static void teardown() throws Exception { // Remove BouncyCastle FIPS Provider Security.removeProvider(JCE_PROVIDER_BOUNCY_CASTLE_FIPS); @@ -227,7 +226,7 @@ public void connectWithFips() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void connectWithFipsKeyPair() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -256,7 +255,7 @@ public void connectWithFipsKeyPair() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void testConnectUsingKeyPair() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -295,7 +294,7 @@ public void testConnectUsingKeyPair() throws Exception { * Currently ignored execution on GCP due to exception thrown "SSlException Could not generate XDH keypair" */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGCP.class) + @DontRunOnGCP public void connectWithFipsAndQuery() throws SQLException { try (Connection con = getConnection()) { Statement statement = con.createStatement(); @@ -329,7 +328,7 @@ public void connectWithFipsAndPut() throws Exception { /** Added in > 3.15.1 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void connectWithFipsKeyPairWithBouncyCastle() throws Exception { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); connectWithFipsKeyPair(); @@ -337,7 +336,7 @@ public void connectWithFipsKeyPairWithBouncyCastle() throws Exception { /** Added in > 3.15.1 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void testConnectUsingKeyPairWithBouncyCastle() throws Exception { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); testConnectUsingKeyPair(); diff --git a/Jenkinsfile b/Jenkinsfile index 8e5925b8c..261a2968b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -87,6 +87,8 @@ timestamps { }.collectEntries { jobDefinition -> return [(jobDefinition.runName): { build job: jobDefinition.jobToRun, parameters: jobDefinition.params }] } + + jobDefinitions.put('JDBC-AIX-Unit', { build job: 'JDBC-AIX-UnitTests', parameters: [ string(name: 'BRANCH', value: scmInfo.GIT_BRANCH ) ] } ) stage('Test') { parallel (jobDefinitions) } diff --git a/README.rst b/README.rst index c2e296e95..a9d3cacb2 100644 --- a/README.rst +++ b/README.rst @@ -214,3 +214,8 @@ Support Feel free to file an issue or submit a PR here for general cases. For official support, contact Snowflake support at: https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge + +Note +---------- + +This driver currently does not support GCP regional endpoints. Please ensure that any workloads using through this driver do not require support for regional endpoints on GCP. If you have questions about this, please contact Snowflake Support. diff --git a/TestOnly/pom.xml b/TestOnly/pom.xml index 109b03c74..509cb8925 100644 --- a/TestOnly/pom.xml +++ b/TestOnly/pom.xml @@ -4,7 +4,7 @@ net.snowflake snowflake-jdbc-test - 3.9.2 + 3.13.21 snowflake-jdbc-test http://maven.apache.org @@ -18,33 +18,82 @@ 0.8.4 true 5.13.0 + 5.11.1 + 3.5.1 3.5.6 net.snowflake.client.jdbc.internal - net.snowflake.client.category.AllTestCategory io.netty netty-common - 4.1.111.Final + 4.1.115.Final io.netty netty-buffer - 4.1.111.Final + 4.1.115.Final org.apache.maven.plugins maven-failsafe-plugin 3.0.0-M1 + test - junit - junit - 4.13.1 - jar + org.junit.jupiter + junit-jupiter-api + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-params + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-engine + ${junit.version} + test + + + org.junit.platform + junit-platform-suite + 1.11.1 + test + + + org.junit.platform + junit-platform-engine + 1.11.1 + test + + + org.junit.platform + junit-platform-runner + 1.11.1 + test + + + org.junit.platform + junit-platform-suite-api + 1.11.1 + test + + + org.junit.platform + junit-platform-suite-engine + 1.11.1 + test + + + org.junit.platform + junit-platform-launcher + 1.11.1 test @@ -371,7 +420,26 @@ org.apache.maven.plugins maven-surefire-plugin - 3.0.0-M5 + + + org.apache.maven.surefire + surefire-junit-platform + ${surefire.version} + + + ${surefire.version} + + + org.apache.maven.plugins + maven-failsafe-plugin + + + org.apache.maven.surefire + surefire-junit-platform + ${surefire.version} + + + ${surefire.version} @@ -387,35 +455,40 @@ org.apache.maven.plugins - maven-failsafe-plugin + maven-surefire-plugin - ${testCategory} + false + + + test + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + + verify + + DefaultIT integration-test - - **/DellBoomiCloudIT.java - - - net.snowflake.client.log.JDK14Logger - - - ${basedir}/../src/test/resources/logging.properties - + net.snowflake.client.log.JDK14Logger + ${basedir}/src/test/resources/logging.properties + ${integrationTestSuites} - - - verify - - diff --git a/ci/container/test_component.sh b/ci/container/test_component.sh index da245a627..65efed88d 100755 --- a/ci/container/test_component.sh +++ b/ci/container/test_component.sh @@ -68,9 +68,6 @@ echo "[INFO] Running Hang Web Server" kill -9 $(ps -ewf | grep hang_webserver | grep -v grep | awk '{print $2}') || true python3 $THIS_DIR/hang_webserver.py 12345& -IFS=',' -read -ra CATEGORY <<< "$JDBC_TEST_CATEGORY" - # Avoid connection timeouts export MAVEN_OPTS="$MAVEN_OPTS -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120" @@ -79,41 +76,39 @@ cd $SOURCE_ROOT # Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched $MVNW_EXE --batch-mode --show-version dependency:go-offline -for c in "${CATEGORY[@]}"; do - c=$(echo $c | sed 's/ *$//g') - if [[ "$is_old_driver" == "true" ]]; then - pushd TestOnly >& /dev/null - JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") - echo "[INFO] Run JDBC $JDBC_VERSION tests" - $MVNW_EXE -DjenkinsIT \ - -Djava.io.tmpdir=$WORKSPACE \ - -Djacoco.skip.instrument=false \ - -DtestCategory=net.snowflake.client.category.$c \ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ - verify \ - --batch-mode --show-version - popd >& /dev/null - elif [[ "$c" == "TestCategoryFips" ]]; then - pushd FIPS >& /dev/null - echo "[INFO] Run Fips tests" - $MVNW_EXE -DjenkinsIT \ - -Djava.io.tmpdir=$WORKSPACE \ - -Djacoco.skip.instrument=false \ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ - -Dnot-self-contained-jar \ - verify \ - --batch-mode --show-version - popd >& /dev/null - else - echo "[INFO] Run $c tests" +if [[ "$is_old_driver" == "true" ]]; then + pushd TestOnly >& /dev/null + JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") + echo "[INFO] Run JDBC $JDBC_VERSION tests" $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ - -DtestCategory=net.snowflake.client.category.$c \ + -DintegrationTestSuites="$JDBC_TEST_SUITES" \ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ - -Dnot-self-contained-jar $ADDITIONAL_MAVEN_PROFILE \ verify \ --batch-mode --show-version - fi -done + popd >& /dev/null +elif [[ "$JDBC_TEST_SUITES" == "FipsTestSuite" ]]; then + pushd FIPS >& /dev/null + echo "[INFO] Run Fips tests" + $MVNW_EXE -DjenkinsIT \ + -Djava.io.tmpdir=$WORKSPACE \ + -Djacoco.skip.instrument=false \ + -DintegrationTestSuites=FipsTestSuite \ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -Dnot-self-contained-jar \ + verify \ + --batch-mode --show-version + popd >& /dev/null +else + echo "[INFO] Run $JDBC_TEST_SUITES tests" + $MVNW_EXE -DjenkinsIT \ + -Djava.io.tmpdir=$WORKSPACE \ + -Djacoco.skip.instrument=false \ + -DintegrationTestSuites="$JDBC_TEST_SUITES" \ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -Dnot-self-contained-jar $ADDITIONAL_MAVEN_PROFILE \ + verify \ + --batch-mode --show-version +fi IFS=' ' diff --git a/ci/log_analyze_setup.sh b/ci/log_analyze_setup.sh index fd573d194..63303964e 100755 --- a/ci/log_analyze_setup.sh +++ b/ci/log_analyze_setup.sh @@ -36,7 +36,7 @@ LOG_PROPERTY_FILE=$(cd "$(dirname "${BASH_SOURCE[0]}")/.."; pwd)/src/test/resour export CLIENT_DRIVER_NAME=JDBC function setup_log_env() { - if ["$WORKSPACE" == "/mnt/workspace"]; then + if [[ "$WORKSPACE" == "/mnt/workspace" ]]; then CLIENT_LOG_DIR_PATH=$LOCAL_CLIENT_LOG_DIR_PATH_DOCKER CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH_DOCKER CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH_DOCKER @@ -53,7 +53,7 @@ function setup_log_env() { sed -i'' -e "s|^java.util.logging.FileHandler.pattern.*|java.util.logging.FileHandler.pattern = $CLIENT_LOG_FILE_PATH|" ${LOG_PROPERTY_FILE} if [[ ! -d ${CLIENT_LOG_DIR_PATH} ]]; then - echo "[INFO] create clien log directory $CLIENT_LOG_DIR_PATH" + echo "[INFO] create client log directory $CLIENT_LOG_DIR_PATH" mkdir -p ${CLIENT_LOG_DIR_PATH} fi diff --git a/ci/scripts/check_content.sh b/ci/scripts/check_content.sh index a9c0768b6..1af33e56a 100755 --- a/ci/scripts/check_content.sh +++ b/ci/scripts/check_content.sh @@ -8,12 +8,12 @@ set -o pipefail DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" -if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types; then +if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -v -E "/$" | grep -v -E "^(net|com)/snowflake" | grep -v -E "(com|net)/\$" | grep -v -E "^META-INF" | grep -v -E "^mozilla" | grep -v -E "^com/sun/jna" | grep -v com/sun/ | grep -v mime.types | grep -v -E "^com/github/luben/zstd/" | grep -v -E "^aix/" | grep -v -E "^darwin/" | grep -v -E "^freebsd/" | grep -v -E "^linux/" | grep -v -E "^win/"; then echo "[ERROR] JDBC jar includes class not under the snowflake namespace" exit 1 fi -if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -E "^META-INF/versions/.*.class" | grep -v -E "^META-INF/versions/.*/(net|com)/snowflake"; then - echo "[ERROR] JDBC jar includes multi release classes not under the snowflake namespace" +if jar tvf $DIR/../../target/snowflake-jdbc${package_modifier}.jar | awk '{print $8}' | grep -E "^META-INF/versions/.*.class" | grep -v -E "^META-INF/versions/.*/(net|com)/snowflake"; then + echo "[ERROR] JDBC jar includes multi-release classes not under the snowflake namespace" exit 1 fi diff --git a/ci/test.sh b/ci/test.sh index 03c66c502..125e91d1f 100755 --- a/ci/test.sh +++ b/ci/test.sh @@ -30,8 +30,8 @@ else exit 2 fi -if [[ -z "$JDBC_TEST_CATEGORY" ]]; then - echo "[ERROR] Set JDBC_TEST_CATEGORY to the JDBC test category." +if [[ -z "$JDBC_TEST_SUITES" ]]; then + echo "[ERROR] Set JDBC_TEST_SUITES to the JDBC test category." find $THIS_DIR/../src/test/java -type f -exec grep -E "^import net.snowflake.client.category" {} \; | sort | uniq | awk -F. '{print $NF}' | awk -F\; '{print $1}' exit 2 fi @@ -56,7 +56,7 @@ for name in "${!TARGET_TEST_IMAGES[@]}"; do -e RUNNER_TRACKING_ID \ -e JOB_NAME \ -e BUILD_NUMBER \ - -e JDBC_TEST_CATEGORY \ + -e JDBC_TEST_SUITES \ -e ADDITIONAL_MAVEN_PROFILE \ -e CLOUD_PROVIDER \ -e is_old_driver \ diff --git a/ci/test_windows.bat b/ci/test_windows.bat index 4a5a8ebe3..0234b105c 100644 --- a/ci/test_windows.bat +++ b/ci/test_windows.bat @@ -111,47 +111,45 @@ echo "MAVEN OPTIONS %MAVEN_OPTS%" REM Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched cmd /c %MVNW_EXE% --batch-mode --show-version dependency:go-offline -echo list = "%JDBC_TEST_CATEGORY%" -for %%a in ("%JDBC_TEST_CATEGORY:,=" "%") do ( - echo "Current category to execute" %%a - if /i %%a=="TestCategoryFips" ( - pushd FIPS - echo "[INFO] Run Fips tests" - cmd /c %MVNW_EXE% -B -DjenkinsIT ^ - -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ - -Djacoco.skip.instrument=false ^ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ - -Dnot-self-contained-jar ^ - verify ^ - --batch-mode --show-version > log.txt & type log.txt - echo "[INFO] Check for test execution status" - find /i /c "BUILD FAILURE" log.txt > NUL - set isfound=!errorlevel! - if !isfound! equ 0 ( - echo [ERROR] Failed run %%a test - exit /b 1 - ) else ( - echo [INFO] Success run %%a test - ) - popd ) else ( - echo "[INFO] Run %%a tests" - cmd /c %MVNW_EXE% -B -DjenkinsIT ^ - -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ - -Djacoco.skip.instrument=false ^ - -DtestCategory=net.snowflake.client.category.%%a ^ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ - -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^ - verify ^ - --batch-mode --show-version > log.txt & type log.txt - echo "[INFO] Check for test execution status" - find /i /c "BUILD FAILURE" log.txt > NUL - set isfound=!errorlevel! - if !isfound! equ 0 ( - echo [ERROR] Failed run %%a test - exit /b 1 - ) else ( - echo [INFO] Success run %%a test - ) +if "%JDBC_TEST_SUITES%"=="FipsTestSuite" ( + pushd FIPS + echo "[INFO] Run Fips tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -DintegrationTestSuites=FipsTestSuite ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + popd +) else ( + echo "[INFO] Run %JDBC_TEST_SUITES% tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -DintegrationTestSuites="%JDBC_TEST_SUITES%" ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test ) ) diff --git a/dependencies/Readme.md b/dependencies/Readme.md index 7b4a4c73c..28afe4031 100644 --- a/dependencies/Readme.md +++ b/dependencies/Readme.md @@ -1,2 +1,2 @@ -Arrow dependencies are built from internal branch `upgradeto17.0.0`. This build was applied the AIX fix. +Arrow dependencies are built from internal branch `upgradeTo17.0.0-v3`. This build was applied the AIX fix and the customer logger instead of slf4j logger. diff --git a/dependencies/arrow-format-17.0.0.jar b/dependencies/arrow-format-17.0.0.jar index 349272113..103c9c00d 100644 Binary files a/dependencies/arrow-format-17.0.0.jar and b/dependencies/arrow-format-17.0.0.jar differ diff --git a/dependencies/arrow-memory-core-17.0.0.jar b/dependencies/arrow-memory-core-17.0.0.jar index a218df5db..916e38238 100644 Binary files a/dependencies/arrow-memory-core-17.0.0.jar and b/dependencies/arrow-memory-core-17.0.0.jar differ diff --git a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar index 2004a461e..63f032a2b 100644 Binary files a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar and b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar differ diff --git a/dependencies/arrow-memory-unsafe-17.0.0.jar b/dependencies/arrow-memory-unsafe-17.0.0.jar index b9897fe47..174af274e 100644 Binary files a/dependencies/arrow-memory-unsafe-17.0.0.jar and b/dependencies/arrow-memory-unsafe-17.0.0.jar differ diff --git a/dependencies/arrow-vector-17.0.0.jar b/dependencies/arrow-vector-17.0.0.jar index 69ccfaf0d..73061da73 100644 Binary files a/dependencies/arrow-vector-17.0.0.jar and b/dependencies/arrow-vector-17.0.0.jar differ diff --git a/linkage-checker-exclusion-rules.xml b/linkage-checker-exclusion-rules.xml index 8bad89714..64b5860c2 100644 --- a/linkage-checker-exclusion-rules.xml +++ b/linkage-checker-exclusion-rules.xml @@ -19,11 +19,6 @@ Optional - - - - Optional - @@ -44,6 +39,16 @@ ? + + + + ? + + + + + ? + + org.bouncycastle + bcutil-jdk18on + ${bouncycastle.version} + org.bouncycastle @@ -462,11 +542,6 @@ ${bouncycastle.bcpkixfips.version} provided - - org.threeten - threetenbp - ${threeten.version} - org.tukaani xz @@ -496,18 +571,6 @@ ${awaitility.version} test - - org.apache.maven.surefire - surefire-junit4 - ${version.plugin.surefire} - test - - - org.apache.maven.surefire - common-junit48 - ${version.plugin.surefire} - test - org.wiremock wiremock-standalone @@ -644,6 +707,10 @@ org.apache.httpcomponents httpcore + + com.github.luben + zstd-jni + org.apache.tika tika-core @@ -723,6 +790,46 @@ junit junit + + org.junit.jupiter + junit-jupiter + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.jupiter + junit-jupiter-engine + + + org.junit.jupiter + junit-jupiter-params + + + org.junit.platform + junit-platform-suite + + + org.junit.platform + junit-platform-engine + + + org.junit.platform + junit-platform-runner + + + org.junit.platform + junit-platform-suite-api + + + org.junit.platform + junit-platform-suite-engine + + + org.junit.platform + junit-platform-launcher + org.apache.avro avro @@ -755,15 +862,6 @@ org.awaitility awaitility - - - org.apache.maven.surefire - surefire-junit4 - - - org.apache.maven.surefire - common-junit48 - org.wiremock wiremock-standalone diff --git a/pom.xml b/pom.xml index 096641174..2cfb0425e 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.19.0 + 3.20.1-SNAPSHOT ./parent-pom.xml ${artifactId} - 3.19.0 + 3.20.1-SNAPSHOT jar ${artifactId} @@ -36,6 +36,10 @@ org.bouncycastle bcprov-jdk18on + + org.bouncycastle + bcutil-jdk18on + @@ -102,6 +106,13 @@ org.apache.maven.plugins maven-failsafe-plugin ${version.plugin.failsafe} + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + org.apache.maven.plugins @@ -142,6 +153,13 @@ org.apache.maven.plugins maven-surefire-plugin ${version.plugin.surefire} + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + org.codehaus.mojo @@ -720,6 +738,9 @@ + + META-INF/io.netty.versions.properties + @@ -819,14 +840,59 @@ com.google ${shadeBase}.google + + + google.api + ${shadeBase}.google.api + + + google.apps + ${shadeBase}.google.apps + + + google.cloud + ${shadeBase}.google.cloud + google.geo ${shadeBase}.google.geo + + google.iam + ${shadeBase}.google.iam + + + google.logging + ${shadeBase}.google.logging + + + google.longrunning + ${shadeBase}.google.longrunning + + + google.monitoring + ${shadeBase}.google.monitoring + + + google.protobuf + ${shadeBase}.google.protobuf + + + google.rpc + ${shadeBase}.google.rpc + + + google.shopping + ${shadeBase}.google.shopping + google.storage ${shadeBase}.google.storage + + google.type + ${shadeBase}.google.type + org.joda ${shadeBase}.joda @@ -875,49 +941,37 @@ com.carrotsearch ${shadeBase}.com.carrotsearch - - google.type - ${shadeBase}.google.type - - - google.rpc - ${shadeBase}.google.rpc - - - google.iam - ${shadeBase}.google.iam - io.opencensus ${shadeBase}.opencensus - org.threeten - ${shadeBase}.threeten + io.opentelemetry + ${shadeBase}.opentelemetry - google.protobuf - ${shadeBase}.google.protobuf - - - google.api - ${shadeBase}.google.api + org.threeten + ${shadeBase}.threeten io.grpc ${shadeBase}.grpc - google.longrunning - ${shadeBase}.google.longrunning + META-INF.native.io_grpc_netty_shaded_netty_tcnative + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_tcnative - google.cloud - ${shadeBase}.google.cloud + META-INF.native.libio_grpc_netty_shaded_netty_tcnative + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_tcnative - google.logging - ${shadeBase}.google.logging + META-INF.native.io_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll + + + META-INF.native.libio_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll org.checkerframework @@ -935,6 +989,18 @@ org.conscrypt ${shadeBase}.org.conscrypt + + conscrypt_openjdk_jni + ${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.conscrypt_openjdk_jni + META-INF.native.${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.libconscrypt_openjdk_jni + META-INF.native.lib${shadeNativeBase}_conscrypt_openjdk_jni + opencensus ${shadeBase}.opencensus @@ -1002,6 +1068,9 @@ + + META-INF/io.netty.versions.properties + @@ -1027,20 +1096,26 @@ + + + + + + @@ -1105,7 +1180,7 @@ @@ -1136,10 +1211,28 @@ org.apache.maven.plugins - maven-failsafe-plugin + maven-surefire-plugin - ${testCategory} + UnitTestSuite + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + + + + + test + + + + + + org.apache.maven.plugins + maven-failsafe-plugin @@ -1152,13 +1245,11 @@ integration-test - - **/DellBoomiCloudIT.java - net.snowflake.client.log.JDK14Logger ${basedir}/src/test/resources/logging.properties + ${integrationTestSuites} @@ -1277,27 +1368,24 @@ org.apache.maven.plugins maven-failsafe-plugin + + + **/*IT.java + + + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + verify - - ClientTelemetryIT - - integration-test - - - - **/ConnectionIT.java - **/SFTrustManagerIT.java - - - ${basedir}/src/test/resources/logback-test.xml - - - @@ -1315,21 +1403,24 @@ org.apache.maven.plugins maven-failsafe-plugin + + + **/*IT.java + + + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + verify - - DellBoomiIT - - integration-test - - - DellBoomiCloudIT.java - - @@ -1347,27 +1438,24 @@ org.apache.maven.plugins maven-failsafe-plugin + + + **/*IT.java + + + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + verify - - ClientTelemetryIT - - integration-test - - - - **/ConnectionIT.java - **/SFTrustManagerIT.java - - - ${basedir}/src/test/resources/logback-test.xml - - - diff --git a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java index a0ca0fa11..45b38dbfa 100644 --- a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java @@ -33,6 +33,7 @@ public class SFClientConfigParser { * @param configFilePath SF_CLIENT_CONFIG_FILE parameter read from connection URL or connection * properties * @return SFClientConfig + * @throws IOException if exception encountered when reading config file. */ public static SFClientConfig loadSFClientConfig(String configFilePath) throws IOException { if (configFilePath != null) { diff --git a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java index 35698c557..1da9f766a 100644 --- a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java @@ -1,5 +1,6 @@ package net.snowflake.client.config; +import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemGetEnvToBooleanValue; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetEnv; import com.fasterxml.jackson.dataformat.toml.TomlMapper; @@ -34,6 +35,53 @@ public class SFConnectionConfigParser { "SNOWFLAKE_DEFAULT_CONNECTION_NAME"; public static final String DEFAULT = "default"; public static final String SNOWFLAKE_TOKEN_FILE_PATH = "/snowflake/session/token"; + public static final String SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION = + "SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION"; + + public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException { + String defaultConnectionName = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT); + Map fileConnectionConfiguration = + loadDefaultConnectionConfiguration(defaultConnectionName); + + if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) { + Properties connectionProperties = new Properties(); + connectionProperties.putAll(fileConnectionConfiguration); + + String url = createUrl(fileConnectionConfiguration); + logger.debug("Url created using parameters from connection configuration file: {}", url); + + if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) + && fileConnectionConfiguration.get("token") == null) { + Path path = + Paths.get( + Optional.ofNullable(fileConnectionConfiguration.get("token_file_path")) + .orElse(SNOWFLAKE_TOKEN_FILE_PATH)); + logger.debug("Token used in connect is read from file: {}", path); + try { + boolean shouldSkipTokenFilePermissionsVerification = + convertSystemGetEnvToBooleanValue(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, false); + if (!shouldSkipTokenFilePermissionsVerification) { + verifyFilePermissionSecure(path); + } else { + logger.debug("Skip token file permissions verification"); + } + String token = new String(Files.readAllBytes(path), Charset.defaultCharset()); + if (!token.isEmpty()) { + putPropertyIfNotNull(connectionProperties, "token", token.trim()); + } else { + throw new SnowflakeSQLException( + "Non-empty token must be set when the authenticator type is OAUTH"); + } + } catch (Exception ex) { + throw new SnowflakeSQLException(ex, "There is a problem during reading token from file"); + } + } + return new ConnectionParameters(url, connectionProperties); + } else { + return null; + } + } private static Map loadDefaultConnectionConfiguration( String defaultConnectionName) throws SnowflakeSQLException { @@ -88,44 +136,6 @@ private static void verifyFilePermissionSecure(Path configFilePath) } } - public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException { - String defaultConnectionName = - Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT); - Map fileConnectionConfiguration = - loadDefaultConnectionConfiguration(defaultConnectionName); - - if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) { - Properties conectionProperties = new Properties(); - conectionProperties.putAll(fileConnectionConfiguration); - - String url = createUrl(fileConnectionConfiguration); - logger.debug("Url created using parameters from connection configuration file: {}", url); - - if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) - && fileConnectionConfiguration.get("token") == null) { - Path path = - Paths.get( - Optional.ofNullable(fileConnectionConfiguration.get("token_file_path")) - .orElse(SNOWFLAKE_TOKEN_FILE_PATH)); - logger.debug("Token used in connect is read from file: {}", path); - try { - verifyFilePermissionSecure(path); - String token = new String(Files.readAllBytes(path), Charset.defaultCharset()); - if (!token.isEmpty()) { - putPropertyIfNotNull(conectionProperties, "token", token.trim()); - } else { - logger.warn("The token has empty value"); - } - } catch (Exception ex) { - throw new SnowflakeSQLException(ex, "There is a problem during reading token from file"); - } - } - return new ConnectionParameters(url, conectionProperties); - } else { - return null; - } - } - private static String createUrl(Map fileConnectionConfiguration) throws SnowflakeSQLException { Optional maybeAccount = Optional.ofNullable(fileConnectionConfiguration.get("account")); diff --git a/src/main/java/net/snowflake/client/core/CancellationReason.java b/src/main/java/net/snowflake/client/core/CancellationReason.java new file mode 100644 index 000000000..e3ae4e308 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/CancellationReason.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.core; + +@SnowflakeJdbcInternalApi +public enum CancellationReason { + UNKNOWN, + CLIENT_REQUESTED, + TIMEOUT +} diff --git a/src/main/java/net/snowflake/client/core/ChunkDownloader.java b/src/main/java/net/snowflake/client/core/ChunkDownloader.java index 8818c9c17..e881eb9a1 100644 --- a/src/main/java/net/snowflake/client/core/ChunkDownloader.java +++ b/src/main/java/net/snowflake/client/core/ChunkDownloader.java @@ -14,6 +14,8 @@ public interface ChunkDownloader { * be blocked if the chunk is not ready to be consumed (a.k.a not loaded into memory yet) * * @return result chunk with data loaded + * @throws InterruptedException if downloading thread was interrupted + * @throws SnowflakeSQLException if downloader encountered an error */ SnowflakeResultChunk getNextChunkToConsume() throws InterruptedException, SnowflakeSQLException; @@ -21,6 +23,7 @@ public interface ChunkDownloader { * Terminate the chunk downloader, release all resources allocated * * @return metrics measuring downloader performance + * @throws InterruptedException if error encountered */ DownloaderMetrics terminate() throws InterruptedException; } diff --git a/src/main/java/net/snowflake/client/core/CredentialManager.java b/src/main/java/net/snowflake/client/core/CredentialManager.java index a5b919d3d..08e9e6b9a 100644 --- a/src/main/java/net/snowflake/client/core/CredentialManager.java +++ b/src/main/java/net/snowflake/client/core/CredentialManager.java @@ -47,7 +47,7 @@ void resetSecureStorageManager() { /** * Testing purpose. Inject a mock manager. * - * @param manager + * @param manager SecureStorageManager */ void injectSecureStorageManager(SecureStorageManager manager) { logger.debug("Injecting secure storage manager"); diff --git a/src/main/java/net/snowflake/client/core/DataConversionContext.java b/src/main/java/net/snowflake/client/core/DataConversionContext.java index 86bba8208..d0f80e021 100644 --- a/src/main/java/net/snowflake/client/core/DataConversionContext.java +++ b/src/main/java/net/snowflake/client/core/DataConversionContext.java @@ -12,25 +12,42 @@ * to a single result set. a.k.a each result set object should have its own formatter info */ public interface DataConversionContext { - /** timestamp_ltz formatter */ + /** + * @return timestamp_ltz formatter + */ SnowflakeDateTimeFormat getTimestampLTZFormatter(); - /** timestamp_ntz formatter */ + /** + * @return timestamp_ntz formatter + */ SnowflakeDateTimeFormat getTimestampNTZFormatter(); - /** timestamp_tz formatter */ + /** + * @return timestamp_ntz formatter + */ SnowflakeDateTimeFormat getTimestampTZFormatter(); - /** date formatter */ + /** + * @return date formatter + */ SnowflakeDateTimeFormat getDateFormatter(); - /** time formatter */ + /** + * @return time formatter + */ SnowflakeDateTimeFormat getTimeFormatter(); - /** binary formatter */ + /** + * @return binary formatter + */ SFBinaryFormat getBinaryFormatter(); - /** get scale from Snowflake metadata */ + /** + * get scale from Snowflake metadata + * + * @param columnIndex column index + * @return scale value + */ int getScale(int columnIndex); /** diff --git a/src/main/java/net/snowflake/client/core/EventUtil.java b/src/main/java/net/snowflake/client/core/EventUtil.java index d45cd0676..ed25c5988 100644 --- a/src/main/java/net/snowflake/client/core/EventUtil.java +++ b/src/main/java/net/snowflake/client/core/EventUtil.java @@ -36,7 +36,7 @@ public class EventUtil { /** * Junit is not recognizing the system properties for EventTest, so overriding the value here * - * @param value + * @param value string value */ public static void setDumpPathPrefixForTesting(String value) { DUMP_PATH_PREFIX = value; diff --git a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java index 25ba5f946..6942a9e5a 100644 --- a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java +++ b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java @@ -67,6 +67,7 @@ private HeartbeatBackground() {} * @param session the session will be added * @param masterTokenValidityInSecs time interval for which client need to check validity of * master token with server + * @param heartbeatFrequencyInSecs heartbeat frequency in seconds */ protected synchronized void addSession( SFSession session, long masterTokenValidityInSecs, int heartbeatFrequencyInSecs) { diff --git a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java index f65b9e29d..d3a356e5a 100644 --- a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java +++ b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java @@ -122,7 +122,11 @@ public String getUserAgentSuffix() { return this.userAgentSuffix; } - /** Be careful of using this! Should only be called when password is later masked. */ + /** + * Be careful of using this! Should only be called when password is later masked. + * + * @return proxy password + */ @SnowflakeJdbcInternalApi public String getProxyPassword() { return this.proxyPassword; diff --git a/src/main/java/net/snowflake/client/core/HttpUtil.java b/src/main/java/net/snowflake/client/core/HttpUtil.java index 166bd7e0a..23b83df09 100644 --- a/src/main/java/net/snowflake/client/core/HttpUtil.java +++ b/src/main/java/net/snowflake/client/core/HttpUtil.java @@ -65,6 +65,7 @@ import org.apache.http.ssl.SSLInitializationException; import org.apache.http.util.EntityUtils; +/** HttpUtil class */ public class HttpUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(HttpUtil.class); @@ -168,7 +169,7 @@ public static void setProxyForS3(HttpClientSettingsKey key, ClientConfiguration * * @param proxyProperties proxy properties * @param clientConfig the configuration needed by S3 to set the proxy - * @throws SnowflakeSQLException + * @throws SnowflakeSQLException when exception encountered * @deprecated Use {@link S3HttpUtil#setSessionlessProxyForS3(Properties, ClientConfiguration)} * instead */ @@ -184,7 +185,7 @@ public static void setSessionlessProxyForS3( * * @param proxyProperties proxy properties * @param opContext the configuration needed by Azure to set the proxy - * @throws SnowflakeSQLException + * @throws SnowflakeSQLException when invalid proxy properties encountered */ public static void setSessionlessProxyForAzure( Properties proxyProperties, OperationContext opContext) throws SnowflakeSQLException { @@ -723,6 +724,7 @@ public static String executeGeneralRequest( * @param includeRetryParameters whether to include retry parameters in retried requests * @param retryOnHTTP403 whether to retry on HTTP 403 or not * @param ocspAndProxyKey OCSP mode and proxy settings for httpclient + * @param execTimeData query execution time telemetry data object * @return response * @throws SnowflakeSQLException if Snowflake error occurs * @throws IOException raises if a general IO error occurs diff --git a/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java index 8d4a01742..e60f6859d 100644 --- a/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java +++ b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java @@ -6,6 +6,9 @@ public class PrivateLinkDetector { * We can only tell if private link is enabled for certain hosts when the hostname contains the * word 'privatelink' but we don't have a good way of telling if a private link connection is * expected for internal stages for example. + * + * @param host host + * @return true if host is considered as privatelink environment */ public static boolean isPrivateLink(String host) { return host.toLowerCase().contains(".privatelink.snowflakecomputing."); diff --git a/src/main/java/net/snowflake/client/core/QueryContextCache.java b/src/main/java/net/snowflake/client/core/QueryContextCache.java index 85fde42ac..60cd8501a 100644 --- a/src/main/java/net/snowflake/client/core/QueryContextCache.java +++ b/src/main/java/net/snowflake/client/core/QueryContextCache.java @@ -274,6 +274,8 @@ private static QueryContextElement deserializeQueryContextElement(JsonNode node) * Deserialize the QueryContext cache from a QueryContextDTO object. This function currently is * only used in QueryContextCacheTest.java where we check that after serialization and * deserialization, the cache is the same as before. + * + * @param queryContextDTO QueryContextDTO to deserialize. */ public void deserializeQueryContextDTO(QueryContextDTO queryContextDTO) { synchronized (this) { @@ -335,6 +337,8 @@ private static QueryContextElement deserializeQueryContextElementDTO( /** * Serialize the QueryContext cache to a QueryContextDTO object, which can be serialized to JSON * automatically later. + * + * @return {@link QueryContextDTO} */ public QueryContextDTO serializeQueryContextDTO() { synchronized (this) { diff --git a/src/main/java/net/snowflake/client/core/QueryStatus.java b/src/main/java/net/snowflake/client/core/QueryStatus.java index bc16abf62..792f4b538 100644 --- a/src/main/java/net/snowflake/client/core/QueryStatus.java +++ b/src/main/java/net/snowflake/client/core/QueryStatus.java @@ -39,6 +39,7 @@ public String getDescription() { /** * @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead + * @return error message */ @Deprecated public String getErrorMessage() { @@ -47,6 +48,7 @@ public String getErrorMessage() { /** * @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead + * @return error code */ @Deprecated public int getErrorCode() { @@ -55,6 +57,7 @@ public int getErrorCode() { /** * @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead + * @param message the error message */ @Deprecated public void setErrorMessage(String message) { @@ -63,12 +66,19 @@ public void setErrorMessage(String message) { /** * @deprecated use {@link net.snowflake.client.jdbc.QueryStatusV2} instead + * @param errorCode the error code */ @Deprecated public void setErrorCode(int errorCode) { this.errorCode = errorCode; } + /** + * Check if query is still running. + * + * @param status QueryStatus + * @return true if query is still running + */ public static boolean isStillRunning(QueryStatus status) { switch (status.getValue()) { case 0: // "RUNNING" @@ -83,6 +93,12 @@ public static boolean isStillRunning(QueryStatus status) { } } + /** + * Check if query status is an error + * + * @param status QueryStatus + * @return true if query status is an error status + */ public static boolean isAnError(QueryStatus status) { switch (status.getValue()) { case 1: // Aborting @@ -97,6 +113,12 @@ public static boolean isAnError(QueryStatus status) { } } + /** + * Get the query status from a string description + * + * @param description the status description + * @return QueryStatus + */ public static QueryStatus getStatusFromString(String description) { if (description != null) { for (QueryStatus st : QueryStatus.values()) { diff --git a/src/main/java/net/snowflake/client/core/ResultUtil.java b/src/main/java/net/snowflake/client/core/ResultUtil.java index b894f4259..20acee866 100644 --- a/src/main/java/net/snowflake/client/core/ResultUtil.java +++ b/src/main/java/net/snowflake/client/core/ResultUtil.java @@ -83,6 +83,12 @@ public static Object effectiveParamValue(Map parameters, String /** * Helper function building a formatter for a specialized timestamp type. Note that it will be * based on either the 'param' value if set, or the default format provided. + * + * @param parameters keyed in parameter name and valued in parameter value + * @param id id + * @param param timestamp output format param + * @param defaultFormat default format + * @return {@link SnowflakeDateTimeFormat} */ public static SnowflakeDateTimeFormat specializedFormatter( Map parameters, String id, String param, String defaultFormat) { diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index f14e74e5d..dcea1d575 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -189,7 +189,8 @@ public SFArrowResultSet( * * @param resultSetSerializable data returned in query response * @param telemetryClient telemetryClient - * @throws SQLException + * @param sortResult set if results should be sorted + * @throws SQLException if exception encountered */ public SFArrowResultSet( SnowflakeResultSetSerializableV1 resultSetSerializable, diff --git a/src/main/java/net/snowflake/client/core/SFBaseSession.java b/src/main/java/net/snowflake/client/core/SFBaseSession.java index 382dcb877..9222b4a57 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseSession.java +++ b/src/main/java/net/snowflake/client/core/SFBaseSession.java @@ -162,6 +162,8 @@ public long getMemoryLimitForTesting() { * Part of the JDBC API, where client applications may fetch a Map of Properties to set various * attributes. This is not used internally by any driver component, but should be maintained by * the Session object. + * + * @return client info as Properties */ public Properties getClientInfo() { // defensive copy to avoid client from changing the properties @@ -171,10 +173,20 @@ public Properties getClientInfo() { return copy; } + /** + * Set common parameters + * + * @param parameters the parameters to set + */ public void setCommonParameters(Map parameters) { this.commonParameters = parameters; } + /** + * Get common parameters + * + * @return Map of common parameters + */ public Map getCommonParameters() { return this.commonParameters; } @@ -183,12 +195,17 @@ public Map getCommonParameters() { * Gets the Property associated with the key 'name' in the ClientInfo map. * * @param name The key from which to fetch the Property. + * @return The ClientInfo entry property. */ public String getClientInfo(String name) { return this.clientInfo.getProperty(name); } - /** Returns a unique id for this session. */ + /** + * Returns a unique id for this session. + * + * @return unique id for session + */ public String getSessionId() { return sessionId; } @@ -202,86 +219,200 @@ public void setSessionId(String sessionId) { this.sessionId = sessionId; } + /** + * @return true if session is in SQLMode + */ public boolean isSfSQLMode() { return sfSQLMode; } + /** + * Set sfSQLMode + * + * @param sfSQLMode boolean + */ public void setSfSQLMode(boolean sfSQLMode) { this.sfSQLMode = sfSQLMode; } + /** + * Get the database version + * + * @return database version + */ public String getDatabaseVersion() { return databaseVersion; } + /** + * Set database version + * + * @param databaseVersion the version to set + */ public void setDatabaseVersion(String databaseVersion) { this.databaseVersion = databaseVersion; } + /** + * Get databse major version + * + * @return the database major version + */ public int getDatabaseMajorVersion() { return databaseMajorVersion; } + /** + * Set database major version + * + * @param databaseMajorVersion the database major version + */ public void setDatabaseMajorVersion(int databaseMajorVersion) { this.databaseMajorVersion = databaseMajorVersion; } + /** + * Get the database minor version + * + * @return database minor version + */ public int getDatabaseMinorVersion() { return databaseMinorVersion; } + /** + * Set the database minor version + * + * @param databaseMinorVersion the minor version + */ public void setDatabaseMinorVersion(int databaseMinorVersion) { this.databaseMinorVersion = databaseMinorVersion; } + /** + * Gets the value of CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS if one has been set. False by + * default. + * + * @see CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS + * @return true if enabled + */ public boolean getPreparedStatementLogging() { return this.preparedStatementLogging; } + /** + * Set prepared statement logging + * + * @see SFBaseSession#getPreparedStatementLogging() + * @param value boolean + */ public void setPreparedStatementLogging(boolean value) { this.preparedStatementLogging = value; } + /** + * Get inject file upload failure. Note: Should only be used in internal tests! + * + * @return file to fail + */ public String getInjectFileUploadFailure() { return this.injectFileUploadFailure; } + /** + * Set inject file upload failure Note: Should only be used in internal tests! + * + * @param fileToFail the file to fail + */ public void setInjectFileUploadFailure(String fileToFail) { this.injectFileUploadFailure = fileToFail; } + /** + * Get timestamp mapped type + * + * @see CLIENT_TIMESTAMP_TYPE_MAPPING + * @return {@link SnowflakeType} + */ public SnowflakeType getTimestampMappedType() { return timestampMappedType; } + /** + * Set the timestamp mapped type + * + * @see SFBaseSession#getTimestampMappedType() + * @param timestampMappedType SnowflakeType + */ public void setTimestampMappedType(SnowflakeType timestampMappedType) { this.timestampMappedType = timestampMappedType; } + /** + * Get if result column is case-insensitive + * + * @see SFBaseSession#setResultColumnCaseInsensitive(boolean) + * @return true if result column is case-insensitive + */ public boolean isResultColumnCaseInsensitive() { return isResultColumnCaseInsensitive; } + /** + * Set if result column is case-insensitive + * + * @see CLIENT_RESULT_COLUMN_CASE_INSENSITIVE + * @param resultColumnCaseInsensitive boolean + */ public void setResultColumnCaseInsensitive(boolean resultColumnCaseInsensitive) { isResultColumnCaseInsensitive = resultColumnCaseInsensitive; } + /** + * Check if we want to treat decimal as int JDBC types + * + * @see JDBC_TREAT_DECIMAL_AS_INT + * @return true if decimal is treated as int + */ public boolean isJdbcTreatDecimalAsInt() { return isJdbcTreatDecimalAsInt; } + /** + * Set if decimal should be treated as int type + * + * @see SFBaseSession#isJdbcTreatDecimalAsInt() + * @param jdbcTreatDecimalAsInt boolean + */ public void setJdbcTreatDecimalAsInt(boolean jdbcTreatDecimalAsInt) { isJdbcTreatDecimalAsInt = jdbcTreatDecimalAsInt; } + /** + * @return true if decimal should be treated as int for arrow types + */ public boolean isJdbcArrowTreatDecimalAsInt() { return isJdbcArrowTreatDecimalAsInt; } + /** + * Set if decimal should be treated as int for arrow types + * + * @param jdbcArrowTreatDecimalAsInt boolean + */ public void setJdbcArrowTreatDecimalAsInt(boolean jdbcArrowTreatDecimalAsInt) { isJdbcArrowTreatDecimalAsInt = jdbcArrowTreatDecimalAsInt; } + /** + * Get the server url + * + * @return the server url or null if it is not set + */ public String getServerUrl() { if (connectionPropertiesMap.containsKey(SFSessionProperty.SERVER_URL)) { return (String) connectionPropertiesMap.get(SFSessionProperty.SERVER_URL); @@ -289,6 +420,11 @@ public String getServerUrl() { return null; } + /** + * Get whether columns strings are quoted. + * + * @return value of 'stringsQuotedForColumnDef' connection property or false if not set. + */ public boolean isStringQuoted() { if (connectionPropertiesMap.containsKey(SFSessionProperty.STRINGS_QUOTED)) { return (Boolean) connectionPropertiesMap.get(SFSessionProperty.STRINGS_QUOTED); @@ -346,10 +482,21 @@ public void addProperty(String propertyName, Object propertyValue) throws SFExce } } + /** + * Get the connection properties map + * + * @return the connection properties map + */ public Map getConnectionPropertiesMap() { return connectionPropertiesMap; } + /** + * Get the http client key + * + * @return HttpClientSettingsKey + * @throws SnowflakeSQLException if exception encountered + */ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { // if key is already created, return it without making a new one if (ocspAndProxyAndGzipKey != null) { @@ -547,6 +694,7 @@ private void logHttpClientInitInfo(HttpClientSettingsKey key) { } } + /** Unset invalid proxy host and port values. */ public void unsetInvalidProxyHostAndPort() { // If proxyHost and proxyPort are used without http or https unset them, so they are not used // later by the ProxySelector. @@ -558,6 +706,11 @@ public void unsetInvalidProxyHostAndPort() { } } + /** + * Get OCSP mode + * + * @return {@link OCSPMode} + */ public OCSPMode getOCSPMode() { OCSPMode ret; @@ -576,18 +729,38 @@ public OCSPMode getOCSPMode() { return ret; } + /** + * Get the query timeout + * + * @return the query timeout value + */ public Integer getQueryTimeout() { return (Integer) this.connectionPropertiesMap.get(SFSessionProperty.QUERY_TIMEOUT); } + /** + * Get the user name + * + * @return user name + */ public String getUser() { return (String) this.connectionPropertiesMap.get(SFSessionProperty.USER); } + /** + * Get the server URL + * + * @return the server URL + */ public String getUrl() { return (String) this.connectionPropertiesMap.get(SFSessionProperty.SERVER_URL); } + /** + * Get inject wait input + * + * @return the value of 'inject_wait_in_put' or 0 if not set + */ public int getInjectWaitInPut() { Object retVal = this.connectionPropertiesMap.get(SFSessionProperty.INJECT_WAIT_IN_PUT); if (retVal != null) { @@ -600,42 +773,92 @@ public int getInjectWaitInPut() { return 0; } + /** + * Get whether the metadata request should use the session database. + * + * @return true if it should use the session database + */ public boolean getMetadataRequestUseSessionDatabase() { return metadataRequestUseSessionDatabase; } + /** + * Set to true if the metadata request should use the session database. + * + * @param enabled boolean + */ public void setMetadataRequestUseSessionDatabase(boolean enabled) { this.metadataRequestUseSessionDatabase = enabled; } + /** + * Get if metadata request should use the connection ctx + * + * @return true if it should use the connection ctx + */ public boolean getMetadataRequestUseConnectionCtx() { return this.metadataRequestUseConnectionCtx; } + /** + * Set to true if metadata request should use connection ctx + * + * @param enabled boolean + */ public void setMetadataRequestUseConnectionCtx(boolean enabled) { this.metadataRequestUseConnectionCtx = enabled; } + /** + * Get injected delay + * + * @return {@link AtomicInteger} + */ AtomicInteger getInjectedDelay() { return _injectedDelay; } + /** + * Set the injected delay + * + * @param injectedDelay injectedDelay value + */ public void setInjectedDelay(int injectedDelay) { this._injectedDelay.set(injectedDelay); } + /** + * Get if NTZ should be treated as UTC + * + * @return true if NTZ should be treated as UTC + */ public boolean getTreatNTZAsUTC() { return treatNTZAsUTC; } + /** + * Set whether NTZ should be treated as UTC + * + * @param treatNTZAsUTC boolean + */ public void setTreatNTZAsUTC(boolean treatNTZAsUTC) { this.treatNTZAsUTC = treatNTZAsUTC; } + /** + * Get if heartbeat is enabled + * + * @return true if enabled + */ public boolean getEnableHeartbeat() { return enableHeartbeat; } + /** + * Set if heartbeat is enabled + * + * @param enableHeartbeat boolean + */ public void setEnableHeartbeat(boolean enableHeartbeat) { this.enableHeartbeat = enableHeartbeat; } @@ -656,39 +879,88 @@ public void setHeartbeatFrequency(int frequency) { } } - /** Retrieve session heartbeat frequency in seconds */ + /** + * Retrieve session heartbeat frequency in seconds + * + * @return the heartbeat frequency in seconds + */ public int getHeartbeatFrequency() { return this.heartbeatFrequency; } + /** + * autoCommit field specifies whether autocommit is enabled for the session. Autocommit determines + * whether a DML statement, when executed without an active transaction, is automatically + * committed after the statement successfully completes. default: true + * + * @see Transactions/Autocommit + * @return a boolean value of autocommit field + */ public boolean getAutoCommit() { return autoCommit.get(); } + /** + * Sets value of autoCommit field + * + * @see SFBaseSession#getAutoCommit() + * @param autoCommit boolean + */ public void setAutoCommit(boolean autoCommit) { this.autoCommit.set(autoCommit); } + /** + * Get if date should be formatted with timezone + * + * @return true if date should be formatted with timezone + */ public boolean getFormatDateWithTimezone() { return formatDateWithTimezone; } + /** + * Set if date should be formatted with timezone + * + * @param formatDateWithTimezone boolean + */ public void setFormatDateWithTimezone(boolean formatDateWithTimezone) { this.formatDateWithTimezone = formatDateWithTimezone; } + /** + * Get if session timezone should be used. + * + * @return true if using session timezone + */ public boolean getUseSessionTimezone() { return useSessionTimezone; } + /** + * Get if using default date format with timezone. + * + * @return true if using default date format with timezone. + */ public boolean getDefaultFormatDateWithTimezone() { return defaultFormatDateWithTimezone; } + /** + * Set if session timezone should be used. + * + * @param useSessionTimezone boolean + */ public void setUseSessionTimezone(boolean useSessionTimezone) { this.useSessionTimezone = useSessionTimezone; } + /** + * Set if default date format with timezone should be used + * + * @param defaultFormatDateWithTimezone boolean + */ public void setDefaultFormatDateWithTimezone(boolean defaultFormatDateWithTimezone) { this.defaultFormatDateWithTimezone = defaultFormatDateWithTimezone; } @@ -906,6 +1178,7 @@ public void setSessionPropertyByKey(String propertyName, Object propertyValue) { * Fetch the value for a custom session property. * * @param propertyName The key of the session property to fetch. + * @return session property value */ public Object getSessionPropertyByKey(String propertyName) { return this.customSessionProperties.get(propertyName); @@ -914,6 +1187,8 @@ public Object getSessionPropertyByKey(String propertyName) { /** * Function that checks if the active session can be closed when the connection is closed. Called * by SnowflakeConnectionV1. + * + * @return true if the active session is safe to close. */ public abstract boolean isSafeToClose(); @@ -921,7 +1196,7 @@ public Object getSessionPropertyByKey(String propertyName) { * @param queryID query ID of the query whose status is being investigated * @return enum of type QueryStatus indicating the query's status * @deprecated Use {@link #getQueryStatusV2(String)} - * @throws SQLException + * @throws SQLException if error encountered */ @Deprecated public abstract QueryStatus getQueryStatus(String queryID) throws SQLException; @@ -929,13 +1204,15 @@ public Object getSessionPropertyByKey(String propertyName) { /** * @param queryID query ID of the query whose status is being investigated * @return QueryStatusV2 indicating the query's status - * @throws SQLException + * @throws SQLException if error encountered */ public abstract QueryStatusV2 getQueryStatusV2(String queryID) throws SQLException; /** * Validates the connection properties used by this session, and returns a list of missing * properties. + * + * @return List of DriverPropertyInfo */ public abstract List checkProperties(); @@ -948,17 +1225,25 @@ public Object getSessionPropertyByKey(String propertyName) { public abstract void close() throws SFException, SnowflakeSQLException; /** - * Returns the telemetry client, if supported, by this session. If not, should return a - * NoOpTelemetryClient. + * @return Returns the telemetry client, if supported, by this session. If not, should return a + * NoOpTelemetryClient. */ public abstract Telemetry getTelemetryClient(); - /** Makes a heartbeat call to check for session validity. */ + /** + * Makes a heartbeat call to check for session validity. + * + * @param timeout timeout value + * @throws Exception if exception occurs + * @throws SFException if exception occurs + */ public abstract void callHeartBeat(int timeout) throws Exception, SFException; /** * JDBC API. Returns a list of warnings generated since starting this session, or the last time it * was cleared. + * + * @return List of SFException's */ public List getSqlWarnings() { return sqlWarnings; @@ -972,29 +1257,59 @@ public void clearSqlWarnings() { sqlWarnings.clear(); } + /** + * Get the SFConnectionHandler + * + * @return {@link SFConnectionHandler} + */ public SFConnectionHandler getSfConnectionHandler() { return sfConnectionHandler; } + /** + * Get network timeout in milliseconds + * + * @return network timeout in milliseconds + */ public abstract int getNetworkTimeoutInMilli(); + /** + * @return auth timeout in seconds + */ public abstract int getAuthTimeout(); + /** + * @return max http retries + */ public abstract int getMaxHttpRetries(); + /** + * @return {@link SnowflakeConnectString} + */ public abstract SnowflakeConnectString getSnowflakeConnectionString(); + /** + * @return true if this is an async session + */ public abstract boolean isAsyncSession(); + /** + * @return QueryContextDTO containing opaque information shared with the cloud service. + */ public abstract QueryContextDTO getQueryContextDTO(); + /** + * Set query context + * + * @param queryContext the query context string + */ public abstract void setQueryContext(String queryContext); /** - * If true, JDBC will enable returning TIMESTAMP_WITH_TIMEZONE as column type, otherwise it will - * not. This function will always return true for JDBC client, so that the client JDBC will not - * have any behavior change. Stored proc JDBC will override this function to return the value of - * SP_JDBC_ENABLE_TIMESTAMP_WITH_TIMEZONE from server for backward compatibility. + * @return If true, JDBC will enable returning TIMESTAMP_WITH_TIMEZONE as column type, otherwise + * it will not. This function will always return true for JDBC client, so that the client JDBC + * will not have any behavior change. Stored proc JDBC will override this function to return + * the value of SP_JDBC_ENABLE_TIMESTAMP_WITH_TIMEZONE from server for backward compatibility. */ public boolean getEnableReturnTimestampWithTimeZone() { return enableReturnTimestampWithTimeZone; diff --git a/src/main/java/net/snowflake/client/core/SFBaseStatement.java b/src/main/java/net/snowflake/client/core/SFBaseStatement.java index 17b2fd1b6..104d49387 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseStatement.java +++ b/src/main/java/net/snowflake/client/core/SFBaseStatement.java @@ -93,6 +93,7 @@ public abstract SFBaseResultSet execute( * @param sql sql statement. * @param parametersBinding parameters to bind * @param caller the JDBC interface method that called this method, if any + * @param execTimeData ExecTimeTelemetryData * @return whether there is result set or not * @throws SQLException if failed to execute sql * @throws SFException exception raised from Snowflake components @@ -116,9 +117,23 @@ public abstract SFBaseResultSet asyncExecute( * * @throws SFException if the statement is already closed. * @throws SQLException if there are server-side errors from trying to abort. + * @deprecated use {@link #cancel(CancellationReason)} instead */ + @Deprecated public abstract void cancel() throws SFException, SQLException; + /** + * Aborts the statement. + * + * @param cancellationReason reason for the cancellation + * @throws SFException if the statement is already closed. + * @throws SQLException if there are server-side errors from trying to abort. + */ + @SnowflakeJdbcInternalApi + public void cancel(CancellationReason cancellationReason) throws SFException, SQLException { + cancel(); // default cancel is called to keep interface backward compatibility + } + /** * Sets a property within session properties, i.e., if the sql is using set-sf-property * @@ -150,8 +165,6 @@ public void executeSetProperty(final String sql) { * A method to check if a sql is file upload statement with consideration for potential comments * in front of put keyword. * - *

- * * @param sql sql statement * @return true if the command is upload statement */ @@ -160,15 +173,25 @@ public static boolean isFileTransfer(String sql) { return statementType == SFStatementType.PUT || statementType == SFStatementType.GET; } - /** If this is a multi-statement, i.e., has child results. */ + /** + * If this is a multi-statement, i.e., has child results. + * + * @return true if has child results + */ public abstract boolean hasChildren(); - /** Returns the SFBaseSession associated with this SFBaseStatement. */ + /** + * Get the SFBaseSession associated with this SFBaseStatement. + * + * @return The SFBaseSession associated with this SFBaseStatement. + */ public abstract SFBaseSession getSFBaseSession(); /** * Retrieves the current result as a ResultSet, if any. This is invoked by SnowflakeStatement and * should return an SFBaseResultSet, which is then wrapped in a SnowflakeResultSet. + * + * @return {@link SFBaseResultSet} */ public abstract SFBaseResultSet getResultSet(); @@ -195,7 +218,9 @@ public enum CallingMethod { public abstract int getConservativePrefetchThreads(); /** + * @param queryID the queryID * @return the child query IDs for the multiple statements query. + * @throws SQLException if an error occurs while getting child query ID's */ public abstract String[] getChildQueryIds(String queryID) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/core/SFException.java b/src/main/java/net/snowflake/client/core/SFException.java index 77c2b1355..a2ea0c551 100644 --- a/src/main/java/net/snowflake/client/core/SFException.java +++ b/src/main/java/net/snowflake/client/core/SFException.java @@ -24,24 +24,47 @@ public class SFException extends Throwable { private int vendorCode; private Object[] params; - /** use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} */ + /** + * Use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} + * + * @param errorCode the error code + * @param params additional params + */ @Deprecated public SFException(ErrorCode errorCode, Object... params) { this(null, null, errorCode, params); } - /** use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} */ + /** + * use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} + * + * @param queryID the query id + * @param errorCode the error code + * @param params additional params + */ @Deprecated public SFException(String queryID, ErrorCode errorCode, Object... params) { this(queryID, null, errorCode, params); } - /** use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} */ + /** + * use {@link SFException#SFException(String, Throwable, ErrorCode, Object...)} + * + * @param cause throwable + * @param errorCode error code + * @param params additional params + */ @Deprecated public SFException(Throwable cause, ErrorCode errorCode, Object... params) { this(null, cause, errorCode, params); } + /** + * @param queryId query ID + * @param cause throwable + * @param errorCode error code + * @param params additional params + */ public SFException(String queryId, Throwable cause, ErrorCode errorCode, Object... params) { super( errorResourceBundleManager.getLocalizedMessage( @@ -55,22 +78,47 @@ public SFException(String queryId, Throwable cause, ErrorCode errorCode, Object. this.params = params; } + /** + * Get the error cause + * + * @return Throwable + */ public Throwable getCause() { return cause; } + /** + * Get the query ID + * + * @return query ID string + */ public String getQueryId() { return queryId; } + /** + * Get the SQL state + * + * @return SQL state string + */ public String getSqlState() { return sqlState; } + /** + * Get the vendor code + * + * @return vendor code + */ public int getVendorCode() { return vendorCode; } + /** + * Get additional parameters + * + * @return parameter array + */ public Object[] getParams() { return params; } diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 1011870df..c32a16424 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -108,7 +108,7 @@ public Object getObject(int columnIndex) throws SFException { * * @param columnIndex the column index * @return an object of type long or BigDecimal depending on number size - * @throws SFException + * @throws SFException if an error occurs */ private Object getBigInt(int columnIndex, Object obj) throws SFException { return converters.getNumberConverter().getBigInt(obj, columnIndex); diff --git a/src/main/java/net/snowflake/client/core/SFLoginOutput.java b/src/main/java/net/snowflake/client/core/SFLoginOutput.java index 8daf81f10..3470076b9 100644 --- a/src/main/java/net/snowflake/client/core/SFLoginOutput.java +++ b/src/main/java/net/snowflake/client/core/SFLoginOutput.java @@ -18,6 +18,7 @@ public class SFLoginOutput { private int databaseMajorVersion; private int databaseMinorVersion; private Duration httpClientSocketTimeout; + private Duration httpClientConnectionTimeout; private String sessionDatabase; private String sessionSchema; private String sessionRole; @@ -53,6 +54,7 @@ public class SFLoginOutput { this.databaseMajorVersion = databaseMajorVersion; this.databaseMinorVersion = databaseMinorVersion; this.httpClientSocketTimeout = Duration.ofMillis(httpClientSocketTimeout); + this.httpClientConnectionTimeout = Duration.ofMillis(httpClientConnectionTimeout); this.sessionDatabase = sessionDatabase; this.sessionSchema = sessionSchema; this.sessionRole = sessionRole; @@ -113,7 +115,7 @@ Duration getHttpClientSocketTimeout() { } Duration getHttpClientConnectionTimeout() { - return httpClientSocketTimeout; + return httpClientConnectionTimeout; } Map getCommonParams() { diff --git a/src/main/java/net/snowflake/client/core/SFResultSet.java b/src/main/java/net/snowflake/client/core/SFResultSet.java index fee90a3ee..7c66e1f5a 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFResultSet.java @@ -30,8 +30,6 @@ /** * Snowflake ResultSet implementation * - *

- * * @author jhuang */ public class SFResultSet extends SFJsonResultSet { @@ -129,7 +127,7 @@ public SFResultSet( * @param resultSetSerializable data returned in query response * @param telemetryClient telemetryClient * @param sortResult should sorting take place - * @throws SQLException + * @throws SQLException if exception is encountered */ public SFResultSet( SnowflakeResultSetSerializableV1 resultSetSerializable, @@ -147,7 +145,7 @@ public SFResultSet( * @param session snowflake session * @param telemetryClient telemetryClient * @param sortResult should sorting take place - * @throws SQLException + * @throws SQLException if an exception is encountered. */ public SFResultSet( SnowflakeResultSetSerializableV1 resultSetSerializable, diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index 8e2e834a0..c3708ea7e 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -290,7 +290,7 @@ else if (ex instanceof SFException) { /** * @param queryID query ID of the query whose status is being investigated * @return enum of type QueryStatus indicating the query's status - * @throws SQLException + * @throws SQLException if an error is encountered * @deprecated the returned enum is error-prone, use {@link #getQueryStatusV2} instead */ @Deprecated @@ -337,7 +337,7 @@ else if (isAnError(result)) { /** * @param queryID query ID of the query whose status is being investigated * @return a QueryStatusV2 instance indicating the query's status - * @throws SQLException + * @throws SQLException if an error is encountered */ public QueryStatusV2 getQueryStatusV2(String queryID) throws SQLException { JsonNode queryNode = getQueryMetadata(queryID); diff --git a/src/main/java/net/snowflake/client/core/SFSqlInput.java b/src/main/java/net/snowflake/client/core/SFSqlInput.java index 2b3d6ba95..6ca9988d9 100644 --- a/src/main/java/net/snowflake/client/core/SFSqlInput.java +++ b/src/main/java/net/snowflake/client/core/SFSqlInput.java @@ -37,6 +37,7 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * * @param the type of the class modeled by this Class object * @param type Class representing the Java data type to convert the attribute to. + * @param tz timezone to consider. * @return the attribute at the head of the stream as an {@code Object} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs diff --git a/src/main/java/net/snowflake/client/core/SFStatement.java b/src/main/java/net/snowflake/client/core/SFStatement.java index 6142b8eb9..173ecf21f 100644 --- a/src/main/java/net/snowflake/client/core/SFStatement.java +++ b/src/main/java/net/snowflake/client/core/SFStatement.java @@ -298,7 +298,7 @@ private TimeBombTask(SFStatement statement) { @Override public Void call() throws SQLException { try { - statement.cancel(); + statement.cancel(CancellationReason.TIMEOUT); } catch (SFException ex) { throw new SnowflakeSQLLoggedException( session, ex.getSqlState(), ex.getVendorCode(), ex, ex.getParams()); @@ -318,6 +318,8 @@ public Void call() throws SQLException { * @param bindValues map of binding values * @param describeOnly whether only show the result set metadata * @param internal run internal query not showing up in history + * @param asyncExec is async execute + * @param execTimeData ExecTimeTelemetryData * @return raw json response * @throws SFException if query is canceled * @throws SnowflakeSQLException if query is already running @@ -711,10 +713,11 @@ private void reauthenticate() throws SFException, SnowflakeSQLException { * * @param sql sql statement * @param mediaType media type + * @param cancellationReason reason for the cancellation * @throws SnowflakeSQLException if failed to cancel the statement * @throws SFException if statement is already closed */ - private void cancelHelper(String sql, String mediaType) + private void cancelHelper(String sql, String mediaType, CancellationReason cancellationReason) throws SnowflakeSQLException, SFException { synchronized (this) { if (isClosed) { @@ -734,7 +737,7 @@ private void cancelHelper(String sql, String mediaType) .setMaxRetries(session.getMaxHttpRetries()) .setHttpClientSettingsKey(session.getHttpClientKey()); - StmtUtil.cancel(stmtInput); + StmtUtil.cancel(stmtInput, cancellationReason); synchronized (this) { /* @@ -751,8 +754,10 @@ private void cancelHelper(String sql, String mediaType) * Execute sql * * @param sql sql statement. + * @param asyncExec is async exec * @param parametersBinding parameters to bind * @param caller the JDBC interface method that called this method, if any + * @param execTimeData ExecTimeTelemetryData * @return whether there is result set or not * @throws SQLException if failed to execute sql * @throws SFException exception raised from Snowflake components @@ -842,6 +847,12 @@ public void close() { @Override public void cancel() throws SFException, SQLException { logger.trace("void cancel()", false); + cancel(CancellationReason.UNKNOWN); + } + + @Override + public void cancel(CancellationReason cancellationReason) throws SFException, SQLException { + logger.trace("void cancel(CancellationReason)", false); if (canceling.get()) { logger.debug("Query is already cancelled", false); @@ -866,7 +877,7 @@ public void cancel() throws SFException, SQLException { } // cancel the query on the server side if it has been issued - cancelHelper(this.sqlText, StmtUtil.SF_MEDIA_TYPE); + cancelHelper(this.sqlText, StmtUtil.SF_MEDIA_TYPE, cancellationReason); } } diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index a12f20ce3..de0eb3a87 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -953,11 +953,22 @@ private static String nullStringAsEmptyString(String value) { return value; } - /** Delete the id token cache */ + /** + * Delete the id token cache + * + * @param host The host string + * @param user The user + */ public static void deleteIdTokenCache(String host, String user) { CredentialManager.getInstance().deleteIdTokenCache(host, user); } + /** + * Delete the mfa token cache + * + * @param host The host string + * @param user The user + */ public static void deleteMfaTokenCache(String host, String user) { CredentialManager.getInstance().deleteMfaTokenCache(host, user); } @@ -1710,6 +1721,7 @@ enum TokenRequestType { * private link, do nothing. * * @param serverUrl The Snowflake URL includes protocol such as "https://" + * @throws IOException If exception encountered */ public static void resetOCSPUrlIfNecessary(String serverUrl) throws IOException { if (PrivateLinkDetector.isPrivateLink(serverUrl)) { diff --git a/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java b/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java index 3b371536d..31e6af391 100644 --- a/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java +++ b/src/main/java/net/snowflake/client/core/SnowflakeMutableProxyRoutePlanner.java @@ -29,6 +29,10 @@ public class SnowflakeMutableProxyRoutePlanner implements HttpRoutePlanner, Seri /** * @deprecated Use {@link #SnowflakeMutableProxyRoutePlanner(String, int, HttpProtocol, String)} * instead + * @param host host + * @param proxyPort proxy port + * @param proxyProtocol proxy protocol + * @param nonProxyHosts non-proxy hosts */ @Deprecated public SnowflakeMutableProxyRoutePlanner( @@ -36,6 +40,12 @@ public SnowflakeMutableProxyRoutePlanner( this(host, proxyPort, toSnowflakeProtocol(proxyProtocol), nonProxyHosts); } + /** + * @param host host + * @param proxyPort proxy port + * @param proxyProtocol proxy protocol + * @param nonProxyHosts non-proxy hosts + */ public SnowflakeMutableProxyRoutePlanner( String host, int proxyPort, HttpProtocol proxyProtocol, String nonProxyHosts) { proxyRoutePlanner = @@ -46,12 +56,20 @@ public SnowflakeMutableProxyRoutePlanner( this.protocol = proxyProtocol; } + /** + * Set non-proxy hosts + * + * @param nonProxyHosts non-proxy hosts + */ public void setNonProxyHosts(String nonProxyHosts) { this.nonProxyHosts = nonProxyHosts; proxyRoutePlanner = new SdkProxyRoutePlanner(host, proxyPort, toAwsProtocol(protocol), nonProxyHosts); } + /** + * @return non-proxy hosts string + */ public String getNonProxyHosts() { return nonProxyHosts; } diff --git a/src/main/java/net/snowflake/client/core/StmtUtil.java b/src/main/java/net/snowflake/client/core/StmtUtil.java index 96fefe5dc..18b7ae7f7 100644 --- a/src/main/java/net/snowflake/client/core/StmtUtil.java +++ b/src/main/java/net/snowflake/client/core/StmtUtil.java @@ -270,6 +270,7 @@ public JsonNode getResult() { * submission, but continue the ping pong process. * * @param stmtInput input statement + * @param execTimeData ExecTimeTelemetryData * @return StmtOutput output statement * @throws SFException exception raised from Snowflake components * @throws SnowflakeSQLException exception raised from Snowflake components @@ -584,8 +585,6 @@ protected static String getQueryResult( /** * Issue get-result call to get query result given an in-progress response. * - *

- * * @param getResultPath path to results * @param stmtInput object with context information * @return results in string form @@ -645,8 +644,6 @@ protected static String getQueryResult(String getResultPath, StmtInput stmtInput /** * Issue get-result call to get query result given an in progress response. * - *

- * * @param queryId id of query to get results for * @param session the current session * @return results in JSON @@ -681,8 +678,23 @@ protected static JsonNode getQueryResultJSON(String queryId, SFSession session) * @param stmtInput input statement * @throws SFException if there is an internal exception * @throws SnowflakeSQLException if failed to cancel the statement + * @deprecated use {@link #cancel(StmtInput, CancellationReason)} instead */ + @Deprecated public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLException { + cancel(stmtInput, CancellationReason.UNKNOWN); + } + + /** + * Cancel a statement identifiable by a request id + * + * @param stmtInput input statement + * @param cancellationReason reason for the cancellation + * @throws SFException if there is an internal exception + * @throws SnowflakeSQLException if failed to cancel the statement + */ + public static void cancel(StmtInput stmtInput, CancellationReason cancellationReason) + throws SFException, SnowflakeSQLException { HttpPost httpRequest = null; AssertUtil.assertTrue( @@ -701,7 +713,7 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE try { URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl); - + logger.warn("Cancelling query {} with reason {}", stmtInput.requestId, cancellationReason); logger.debug("Aborting query: {}", stmtInput.sql); uriBuilder.setPath(SF_PATH_ABORT_REQUEST_V1); diff --git a/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java b/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java index c7054442c..855f128b2 100644 --- a/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/AbstractArrowVectorConverter.java @@ -43,9 +43,18 @@ abstract class AbstractArrowVectorConverter implements ArrowVectorConverter { /** Field names of the struct vectors used by timestamp */ public static final String FIELD_NAME_EPOCH = "epoch"; // seconds since epoch + /** Timezone index */ public static final String FIELD_NAME_TIME_ZONE_INDEX = "timezone"; // time zone index + + /** Fraction in nanoseconds */ public static final String FIELD_NAME_FRACTION = "fraction"; // fraction in nanoseconds + /** + * @param logicalTypeStr snowflake logical type of the target arrow vector. + * @param valueVector value vector + * @param vectorIndex value index + * @param context DataConversionContext + */ AbstractArrowVectorConverter( String logicalTypeStr, ValueVector valueVector, @@ -153,6 +162,11 @@ public BigDecimal toBigDecimal(int index) throws SFException { ErrorCode.INVALID_VALUE_CONVERT, logicalTypeStr, SnowflakeUtil.BIG_DECIMAL_STR, ""); } + /** + * True if should treat decimal as int type. + * + * @return true or false if decimal should be treated as int type. + */ boolean shouldTreatDecimalAsInt() { return shouldTreatDecimalAsInt; } diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java b/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java index 08ce23eec..ad9926eac 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/ArrayConverter.java @@ -2,13 +2,22 @@ import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.arrow.tostringhelpers.ArrowArrayStringRepresentationBuilder; +import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeType; +import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.complex.ListVector; +/** Array type converter. */ public class ArrayConverter extends AbstractArrowVectorConverter { private final ListVector vector; + /** + * @param valueVector ListVector + * @param vectorIndex vector index + * @param context DataConversionContext + */ public ArrayConverter(ListVector valueVector, int vectorIndex, DataConversionContext context) { super(SnowflakeType.ARRAY.name(), valueVector, vectorIndex, context); this.vector = valueVector; @@ -21,6 +30,25 @@ public Object toObject(int index) throws SFException { @Override public String toString(int index) throws SFException { - return vector.getObject(index).toString(); + FieldVector vectorUnpacked = vector.getChildrenFromFields().get(0); + SnowflakeType logicalType = + ArrowVectorConverterUtil.getSnowflakeTypeFromFieldMetadata(vectorUnpacked.getField()); + + ArrowArrayStringRepresentationBuilder builder = + new ArrowArrayStringRepresentationBuilder(logicalType); + + final ArrowVectorConverter converter; + + try { + converter = ArrowVectorConverterUtil.initConverter(vectorUnpacked, context, columnIndex); + } catch (SnowflakeSQLException e) { + return vector.getObject(index).toString(); + } + + for (int i = vector.getElementStartIndex(index); i < vector.getElementEndIndex(index); i++) { + builder.appendValue(converter.toString(i)); + } + + return builder.toString(); } } diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java b/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java index 0478b2996..5966447e8 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java +++ b/src/main/java/net/snowflake/client/core/arrow/ArrowResultChunkIndexSorter.java @@ -41,7 +41,7 @@ private void initIndices() { * This method is only used when sf-property sort is on * * @return sorted indices - * @throws SFException + * @throws SFException when exception encountered */ public IntVector sort() throws SFException { quickSort(0, resultChunk.get(0).getValueCount() - 1); diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java index 2ad5c3ef2..03d5c03e8 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java +++ b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java @@ -46,7 +46,7 @@ public static String getStringFormat(int scale) { /** * new method to get Date from integer * - * @param day + * @param day The day to convert. * @return Date */ public static Date getDate(int day) { @@ -57,11 +57,11 @@ public static Date getDate(int day) { /** * Method to get Date from integer using timezone offsets * - * @param day - * @param oldTz - * @param newTz - * @return - * @throws SFException + * @param day The day to convert. + * @param oldTz The old timezone. + * @param newTz The new timezone. + * @return Date + * @throws SFException if date value is invalid */ public static Date getDate(int day, TimeZone oldTz, TimeZone newTz) throws SFException { try { @@ -90,10 +90,10 @@ public static Date getDate(int day, TimeZone oldTz, TimeZone newTz) throws SFExc /** * simplified moveToTimeZone method * - * @param milliSecsSinceEpoch - * @param oldTZ - * @param newTZ - * @return offset + * @param milliSecsSinceEpoch milliseconds since Epoch + * @param oldTZ old timezone + * @param newTZ new timezone + * @return offset offset value */ private static long moveToTimeZoneOffset( long milliSecsSinceEpoch, TimeZone oldTZ, TimeZone newTZ) { @@ -128,9 +128,9 @@ private static long moveToTimeZoneOffset( /** * move the input timestamp form oldTZ to newTZ * - * @param ts - * @param oldTZ - * @param newTZ + * @param ts Timestamp + * @param oldTZ Old timezone + * @param newTZ New timezone * @return timestamp in newTZ */ public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone newTZ) { @@ -149,7 +149,7 @@ public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone ne * * @param epoch the value since epoch time * @param scale the scale of the value - * @return + * @return Timestamp */ public static Timestamp toJavaTimestamp(long epoch, int scale) { return toJavaTimestamp(epoch, scale, TimeZone.getDefault(), false); @@ -160,7 +160,9 @@ public static Timestamp toJavaTimestamp(long epoch, int scale) { * * @param epoch the value since epoch time * @param scale the scale of the value - * @return + * @param sessionTimezone the session timezone + * @param useSessionTimezone should the session timezone be used + * @return Timestamp */ @SnowflakeJdbcInternalApi public static Timestamp toJavaTimestamp( @@ -178,8 +180,8 @@ public static Timestamp toJavaTimestamp( /** * check whether the input seconds out of the scope of Java timestamp * - * @param seconds - * @return + * @param seconds long value to check + * @return true if value is out of the scope of Java timestamp. */ public static boolean isTimestampOverflow(long seconds) { return seconds < Long.MIN_VALUE / powerOfTen(3) || seconds > Long.MAX_VALUE / powerOfTen(3); @@ -191,10 +193,10 @@ public static boolean isTimestampOverflow(long seconds) { * represents as epoch = -1233 and fraction = 766,000,000 For example, -0.13 represents as epoch = * -1 and fraction = 870,000,000 * - * @param seconds - * @param fraction - * @param timezone - The timezone being used for the toString() formatting - * @param timezone - + * @param seconds seconds value + * @param fraction fraction + * @param timezone The timezone being used for the toString() formatting + * @param useSessionTz boolean useSessionTz * @return java timestamp object */ public static Timestamp createTimestamp( diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java index f61e9954d..1a1cff542 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverter.java @@ -16,7 +16,7 @@ public interface ArrowVectorConverter { /** * Set to true when time value should be displayed in wallclock time (no timezone offset) * - * @param useSessionTimezone + * @param useSessionTimezone boolean value indicating if there is a timezone offset. */ void setUseSessionTimezone(boolean useSessionTimezone); @@ -160,6 +160,8 @@ public interface ArrowVectorConverter { Object toObject(int index) throws SFException; /** + * Set to true if NTZ timestamp should be set to UTC + * * @param isUTC true or false value of whether NTZ timestamp should be set to UTC */ void setTreatNTZAsUTC(boolean isUTC); diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java index 45185072f..a6799b223 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java +++ b/src/main/java/net/snowflake/client/core/arrow/ArrowVectorConverterUtil.java @@ -9,12 +9,14 @@ import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import net.snowflake.client.jdbc.SnowflakeType; import net.snowflake.common.core.SqlState; +import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.complex.FixedSizeListVector; import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.complex.MapVector; import org.apache.arrow.vector.complex.StructVector; import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.pojo.Field; @SnowflakeJdbcInternalApi public final class ArrowVectorConverterUtil { @@ -34,13 +36,20 @@ public static int getScale(ValueVector vector, SFBaseSession session) } } + public static SnowflakeType getSnowflakeTypeFromFieldMetadata(Field field) { + Map customMeta = field.getMetadata(); + if (customMeta != null && customMeta.containsKey("logicalType")) { + return SnowflakeType.valueOf(customMeta.get("logicalType")); + } + + return null; + } + /** * Given an arrow vector (a single column in a single record batch), return an arrow vector * converter. Note, converter is built on top of arrow vector, so that arrow data can be converted * back to java data * - *

- * *

Arrow converter mappings for Snowflake fixed-point numbers * ----------------------------------------------------------------------------------------- Max * position and scale Converter @@ -57,6 +66,7 @@ public static int getScale(ValueVector vector, SFBaseSession session) * @param session SFBaseSession for purposes of logging * @param idx the index of the vector in its batch * @return A converter on top og the vector + * @throws SnowflakeSQLException if error encountered */ public static ArrowVectorConverter initConverter( ValueVector vector, DataConversionContext context, SFBaseSession session, int idx) @@ -65,12 +75,11 @@ public static ArrowVectorConverter initConverter( Types.MinorType type = Types.getMinorTypeForArrowType(vector.getField().getType()); // each column's metadata - Map customMeta = vector.getField().getMetadata(); + SnowflakeType st = getSnowflakeTypeFromFieldMetadata(vector.getField()); if (type == Types.MinorType.DECIMAL) { // Note: Decimal vector is different from others return new DecimalToScaledFixedConverter(vector, idx, context); - } else if (!customMeta.isEmpty()) { - SnowflakeType st = SnowflakeType.valueOf(customMeta.get("logicalType")); + } else if (st != null) { switch (st) { case ANY: case CHAR: @@ -229,4 +238,10 @@ public static ArrowVectorConverter initConverter( "Unexpected Arrow Field for ", type.toString()); } + + public static ArrowVectorConverter initConverter( + FieldVector vector, DataConversionContext context, int columnIndex) + throws SnowflakeSQLException { + return initConverter(vector, context, context.getSession(), columnIndex); + } } diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java index 71bd123a0..13a026e4f 100644 --- a/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToFixedConverter.java @@ -23,6 +23,11 @@ public class BigIntToFixedConverter extends AbstractArrowVectorConverter { protected ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public BigIntToFixedConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super( diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java index 74d01f98a..87b3d43d1 100644 --- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimeConverter.java @@ -18,10 +18,16 @@ import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.ValueVector; +/** BigInt to Time type converter. */ public class BigIntToTimeConverter extends AbstractArrowVectorConverter { private BigIntVector bigIntVector; protected ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public BigIntToTimeConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TIME.name(), fieldVector, columnIndex, context); @@ -49,6 +55,15 @@ public Time toTime(int index) throws SFException { } } + /** + * Return the long value as a Time object. + * + * @param value long value to represent as Time + * @param scale the scale + * @param useSessionTimezone boolean indicating use of session timezone + * @return Time object representing the value + * @throws SFException invalid data conversion + */ public static Time getTime(long value, int scale, boolean useSessionTimezone) throws SFException { SFTime sfTime = SFTime.fromFractionalSeconds(value, scale); Time ts = diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java index e2bba45ab..774a0cd74 100644 --- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java @@ -23,6 +23,11 @@ public class BigIntToTimestampLTZConverter extends AbstractArrowVectorConverter private BigIntVector bigIntVector; private ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public BigIntToTimestampLTZConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TIMESTAMP_LTZ.name(), fieldVector, columnIndex, context); @@ -97,7 +102,7 @@ public boolean toBoolean(int index) throws SFException { * @param val epoch * @param scale scale * @return Timestamp value without timezone take into account - * @throws SFException + * @throws SFException if exception encountered */ @Deprecated public static Timestamp getTimestamp(long val, int scale) throws SFException { diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java index cec64d59e..82d107209 100644 --- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverter.java @@ -24,6 +24,11 @@ public class BigIntToTimestampNTZConverter extends AbstractArrowVectorConverter private static final TimeZone NTZ = TimeZone.getTimeZone("UTC"); private ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public BigIntToTimestampNTZConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TIMESTAMP_NTZ.name(), fieldVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java b/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java index 2f5a8cf83..cddc7b3b4 100644 --- a/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/BitToBooleanConverter.java @@ -14,6 +14,11 @@ public class BitToBooleanConverter extends AbstractArrowVectorConverter { private BitVector bitVector; + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public BitToBooleanConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.BOOLEAN.name(), fieldVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/DateConverter.java b/src/main/java/net/snowflake/client/core/arrow/DateConverter.java index a6f50e388..7d18417e2 100644 --- a/src/main/java/net/snowflake/client/core/arrow/DateConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/DateConverter.java @@ -31,6 +31,12 @@ public DateConverter(ValueVector fieldVector, int columnIndex, DataConversionCon this.useDateFormat = false; } + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + * @param useDateFormat boolean indicates whether to use session timezone + */ public DateConverter( ValueVector fieldVector, int columnIndex, diff --git a/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java index 259913d95..b6d9b7a0b 100644 --- a/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/DecimalToScaledFixedConverter.java @@ -17,6 +17,11 @@ public class DecimalToScaledFixedConverter extends AbstractArrowVectorConverter { protected DecimalVector decimalVector; + /** + * @param fieldVector ValueVector + * @param vectorIndex vector index + * @param context DataConversionContext + */ public DecimalToScaledFixedConverter( ValueVector fieldVector, int vectorIndex, DataConversionContext context) { super( diff --git a/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java b/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java index d2f925867..731407861 100644 --- a/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/DoubleToRealConverter.java @@ -13,10 +13,16 @@ import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.ValueVector; +/** Convert from Arrow Float8Vector to Real. */ public class DoubleToRealConverter extends AbstractArrowVectorConverter { private Float8Vector float8Vector; private ByteBuffer byteBuf = ByteBuffer.allocate(Float8Vector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public DoubleToRealConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.REAL.name(), fieldVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java index 8055081ef..8cca3c930 100644 --- a/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/IntToFixedConverter.java @@ -18,6 +18,11 @@ public class IntToFixedConverter extends AbstractArrowVectorConverter { protected int sfScale; protected ByteBuffer byteBuf = ByteBuffer.allocate(IntVector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public IntToFixedConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super( diff --git a/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java b/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java index d704e31bd..27ca0b4ad 100644 --- a/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/IntToTimeConverter.java @@ -18,10 +18,16 @@ import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.ValueVector; +/** Convert from Arrow IntVector to Time. */ public class IntToTimeConverter extends AbstractArrowVectorConverter { private IntVector intVector; private ByteBuffer byteBuf = ByteBuffer.allocate(IntVector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public IntToTimeConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TIME.name(), fieldVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/MapConverter.java b/src/main/java/net/snowflake/client/core/arrow/MapConverter.java index 433792294..4099cd5fb 100644 --- a/src/main/java/net/snowflake/client/core/arrow/MapConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/MapConverter.java @@ -4,14 +4,23 @@ import java.util.stream.Collectors; import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.arrow.tostringhelpers.ArrowObjectStringRepresentationBuilder; +import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeType; +import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.complex.MapVector; import org.apache.arrow.vector.util.JsonStringHashMap; +/** Arrow MapVector converter. */ public class MapConverter extends AbstractArrowVectorConverter { private final MapVector vector; + /** + * @param valueVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public MapConverter(MapVector valueVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.MAP.name(), valueVector, columnIndex, context); this.vector = valueVector; @@ -28,6 +37,30 @@ public Object toObject(int index) throws SFException { @Override public String toString(int index) throws SFException { - return vector.getObject(index).toString(); + ArrowObjectStringRepresentationBuilder builder = new ArrowObjectStringRepresentationBuilder(); + + FieldVector vectorUnpacked = vector.getChildrenFromFields().get(0); + + FieldVector keys = vectorUnpacked.getChildrenFromFields().get(0); + FieldVector values = vectorUnpacked.getChildrenFromFields().get(1); + final ArrowVectorConverter keyConverter; + final ArrowVectorConverter valueConverter; + + SnowflakeType valueLogicalType = + ArrowVectorConverterUtil.getSnowflakeTypeFromFieldMetadata(values.getField()); + + try { + keyConverter = ArrowVectorConverterUtil.initConverter(keys, context, columnIndex); + valueConverter = ArrowVectorConverterUtil.initConverter(values, context, columnIndex); + } catch (SnowflakeSQLException e) { + return vector.getObject(index).toString(); + } + + for (int i = vector.getElementStartIndex(index); i < vector.getElementEndIndex(index); i++) { + builder.appendKeyValue( + keyConverter.toString(i), valueConverter.toString(i), valueLogicalType); + } + + return builder.toString(); } } diff --git a/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java index bfa398d88..13aa87db5 100644 --- a/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/SmallIntToFixedConverter.java @@ -18,6 +18,11 @@ public class SmallIntToFixedConverter extends AbstractArrowVectorConverter { protected SmallIntVector smallIntVector; ByteBuffer byteBuf = ByteBuffer.allocate(SmallIntVector.TYPE_WIDTH); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public SmallIntToFixedConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super( diff --git a/src/main/java/net/snowflake/client/core/arrow/StructConverter.java b/src/main/java/net/snowflake/client/core/arrow/StructConverter.java index 84ccd7c0f..4c0516c51 100644 --- a/src/main/java/net/snowflake/client/core/arrow/StructConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/StructConverter.java @@ -3,7 +3,10 @@ import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.core.arrow.tostringhelpers.ArrowObjectStringRepresentationBuilder; +import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeType; +import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.complex.StructVector; @SnowflakeJdbcInternalApi @@ -23,6 +26,19 @@ public Object toObject(int index) throws SFException { @Override public String toString(int index) throws SFException { - return structVector.getObject(index).toString(); + ArrowObjectStringRepresentationBuilder builder = new ArrowObjectStringRepresentationBuilder(); + for (String childName : structVector.getChildFieldNames()) { + FieldVector fieldVector = structVector.getChild(childName); + SnowflakeType logicalType = + ArrowVectorConverterUtil.getSnowflakeTypeFromFieldMetadata(fieldVector.getField()); + try { + ArrowVectorConverter converter = + ArrowVectorConverterUtil.initConverter(fieldVector, context, columnIndex); + builder.appendKeyValue(childName, converter.toString(index), logicalType); + } catch (SnowflakeSQLException e) { + return structVector.getObject(index).toString(); + } + } + return builder.toString(); } } diff --git a/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java index 88d3e53ba..929045dd1 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverter.java @@ -29,6 +29,11 @@ public class ThreeFieldStructToTimestampTZConverter extends AbstractArrowVectorC private IntVector timeZoneIndices; private TimeZone timeZone = TimeZone.getTimeZone("UTC"); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public ThreeFieldStructToTimestampTZConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TIMESTAMP_LTZ.name(), fieldVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java b/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java index 26c90c228..ace873f7f 100644 --- a/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/TinyIntToFixedConverter.java @@ -17,6 +17,11 @@ public class TinyIntToFixedConverter extends AbstractArrowVectorConverter { protected TinyIntVector tinyIntVector; protected int sfScale = 0; + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public TinyIntToFixedConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super( diff --git a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java index 86eeb93b8..6e3904751 100644 --- a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverter.java @@ -26,6 +26,11 @@ public class TwoFieldStructToTimestampLTZConverter extends AbstractArrowVectorCo private BigIntVector epochs; private IntVector fractions; + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public TwoFieldStructToTimestampLTZConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TIMESTAMP_LTZ.name(), fieldVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java index f4d0d9417..30467169e 100644 --- a/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverter.java @@ -27,6 +27,11 @@ public class TwoFieldStructToTimestampNTZConverter extends AbstractArrowVectorCo private static final TimeZone NTZ = TimeZone.getTimeZone("UTC"); + /** + * @param fieldVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public TwoFieldStructToTimestampNTZConverter( ValueVector fieldVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TIMESTAMP_NTZ.name(), fieldVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java b/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java index f45e561f4..2c4774fb0 100644 --- a/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverter.java @@ -11,9 +11,15 @@ import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.VarBinaryVector; +/** Converter from Arrow VarBinaryVector to Binary. */ public class VarBinaryToBinaryConverter extends AbstractArrowVectorConverter { private VarBinaryVector varBinaryVector; + /** + * @param valueVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public VarBinaryToBinaryConverter( ValueVector valueVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.BINARY.name(), valueVector, columnIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java b/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java index 8a6ce64e5..b53595d42 100644 --- a/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/VarCharConverter.java @@ -22,6 +22,11 @@ public class VarCharConverter extends AbstractArrowVectorConverter { private VarCharVector varCharVector; + /** + * @param valueVector ValueVector + * @param columnIndex column index + * @param context DataConversionContext + */ public VarCharConverter(ValueVector valueVector, int columnIndex, DataConversionContext context) { super(SnowflakeType.TEXT.name(), valueVector, columnIndex, context); this.varCharVector = (VarCharVector) valueVector; diff --git a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java index ae7a492a0..8d1ae2942 100644 --- a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java @@ -6,10 +6,16 @@ import net.snowflake.client.jdbc.SnowflakeType; import org.apache.arrow.vector.complex.FixedSizeListVector; +/** Arrow FixedSizeListVector converter. */ public class VectorTypeConverter extends AbstractArrowVectorConverter { private final FixedSizeListVector vector; + /** + * @param valueVector ValueVector + * @param vectorIndex vector index + * @param context DataConversionContext + */ public VectorTypeConverter( FixedSizeListVector valueVector, int vectorIndex, DataConversionContext context) { super(SnowflakeType.ARRAY.name(), valueVector, vectorIndex, context); diff --git a/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowArrayStringRepresentationBuilder.java b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowArrayStringRepresentationBuilder.java new file mode 100644 index 000000000..7ee6a07aa --- /dev/null +++ b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowArrayStringRepresentationBuilder.java @@ -0,0 +1,19 @@ +package net.snowflake.client.core.arrow.tostringhelpers; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeType; + +@SnowflakeJdbcInternalApi +public class ArrowArrayStringRepresentationBuilder extends ArrowStringRepresentationBuilderBase { + + private final SnowflakeType valueType; + + public ArrowArrayStringRepresentationBuilder(SnowflakeType valueType) { + super(",", "[", "]"); + this.valueType = valueType; + } + + public ArrowStringRepresentationBuilderBase appendValue(String value) { + return add(quoteIfNeeded(value, valueType)); + } +} diff --git a/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowObjectStringRepresentationBuilder.java b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowObjectStringRepresentationBuilder.java new file mode 100644 index 000000000..53513836b --- /dev/null +++ b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowObjectStringRepresentationBuilder.java @@ -0,0 +1,21 @@ +package net.snowflake.client.core.arrow.tostringhelpers; + +import java.util.StringJoiner; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeType; + +@SnowflakeJdbcInternalApi +public class ArrowObjectStringRepresentationBuilder extends ArrowStringRepresentationBuilderBase { + + public ArrowObjectStringRepresentationBuilder() { + super(",", "{", "}"); + } + + public ArrowStringRepresentationBuilderBase appendKeyValue( + String key, String value, SnowflakeType valueType) { + StringJoiner joiner = new StringJoiner(": "); + joiner.add('"' + key + '"'); + joiner.add(quoteIfNeeded(value, valueType)); + return add(joiner.toString()); + } +} diff --git a/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowStringRepresentationBuilderBase.java b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowStringRepresentationBuilderBase.java new file mode 100644 index 000000000..cc25bb7e0 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/arrow/tostringhelpers/ArrowStringRepresentationBuilderBase.java @@ -0,0 +1,65 @@ +package net.snowflake.client.core.arrow.tostringhelpers; + +import java.util.HashSet; +import java.util.Set; +import java.util.StringJoiner; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeType; + +/** + * StringBuilder like class to aggregate the string representation of snowflake Native ARROW + * structured types as JSON one-liners. Provides some additional snowflake-specific logic in order + * to determine whether the value should be quoted or case should be changed. + */ +@SnowflakeJdbcInternalApi +public abstract class ArrowStringRepresentationBuilderBase { + private final StringJoiner joiner; + private static final Set quotableTypes; + + static { + quotableTypes = new HashSet<>(); + quotableTypes.add(SnowflakeType.ANY); + quotableTypes.add(SnowflakeType.CHAR); + quotableTypes.add(SnowflakeType.TEXT); + quotableTypes.add(SnowflakeType.VARIANT); + quotableTypes.add(SnowflakeType.BINARY); + quotableTypes.add(SnowflakeType.DATE); + quotableTypes.add(SnowflakeType.TIME); + quotableTypes.add(SnowflakeType.TIMESTAMP_LTZ); + quotableTypes.add(SnowflakeType.TIMESTAMP_NTZ); + quotableTypes.add(SnowflakeType.TIMESTAMP_TZ); + } + + public ArrowStringRepresentationBuilderBase(String delimiter, String prefix, String suffix) { + joiner = new StringJoiner(delimiter, prefix, suffix); + } + + protected ArrowStringRepresentationBuilderBase add(String string) { + joiner.add(string); + return this; + } + + private boolean shouldQuoteValue(SnowflakeType type) { + return quotableTypes.contains(type); + } + + protected String quoteIfNeeded(String string, SnowflakeType type) { + // Turn Boolean string representations lowercase to make the output JSON-compatible + // this should be changed on the converter level, but it would be a breaking change thus + // for now only structured types will be valid JSONs while in NATIVE ARROW mode + if (type == SnowflakeType.BOOLEAN) { + string = string.toLowerCase(); + } + + if (shouldQuoteValue(type)) { + return '"' + string + '"'; + } + + return string; + } + + @Override + public String toString() { + return joiner.toString(); + } +} diff --git a/src/main/java/net/snowflake/client/core/bind/BindUploader.java b/src/main/java/net/snowflake/client/core/bind/BindUploader.java index 6b901da44..ed1f11249 100644 --- a/src/main/java/net/snowflake/client/core/bind/BindUploader.java +++ b/src/main/java/net/snowflake/client/core/bind/BindUploader.java @@ -187,7 +187,13 @@ public static synchronized BindUploader newInstance(SFBaseSession session, Strin return new BindUploader(session, stageDir); } - /** Wrapper around upload() with default compression to true. */ + /** + * Wrapper around upload() with default compression to true. + * + * @param bindValues the bind map to upload + * @throws BindException if there is an error when uploading bind values + * @throws SQLException if any error occurs + */ public void upload(Map bindValues) throws BindException, SQLException { upload(bindValues, true); @@ -199,8 +205,8 @@ public void upload(Map bindValues) * * @param bindValues the bind map to upload * @param compressData whether or not to compress data - * @throws BindException - * @throws SQLException + * @throws BindException if there is an error when uploading bind values + * @throws SQLException if any error occurs */ public void upload(Map bindValues, boolean compressData) throws BindException, SQLException { @@ -254,6 +260,7 @@ public void upload(Map bindValues, boolean compress * @param destFileName destination file name to use * @param compressData whether compression is requested fore uploading data * @throws SQLException raises if any error occurs + * @throws BindException if there is an error when uploading bind values */ private void uploadStreamInternal( InputStream inputStream, String destFileName, boolean compressData) diff --git a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java index c080a2f36..896437def 100644 --- a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java +++ b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java @@ -147,12 +147,16 @@ public void freeData() { } /** + * @param dataConversionContext DataConversionContext * @return an iterator to iterate over current chunk */ public ArrowChunkIterator getIterator(DataConversionContext dataConversionContext) { return new ArrowChunkIterator(dataConversionContext); } + /** + * @return an empty iterator to iterate over current chunk + */ public static ArrowChunkIterator getEmptyChunkIterator() { return new EmptyArrowResultChunk().new ArrowChunkIterator(null); } @@ -209,7 +213,12 @@ private List initConverters(List vectors) return converters; } - /** advance to next row */ + /** + * Advance to next row. + * + * @return true if there is a next row + * @throws SnowflakeSQLException if an error is encountered. + */ public boolean next() throws SnowflakeSQLException { currentRowInRecordBatch++; if (currentRowInRecordBatch < rowCountInCurrentRecordBatch) { @@ -279,6 +288,8 @@ public int getCurrentRowInRecordBatch() { /** * merge arrow result chunk with more than one batches into one record batch (Only used for the * first chunk when client side sorting is required) + * + * @throws SnowflakeSQLException if failed to merge first result chunk */ public void mergeBatchesIntoOne() throws SnowflakeSQLException { try { diff --git a/src/main/java/net/snowflake/client/jdbc/CompressedStreamFactory.java b/src/main/java/net/snowflake/client/jdbc/CompressedStreamFactory.java new file mode 100644 index 000000000..ebb376db9 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/CompressedStreamFactory.java @@ -0,0 +1,38 @@ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.core.Constants.MB; +import static net.snowflake.common.core.FileCompressionType.GZIP; +import static net.snowflake.common.core.FileCompressionType.ZSTD; + +import com.github.luben.zstd.ZstdInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.zip.GZIPInputStream; +import net.snowflake.common.core.SqlState; +import org.apache.http.Header; + +class CompressedStreamFactory { + + private static final int STREAM_BUFFER_SIZE = MB; + + /** + * Determine the format of the response, if it is not either plain text or gzip, raise an error. + */ + public InputStream createBasedOnEncodingHeader(InputStream is, Header encoding) + throws IOException, SnowflakeSQLException { + if (encoding != null) { + if (GZIP.name().equalsIgnoreCase(encoding.getValue())) { + return new GZIPInputStream(is, STREAM_BUFFER_SIZE); + } else if (ZSTD.name().equalsIgnoreCase(encoding.getValue())) { + return new ZstdInputStream(is); + } else { + throw new SnowflakeSQLException( + SqlState.INTERNAL_ERROR, + ErrorCode.INTERNAL_ERROR.getMessageCode(), + "Exception: unexpected compression got " + encoding.getValue()); + } + } else { + return DefaultResultStreamProvider.detectGzipAndGetStream(is); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java index 3ee556bb4..e7a1e8a0c 100644 --- a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java +++ b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java @@ -1,7 +1,5 @@ package net.snowflake.client.jdbc; -import static net.snowflake.client.core.Constants.MB; - import java.io.IOException; import java.io.InputStream; import java.io.PushbackInputStream; @@ -34,7 +32,11 @@ public class DefaultResultStreamProvider implements ResultStreamProvider { // SSE-C algorithm value private static final String SSE_C_AES = "AES256"; - private static final int STREAM_BUFFER_SIZE = MB; + private CompressedStreamFactory compressedStreamFactory; + + public DefaultResultStreamProvider() { + this.compressedStreamFactory = new CompressedStreamFactory(); + } @Override public InputStream getInputStream(ChunkDownloadContext context) throws Exception { @@ -71,9 +73,11 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception InputStream inputStream; final HttpEntity entity = response.getEntity(); + Header encoding = response.getFirstHeader("Content-Encoding"); try { - // read the chunk data - inputStream = detectContentEncodingAndGetInputStream(response, entity.getContent()); + // create stream based on compression type + inputStream = + compressedStreamFactory.createBasedOnEncodingHeader(entity.getContent(), encoding); } catch (Exception ex) { logger.error("Failed to decompress data: {}", response); @@ -144,28 +148,6 @@ else if (context.getQrmk() != null) { return response; } - private InputStream detectContentEncodingAndGetInputStream(HttpResponse response, InputStream is) - throws IOException, SnowflakeSQLException { - InputStream inputStream = is; // Determine the format of the response, if it is not - // either plain text or gzip, raise an error. - Header encoding = response.getFirstHeader("Content-Encoding"); - if (encoding != null) { - if ("gzip".equalsIgnoreCase(encoding.getValue())) { - /* specify buffer size for GZIPInputStream */ - inputStream = new GZIPInputStream(is, STREAM_BUFFER_SIZE); - } else { - throw new SnowflakeSQLException( - SqlState.INTERNAL_ERROR, - ErrorCode.INTERNAL_ERROR.getMessageCode(), - "Exception: unexpected compression got " + encoding.getValue()); - } - } else { - inputStream = detectGzipAndGetStream(is); - } - - return inputStream; - } - public static InputStream detectGzipAndGetStream(InputStream is) throws IOException { PushbackInputStream pb = new PushbackInputStream(is, 2); byte[] signature = new byte[2]; diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java index 6bb62c82f..67151bea8 100644 --- a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java +++ b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java @@ -81,6 +81,7 @@ public DefaultSFConnectionHandler(SnowflakeConnectString conStr, boolean skipOpe * schemaName from the URL if it is specified there. * * @param conStr Connection string object + * @return a map containing accountName, databaseName and schemaName if specified */ public static Map mergeProperties(SnowflakeConnectString conStr) { conStr.getParameters().remove("SSL"); diff --git a/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java b/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java index 743447ef0..a40976461 100644 --- a/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java +++ b/src/main/java/net/snowflake/client/jdbc/QueryStatusV2.java @@ -127,7 +127,11 @@ public String getWarehouseServerType() { return warehouseServerType; } - /** To preserve compatibility with {@link QueryStatus} */ + /** + * To preserve compatibility with {@link QueryStatus} + * + * @return name + */ public String getDescription() { return name; } diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java index 5be46c5de..35e61efd9 100644 --- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java +++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java @@ -106,6 +106,7 @@ public static CloseableHttpResponse execute( * @param includeRequestGuid whether to include request_guid parameter * @param retryHTTP403 whether to retry on HTTP 403 or not * @param noRetry should we disable retry on non-successful http resp code + * @param execTimeData ExecTimeTelemetryData * @return HttpResponse Object get from server * @throws net.snowflake.client.jdbc.SnowflakeSQLException Request timeout Exception or Illegal * State Exception i.e. connection is already shutdown etc @@ -283,7 +284,14 @@ public static CloseableHttpResponse execute( // if an SSL issue occurs like an SSLHandshakeException then fail // immediately and stop retrying the requests - throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, ex.getMessage()); + String formattedMsg = + ex.getMessage() + + "\n" + + "Verify that the hostnames and portnumbers in SYSTEM$ALLOWLIST are added to your firewall's allowed list.\n" + + "To troubleshoot your connection further, you can refer to this article:\n" + + "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview"; + + throw new SnowflakeSQLLoggedException(null, ErrorCode.NETWORK_ERROR, ex, formattedMsg); } catch (Exception ex) { diff --git a/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java b/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java index 795cf94ff..4cc748876 100644 --- a/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java +++ b/src/main/java/net/snowflake/client/jdbc/ResultJsonParserV2.java @@ -59,6 +59,10 @@ public void startParsing(JsonResultChunk resultChunk, SFBaseSession session) /** * Check if the chunk has been parsed correctly. After calling this it is safe to acquire the * output data + * + * @param in byte buffer + * @param session SFBaseSession + * @throws SnowflakeSQLException if parsing fails */ public void endParsing(ByteBuffer in, SFBaseSession session) throws SnowflakeSQLException { continueParsingInternal(in, true, session); @@ -79,6 +83,9 @@ public void endParsing(ByteBuffer in, SFBaseSession session) throws SnowflakeSQL * * @param in readOnly byteBuffer backed by an array (the data to be reed is from position to * limit) + * @param session SFBaseSession + * @return int remaining number of elements in byteBuffer + * @throws SnowflakeSQLException if an error is encountered during parsing */ public int continueParsing(ByteBuffer in, SFBaseSession session) throws SnowflakeSQLException { if (state == State.UNINITIALIZED) { @@ -95,6 +102,7 @@ public int continueParsing(ByteBuffer in, SFBaseSession session) throws Snowflak /** * @param in readOnly byteBuffer backed by an array (the data is from position to limit) * @param lastData If true, this signifies this is the last data in parsing + * @param session SFBaseSession * @throws SnowflakeSQLException Will be thrown if parsing the chunk data fails */ private void continueParsingInternal(ByteBuffer in, boolean lastData, SFBaseSession session) diff --git a/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java b/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java index 64297ff57..959754fd9 100644 --- a/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java +++ b/src/main/java/net/snowflake/client/jdbc/SFConnectionHandler.java @@ -17,25 +17,47 @@ public interface SFConnectionHandler { /** - * Whether this Connection supports asynchronous queries. If yes, createAsyncResultSet may be - * called. + * @return Whether this Connection supports asynchronous queries. If yes, createAsyncResultSet may + * be called. */ boolean supportsAsyncQuery(); - /** Initializes the SnowflakeConnection */ + /** + * Initializes the SnowflakeConnection + * + * @param url url string + * @param info connection parameters + * @throws SQLException if any error is encountered + */ void initializeConnection(String url, Properties info) throws SQLException; - /** Gets the SFBaseSession implementation for this connection implementation */ + /** + * @return Gets the SFBaseSession implementation for this connection implementation + */ SFBaseSession getSFSession(); - /** Returns the SFStatementInterface implementation for this connection implementation */ + /** + * @return Returns the SFStatementInterface implementation for this connection implementation + * @throws SQLException if any error occurs + */ SFBaseStatement getSFStatement() throws SQLException; - /** Creates a result set from a query id. */ + /** + * Creates a result set from a query id. + * + * @param queryID the query ID + * @param statement Statement object + * @return ResultSet + * @throws SQLException if any error occurs + */ ResultSet createResultSet(String queryID, Statement statement) throws SQLException; /** - * Creates a SnowflakeResultSet from a base SFBaseResultSet for this connection implementation. + * @param resultSet SFBaseResultSet + * @param statement Statement + * @return Creates a SnowflakeResultSet from a base SFBaseResultSet for this connection + * implementation. + * @throws SQLException if an error occurs */ SnowflakeBaseResultSet createResultSet(SFBaseResultSet resultSet, Statement statement) throws SQLException; @@ -43,6 +65,11 @@ SnowflakeBaseResultSet createResultSet(SFBaseResultSet resultSet, Statement stat /** * Creates an asynchronous result set from a base SFBaseResultSet for this connection * implementation. + * + * @param resultSet SFBaseResultSet + * @param statement Statement + * @return An asynchronous result set from SFBaseResultSet + * @throws SQLException if an error occurs */ SnowflakeBaseResultSet createAsyncResultSet(SFBaseResultSet resultSet, Statement statement) throws SQLException; @@ -50,6 +77,9 @@ SnowflakeBaseResultSet createAsyncResultSet(SFBaseResultSet resultSet, Statement /** * @param command The command to parse for this file transfer (e.g., PUT/GET) * @param statement The statement to use for this file transfer + * @return SFBaseFileTransferAgent + * @throws SQLNonTransientConnectionException if a connection error occurs + * @throws SnowflakeSQLException if any other exception occurs */ SFBaseFileTransferAgent getFileTransferAgent(String command, SFBaseStatement statement) throws SQLNonTransientConnectionException, SnowflakeSQLException; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java index d9149412e..ced00e325 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java @@ -93,6 +93,7 @@ private static SFBaseSession maybeGetSession(Statement statement) { * * @param resultSetSerializable The result set serializable object which includes all metadata to * create the result set + * @throws SQLException if an error occurs */ public SnowflakeBaseResultSet(SnowflakeResultSetSerializableV1 resultSetSerializable) throws SQLException { @@ -108,7 +109,7 @@ public SnowflakeBaseResultSet(SnowflakeResultSetSerializableV1 resultSetSerializ /** * This should never be used. Simply needed this for SFAsynchronousResult subclass * - * @throws SQLException + * @throws SQLException if an error occurs */ protected SnowflakeBaseResultSet() throws SQLException { this.resultSetType = 0; @@ -139,6 +140,14 @@ protected void raiseSQLExceptionIfResultSetIsClosed() throws SQLException { @Override public abstract byte[] getBytes(int columnIndex) throws SQLException; + /** + * Get Date value + * + * @param columnIndex column index + * @param tz timezone + * @return Date value at column index + * @throws SQLException if data at column index is incompatible with Date type + */ public abstract Date getDate(int columnIndex, TimeZone tz) throws SQLException; private boolean getGetDateUseNullTimezone() { @@ -168,6 +177,14 @@ public Timestamp getTimestamp(int columnIndex) throws SQLException { return getTimestamp(columnIndex, (TimeZone) null); } + /** + * Get timestamp value + * + * @param columnIndex column index + * @param tz timezone + * @return timestamp value at column index + * @throws SQLException if data at column index is incompatible with timestamp + */ public abstract Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SQLException; @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java index 6db9aede0..5163f3299 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java @@ -212,6 +212,7 @@ public void uncaughtException(Thread t, Throwable e) { * * @param resultSetSerializable the result set serializable object which includes required * metadata to start chunk downloader + * @throws SnowflakeSQLException if an error is encountered */ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerializable) throws SnowflakeSQLException { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java index 10f06dafa..13bad3195 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java @@ -13,21 +13,21 @@ /** * (Optional) The name for a column in database, * - *

The default value is empty string. Provided name can override SqlData field name + * @return The default value is empty string. Provided name can override SqlData field name. */ String name() default ""; /** * (Optional) The snowflake type for a column * - *

The default value is empty string Provided type can override default type + * @return The default value is empty string Provided type can override default type. */ String type() default ""; /** * (Optional) The snowflake nullable flag for a column * - *

The default value is true Provided value can override default nullable value + * @return The default value is true Provided value can override default nullable value. */ boolean nullable() default true; @@ -37,7 +37,8 @@ * *

Applies only to columns of exact varchar and binary type. * - *

The default value {@code -1} indicates that a provider-determined length should be inferred. + * @return The default value {@code -1} indicates that a provider-determined length should be + * inferred. */ int length() default -1; /** @@ -46,8 +47,8 @@ * *

Applies only to columns of exact varchar and binary type. * - *

The default value {@code -1} indicates that a provider-determined byteLength should be - * inferred. + * @return The default value {@code -1} indicates that a provider-determined byteLength should be + * inferred. */ int byteLength() default -1; @@ -57,8 +58,8 @@ * *

Applies only to columns of exact numeric type. * - *

The default value {@code -1} indicates that a provider-determined precision should be - * inferred. + * @return The default value {@code -1} indicates that a provider-determined precision should be + * inferred. */ int precision() default -1; @@ -68,7 +69,8 @@ * *

Applies only to columns of exact numeric type. * - *

The default value {@code 0} indicates that a provider-determined scale should be inferred. + * @return The default value {@code 0} indicates that a provider-determined scale should be + * inferred. */ int scale() default -1; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java index 4525c2efb..69f467b90 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java @@ -76,6 +76,19 @@ public SnowflakeColumnMetadata( * @deprecated Use {@link SnowflakeColumnMetadata#SnowflakeColumnMetadata(String, int, boolean, * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean, int)} * instead + * @param name name + * @param type type + * @param nullable is nullable + * @param length length + * @param precision precision + * @param scale scale + * @param typeName type name + * @param fixed is fixed + * @param base SnowflakeType + * @param columnSrcDatabase column source database + * @param columnSrcSchema column source schema + * @param columnSrcTable column source table + * @param isAutoIncrement is auto-increment */ @Deprecated public SnowflakeColumnMetadata( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java index e997b053e..6edc510f8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnection.java @@ -47,7 +47,7 @@ InputStream downloadStream(String stageName, String sourceFileName, boolean deco * Return unique session ID from current session generated by making connection * * @return a unique alphanumeric value representing current session ID - * @throws SQLException + * @throws SQLException if an error occurs */ String getSessionID() throws SQLException; @@ -56,12 +56,16 @@ InputStream downloadStream(String stageName, String sourceFileName, boolean deco * of corresponding query. Used when original ResultSet object is no longer available, such as * when original connection has been closed. * - * @param queryID - * @return - * @throws SQLException + * @param queryID the query ID + * @return ResultSet based off the query ID + * @throws SQLException if an error occurs */ ResultSet createResultSet(String queryID) throws SQLException; - /** Returns the SnowflakeConnectionImpl from the connection object. */ + /** + * Returns the SnowflakeConnectionImpl from the connection object. + * + * @return SFConnectionHandler + */ SFConnectionHandler getHandler(); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java index 498e6393b..1f55c83f4 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java @@ -91,6 +91,7 @@ public class SnowflakeConnectionV1 implements Connection, SnowflakeConnection { * Instantiates a SnowflakeConnectionV1 with the passed-in SnowflakeConnectionImpl. * * @param sfConnectionHandler The SnowflakeConnectionImpl. + * @throws SQLException if failed to instantiate a SnowflakeConnectionV1. */ public SnowflakeConnectionV1(SFConnectionHandler sfConnectionHandler) throws SQLException { initConnectionWithImpl(sfConnectionHandler, null, null); @@ -100,6 +101,9 @@ public SnowflakeConnectionV1(SFConnectionHandler sfConnectionHandler) throws SQL * Instantiates a SnowflakeConnectionV1 with the passed-in SnowflakeConnectionImpl. * * @param sfConnectionHandler The SnowflakeConnectionImpl. + * @param url The URL string. + * @param info Connection properties. + * @throws SQLException if failed to instantiate connection. */ public SnowflakeConnectionV1(SFConnectionHandler sfConnectionHandler, String url, Properties info) throws SQLException { @@ -195,9 +199,9 @@ public Statement createStatement() throws SQLException { /** * Get an instance of a ResultSet object * - * @param queryID - * @return - * @throws SQLException + * @param queryID the query ID + * @return ResultSet + * @throws SQLException if connection is closed */ public ResultSet createResultSet(String queryID) throws SQLException { raiseSQLExceptionIfConnectionIsClosed(); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 05566da82..298b64ee7 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -37,7 +37,7 @@ public class SnowflakeDriver implements Driver { static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); - public static String implementVersion = "3.19.0"; + public static String implementVersion = "3.20.1"; static int majorVersion = 0; static int minorVersion = 0; @@ -167,7 +167,7 @@ public static String getDisableArrowResultFormatMessage() { /** * Utility method to verify if the standard or fips snowflake-jdbc driver is being used. * - * @return + * @return the title of the implementation, null is returned if it is not known. */ public static String getImplementationTitle() { Package pkg = Package.getPackage("net.snowflake.client.jdbc"); @@ -177,7 +177,7 @@ public static String getImplementationTitle() { /** * Utility method to get the complete jar name with version. * - * @return + * @return the jar name with version */ public static String getJdbcJarname() { return String.format("%s-%s", getImplementationTitle(), implementVersion); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java index bd5a3945e..4213b33b0 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java @@ -791,6 +791,7 @@ public static Callable getDownloadFileCallable( * @param encMat remote store encryption material * @param parallel number of parallel threads for downloading * @param presignedUrl Presigned URL for file download + * @param queryId the query ID * @return a callable responsible for downloading files */ public static Callable getDownloadFileCallable( @@ -925,11 +926,13 @@ private void parseCommand() throws SnowflakeSQLException { // get source file locations as array (apply to both upload and download) JsonNode locationsNode = jsonNode.path("data").path("src_locations"); + if (!locationsNode.isArray()) { + throw new SnowflakeSQLException( + queryID, ErrorCode.INTERNAL_ERROR, "src_locations must be an array"); + } queryID = jsonNode.path("data").path("queryId").asText(); - assert locationsNode.isArray(); - String[] src_locations; try { @@ -1108,8 +1111,16 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf // specifically // for FIPS or VPCE S3 endpoint. SNOW-652696 String endPoint = null; - if ("AZURE".equalsIgnoreCase(stageLocationType) || "S3".equalsIgnoreCase(stageLocationType)) { + if ("AZURE".equalsIgnoreCase(stageLocationType) + || "S3".equalsIgnoreCase(stageLocationType) + || "GCS".equalsIgnoreCase(stageLocationType)) { endPoint = jsonNode.path("data").path("stageInfo").findValue("endPoint").asText(); + if ("GCS".equalsIgnoreCase(stageLocationType) + && endPoint != null + && (endPoint.trim().isEmpty() || "null".equals(endPoint))) { + // setting to null to preserve previous behaviour for GCS + endPoint = null; + } } String stgAcct = null; @@ -1176,6 +1187,8 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf } } + setupUseRegionalUrl(jsonNode, stageInfo); + if (stageInfo.getStageType() == StageInfo.StageType.S3) { if (session == null) { // This node's value is set if PUT is used without Session. (For Snowpipe Streaming, we rely @@ -1197,6 +1210,18 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf return stageInfo; } + private static void setupUseRegionalUrl(JsonNode jsonNode, StageInfo stageInfo) { + if (stageInfo.getStageType() != StageInfo.StageType.GCS + && stageInfo.getStageType() != StageInfo.StageType.S3) { + return; + } + JsonNode useRegionalURLNode = jsonNode.path("data").path("stageInfo").path("useRegionalUrl"); + if (!useRegionalURLNode.isMissingNode()) { + boolean useRegionalURL = useRegionalURLNode.asBoolean(false); + stageInfo.setUseRegionalUrl(useRegionalURL); + } + } + /** * A helper method to verify if the local file path from GS matches what's parsed locally. This is * for security purpose as documented in SNOW-15153. @@ -1459,7 +1484,13 @@ public static List getFileTransferMetadatas( } // For UPLOAD we expect encryptionMaterial to have length 1 - assert encryptionMaterial.size() == 1; + if (encryptionMaterial.size() != 1) { + throw new SnowflakeSQLException( + queryId, + ErrorCode.INTERNAL_ERROR, + "Encryption material for UPLOAD should have size 1 but have " + + encryptionMaterial.size()); + } final Set sourceFiles = expandFileNames(srcLocations, queryId); @@ -1649,6 +1680,7 @@ private void uploadStream() throws SnowflakeSQLException { /** Download a file from remote, and return an input stream */ @Override public InputStream downloadStream(String fileName) throws SnowflakeSQLException { + logger.debug("Downloading file as stream: {}", fileName); if (stageInfo.getStageType() == StageInfo.StageType.LOCAL_FS) { logger.error("downloadStream function doesn't support local file system", false); @@ -1662,14 +1694,32 @@ public InputStream downloadStream(String fileName) throws SnowflakeSQLException remoteLocation remoteLocation = extractLocationAndPath(stageInfo.getLocation()); - String stageFilePath = fileName; + // when downloading files as stream there should be only one file in source files + String sourceLocation = + sourceFiles.stream() + .findFirst() + .orElseThrow( + () -> + new SnowflakeSQLException( + queryID, + SqlState.NO_DATA, + ErrorCode.FILE_NOT_FOUND.getMessageCode(), + session, + "File not found: " + fileName)); + + if (!fileName.equals(sourceLocation)) { + // filename may be different from source location e.g. in git repositories + logger.debug("Changing file to download location from {} to {}", fileName, sourceLocation); + } + String stageFilePath = sourceLocation; if (!remoteLocation.path.isEmpty()) { - stageFilePath = SnowflakeUtil.concatFilePathNames(remoteLocation.path, fileName, "/"); + stageFilePath = SnowflakeUtil.concatFilePathNames(remoteLocation.path, sourceLocation, "/"); } + logger.debug("Stage file path for {} is {}", sourceLocation, stageFilePath); - RemoteStoreFileEncryptionMaterial encMat = srcFileToEncMat.get(fileName); - String presignedUrl = srcFileToPresignedUrl.get(fileName); + RemoteStoreFileEncryptionMaterial encMat = srcFileToEncMat.get(sourceLocation); + String presignedUrl = srcFileToPresignedUrl.get(sourceLocation); return storageFactory .createClient(stageInfo, parallel, encMat, session) @@ -3346,7 +3396,7 @@ public static void throwJCEMissingError(String operation, Exception ex, String q * @param session the current session * @param operation the operation i.e. GET * @param ex the exception caught - * @throws SnowflakeSQLLoggedException + * @throws SnowflakeSQLLoggedException if not enough space left on device to download file. */ @Deprecated public static void throwNoSpaceLeftError(SFSession session, String operation, Exception ex) @@ -3361,7 +3411,8 @@ public static void throwNoSpaceLeftError(SFSession session, String operation, Ex * @param session the current session * @param operation the operation i.e. GET * @param ex the exception caught - * @throws SnowflakeSQLLoggedException + * @param queryId the query ID + * @throws SnowflakeSQLLoggedException if not enough space left on device to download file. */ public static void throwNoSpaceLeftError( SFSession session, String operation, Exception ex, String queryId) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java index 60ca632ad..438abb4b2 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferConfig.java @@ -190,7 +190,12 @@ public Builder setUseS3RegionalUrl(boolean useS3RegUrl) { return this; } - /** Streaming ingest client name, used to calculate streaming ingest billing per client */ + /** + * Streaming ingest client name, used to calculate streaming ingest billing per client + * + * @param streamingIngestClientName streaming ingest client name + * @return Builder + */ public Builder setStreamingIngestClientName(String streamingIngestClientName) { this.streamingIngestClientName = streamingIngestClientName; return this; @@ -199,6 +204,9 @@ public Builder setStreamingIngestClientName(String streamingIngestClientName) { /** * Streaming ingest client key provided by Snowflake, used to calculate streaming ingest billing * per client + * + * @param streamingIngestClientKey streaming ingest client key + * @return Builder */ public Builder setStreamingIngestClientKey(String streamingIngestClientKey) { this.streamingIngestClientKey = streamingIngestClientKey; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java index ee3dc3ec8..2f7ec66f4 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java @@ -8,6 +8,7 @@ public interface SnowflakePreparedStatement { /** * @return the Snowflake query ID of the latest executed query + * @throws SQLException if an error occurs */ String getQueryID() throws SQLException; @@ -15,25 +16,27 @@ public interface SnowflakePreparedStatement { * Execute a query asynchronously * * @return ResultSet containing results - * @throws SQLException + * @throws SQLException if an error occurs */ ResultSet executeAsyncQuery() throws SQLException; /** * Sets the designated parameter to the given BigInteger value. * - * @param parameterIndex - * @param x - * @throws SQLException + * @param parameterIndex the parameter index + * @param x the BigInteger value + * @throws SQLException if an error occurs */ void setBigInteger(int parameterIndex, BigInteger x) throws SQLException; /** * Sets the designated parameter to the given Map instance. * - * @param parameterIndex - * @param map - * @throws SQLException + * @param parameterIndex the parameter index + * @param map the map instance + * @param type the type + * @param generic type + * @throws SQLException if an error occurs */ void setMap(int parameterIndex, Map map, int type) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java index 000d4634d..cb293690d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java @@ -58,14 +58,19 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1 implements PreparedStatement, SnowflakePreparedStatement { private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakePreparedStatementV1.class); + /** Error code returned when describing a statement that is binding table name */ private static final Integer ERROR_CODE_TABLE_BIND_VARIABLE_NOT_SET = 2128; + /** Error code when preparing statement with binding object names */ private static final Integer ERROR_CODE_OBJECT_BIND_NOT_SET = 2129; + /** Error code returned when describing a ddl command */ private static final Integer ERROR_CODE_STATEMENT_CANNOT_BE_PREPARED = 7; + /** snow-44393 Workaround for compiler cannot prepare to_timestamp(?, 3) */ private static final Integer ERROR_CODE_FORMAT_ARGUMENT_NOT_STRING = 1026; + /** A hash set that contains the error code that will not lead to exception in describe mode */ private static final Set errorCodesIgnoredInDescribeMode = new HashSet<>( @@ -88,10 +93,12 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1 *

Currently, bind name is just value index */ private Map parameterBindings = new HashMap<>(); + /** map of bind values for batch query executions */ private Map batchParameterBindings = new HashMap<>(); private Map wasPrevValueNull = new HashMap<>(); + /** Counter for batch size if we are executing a statement with array bind supported */ private int batchSize = 0; @@ -133,6 +140,12 @@ private void describeSqlIfNotTried() throws SQLException { if (!alreadyDescribed) { try { this.preparedStatementMetaData = sfBaseStatement.describe(sql); + if (preparedStatementMetaData != null + && !preparedStatementMetaData.isArrayBindSupported()) { + logger.debug( + "Array bind is not supported - each batch entry will be executed as a single request for query: {}", + sql); + } } catch (SFException e) { throw new SnowflakeSQLLoggedException(connection.getSFBaseSession(), e); } catch (SnowflakeSQLException e) { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java index 03171a599..5b6304bdf 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSet.java @@ -12,6 +12,7 @@ public interface SnowflakeResultSet { /** * @return the Snowflake query ID of the query which generated this result set + * @throws SQLException if an error is encountered */ String getQueryID() throws SQLException; @@ -23,7 +24,7 @@ public interface SnowflakeResultSet { * query statuses. QueryStatus = SUCCESS means results can be retrieved. * * @return QueryStatus enum showing status of query - * @throws SQLException + * @throws SQLException if an error is encountered */ QueryStatus getStatus() throws SQLException; @@ -33,7 +34,7 @@ public interface SnowflakeResultSet { * returned. * * @return String value of query's error message - * @throws SQLException + * @throws SQLException if an error is encountered */ String getQueryErrorMessage() throws SQLException; @@ -45,7 +46,7 @@ public interface SnowflakeResultSet { *

status.isSuccess() means that results can be retrieved. * * @return an instance containing query metadata - * @throws SQLException + * @throws SQLException if an error is encountered */ QueryStatusV2 getStatusV2() throws SQLException; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java index f5a9aa97c..2a1ba82a1 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializable.java @@ -71,6 +71,7 @@ public Builder setSfFullURL(String sfFullURL) { * sc:2.8.2/jdbc:3.12.12 since Sept 2020. It is safe to remove it after Sept 2022. * * @return a ResultSet which represents for the data wrapped in the object + * @throws SQLException if an error occurs * @deprecated Use {@link #getResultSet(ResultSetRetrieveConfig)} instead */ @Deprecated @@ -84,6 +85,7 @@ public Builder setSfFullURL(String sfFullURL) { * * @param info The proxy server information if proxy is necessary. * @return a ResultSet which represents for the data wrapped in the object + * @throws SQLException if an error occurs * @deprecated Use {@link #getResultSet(ResultSetRetrieveConfig)} instead */ @Deprecated @@ -94,6 +96,7 @@ public Builder setSfFullURL(String sfFullURL) { * * @param resultSetRetrieveConfig The extra info to retrieve the result set. * @return a ResultSet which represents for the data wrapped in the object + * @throws SQLException if an error occurs */ ResultSet getResultSet(ResultSetRetrieveConfig resultSetRetrieveConfig) throws SQLException; @@ -101,6 +104,7 @@ public Builder setSfFullURL(String sfFullURL) { * Retrieve total row count included in the ResultSet Serializable object. * * @return the total row count from metadata + * @throws SQLException if an error occurs */ long getRowCount() throws SQLException; @@ -108,6 +112,7 @@ public Builder setSfFullURL(String sfFullURL) { * Retrieve compressed data size included in the ResultSet Serializable object. * * @return the total compressed data size in bytes from metadata + * @throws SQLException if an error occurs */ long getCompressedDataSizeInBytes() throws SQLException; @@ -115,6 +120,7 @@ public Builder setSfFullURL(String sfFullURL) { * Retrieve uncompressed data size included in the ResultSet Serializable object. * * @return the total uncompressed data size in bytes from metadata + * @throws SQLException if an error occurs */ long getUncompressedDataSizeInBytes() throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java index 082dc2e30..2baf8027a 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java @@ -282,6 +282,7 @@ private SnowflakeResultSetSerializableV1(SnowflakeResultSetSerializableV1 toCopy * @param sfStatement the Snowflake statement * @param resultStreamProvider a ResultStreamProvider for computing a custom data source for * result-file streams + * @param disableChunksPrefetch is prefetch disabled * @throws SnowflakeSQLException if failed to parse the result JSON node */ protected SnowflakeResultSetSerializableV1( @@ -754,6 +755,12 @@ public static SnowflakeResultSetSerializableV1 create( /** * A factory function for internal usage only. It creates SnowflakeResultSetSerializableV1 with * NoOpChunksDownloader which disables chunks prefetch. + * + * @param rootNode JSON root node + * @param sfSession SFBaseSession + * @param sfStatement SFBaseStatement + * @return SnowflakeResultSetSerializableV1 with NoOpChunksDownloader + * @throws SnowflakeSQLException if an error occurs */ @SnowflakeJdbcInternalApi public static SnowflakeResultSetSerializableV1 createWithChunksPrefetchDisabled( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java index 085c8bcc3..e290db3f2 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java @@ -64,7 +64,7 @@ public SnowflakeResultSetV1(SFBaseResultSet sfBaseResultSet, Statement statement * This function is not supported for synchronous queries * * @return no return value; exception is always thrown - * @throws SQLFeatureNotSupportedException + * @throws SQLFeatureNotSupportedException always thrown because feature is not supported */ public QueryStatus getStatus() throws SQLException { throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -74,7 +74,7 @@ public QueryStatus getStatus() throws SQLException { * This function is not supported for synchronous queries * * @return no return value; exception is always thrown - * @throws SQLFeatureNotSupportedException + * @throws SQLFeatureNotSupportedException always thrown because feature is not supported */ @Override public QueryStatusV2 getStatusV2() throws SQLException { @@ -86,7 +86,7 @@ public QueryStatusV2 getStatusV2() throws SQLException { * This function is not supported for synchronous queries * * @return no return value; exception is always thrown - * @throws SQLFeatureNotSupportedException + * @throws SQLFeatureNotSupportedException always thrown because feature is not supported */ @Override public String getQueryErrorMessage() throws SQLException { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java index 194748317..084e62ba8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeRichResultSetSerializableV1.java @@ -38,6 +38,12 @@ public class SnowflakeRichResultSetSerializableV1 extends SnowflakeResultSetSeri /** * A factory function for internal usage only. It creates SnowflakeRichResultSetSerializableV1 * with NoOpChunksDownloader which disables chunks prefetch. + * + * @param rootNode JSON root node + * @param sfSession SFBaseSession + * @param sfStatement SFBaseStatement + * @return SnowflakeRichResultSetSerializableV1 with NoOpChunksDownloader + * @throws SnowflakeSQLException if an error occurs */ public static SnowflakeRichResultSetSerializableV1 createWithChunksPrefetchDisabled( JsonNode rootNode, SFBaseSession sfSession, SFBaseStatement sfStatement) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java index ebe84c13c..48faec24c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java @@ -51,12 +51,22 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState, int queryId); } - /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, String)} */ + /** + * use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, String)} + * + * @param reason exception reason + * @param sqlState the SQL state + */ @Deprecated public SnowflakeSQLException(String reason, String sqlState) { this((String) null, reason, sqlState); } + /** + * @param queryId the queryID + * @param reason exception reason + * @param sqlState the SQL state + */ public SnowflakeSQLException(String queryId, String reason, String sqlState) { super(reason, sqlState); this.queryId = queryId; @@ -64,12 +74,22 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState) { logger.debug("Snowflake exception: {}, sqlState:{}", reason, sqlState); } - /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int)} */ + /** + * use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int)} + * + * @param sqlState the SQL state + * @param vendorCode the vendor code + */ @Deprecated public SnowflakeSQLException(String sqlState, int vendorCode) { this((String) null, sqlState, vendorCode); } + /** + * @param queryId query ID + * @param sqlState SQL state + * @param vendorCode vendor code + */ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode) { super( errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode)), @@ -83,12 +103,24 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode) { vendorCode); } - /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int, Object...)} */ + /** + * use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int, Object...)} + * + * @param sqlState the SQL state + * @param vendorCode the vendor code + * @param params additional parameters + */ @Deprecated public SnowflakeSQLException(String sqlState, int vendorCode, Object... params) { this((String) null, sqlState, vendorCode, params); } + /** + * @param queryId query ID + * @param sqlState the SQL state + * @param vendorCode the vendor code + * @param params additional parameters + */ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode, Object... params) { super( errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode), params), @@ -102,6 +134,11 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode, Ob vendorCode); } + /** + * @param ex Throwable exception + * @param sqlState the SQL state + * @param vendorCode the vendor code + */ public SnowflakeSQLException(Throwable ex, String sqlState, int vendorCode) { super( errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode)), @@ -115,6 +152,11 @@ public SnowflakeSQLException(Throwable ex, String sqlState, int vendorCode) { ex); } + /** + * @param ex Throwable exception + * @param errorCode the error code + * @param params additional parameters + */ public SnowflakeSQLException(Throwable ex, ErrorCode errorCode, Object... params) { this(ex, errorCode.getSqlState(), errorCode.getMessageCode(), params); } @@ -122,12 +164,23 @@ public SnowflakeSQLException(Throwable ex, ErrorCode errorCode, Object... params /** * @deprecated use {@link SnowflakeSQLException#SnowflakeSQLException(String, Throwable, String, * int, Object...)} + * @param ex Throwable exception + * @param sqlState the SQL state + * @param vendorCode the vendor code + * @param params additional parameters */ @Deprecated public SnowflakeSQLException(Throwable ex, String sqlState, int vendorCode, Object... params) { this(null, ex, sqlState, vendorCode, params); } + /** + * @param queryId query ID + * @param ex Throwable exception + * @param sqlState the SQL state + * @param vendorCode the vendor code + * @param params additional parameters + */ public SnowflakeSQLException( String queryId, Throwable ex, String sqlState, int vendorCode, Object... params) { super( @@ -143,6 +196,10 @@ public SnowflakeSQLException( ex); } + /** + * @param errorCode the error code + * @param params additional parameters + */ public SnowflakeSQLException(ErrorCode errorCode, Object... params) { super( errorResourceBundleManager.getLocalizedMessage( @@ -151,6 +208,11 @@ public SnowflakeSQLException(ErrorCode errorCode, Object... params) { errorCode.getMessageCode()); } + /** + * @param queryId query ID + * @param errorCode error code + * @param params additional parameters + */ public SnowflakeSQLException(String queryId, ErrorCode errorCode, Object... params) { super( errorResourceBundleManager.getLocalizedMessage( @@ -160,6 +222,12 @@ public SnowflakeSQLException(String queryId, ErrorCode errorCode, Object... para this.queryId = queryId; } + /** + * @param errorCode error code + * @param retryCount retry count + * @param issocketTimeoutNoBackoff issocketTimeoutNoBackoff + * @param elapsedSeconds time elapsed in seconds + */ public SnowflakeSQLException( ErrorCode errorCode, int retryCount, boolean issocketTimeoutNoBackoff, long elapsedSeconds) { super( @@ -171,6 +239,9 @@ public SnowflakeSQLException( this.elapsedSeconds = elapsedSeconds; } + /** + * @param e the SFException + */ public SnowflakeSQLException(SFException e) { this(e.getQueryId(), e.getMessage(), e.getSqlState(), e.getVendorCode()); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java index 9f8f2d7a9..78d4fb971 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java @@ -100,7 +100,7 @@ private static Future sendInBandTelemetryMessage( * Helper function to remove sensitive data (error message, reason) from the stacktrace. * * @param stackTrace original stacktrace - * @return + * @return stack trace with sensitive data removed */ static String maskStacktrace(String stackTrace) { Pattern STACKTRACE_BEGINNING = @@ -118,9 +118,9 @@ static String maskStacktrace(String stackTrace) { /** * Helper function to create JSONObject node for OOB telemetry log * - * @param queryId - * @param SQLState - * @param vendorCode + * @param queryId query ID + * @param SQLState the SQL state + * @param vendorCode the vendor code * @return JSONObject with data about SQLException */ static JSONObject createOOBValue(String queryId, String SQLState, int vendorCode) { @@ -143,10 +143,10 @@ static JSONObject createOOBValue(String queryId, String SQLState, int vendorCode /** * Helper function to create ObjectNode for IB telemetry log * - * @param queryId - * @param SQLState - * @param vendorCode - * @return + * @param queryId query ID + * @param SQLState the SQL state + * @param vendorCode the vendor code + * @return ObjectNode for IB telemetry log */ static ObjectNode createIBValue(String queryId, String SQLState, int vendorCode) { ObjectNode ibValue = mapper.createObjectNode(); @@ -224,17 +224,35 @@ public static void sendTelemetryData( } } + /** + * @param session SFBaseSession + * @param reason exception reason + * @param SQLState the SQL state + * @param vendorCode the vendor code + * @param queryId the query ID + */ public SnowflakeSQLLoggedException( SFBaseSession session, String reason, String SQLState, int vendorCode, String queryId) { super(queryId, reason, SQLState, vendorCode); sendTelemetryData(queryId, SQLState, vendorCode, session, this); } + /** + * @param session SFBaseSession + * @param vendorCode the vendor code + * @param SQLState the SQL state + */ public SnowflakeSQLLoggedException(SFBaseSession session, int vendorCode, String SQLState) { super(SQLState, vendorCode); sendTelemetryData(null, SQLState, vendorCode, session, this); } + /** + * @param queryId the query ID + * @param session SFBaseSession + * @param vendorCode the vendor code + * @param SQLState the SQL state + */ public SnowflakeSQLLoggedException( String queryId, SFBaseSession session, int vendorCode, String SQLState) { super(queryId, SQLState, vendorCode); @@ -244,41 +262,85 @@ public SnowflakeSQLLoggedException( /** * use {@link SnowflakeSQLLoggedException#SnowflakeSQLLoggedException(String, SFBaseSession, * String, String)} + * + * @param session SFBaseSession + * @param SQLState the SQL state + * @param reason exception reason */ @Deprecated public SnowflakeSQLLoggedException(SFBaseSession session, String SQLState, String reason) { this(null, session, SQLState, reason); } + /** + * @param queryId the query ID + * @param session SFBaseSession + * @param SQLState the SQL state + * @param reason the exception reason + */ public SnowflakeSQLLoggedException( String queryId, SFBaseSession session, String SQLState, String reason) { super(reason, SQLState); sendTelemetryData(queryId, SQLState, -1, session, this); } + /** + * @param session SFBaseSession + * @param vendorCode the vendor code + * @param SQLState the SQL state + * @param params additional parameters + */ public SnowflakeSQLLoggedException( SFBaseSession session, int vendorCode, String SQLState, Object... params) { this(null, session, vendorCode, SQLState, params); } + /** + * @param queryId the query ID + * @param session SFBaseSession + * @param vendorCode the vendor code + * @param SQLState the SQL state + * @param params additional parameters + */ public SnowflakeSQLLoggedException( String queryId, SFBaseSession session, int vendorCode, String SQLState, Object... params) { super(queryId, SQLState, vendorCode, params); sendTelemetryData(queryId, SQLState, vendorCode, session, this); } + /** + * @param session SFBaseSession + * @param errorCode the error code + * @param ex Throwable exception + * @param params additional parameters + */ public SnowflakeSQLLoggedException( SFBaseSession session, ErrorCode errorCode, Throwable ex, Object... params) { super(ex, errorCode, params); sendTelemetryData(null, errorCode.getSqlState(), errorCode.getMessageCode(), session, this); } + /** + * @param session SFBaseSession + * @param SQLState the SQL state + * @param vendorCode the vendor code + * @param ex Throwable exception + * @param params additional parameters + */ public SnowflakeSQLLoggedException( SFBaseSession session, String SQLState, int vendorCode, Throwable ex, Object... params) { super(ex, SQLState, vendorCode, params); sendTelemetryData(null, SQLState, vendorCode, session, this); } + /** + * @param queryId the query ID + * @param session SFBaseSession + * @param SQLState the SQL state + * @param vendorCode the vendor code + * @param ex Throwable exception + * @param params additional parameters + */ public SnowflakeSQLLoggedException( String queryId, SFBaseSession session, @@ -293,18 +355,32 @@ public SnowflakeSQLLoggedException( /** * use {@link SnowflakeSQLLoggedException#SnowflakeSQLLoggedException(String, SFBaseSession, * ErrorCode, Object...)} + * + * @param session SFBaseSession + * @param errorCode the error code + * @param params additional parameters */ @Deprecated public SnowflakeSQLLoggedException(SFBaseSession session, ErrorCode errorCode, Object... params) { this(null, session, errorCode, params); } + /** + * @param queryId the query ID + * @param session SFBaseSession + * @param errorCode the error code + * @param params additional parameters + */ public SnowflakeSQLLoggedException( String queryId, SFBaseSession session, ErrorCode errorCode, Object... params) { super(queryId, errorCode, params); sendTelemetryData(queryId, null, -1, session, this); } + /** + * @param session SFBaseSession + * @param e throwable exception + */ public SnowflakeSQLLoggedException(SFBaseSession session, SFException e) { super(e); sendTelemetryData(null, null, -1, session, this); @@ -313,12 +389,20 @@ public SnowflakeSQLLoggedException(SFBaseSession session, SFException e) { /** * use {@link SnowflakeSQLLoggedException#SnowflakeSQLLoggedException(String, SFBaseSession, * String)} + * + * @param session SFBaseSession + * @param reason exception reason */ @Deprecated public SnowflakeSQLLoggedException(SFBaseSession session, String reason) { this(null, session, reason); } + /** + * @param queryId the query ID + * @param session SFBaseSession + * @param reason exception reason + */ public SnowflakeSQLLoggedException(String queryId, SFBaseSession session, String reason) { super(queryId, reason, null); sendTelemetryData(queryId, null, -1, session, this); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java index f1f41d4d0..d684c3d27 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatement.java @@ -14,11 +14,13 @@ public interface SnowflakeStatement { /** * @return the Snowflake query ID of the latest executed query (even failed one) or null when the * last query ID is not available + * @throws SQLException if an error is encountered */ String getQueryID() throws SQLException; /** * @return the Snowflake query IDs of the latest executed batch queries + * @throws SQLException if an error is encountered */ List getBatchQueryIDs() throws SQLException; @@ -27,9 +29,15 @@ public interface SnowflakeStatement { * * @param name parameter name * @param value parameter value + * @throws SQLException if an error is encountered */ void setParameter(String name, Object value) throws SQLException; + /** + * Set batch ID + * + * @param batchID the batch ID + */ void setBatchID(String batchID); /** @@ -46,8 +54,8 @@ public interface SnowflakeStatement { * required as SnowflakeStatementV1 doesn't directly expose ResultSet to the sub-classes making it * challenging to get additional information from the previously executed query. * - * @param resultSet - * @throws SQLException + * @param resultSet SFBaseResultSet + * @throws SQLException if an error is encountered */ void resultSetMetadataHandler(SFBaseResultSet resultSet) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java index 5016c175b..08cb3fac7 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java @@ -20,6 +20,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import net.snowflake.client.core.CancellationReason; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.ResultUtil; @@ -952,7 +953,7 @@ public void cancel() throws SQLException { raiseSQLExceptionIfStatementIsClosed(); try { - sfBaseStatement.cancel(); + sfBaseStatement.cancel(CancellationReason.CLIENT_REQUESTED); } catch (SFException ex) { throw new SnowflakeSQLException(ex, ex.getSqlState(), ex.getVendorCode(), ex.getParams()); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java index ea958c551..5e59dcbc4 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java @@ -102,7 +102,12 @@ public static JavaDataType getJavaType(SnowflakeType type, boolean isStructuredT } } - /** Converts text of data type (returned from SQL query) into Types type, represented by an int */ + /** + * Converts text of data type (returned from SQL query) into Types type, represented by an int + * + * @param typeName type name + * @return int representation of type + */ public static int convertStringToType(String typeName) { int retval = Types.NULL; if (typeName == null || typeName.trim().isEmpty()) { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 1485249b3..8e9a683a0 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -4,6 +4,7 @@ package net.snowflake.client.jdbc; +import static java.util.Arrays.stream; import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY; import com.fasterxml.jackson.core.JsonProcessingException; @@ -32,10 +33,12 @@ import java.util.Optional; import java.util.Properties; import java.util.Random; +import java.util.TreeMap; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import net.snowflake.client.core.Constants; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.OCSPMode; @@ -53,6 +56,7 @@ import org.apache.commons.io.IOUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; +import org.apache.http.NameValuePair; /** * @author jhuang @@ -172,7 +176,15 @@ private static void checkErrorAndThrowExceptionSub( throw new SnowflakeSQLException(queryId, errorMessage, sqlState, errorCode); } - /** This method should only be used internally */ + /** + * This method should only be used internally + * + * @param colNode JsonNode + * @param jdbcTreatDecimalAsInt true if should treat Decimal as Int + * @param session SFBaseSession + * @return SnowflakeColumnMetadata + * @throws SnowflakeSQLException if an error occurs + */ @Deprecated public static SnowflakeColumnMetadata extractColumnMetadata( JsonNode colNode, boolean jdbcTreatDecimalAsInt, SFBaseSession session) @@ -661,7 +673,12 @@ public static String systemGetEnv(String env) { return null; } - /** System.setEnv function. Can be used for unit tests. */ + /** + * System.setEnv function. Can be used for unit tests. + * + * @param key key + * @param value value + */ public static void systemSetEnv(String key, String value) { try { Map env = System.getenv(); @@ -692,7 +709,7 @@ public static void systemSetEnv(String key, String value) { /** * System.unsetEnv function to remove a system environment parameter in the map * - * @param key + * @param key key value */ public static void systemUnsetEnv(String key) { try { @@ -714,6 +731,8 @@ public static void systemUnsetEnv(String key) { * * @param mode OCSP mode * @param info proxy server properties. + * @return HttpClientSettingsKey + * @throws SnowflakeSQLException if an error occurs */ public static HttpClientSettingsKey convertProxyPropertiesToHttpClientKey( OCSPMode mode, Properties info) throws SnowflakeSQLException { @@ -769,8 +788,8 @@ public static HttpClientSettingsKey convertProxyPropertiesToHttpClientKey( * SimpleDateFormatter. Negative values have to be rounded to the next negative value, while * positive values should be cut off with no rounding. * - * @param millis - * @return + * @param millis milliseconds + * @return seconds as long value */ public static long getSecondsFromMillis(long millis) { long returnVal; @@ -818,6 +837,22 @@ public static boolean convertSystemPropertyToBooleanValue( } return defaultValue; } + /** + * Helper function to convert environment variable to boolean + * + * @param envVariableKey property name of the environment variable + * @param defaultValue default value used + * @return the value of the environment variable as boolean, else the default value + */ + @SnowflakeJdbcInternalApi + public static boolean convertSystemGetEnvToBooleanValue( + String envVariableKey, boolean defaultValue) { + String environmentVariableValue = systemGetEnv(envVariableKey); + if (environmentVariableValue != null) { + return Boolean.parseBoolean(environmentVariableValue); + } + return defaultValue; + } @SnowflakeJdbcInternalApi public static T mapSFExceptionToSQLException(ThrowingCallable action) @@ -835,4 +870,37 @@ public static String getJsonNodeStringValue(JsonNode node) throws SFException { } return node.isValueNode() ? node.asText() : node.toString(); } + + /** + * Method introduced to avoid inconsistencies in custom headers handling, since these are defined + * on drivers side e.g. some drivers might internally convert headers to canonical form. + * + * @param input map input + * @return case insensitive map + */ + @SnowflakeJdbcInternalApi + public static Map createCaseInsensitiveMap(Map input) { + Map caseInsensitiveMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + if (input != null) { + caseInsensitiveMap.putAll(input); + } + return caseInsensitiveMap; + } + + /** + * toCaseInsensitiveMap, but adjusted to Headers[] argument type + * + * @param headers array of headers + * @return case insensitive map + */ + @SnowflakeJdbcInternalApi + public static Map createCaseInsensitiveMap(Header[] headers) { + if (headers != null) { + return createCaseInsensitiveMap( + stream(headers) + .collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue))); + } else { + return new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + } + } } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java index 93646e104..c3602fcf7 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/CommonObjectMetadata.java @@ -3,8 +3,9 @@ */ package net.snowflake.client.jdbc.cloud.storage; -import java.util.HashMap; import java.util.Map; +import java.util.TreeMap; +import net.snowflake.client.jdbc.SnowflakeUtil; /** * Implements platform-independent interface Azure BLOB and GCS object metadata @@ -16,11 +17,11 @@ */ public class CommonObjectMetadata implements StorageObjectMetadata { private long contentLength; - private Map userDefinedMetadata; + private final Map userDefinedMetadata; private String contentEncoding; CommonObjectMetadata() { - userDefinedMetadata = new HashMap<>(); + userDefinedMetadata = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); } /* @@ -31,7 +32,7 @@ public class CommonObjectMetadata implements StorageObjectMetadata { long contentLength, String contentEncoding, Map userDefinedMetadata) { this.contentEncoding = contentEncoding; this.contentLength = contentLength; - this.userDefinedMetadata = userDefinedMetadata; + this.userDefinedMetadata = SnowflakeUtil.createCaseInsensitiveMap(userDefinedMetadata); } /** @@ -41,7 +42,6 @@ public class CommonObjectMetadata implements StorageObjectMetadata { public Map getUserMetadata() { return userDefinedMetadata; } - ; /** * @return returns the size of object in bytes diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java index d9999457d..7acb2fc4a 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProvider.java @@ -43,7 +43,22 @@ public class EncryptionProvider { private static final int BUFFER_SIZE = 2 * 1024 * 1024; // 2 MB private static SecureRandom secRnd; - /** Decrypt a InputStream */ + /** + * Decrypt a InputStream + * + * @param inputStream input stream + * @param keyBase64 keyBase64 + * @param ivBase64 ivBase64 + * @param encMat RemoteStoreFileEncryptionMaterial + * @return InputStream + * @throws NoSuchPaddingException when padding mechanism is not available for this environment + * @throws NoSuchAlgorithmException when the requested algorithm is not available for this + * environment + * @throws InvalidKeyException when there is an issue with the key value + * @throws BadPaddingException when the data is not padded as expected + * @throws IllegalBlockSizeException when the length of data is incorrect + * @throws InvalidAlgorithmParameterException when the provided KeyStore has no trustAnchors + */ public static InputStream decryptStream( InputStream inputStream, String keyBase64, diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java index fa31a6a0c..c3f53c0ea 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProvider.java @@ -30,7 +30,8 @@ import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; class GcmEncryptionProvider { - private static final int TAG_LENGTH = 128; + private static final int TAG_LENGTH_IN_BITS = 128; + private static final int IV_LENGTH_IN_BYTES = 12; private static final String AES = "AES"; private static final String FILE_CIPHER = "AES/GCM/NoPadding"; private static final String KEY_CIPHER = "AES/GCM/NoPadding"; @@ -64,8 +65,8 @@ static InputStream encrypt( byte[] kek = base64Decoder.decode(encMat.getQueryStageMasterKey()); int keySize = kek.length; byte[] keyBytes = new byte[keySize]; - byte[] dataIvBytes = new byte[blockSize]; - byte[] keyIvBytes = new byte[blockSize]; + byte[] dataIvBytes = new byte[IV_LENGTH_IN_BYTES]; + byte[] keyIvBytes = new byte[IV_LENGTH_IN_BYTES]; initRandomIvsAndFileKey(dataIvBytes, keyIvBytes, keyBytes); byte[] encryptedKey = encryptKey(kek, keyBytes, keyIvBytes, keyAad); CipherInputStream cis = encryptContent(src, keyBytes, dataIvBytes, dataAad); @@ -94,7 +95,7 @@ private static byte[] encryptKey(byte[] kekBytes, byte[] keyBytes, byte[] keyIvD throws InvalidKeyException, InvalidAlgorithmParameterException, IllegalBlockSizeException, BadPaddingException, NoSuchPaddingException, NoSuchAlgorithmException { SecretKey kek = new SecretKeySpec(kekBytes, 0, kekBytes.length, AES); - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, keyIvData); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, keyIvData); Cipher keyCipher = Cipher.getInstance(KEY_CIPHER); keyCipher.init(Cipher.ENCRYPT_MODE, kek, gcmParameterSpec); if (aad != null) { @@ -108,7 +109,7 @@ private static CipherInputStream encryptContent( throws InvalidKeyException, InvalidAlgorithmParameterException, NoSuchPaddingException, NoSuchAlgorithmException { SecretKey fileKey = new SecretKeySpec(keyBytes, 0, keyBytes.length, AES); - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, dataIvBytes); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, dataIvBytes); Cipher fileCipher = Cipher.getInstance(FILE_CIPHER); fileCipher.init(Cipher.ENCRYPT_MODE, fileKey, gcmParameterSpec); if (aad != null) { @@ -180,7 +181,7 @@ private static CipherInputStream decryptContentFromStream( InputStream inputStream, byte[] ivBytes, byte[] fileKeyBytes, byte[] aad) throws InvalidKeyException, InvalidAlgorithmParameterException, NoSuchPaddingException, NoSuchAlgorithmException { - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, ivBytes); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, ivBytes); SecretKey fileKey = new SecretKeySpec(fileKeyBytes, AES); Cipher fileCipher = Cipher.getInstance(FILE_CIPHER); fileCipher.init(Cipher.DECRYPT_MODE, fileKey, gcmParameterSpec); @@ -195,7 +196,7 @@ private static void decryptContentFromFile( throws InvalidKeyException, InvalidAlgorithmParameterException, IOException, NoSuchPaddingException, NoSuchAlgorithmException { SecretKey fileKey = new SecretKeySpec(fileKeyBytes, AES); - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, cekIvBytes); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, cekIvBytes); byte[] buffer = new byte[BUFFER_SIZE]; Cipher fileCipher = Cipher.getInstance(FILE_CIPHER); fileCipher.init(Cipher.DECRYPT_MODE, fileKey, gcmParameterSpec); @@ -224,7 +225,7 @@ private static byte[] decryptKey(byte[] kekBytes, byte[] ivBytes, byte[] keyByte throws InvalidKeyException, InvalidAlgorithmParameterException, IllegalBlockSizeException, BadPaddingException, NoSuchPaddingException, NoSuchAlgorithmException { SecretKey kek = new SecretKeySpec(kekBytes, 0, kekBytes.length, AES); - GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH, ivBytes); + GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(TAG_LENGTH_IN_BITS, ivBytes); Cipher keyCipher = Cipher.getInstance(KEY_CIPHER); keyCipher.init(Cipher.DECRYPT_MODE, kek, gcmParameterSpec); if (aad != null) { diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java index 49b3542fd..565db0210 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java @@ -67,7 +67,7 @@ public static void setProxyForS3(HttpClientSettingsKey key, ClientConfiguration * * @param proxyProperties proxy properties * @param clientConfig the configuration needed by S3 to set the proxy - * @throws SnowflakeSQLException + * @throws SnowflakeSQLException when an error is encountered */ public static void setSessionlessProxyForS3( Properties proxyProperties, ClientConfiguration clientConfig) throws SnowflakeSQLException { diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java index ec54508f9..38f20cf65 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3ObjectMetadata.java @@ -5,6 +5,7 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import java.util.Map; +import net.snowflake.client.jdbc.SnowflakeUtil; /** * s3 implementation of platform independent StorageObjectMetadata interface, wraps an S3 @@ -28,7 +29,7 @@ public class S3ObjectMetadata implements StorageObjectMetadata { @Override public Map getUserMetadata() { - return objectMetadata.getUserMetadata(); + return SnowflakeUtil.createCaseInsensitiveMap(objectMetadata.getUserMetadata()); } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java index 3bb209c48..853d461b5 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3StorageObjectMetadata.java @@ -5,6 +5,7 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import java.util.Map; +import net.snowflake.client.jdbc.SnowflakeUtil; /** * Implementation of StorageObjectMetadata for S3 for remote storage object metadata. @@ -26,7 +27,7 @@ public S3StorageObjectMetadata(ObjectMetadata s3Metadata) { */ @Override public Map getUserMetadata() { - return this.s3Metadata.getUserMetadata(); + return SnowflakeUtil.createCaseInsensitiveMap(this.s3Metadata.getUserMetadata()); } /** diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java index cdf303bbd..0f8014ef6 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java @@ -4,6 +4,8 @@ package net.snowflake.client.jdbc.cloud.storage; import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED; +import static net.snowflake.client.core.HttpUtil.setProxyForAzure; +import static net.snowflake.client.core.HttpUtil.setSessionlessProxyForAzure; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import com.fasterxml.jackson.core.JsonFactory; @@ -41,7 +43,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFSession; @@ -154,9 +155,9 @@ private void setupAzureClient( this.azStorageClient = new CloudBlobClient(storageEndpoint, azCreds); opContext = new OperationContext(); if (session != null) { - HttpUtil.setProxyForAzure(session.getHttpClientKey(), opContext); + setProxyForAzure(session.getHttpClientKey(), opContext); } else { - HttpUtil.setSessionlessProxyForAzure(stage.getProxyProperties(), opContext); + setSessionlessProxyForAzure(stage.getProxyProperties(), opContext); } } catch (URISyntaxException ex) { throw new IllegalArgumentException("invalid_azure_credentials"); @@ -273,7 +274,8 @@ public StorageObjectMetadata getObjectMetadata(String remoteStorageLocation, Str blob.downloadAttributes(null, null, opContext); // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); // Get the BLOB system properties we care about BlobProperties properties = blob.getProperties(); @@ -348,7 +350,8 @@ public void download( blob.downloadAttributes(null, transferOptions, opContext); // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId); @@ -447,13 +450,11 @@ public InputStream downloadToStream( InputStream stream = blob.openInputStream(null, null, opContext); stopwatch.stop(); long downloadMillis = stopwatch.elapsedMillis(); - Map userDefinedMetadata = blob.getMetadata(); - + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId); - String key = encryptionData.getKey(); - String iv = encryptionData.getValue(); if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { @@ -574,7 +575,7 @@ public void upload( CloudBlockBlob blob = container.getBlockBlobReference(destFileName); // Set the user-defined/Snowflake metadata and upload the BLOB - blob.setMetadata((HashMap) meta.getUserMetadata()); + blob.setMetadata(new HashMap<>(meta.getUserMetadata())); BlobRequestOptions transferOptions = new BlobRequestOptions(); transferOptions.setConcurrentRequestCount(parallelism); @@ -694,7 +695,7 @@ private SFPair createUploadStream( final InputStream stream; FileInputStream srcFileStream = null; try { - if (isEncrypting() && getEncryptionKeySize() < 256) { + if (isEncrypting() && getEncryptionKeySize() <= 256) { try { final InputStream uploadStream = uploadFromStream diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java index d907973ac..d6bf6ba84 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java @@ -4,6 +4,10 @@ package net.snowflake.client.jdbc.cloud.storage; import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED; +import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemPropertyToBooleanValue; +import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap; +import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause; +import static net.snowflake.client.jdbc.SnowflakeUtil.isBlank; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import com.fasterxml.jackson.core.JsonFactory; @@ -14,9 +18,11 @@ import com.google.api.gax.rpc.FixedHeaderProvider; import com.google.auth.oauth2.AccessToken; import com.google.auth.oauth2.GoogleCredentials; +import com.google.cloud.NoCredentials; import com.google.cloud.storage.Blob; import com.google.cloud.storage.BlobId; import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.HttpStorageOptions; import com.google.cloud.storage.Storage; import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.StorageException; @@ -62,7 +68,6 @@ import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; -import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.HttpResponseException; import org.apache.http.client.methods.HttpGet; @@ -183,8 +188,8 @@ public void shutdown() { * * @param remoteStorageLocation bucket name * @param prefix Path - * @return - * @throws StorageProviderException + * @return a collection of storage summary objects + * @throws StorageProviderException cloud storage provider error */ @Override public StorageObjectSummaryCollection listObjects(String remoteStorageLocation, String prefix) @@ -310,18 +315,14 @@ public void download( outStream.close(); bodyStream.close(); if (isEncrypting()) { - for (Header header : response.getAllHeaders()) { - if (header - .getName() - .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) { - AbstractMap.SimpleEntry encryptionData = - parseEncryptionData(header.getValue(), queryId); - - key = encryptionData.getKey(); - iv = encryptionData.getValue(); - break; - } - } + Map userDefinedHeaders = + createCaseInsensitiveMap(response.getAllHeaders()); + AbstractMap.SimpleEntry encryptionData = + parseEncryptionData( + userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP), + queryId); + key = encryptionData.getKey(); + iv = encryptionData.getValue(); } stopwatch.stop(); downloadMillis = stopwatch.elapsedMillis(); @@ -355,9 +356,10 @@ public void download( logger.debug("Download successful", false); // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); if (isEncrypting()) { - if (userDefinedMetadata != null) { + if (!userDefinedMetadata.isEmpty()) { AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId); @@ -499,18 +501,14 @@ public InputStream downloadToStream( inputStream = response.getEntity().getContent(); if (isEncrypting()) { - for (Header header : response.getAllHeaders()) { - if (header - .getName() - .equalsIgnoreCase(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP)) { - AbstractMap.SimpleEntry encryptionData = - parseEncryptionData(header.getValue(), queryId); - - key = encryptionData.getKey(); - iv = encryptionData.getValue(); - break; - } - } + Map userDefinedHeaders = + createCaseInsensitiveMap(response.getAllHeaders()); + AbstractMap.SimpleEntry encryptionData = + parseEncryptionData( + userDefinedHeaders.get(GCS_METADATA_PREFIX + GCS_ENCRYPTIONDATAPROP), + queryId); + key = encryptionData.getKey(); + iv = encryptionData.getValue(); } stopwatch.stop(); downloadMillis = stopwatch.elapsedMillis(); @@ -538,7 +536,8 @@ public InputStream downloadToStream( inputStream = Channels.newInputStream(blob.reader()); if (isEncrypting()) { // Get the user-defined BLOB metadata - Map userDefinedMetadata = blob.getMetadata(); + Map userDefinedMetadata = + SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata()); AbstractMap.SimpleEntry encryptionData = parseEncryptionData(userDefinedMetadata.get(GCS_ENCRYPTIONDATAPROP), queryId); @@ -1039,7 +1038,7 @@ private SFPair createUploadStream( final InputStream stream; FileInputStream srcFileStream = null; try { - if (isEncrypting() && getEncryptionKeySize() < 256) { + if (isEncrypting() && getEncryptionKeySize() <= 256) { try { final InputStream uploadStream = uploadFromStream @@ -1121,7 +1120,7 @@ public void handleStorageException( // If there is no space left in the download location, java.io.IOException is thrown. // Don't retry. - if (SnowflakeUtil.getRootCause(ex) instanceof IOException) { + if (getRootCause(ex) instanceof IOException) { SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId); } @@ -1181,7 +1180,7 @@ public void handleStorageException( } } } else if (ex instanceof InterruptedException - || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) { + || getRootCause(ex) instanceof SocketTimeoutException) { if (retryCount > getMaxRetries()) { throw new SnowflakeSQLLoggedException( queryId, @@ -1278,7 +1277,7 @@ private AbstractMap.SimpleEntry parseEncryptionData( /** Adds digest metadata to the StorageObjectMetadata object */ @Override public void addDigestMetadata(StorageObjectMetadata meta, String digest) { - if (!SnowflakeUtil.isBlank(digest)) { + if (!isBlank(digest)) { meta.addUserMetadata("sfc-digest", digest); } } @@ -1315,6 +1314,8 @@ private void setupGCSClient( if (accessToken != null) { // We are authenticated with an oauth access token. StorageOptions.Builder builder = StorageOptions.newBuilder(); + stage.gcsCustomEndpoint().ifPresent(builder::setHost); + if (areDisabledGcsDefaultCredentials(session)) { logger.debug( "Adding explicit credentials to avoid default credential lookup by the GCS client"); @@ -1332,7 +1333,10 @@ private void setupGCSClient( .getService(); } else { // Use anonymous authentication. - this.gcsClient = StorageOptions.getUnauthenticatedInstance().getService(); + HttpStorageOptions.Builder builder = + HttpStorageOptions.newBuilder().setCredentials(NoCredentials.getInstance()); + stage.gcsCustomEndpoint().ifPresent(builder::setHost); + this.gcsClient = builder.build().getService(); } if (encMat != null) { @@ -1355,7 +1359,7 @@ private void setupGCSClient( private static boolean areDisabledGcsDefaultCredentials(SFSession session) { return session != null && session.getDisableGcsDefaultCredentials() - || SnowflakeUtil.convertSystemPropertyToBooleanValue( + || convertSystemPropertyToBooleanValue( DISABLE_GCS_DEFAULT_CREDENTIALS_PROPERTY_NAME, false); } @@ -1374,13 +1378,11 @@ public void addStreamingIngestMetadata( meta.addUserMetadata(GCS_STREAMING_INGEST_CLIENT_KEY, clientKey); } - /** Gets streaming ingest client name to the StorageObjectMetadata object */ @Override public String getStreamingIngestClientName(StorageObjectMetadata meta) { return meta.getUserMetadata().get(GCS_STREAMING_INGEST_CLIENT_NAME); } - /** Gets streaming ingest client key to the StorageObjectMetadata object */ @Override public String getStreamingIngestClientKey(StorageObjectMetadata meta) { return meta.getUserMetadata().get(GCS_STREAMING_INGEST_CLIENT_KEY); diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java index 3b33b60f0..f1a2392bb 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java @@ -5,6 +5,8 @@ package net.snowflake.client.jdbc.cloud.storage; import static net.snowflake.client.core.Constants.CLOUD_STORAGE_CREDENTIALS_EXPIRED; +import static net.snowflake.client.jdbc.SnowflakeUtil.createDefaultExecutorService; +import static net.snowflake.client.jdbc.SnowflakeUtil.getRootCause; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import com.amazonaws.AmazonClientException; @@ -368,7 +370,7 @@ public void download( new ExecutorFactory() { @Override public ExecutorService newExecutor() { - return SnowflakeUtil.createDefaultExecutorService( + return createDefaultExecutorService( "s3-transfer-manager-downloader-", parallelism); } }) @@ -379,7 +381,8 @@ public ExecutorService newExecutor() { // Pull object metadata from S3 ObjectMetadata meta = amazonClient.getObjectMetadata(remoteStorageLocation, stageFilePath); - Map metaMap = meta.getUserMetadata(); + Map metaMap = + SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata()); String key = metaMap.get(AMZ_KEY); String iv = metaMap.get(AMZ_IV); @@ -481,7 +484,8 @@ public InputStream downloadToStream( InputStream stream = file.getObjectContent(); stopwatch.stop(); long downloadMillis = stopwatch.elapsedMillis(); - Map metaMap = meta.getUserMetadata(); + Map metaMap = + SnowflakeUtil.createCaseInsensitiveMap(meta.getUserMetadata()); String key = metaMap.get(AMZ_KEY); String iv = metaMap.get(AMZ_IV); @@ -611,7 +615,7 @@ public void upload( new ExecutorFactory() { @Override public ExecutorService newExecutor() { - return SnowflakeUtil.createDefaultExecutorService( + return createDefaultExecutorService( "s3-transfer-manager-uploader-", parallelism); } }) @@ -821,7 +825,7 @@ private static void handleS3Exception( // If there is no space left in the download location, java.io.IOException is thrown. // Don't retry. - if (SnowflakeUtil.getRootCause(ex) instanceof IOException) { + if (getRootCause(ex) instanceof IOException) { SnowflakeFileTransferAgent.throwNoSpaceLeftError(session, operation, ex, queryId); } @@ -912,7 +916,7 @@ private static void handleS3Exception( } } else { if (ex instanceof InterruptedException - || SnowflakeUtil.getRootCause(ex) instanceof SocketTimeoutException) { + || getRootCause(ex) instanceof SocketTimeoutException) { if (retryCount > s3Client.getMaxRetries()) { throw new SnowflakeSQLLoggedException( queryId, @@ -940,7 +944,12 @@ private static void handleS3Exception( } } - /** Checks the status code of the exception to see if it's a 400 or 404 */ + /** + * Checks the status code of the exception to see if it's a 400 or 404 + * + * @param ex exception + * @return true if it's a 400 or 404 status code + */ public boolean isClientException400Or404(Exception ex) { if (ex instanceof AmazonServiceException) { AmazonServiceException asEx = (AmazonServiceException) (ex); @@ -950,13 +959,13 @@ public boolean isClientException400Or404(Exception ex) { return false; } - /** Returns the material descriptor key */ + /* Returns the material descriptor key */ @Override public String getMatdescKey() { return "x-amz-matdesc"; } - /** Adds encryption metadata to the StorageObjectMetadata object */ + /* Adds encryption metadata to the StorageObjectMetadata object */ @Override public void addEncryptionMetadata( StorageObjectMetadata meta, @@ -970,13 +979,13 @@ public void addEncryptionMetadata( meta.setContentLength(contentLength); } - /** Adds digest metadata to the StorageObjectMetadata object */ + /* Adds digest metadata to the StorageObjectMetadata object */ @Override public void addDigestMetadata(StorageObjectMetadata meta, String digest) { meta.addUserMetadata("sfc-digest", digest); } - /** Gets digest metadata to the StorageObjectMetadata object */ + /* Gets digest metadata to the StorageObjectMetadata object */ @Override public String getDigestMetadata(StorageObjectMetadata meta) { return meta.getUserMetadata().get("sfc-digest"); @@ -1001,7 +1010,7 @@ private static SSLConnectionSocketFactory getSSLConnectionSocketFactory() { return s3ConnectionSocketFactory; } - /** + /* * Adds streaming ingest metadata to the StorageObjectMetadata object, used for streaming ingest * per client billing calculation */ @@ -1012,13 +1021,11 @@ public void addStreamingIngestMetadata( meta.addUserMetadata(S3_STREAMING_INGEST_CLIENT_KEY, clientKey); } - /** Gets streaming ingest client name to the StorageObjectMetadata object */ @Override public String getStreamingIngestClientName(StorageObjectMetadata meta) { return meta.getUserMetadata().get(S3_STREAMING_INGEST_CLIENT_NAME); } - /** Gets streaming ingest client key to the StorageObjectMetadata object */ @Override public String getStreamingIngestClientKey(StorageObjectMetadata meta) { return meta.getUserMetadata().get(S3_STREAMING_INGEST_CLIENT_KEY); diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java index 4be936763..ba74ac7d2 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeStorageClient.java @@ -523,9 +523,19 @@ default void addEncryptionMetadataForGcm( */ void addStreamingIngestMetadata(StorageObjectMetadata meta, String clientName, String clientKey); - /** Gets streaming ingest client name to the StorageObjectMetadata object */ + /** + * Gets streaming ingest client name to the StorageObjectMetadata object + * + * @param meta StorageObjectMetadata + * @return Client name + */ String getStreamingIngestClientName(StorageObjectMetadata meta); - /** Gets streaming ingest client key to the StorageObjectMetadata object */ + /** + * Gets streaming ingest client key to the StorageObjectMetadata object + * + * @param meta StorageObjectMetadata + * @return Client key + */ String getStreamingIngestClientKey(StorageObjectMetadata meta); } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java index 7a8bf4d36..3a14b8fa0 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java @@ -2,10 +2,17 @@ import java.io.Serializable; import java.util.Map; +import java.util.Optional; import java.util.Properties; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; -/** Encapsulates all the required stage properties used by GET/PUT for Azure and S3 stages */ +/** Encapsulates all the required stage properties used by GET/PUT for Azure, GCS and S3 stages */ public class StageInfo implements Serializable { + + // me-central2 GCS region always use regional urls + // TODO SNOW-1818804: the value is hardcoded now, but it should be server driven + private static final String GCS_REGION_ME_CENTRAL_2 = "me-central2"; + public enum StageType { S3, AZURE, @@ -17,12 +24,18 @@ public enum StageType { private StageType stageType; // The stage type private String location; // The container or bucket private Map credentials; // the credentials required for the stage - private String region; // AWS/S3/GCS region (S3/GCS only) - private String endPoint; // The Azure Storage endpoint (Azure only) + private String region; // S3/GCS region + // An endpoint (Azure, AWS FIPS and GCS custom endpoint override) + private String endPoint; private String storageAccount; // The Azure Storage account (Azure only) private String presignedUrl; // GCS gives us back a presigned URL instead of a cred private boolean isClientSideEncrypted; // whether to encrypt/decrypt files on the stage - private boolean useS3RegionalUrl; // whether to use s3 regional URL (AWS Only) + // whether to use s3 regional URL (AWS Only) + // TODO SNOW-1818804: this field will be deprecated when the server returns {@link + // #useRegionalUrl} + private boolean useS3RegionalUrl; + // whether to use regional URL (AWS and GCS only) + private boolean useRegionalUrl; private Properties proxyProperties; /* @@ -166,6 +179,16 @@ public boolean getUseS3RegionalUrl() { return useS3RegionalUrl; } + @SnowflakeJdbcInternalApi + public void setUseRegionalUrl(boolean useRegionalUrl) { + this.useRegionalUrl = useRegionalUrl; + } + + @SnowflakeJdbcInternalApi + public boolean getUseRegionalUrl() { + return useRegionalUrl; + } + private static boolean isSpecified(String arg) { return !(arg == null || arg.equalsIgnoreCase("")); } @@ -173,9 +196,22 @@ private static boolean isSpecified(String arg) { public void setProxyProperties(Properties proxyProperties) { this.proxyProperties = proxyProperties; } - ; public Properties getProxyProperties() { return proxyProperties; } + + @SnowflakeJdbcInternalApi + public Optional gcsCustomEndpoint() { + if (stageType != StageType.GCS) { + return Optional.empty(); + } + if (endPoint != null && !endPoint.trim().isEmpty() && !"null".equals(endPoint)) { + return Optional.of(endPoint); + } + if (GCS_REGION_ME_CENTRAL_2.equalsIgnoreCase(region) || useRegionalUrl) { + return Optional.of(String.format("storage.%s.rep.googleapis.com", region.toLowerCase())); + } + return Optional.empty(); + } } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java index ac7de73a6..69d56e195 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java @@ -47,6 +47,7 @@ public static StorageClientFactory getFactory() { * @param stage the stage properties * @param parallel the degree of parallelism to be used by the client * @param encMat encryption material for the client + * @param session SFSession * @return a SnowflakeStorageClient interface to the instance created * @throws SnowflakeSQLException if any error occurs */ @@ -58,8 +59,9 @@ public SnowflakeStorageClient createClient( switch (stage.getStageType()) { case S3: boolean useS3RegionalUrl = - (stage.getUseS3RegionalUrl() - || (session != null && session.getUseRegionalS3EndpointsForPresignedURL())); + stage.getUseS3RegionalUrl() + || stage.getUseRegionalUrl() + || session != null && session.getUseRegionalS3EndpointsForPresignedURL(); return createS3Client( stage.getCredentials(), parallel, diff --git a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java index ed360789e..5e163c8bf 100644 --- a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java +++ b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java @@ -158,7 +158,11 @@ public JSONObject getContext() { return context; } - /** Note: Only used for IT */ + /** + * Note: Only used for IT + * + * @param params parameter map + */ public void updateContextForIT(Map params) { Properties info = new Properties(); for (String key : params.keySet()) { @@ -247,7 +251,11 @@ private void configureDeployment(SnowflakeConnectString conStr) { this.setDeployment(deployment); } - /** whether the telemetry service is enabled for current deployment */ + /** + * whether the telemetry service is enabled for current deployment + * + * @return true if the telemetry service is enabled for current deployment + */ public boolean isDeploymentEnabled() { return ENABLED_DEPLOYMENT.contains(this.serverDeployment.name); } @@ -372,7 +380,11 @@ public void count() { eventCnt.incrementAndGet(); } - /** Report the event to the telemetry server in a new thread */ + /** + * Report the event to the telemetry server in a new thread + * + * @param event TelemetryEvent + */ public void report(TelemetryEvent event) { reportChooseEvent(event, /* isHTAP */ false); } @@ -389,7 +401,12 @@ public void reportChooseEvent(TelemetryEvent event, boolean isHTAP) { TelemetryThreadPool.getInstance().execute(runUpload); } - /** Convert an event to a payload in string */ + /** + * Convert an event to a payload in string + * + * @param event TelemetryEvent + * @return the string payload + */ public String exportQueueToString(TelemetryEvent event) { JSONArray logs = new JSONArray(); logs.add(event); @@ -509,7 +526,13 @@ private void uploadPayload() { } } - /** log OCSP exception to telemetry */ + /** + * log OCSP exception to telemetry + * + * @param eventType event type + * @param telemetryData JSON telemetry data + * @param ex CertificateException + */ public void logOCSPExceptionTelemetryEvent( String eventType, JSONObject telemetryData, CertificateException ex) { if (enabled) { @@ -533,7 +556,24 @@ public void logOCSPExceptionTelemetryEvent( } } - /** log error http response to telemetry */ + /** + * log error http response to telemetry + * + * @param eventName the event name + * @param request the HttpRequestBase + * @param injectSocketTimeout the socket timeout + * @param canceling cancelling + * @param withoutCookies without cookies + * @param includeRetryParameters include retry parameters + * @param includeRequestGuid include rest GUID + * @param response the CloseableHttpResponse + * @param savedEx the saved exception + * @param breakRetryReason the break retry reason + * @param retryTimeout the retry timeout + * @param retryCount retry count + * @param sqlState the SQL state + * @param errorCode the error code + */ public void logHttpRequestTelemetryEvent( String eventName, HttpRequestBase request, @@ -593,7 +633,12 @@ public void logHttpRequestTelemetryEvent( } } - /** log execution times from various processing slices */ + /** + * log execution times from various processing slices + * + * @param telemetryData JSON telemetry data + * @param eventName the event name + */ public void logExecutionTimeTelemetryEvent(JSONObject telemetryData, String eventName) { if (htapEnabled) { TelemetryEvent.LogBuilder logBuilder = new TelemetryEvent.LogBuilder(); diff --git a/src/main/java/net/snowflake/client/log/ArgSupplier.java b/src/main/java/net/snowflake/client/log/ArgSupplier.java index f7fef53a6..adead308d 100644 --- a/src/main/java/net/snowflake/client/log/ArgSupplier.java +++ b/src/main/java/net/snowflake/client/log/ArgSupplier.java @@ -11,5 +11,10 @@ */ @FunctionalInterface public interface ArgSupplier { + /** + * Get value + * + * @return Object value. + */ Object get(); } diff --git a/src/main/java/net/snowflake/client/log/JDK14Logger.java b/src/main/java/net/snowflake/client/log/JDK14Logger.java index d70009e16..e9ae25696 100644 --- a/src/main/java/net/snowflake/client/log/JDK14Logger.java +++ b/src/main/java/net/snowflake/client/log/JDK14Logger.java @@ -185,7 +185,9 @@ public static Level getLevel() { /** * This is way to enable logging in JDBC through TRACING parameter or sf client config file. * - * @param level + * @param level log level + * @param logPath log path + * @throws IOException if there is an error writing to the log */ public static synchronized void instantiateLogger(Level level, String logPath) throws IOException { @@ -212,6 +214,9 @@ public static synchronized void instantiateLogger(Level level, String logPath) * places. * *

This method will convert string in ex.1 to ex.2 + * + * @param original original string + * @return refactored string */ private String refactorString(String original) { StringBuilder sb = new StringBuilder(); diff --git a/src/main/java/net/snowflake/client/log/SFLogLevel.java b/src/main/java/net/snowflake/client/log/SFLogLevel.java index 18aeeb2a6..94e530af2 100644 --- a/src/main/java/net/snowflake/client/log/SFLogLevel.java +++ b/src/main/java/net/snowflake/client/log/SFLogLevel.java @@ -23,8 +23,8 @@ public enum SFLogLevel { * Method to parse the input loglevel string and returns corresponding loglevel. This method uses * case in-sensitive matching. * - * @param levelStr - * @return + * @param levelStr log level string + * @return SFLogLevel */ public static SFLogLevel getLogLevel(String levelStr) { for (SFLogLevel level : SFLogLevel.values()) { diff --git a/src/main/java/net/snowflake/client/util/SecretDetector.java b/src/main/java/net/snowflake/client/util/SecretDetector.java index 3ae48defa..3c0727de7 100644 --- a/src/main/java/net/snowflake/client/util/SecretDetector.java +++ b/src/main/java/net/snowflake/client/util/SecretDetector.java @@ -95,7 +95,8 @@ public class SecretDetector { /** * Check whether the name is sensitive * - * @param name + * @param name the name + * @return true if the name is sensitive. */ public static boolean isSensitive(String name) { return SENSITIVE_NAME_SET.contains(name.toLowerCase()); diff --git a/src/main/java/net/snowflake/client/util/TimeMeasurement.java b/src/main/java/net/snowflake/client/util/TimeMeasurement.java index 390294236..797f454c1 100644 --- a/src/main/java/net/snowflake/client/util/TimeMeasurement.java +++ b/src/main/java/net/snowflake/client/util/TimeMeasurement.java @@ -12,7 +12,11 @@ public class TimeMeasurement { private long start; private long end; - /** Get the start time as epoch time in microseconds. */ + /** + * Get the start time as epoch time in microseconds. + * + * @return the start time as epoch time in microseconds. + */ public long getStart() { return start; } @@ -22,7 +26,11 @@ public void setStart() { this.start = SnowflakeUtil.getEpochTimeInMicroSeconds(); } - /** Get the stop time as epoch time in microseconds. */ + /** + * Get the stop time as epoch time in microseconds. + * + * @return the stop time as epoch time in microseconds. + */ public long getEnd() { return end; } diff --git a/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java index 8cf16c6bd..f4f226fa9 100644 --- a/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java +++ b/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java @@ -1,12 +1,15 @@ package com.snowflake.client.jdbc; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.SQLException; import net.snowflake.client.AbstractDriverIT; -import org.junit.Test; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +@Tag(TestTags.CONNECTION) public class SnowflakeDriverIT extends AbstractDriverIT { @Test diff --git a/src/test/java/net/snowflake/client/AbstractDriverIT.java b/src/test/java/net/snowflake/client/AbstractDriverIT.java index 4a3acea23..3104ce7e9 100644 --- a/src/test/java/net/snowflake/client/AbstractDriverIT.java +++ b/src/test/java/net/snowflake/client/AbstractDriverIT.java @@ -24,12 +24,9 @@ import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; -import org.junit.Rule; /** Base test class with common constants, data structures and methods */ public class AbstractDriverIT { - // This is required to use ConditionalIgnore annotation. - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; public static final String DRIVER_CLASS_COM = "com.snowflake.client.jdbc.SnowflakeDriver"; diff --git a/src/test/java/net/snowflake/client/AssumptionUtils.java b/src/test/java/net/snowflake/client/AssumptionUtils.java new file mode 100644 index 000000000..73ae13fbb --- /dev/null +++ b/src/test/java/net/snowflake/client/AssumptionUtils.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client; + +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +import net.snowflake.client.core.Constants; + +public class AssumptionUtils { + public static void assumeNotRunningOnGithubActionsMac() { + assumeFalse(isRunningOnGithubActions() && Constants.getOS() == Constants.OS.MAC); + } + + public static void assumeNotRunningOnJava8() { + assumeFalse(systemGetProperty("java.version").startsWith("1.8.0")); + } + + public static void assumeNotRunningOnJava21() { + assumeFalse(systemGetProperty("java.version").startsWith("21.")); + } + + public static void assumeRunningOnGithubActions() { + assumeTrue(isRunningOnGithubActions()); + } + + public static boolean isRunningOnGithubActions() { + return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; + } + + public static void assumeRunningOnLinuxMac() { + assumeTrue(Constants.getOS() == Constants.OS.LINUX || Constants.getOS() == Constants.OS.MAC); + } +} diff --git a/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java b/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java deleted file mode 100644 index fe20883db..000000000 --- a/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java +++ /dev/null @@ -1,125 +0,0 @@ -package net.snowflake.client; - -/* - * Created by hyu on 1/22/18. - */ - -/* -Copyright (c) 2013,2014 RĂ¼diger Herrmann -All rights reserved. This program and the accompanying materials -are made available under the terms of the Eclipse Public License v1.0 -which accompanies this distribution, and is available at -http://www.eclipse.org/legal/epl-v10.html - -Contributors: -RĂ¼diger Herrmann - initial API and implementation -Matt Morrissette - allow to use non-static inner IgnoreConditions -*/ - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.lang.reflect.Modifier; -import org.junit.Assume; -import org.junit.rules.MethodRule; -import org.junit.runners.model.FrameworkMethod; -import org.junit.runners.model.Statement; - -public class ConditionalIgnoreRule implements MethodRule { - - public interface IgnoreCondition { - boolean isSatisfied(); - } - - @Retention(RetentionPolicy.RUNTIME) - @Target({ElementType.METHOD}) - public @interface ConditionalIgnore { - Class condition(); - } - - @Override - public Statement apply(Statement base, FrameworkMethod method, Object target) { - Statement result = base; - if (hasConditionalIgnoreAnnotation(method)) { - IgnoreCondition condition = getIgnoreCondition(target, method); - if (condition.isSatisfied()) { - result = new IgnoreStatement(condition); - } - } - return result; - } - - private static boolean hasConditionalIgnoreAnnotation(FrameworkMethod method) { - return method.getAnnotation(ConditionalIgnore.class) != null; - } - - private static IgnoreCondition getIgnoreCondition(Object target, FrameworkMethod method) { - ConditionalIgnore annotation = method.getAnnotation(ConditionalIgnore.class); - return new IgnoreConditionCreator(target, annotation).create(); - } - - private static class IgnoreConditionCreator { - private final Object target; - private final Class conditionType; - - IgnoreConditionCreator(Object target, ConditionalIgnore annotation) { - this.target = target; - this.conditionType = annotation.condition(); - } - - IgnoreCondition create() { - checkConditionType(); - try { - return createCondition(); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private IgnoreCondition createCondition() throws Exception { - IgnoreCondition result; - if (isConditionTypeStandalone()) { - result = conditionType.newInstance(); - } else { - result = conditionType.getDeclaredConstructor(target.getClass()).newInstance(target); - } - return result; - } - - private void checkConditionType() { - if (!isConditionTypeStandalone() && !isConditionTypeDeclaredInTarget()) { - String msg = - "Conditional class '%s' is a member class " - + "but was not declared inside the test case using it.\n" - + "Either make this class a static class, " - + "standalone class (by declaring it in it's own file) " - + "or move it inside the test case using it"; - throw new IllegalArgumentException(String.format(msg, conditionType.getName())); - } - } - - private boolean isConditionTypeStandalone() { - return !conditionType.isMemberClass() || Modifier.isStatic(conditionType.getModifiers()); - } - - private boolean isConditionTypeDeclaredInTarget() { - return target.getClass().isAssignableFrom(conditionType.getDeclaringClass()); - } - } - - private static class IgnoreStatement extends Statement { - private final IgnoreCondition condition; - - IgnoreStatement(IgnoreCondition condition) { - this.condition = condition; - } - - @Override - public void evaluate() { - Assume.assumeTrue("Ignored by " + condition.getClass().getSimpleName(), false); - } - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnAWS.java b/src/test/java/net/snowflake/client/RunningNotOnAWS.java deleted file mode 100644 index 70f54ab8f..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnAWS.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningNotOnAWS implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && !cloudProvider.equalsIgnoreCase("AWS"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnAzure.java b/src/test/java/net/snowflake/client/RunningNotOnAzure.java deleted file mode 100644 index e2a00966c..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnAzure.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningNotOnAzure implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && !cloudProvider.equalsIgnoreCase("Azure"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnGCP.java b/src/test/java/net/snowflake/client/RunningNotOnGCP.java deleted file mode 100644 index 7a5c7aafb..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnGCP.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningNotOnGCP implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && !cloudProvider.equalsIgnoreCase("GCP"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java b/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java deleted file mode 100644 index 9b872fc8b..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnGithubActionsMac implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return isRunningOnGithubActionsMac(); - } - - public static boolean isRunningOnGithubActionsMac() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null && Constants.getOS() == Constants.OS.MAC; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnJava21.java b/src/test/java/net/snowflake/client/RunningNotOnJava21.java deleted file mode 100644 index 4e2e3e03c..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnJava21.java +++ /dev/null @@ -1,13 +0,0 @@ -package net.snowflake.client; - -import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; - -public class RunningNotOnJava21 implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return isRunningOnJava21(); - } - - public static boolean isRunningOnJava21() { - return systemGetProperty("java.version").startsWith("21."); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnJava8.java b/src/test/java/net/snowflake/client/RunningNotOnJava8.java deleted file mode 100644 index 8ee4b3e40..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnJava8.java +++ /dev/null @@ -1,13 +0,0 @@ -package net.snowflake.client; - -import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; - -public class RunningNotOnJava8 implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return isRunningOnJava8(); - } - - public static boolean isRunningOnJava8() { - return systemGetProperty("java.version").startsWith("1.8.0"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinux.java b/src/test/java/net/snowflake/client/RunningNotOnLinux.java deleted file mode 100644 index 3cbaf1339..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnLinux.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnLinux implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.LINUX; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java b/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java deleted file mode 100644 index a99eaa3b7..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java +++ /dev/null @@ -1,13 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnLinuxMac implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; - } - - public static boolean isNotRunningOnLinuxMac() { - return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java b/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java deleted file mode 100644 index 596f5ca55..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java +++ /dev/null @@ -1,10 +0,0 @@ -package net.snowflake.client; - -import static net.snowflake.client.RunningOnGithubAction.isRunningOnGithubAction; - -public class RunningNotOnTestaccount implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return (!("testaccount".equals(TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT"))) - || isRunningOnGithubAction()); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnWin.java b/src/test/java/net/snowflake/client/RunningNotOnWin.java deleted file mode 100644 index ce5cdf7d1..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnWin.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnWin implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.WINDOWS; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnWinMac.java b/src/test/java/net/snowflake/client/RunningNotOnWinMac.java deleted file mode 100644 index 9d1c32bdc..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnWinMac.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnWinMac implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.MAC && Constants.getOS() != Constants.OS.WINDOWS; - } -} diff --git a/src/test/java/net/snowflake/client/RunningOnGithubAction.java b/src/test/java/net/snowflake/client/RunningOnGithubAction.java deleted file mode 100644 index 0326c4fca..000000000 --- a/src/test/java/net/snowflake/client/RunningOnGithubAction.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests on CI */ -public class RunningOnGithubAction implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; - } - - public static boolean isRunningOnGithubAction() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; - } -} diff --git a/src/test/java/net/snowflake/client/RunningOnTestaccount.java b/src/test/java/net/snowflake/client/RunningOnTestaccount.java deleted file mode 100644 index 186496977..000000000 --- a/src/test/java/net/snowflake/client/RunningOnTestaccount.java +++ /dev/null @@ -1,7 +0,0 @@ -package net.snowflake.client; - -public class RunningOnTestaccount implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT").contains("testaccount"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningOnWin.java b/src/test/java/net/snowflake/client/RunningOnWin.java deleted file mode 100644 index 025ab1e04..000000000 --- a/src/test/java/net/snowflake/client/RunningOnWin.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningOnWin implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() == Constants.OS.WINDOWS; - } -} diff --git a/src/test/java/net/snowflake/client/SkipOnThinJar.java b/src/test/java/net/snowflake/client/SkipOnThinJar.java deleted file mode 100644 index d02d104dd..000000000 --- a/src/test/java/net/snowflake/client/SkipOnThinJar.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Skip tests on CI when thin jar is tested */ -public class SkipOnThinJar implements ConditionalIgnoreRule.IgnoreCondition { - @Override - public boolean isSatisfied() { - return "-Dthin-jar".equals(TestUtil.systemGetEnv("ADDITIONAL_MAVEN_PROFILE")); - } -} diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index 76487bcb4..7f4b8d90a 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -5,10 +5,10 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.hamcrest.Matchers.matchesPattern; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.SQLException; import java.sql.Statement; @@ -19,7 +19,7 @@ import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; -import org.junit.Assert; +import org.hamcrest.MatcherAssert; public class TestUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(TestUtil.class); @@ -53,7 +53,7 @@ public static boolean isSchemaGeneratedInTests(String schema) { public static void assertSFException(int errorCode, TestRunInterface testCode) { try { testCode.run(); - Assert.fail(); + fail(); } catch (SFException e) { assertThat(e.getVendorCode(), is(errorCode)); } @@ -91,8 +91,8 @@ public static String systemGetEnv(String env) { public static void assertValidQueryId(String queryId) { assertNotNull(queryId); - assertTrue( - "Expecting " + queryId + " is a valid UUID", QUERY_ID_REGEX.matcher(queryId).matches()); + MatcherAssert.assertThat( + "Expecting " + queryId + " is a valid UUID", queryId, matchesPattern(QUERY_ID_REGEX)); } /** @@ -144,4 +144,14 @@ public static void expectSnowflakeLoggedFeatureNotSupportedException(MethodRaise assertEquals(ex.getClass().getSimpleName(), "SnowflakeLoggedFeatureNotSupportedException"); } } + + /** + * Compares two string values both values are cleaned of whitespaces + * + * @param expected expected value + * @param actual actual value + */ + public static void assertEqualsIgnoringWhitespace(String expected, String actual) { + assertEquals(expected.replaceAll("\\s+", ""), actual.replaceAll("\\s+", "")); + } } diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java b/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java new file mode 100644 index 000000000..993d9d6ad --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +public @interface DontRunOnGithubActions {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java b/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java new file mode 100644 index 000000000..29374b837 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnJre; +import org.junit.jupiter.api.condition.JRE; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledOnJre(JRE.JAVA_21) +public @interface DontRunOnJava21 {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java b/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java new file mode 100644 index 000000000..81a3a0c03 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnJre; +import org.junit.jupiter.api.condition.JRE; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledOnJre(JRE.JAVA_8) +public @interface DontRunOnJava8 {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java b/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java new file mode 100644 index 000000000..5c9fff944 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "SNOWFLAKE_TEST_ACCOUNT", matches = "testaccount") +public @interface DontRunOnTestaccount {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java b/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java new file mode 100644 index 000000000..bb254a2c4 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "ADDITIONAL_MAVEN_PROFILE", matches = "-Dthin-jar") +public @interface DontRunOnThinJar {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java b/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java new file mode 100644 index 000000000..140f0d752 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledOnOs(OS.WINDOWS) +public @interface DontRunOnWindows {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnAWS.java b/src/test/java/net/snowflake/client/annotations/RunOnAWS.java new file mode 100644 index 000000000..fd3acc546 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnAWS.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)AWS(?-i)") +public @interface RunOnAWS {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnAzure.java b/src/test/java/net/snowflake/client/annotations/RunOnAzure.java new file mode 100644 index 000000000..13c8379b3 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnAzure.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)Azure(?-i)") +public @interface RunOnAzure {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnGCP.java b/src/test/java/net/snowflake/client/annotations/RunOnGCP.java new file mode 100644 index 000000000..e361aa808 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnGCP.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)GCP(?-i)") +public @interface RunOnGCP {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java b/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java new file mode 100644 index 000000000..f133022e3 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnOs; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +@DisabledOnOs(OS.MAC) +public @interface RunOnGithubActionsNotMac {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnLinux.java b/src/test/java/net/snowflake/client/annotations/RunOnLinux.java new file mode 100644 index 000000000..33231effe --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnLinux.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs({OS.LINUX, OS.AIX}) +public @interface RunOnLinux {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java b/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java new file mode 100644 index 000000000..6c6013154 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs({OS.MAC, OS.LINUX, OS.AIX}) +public @interface RunOnLinuxOrMac {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnMac.java b/src/test/java/net/snowflake/client/annotations/RunOnMac.java new file mode 100644 index 000000000..a5f18a345 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnMac.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs(OS.MAC) +public @interface RunOnMac {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java b/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java new file mode 100644 index 000000000..6dacdb993 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "SNOWFLAKE_TEST_ACCOUNT", matches = "testaccount") +@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +public @interface RunOnTestaccountNotOnGithubActions {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnWindows.java b/src/test/java/net/snowflake/client/annotations/RunOnWindows.java new file mode 100644 index 000000000..69a2ee7ff --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnWindows.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs(OS.WINDOWS) +public @interface RunOnWindows {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java b/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java new file mode 100644 index 000000000..77d50109c --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs({OS.WINDOWS, OS.MAC}) +public @interface RunOnWindowsOrMac {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryArrow.java b/src/test/java/net/snowflake/client/category/TestCategoryArrow.java deleted file mode 100644 index 59a8396cd..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryArrow.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryArrow {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryConnection.java b/src/test/java/net/snowflake/client/category/TestCategoryConnection.java deleted file mode 100644 index cfa5bfd30..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryConnection.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryConnection {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryCore.java b/src/test/java/net/snowflake/client/category/TestCategoryCore.java deleted file mode 100644 index 7c97c58ef..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryCore.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryCore {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java b/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java deleted file mode 100644 index ecb5c0509..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryDiagnostic {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryLoader.java b/src/test/java/net/snowflake/client/category/TestCategoryLoader.java deleted file mode 100644 index eac9e7bef..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryLoader.java +++ /dev/null @@ -1,4 +0,0 @@ -package net.snowflake.client.category; - -/** Test category Loader */ -public interface TestCategoryLoader {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryOthers.java b/src/test/java/net/snowflake/client/category/TestCategoryOthers.java deleted file mode 100644 index 7f11baaa9..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryOthers.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryOthers {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java b/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java deleted file mode 100644 index 7d9824823..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryResultSet {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryStatement.java b/src/test/java/net/snowflake/client/category/TestCategoryStatement.java deleted file mode 100644 index 5381cbb00..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryStatement.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryStatement {} diff --git a/src/test/java/net/snowflake/client/category/TestTags.java b/src/test/java/net/snowflake/client/category/TestTags.java new file mode 100644 index 000000000..92cd7ce3b --- /dev/null +++ b/src/test/java/net/snowflake/client/category/TestTags.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.category; + +public class TestTags { + private TestTags() {} + + public static final String ARROW = "arrow"; + public static final String CONNECTION = "connection"; + public static final String CORE = "core"; + public static final String DIAGNOSTIC = "diagnostic"; + public static final String LOADER = "loader"; + public static final String OTHERS = "others"; + public static final String RESULT_SET = "resultSet"; + public static final String STATEMENT = "statement"; +} diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java index a00784f68..f570cfb7f 100644 --- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java @@ -7,10 +7,10 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static net.snowflake.client.jdbc.SnowflakeUtil.systemSetEnv; import static net.snowflake.client.jdbc.SnowflakeUtil.systemUnsetEnv; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mockStatic; import java.io.IOException; @@ -18,8 +18,8 @@ import java.nio.file.Path; import java.nio.file.Paths; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; public class SFClientConfigParserTest { @@ -30,7 +30,7 @@ public class SFClientConfigParserTest { private Path configFilePath; - @After + @AfterEach public void cleanup() throws IOException { if (configFilePath != null) { Files.deleteIfExists(configFilePath); diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java index 01da714e5..50dd75ff2 100644 --- a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java @@ -1,12 +1,13 @@ package net.snowflake.client.config; +import static net.snowflake.client.AssumptionUtils.assumeRunningOnLinuxMac; +import static net.snowflake.client.config.SFConnectionConfigParser.SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_HOME_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assume.assumeFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.fasterxml.jackson.dataformat.toml.TomlMapper; import java.io.File; @@ -17,35 +18,52 @@ import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; -import net.snowflake.client.RunningNotOnLinuxMac; import net.snowflake.client.core.Constants; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class SFConnectionConfigParserTest { + private static final List ENV_VARIABLES_KEYS = + new ArrayList<>( + Arrays.asList( + SNOWFLAKE_HOME_KEY, + SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, + SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION)); private Path tempPath = null; private TomlMapper tomlMapper = new TomlMapper(); + private Map envVariables = new HashMap(); - @Before + @BeforeEach public void setUp() throws IOException { tempPath = Files.createTempDirectory(".snowflake"); + ENV_VARIABLES_KEYS.stream() + .forEach( + key -> { + if (SnowflakeUtil.systemGetEnv(key) != null) { + envVariables.put(key, SnowflakeUtil.systemGetEnv(key)); + } + }); } - @After + @AfterEach public void close() throws IOException { SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_HOME_KEY); SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + SnowflakeUtil.systemUnsetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION); Files.walk(tempPath).map(Path::toFile).forEach(File::delete); Files.delete(tempPath); + envVariables.forEach((key, value) -> SnowflakeUtil.systemSetEnv(key, value)); } @Test @@ -87,7 +105,7 @@ public void testThrowErrorWhenWrongPermissionsForConnectionConfigurationFile() File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); prepareConnectionConfigurationTomlFile( Collections.singletonMap("token_file_path", tokenFile.toString()), false, false); - assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assumeRunningOnLinuxMac(); assertThrows( SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } @@ -98,11 +116,26 @@ public void testThrowErrorWhenWrongPermissionsForTokenFile() throws IOException File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); prepareConnectionConfigurationTomlFile( Collections.singletonMap("token_file_path", tokenFile.toString()), true, false); - assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assumeRunningOnLinuxMac(); assertThrows( SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } + @Test + public void testNoThrowErrorWhenWrongPermissionsForTokenFileButSkippingFlagIsEnabled() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + SnowflakeUtil.systemSetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, "true"); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), true, false); + + ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters(); + assertNotNull(data); + assertEquals(tokenFile.toString(), data.getParams().get("token_file_path")); + } + @Test public void testLoadSFConnectionConfigWithHostConfigured() throws SnowflakeSQLException, IOException { @@ -129,7 +162,20 @@ public void shouldThrowExceptionIfNoneOfHostAndAccountIsSet() throws IOException extraparams.put("host", null); extraparams.put("account", null); prepareConnectionConfigurationTomlFile(extraparams); - Assert.assertThrows( + assertThrows( + SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); + } + + @Test + public void shouldThrowExceptionIfTokenIsNotSetForOauth() throws IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + SnowflakeUtil.systemSetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, "true"); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), true, false, ""); + + assertThrows( SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } @@ -144,6 +190,16 @@ private void prepareConnectionConfigurationTomlFile(Map moreParameters) throws I private void prepareConnectionConfigurationTomlFile( Map moreParameters, boolean onlyUserPermissionConnection, boolean onlyUserPermissionToken) throws IOException { + prepareConnectionConfigurationTomlFile( + moreParameters, onlyUserPermissionConnection, onlyUserPermissionToken, "token_from_file"); + } + + private void prepareConnectionConfigurationTomlFile( + Map moreParameters, + boolean onlyUserPermissionConnection, + boolean onlyUserPermissionToken, + String token) + throws IOException { Path path = Paths.get(tempPath.toString(), "connections.toml"); Path filePath = createFilePathWithPermission(path, onlyUserPermissionConnection); File file = filePath.toFile(); @@ -166,7 +222,16 @@ private void prepareConnectionConfigurationTomlFile( createFilePathWithPermission( Paths.get(configurationParams.get("token_file_path").toString()), onlyUserPermissionToken); - Files.write(tokenFilePath, "token_from_file".getBytes()); + Files.write(tokenFilePath, token.getBytes()); + Path emptyTokenFilePath = + createFilePathWithPermission( + Paths.get( + configurationParams + .get("token_file_path") + .toString() + .replaceAll("token", "emptytoken")), + onlyUserPermissionToken); + Files.write(emptyTokenFilePath, "".getBytes()); } } diff --git a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java index 92ec8a624..f5e41e260 100644 --- a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java +++ b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java @@ -1,79 +1,55 @@ package net.snowflake.client.config; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.attribute.PosixFilePermissions; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnWin; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.annotations.DontRunOnWindows; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; -@RunWith(Parameterized.class) public class SFPermissionsTest { - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); - - @Parameterized.Parameters(name = "permission={0}") - public static Set> data() { - Map testConfigFilePermissions = - new HashMap() { - { - put("rwx------", false); - put("rw-------", false); - put("r-x------", false); - put("r--------", false); - put("rwxrwx---", true); - put("rwxrw----", true); - put("rwxr-x---", false); - put("rwxr-----", false); - put("rwx-wx---", true); - put("rwx-w----", true); - put("rwx--x---", false); - put("rwx---rwx", true); - put("rwx---rw-", true); - put("rwx---r-x", false); - put("rwx---r--", false); - put("rwx----wx", true); - put("rwx----w-", true); - put("rwx-----x", false); - } - }; - return testConfigFilePermissions.entrySet(); - } - Path configFilePath = Paths.get("config.json"); String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"logs\"}}"; - String permission; - Boolean isSucceed; - - public SFPermissionsTest(Map.Entry permission) { - this.permission = permission.getKey(); - this.isSucceed = permission.getValue(); - } - @Before + @BeforeEach public void createConfigFile() throws IOException { Files.write(configFilePath, configJson.getBytes()); } - @After + @AfterEach public void cleanupConfigFile() throws IOException { Files.deleteIfExists(configFilePath); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnWin.class) - public void testLogDirectoryPermissions() throws IOException { + @ParameterizedTest + @CsvSource({ + "rwx------,false", + "rw-------,false", + "r-x------,false", + "r--------,false", + "rwxrwx---,true", + "rwxrw----,true", + "rwxr-x---,false", + "rwxr-----,false", + "rwx-wx---,true", + "rwx-w----,true", + "rwx--x---,false", + "rwx---rwx,true", + "rwx---rw-,true", + "rwx---r-x,false", + "rwx---r--,false", + "rwx----wx,true", + "rwx----w-,true", + "rwx-----x,false" + }) + @DontRunOnWindows + public void testLogDirectoryPermissions(String permission, boolean isSucceed) throws IOException { // TODO: SNOW-1503722 Change to check for thrown exceptions // Don't run on Windows Files.setPosixFilePermissions(configFilePath, PosixFilePermissions.fromString(permission)); diff --git a/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java b/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java index f11614c8b..beb0ad292 100644 --- a/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java +++ b/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java @@ -4,10 +4,10 @@ package net.snowflake.client.core; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; @@ -16,13 +16,12 @@ import java.net.Proxy; import java.util.HashMap; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.annotations.DontRunOnGithubActions; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.jdbc.cloud.storage.S3HttpUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class CoreUtilsMiscellaneousTest { @@ -41,7 +40,7 @@ public void testSnowflakeAssertTrue() { /** Test that Constants.getOS function is working as expected */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testgetOS() { Constants.clearOSForTesting(); String originalOS = systemGetProperty("os.name"); diff --git a/src/test/java/net/snowflake/client/core/EventHandlerTest.java b/src/test/java/net/snowflake/client/core/EventHandlerTest.java index eb930f7c6..56b48b987 100644 --- a/src/test/java/net/snowflake/client/core/EventHandlerTest.java +++ b/src/test/java/net/snowflake/client/core/EventHandlerTest.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -14,18 +14,17 @@ import java.util.logging.LogRecord; import java.util.zip.GZIPInputStream; import org.apache.commons.io.IOUtils; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; public class EventHandlerTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; - @Before + @BeforeEach public void setUp() throws IOException { - tmpFolder.newFolder("snowflake_dumps"); - System.setProperty("snowflake.dump_path", tmpFolder.getRoot().getCanonicalPath()); + new File(tmpFolder, "snowflake_dumps").mkdirs(); + System.setProperty("snowflake.dump_path", tmpFolder.getCanonicalPath()); } @Test diff --git a/src/test/java/net/snowflake/client/core/EventTest.java b/src/test/java/net/snowflake/client/core/EventTest.java index e9ee978e5..7ca041744 100644 --- a/src/test/java/net/snowflake/client/core/EventTest.java +++ b/src/test/java/net/snowflake/client/core/EventTest.java @@ -5,8 +5,8 @@ package net.snowflake.client.core; import static net.snowflake.client.core.EventUtil.DUMP_PATH_PROP; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -14,24 +14,25 @@ import java.nio.file.Files; import java.util.zip.GZIPInputStream; import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; public class EventTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private File homeDirectory; private File dmpDirectory; - @Before + @BeforeEach public void setUp() throws IOException { - homeDirectory = tmpFolder.newFolder("homedir"); - dmpDirectory = tmpFolder.newFolder("homedir", "snowflake_dumps"); + homeDirectory = new File(tmpFolder, "homedir"); + homeDirectory.mkdirs(); + dmpDirectory = new File(homeDirectory, "snowflake_dumps"); + dmpDirectory.mkdirs(); } - @After + @AfterEach public void tearDown() { dmpDirectory.delete(); } @@ -58,7 +59,7 @@ public void testWriteEventDumpLine() throws IOException { // created String dmpPath1 = EventUtil.getDumpPathPrefix(); String dmpPath2 = dmpDirectory.getCanonicalPath(); - assertEquals("dump path is: " + EventUtil.getDumpPathPrefix(), dmpPath2, dmpPath1); + assertEquals(dmpPath2, dmpPath1, "dump path is: " + EventUtil.getDumpPathPrefix()); File dumpFile = new File( EventUtil.getDumpPathPrefix() diff --git a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java index f7ad06b46..04cec29fb 100644 --- a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java +++ b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java @@ -1,14 +1,14 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import net.minidev.json.JSONObject; import net.minidev.json.parser.JSONParser; import net.minidev.json.parser.ParseException; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ExecTimeTelemetryDataTest { diff --git a/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java b/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java index 34892843c..00c318227 100644 --- a/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java @@ -3,21 +3,22 @@ */ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.net.SocketTimeoutException; import java.time.Duration; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class HttpUtilLatestIT { private static final String HANG_WEBSERVER_ADDRESS = "http://localhost:12345/hang"; @@ -30,7 +31,8 @@ public void shouldGetDefaultConnectionAndSocketTimeouts() { } /** Added in > 3.14.5 */ - @Test(timeout = 1000L) + @Test + @Timeout(1) public void shouldOverrideConnectionAndSocketTimeouts() { // it's hard to test connection timeout so there is only a test for socket timeout HttpUtil.setConnectionTimeout(100); diff --git a/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java b/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java index cd2e89806..5ffe7c5d3 100644 --- a/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java @@ -6,23 +6,23 @@ import static net.snowflake.client.core.IncidentUtil.INC_DUMP_FILE_EXT; import static net.snowflake.client.core.IncidentUtil.INC_DUMP_FILE_NAME; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.StringWriter; import java.util.zip.GZIPInputStream; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import org.apache.commons.io.IOUtils; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class IncidentUtilLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private static final String FILE_NAME = "sf_incident_123456.dmp.gz"; @Test @@ -34,7 +34,9 @@ public void testOneLinerDescription() { /** Tests dumping JVM metrics for the current process */ @Test public void testDumpVmMetrics() throws IOException { - String dumpPath = tmpFolder.newFolder().getCanonicalPath(); + File dumpDir = new File(tmpFolder, "dump"); + dumpDir.mkdirs(); + String dumpPath = dumpDir.getCanonicalPath(); System.setProperty("snowflake.dump_path", dumpPath); String incidentId = "123456"; @@ -47,13 +49,15 @@ public void testDumpVmMetrics() throws IOException { EventUtil.getDumpPathPrefix() + "/" + INC_DUMP_FILE_NAME + incidentId + INC_DUMP_FILE_EXT; // Read back the file contents - GZIPInputStream gzip = new GZIPInputStream(new FileInputStream(targetVMFileLocation)); - StringWriter sWriter = new StringWriter(); - IOUtils.copy(gzip, sWriter, "UTF-8"); - String output = sWriter.toString(); - assertEquals( - "\n\n\n--------------------------- METRICS " + "---------------------------\n\n", - output.substring(0, 69)); - sWriter.close(); + try (FileInputStream fis = new FileInputStream(targetVMFileLocation); + GZIPInputStream gzip = new GZIPInputStream(fis)) { + StringWriter sWriter = new StringWriter(); + IOUtils.copy(gzip, sWriter, "UTF-8"); + String output = sWriter.toString(); + assertEquals( + "\n\n\n--------------------------- METRICS " + "---------------------------\n\n", + output.substring(0, 69)); + sWriter.close(); + } } } diff --git a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java index 9a5af03b2..37bfea5c6 100644 --- a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java +++ b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java @@ -1,93 +1,76 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class OCSPCacheServerTest { - @Parameterized.Parameters( - name = "For host {0} cache server fetch url should be {1} and retry url {2}") - public static Object[][] data() { - return new Object[][] { - { - "bla-12345.global.snowflakecomputing.com", - "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch", - "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry" - }, - { - "bla-12345.global.snowflakecomputing.cn", - "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch", - "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry" - }, - { - "bla-12345.global.snowflakecomputing.xyz", - "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch", - "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry" - }, - { - "bla-12345.GLOBAL.snowflakecomputing.xyz", - "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch", - "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry" - }, - { - "bla-12345.snowflakecomputing.com", - "https://ocspssd.snowflakecomputing.com/ocsp/fetch", - "https://ocspssd.snowflakecomputing.com/ocsp/retry" - }, - { - "bla-12345.snowflakecomputing.cn", - "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", - "https://ocspssd.snowflakecomputing.cn/ocsp/retry" - }, - { - "bla-12345.snowflakecomputing.xyz", - "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch", - "https://ocspssd.snowflakecomputing.xyz/ocsp/retry" - }, - { - "bla-12345.SNOWFLAKEcomputing.xyz", - "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch", - "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry" - }, - { - "s3.amazoncomaws.com", - "https://ocspssd.snowflakecomputing.com/ocsp/fetch", - "https://ocspssd.snowflakecomputing.com/ocsp/retry" - }, - { - "s3.amazoncomaws.COM", - "https://ocspssd.snowflakecomputing.COM/ocsp/fetch", - "https://ocspssd.snowflakecomputing.COM/ocsp/retry" - }, - { - "s3.amazoncomaws.com.cn", - "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", - "https://ocspssd.snowflakecomputing.cn/ocsp/retry" - }, - { - "S3.AMAZONCOMAWS.COM.CN", - "https://ocspssd.snowflakecomputing.CN/ocsp/fetch", - "https://ocspssd.snowflakecomputing.CN/ocsp/retry" - }, - }; - } - - private final String host; - private final String expectedFetchUrl; - private final String expectedRetryUrl; + static class URLProvider implements ArgumentsProvider { - public OCSPCacheServerTest(String host, String expectedFetchUrl, String expectedRetryUrl) { - this.host = host; - this.expectedFetchUrl = expectedFetchUrl; - this.expectedRetryUrl = expectedRetryUrl; + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of( + "bla-12345.global.snowflakecomputing.com", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry"), + Arguments.of( + "bla-12345.global.snowflakecomputing.cn", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry"), + Arguments.of( + "bla-12345.global.snowflakecomputing.xyz", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry"), + Arguments.of( + "bla-12345.GLOBAL.snowflakecomputing.xyz", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry"), + Arguments.of( + "bla-12345.snowflakecomputing.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry"), + Arguments.of( + "bla-12345.snowflakecomputing.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry"), + Arguments.of( + "bla-12345.snowflakecomputing.xyz", + "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd.snowflakecomputing.xyz/ocsp/retry"), + Arguments.of( + "bla-12345.SNOWFLAKEcomputing.xyz", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry"), + Arguments.of( + "s3.amazoncomaws.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry"), + Arguments.of( + "s3.amazoncomaws.COM", + "https://ocspssd.snowflakecomputing.COM/ocsp/fetch", + "https://ocspssd.snowflakecomputing.COM/ocsp/retry"), + Arguments.of( + "s3.amazoncomaws.com.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry"), + Arguments.of( + "S3.AMAZONCOMAWS.COM.CN", + "https://ocspssd.snowflakecomputing.CN/ocsp/fetch", + "https://ocspssd.snowflakecomputing.CN/ocsp/retry")); + } } - @Test - public void shouldChooseOcspCacheServerUrls() { + @ParameterizedTest(name = "For host {0} cache server fetch url should be {1} and retry url {2}") + @ArgumentsSource(URLProvider.class) + public void shouldChooseOcspCacheServerUrls( + String host, String expectedFetchUrl, String expectedRetryUrl) { SFTrustManager.OCSPCacheServer ocspCacheServer = new SFTrustManager.OCSPCacheServer(); ocspCacheServer.resetOCSPResponseCacheServer(host); diff --git a/src/test/java/net/snowflake/client/core/ObjectMapperTest.java b/src/test/java/net/snowflake/client/core/ObjectMapperTest.java index 6868d186e..e0a9e11ab 100644 --- a/src/test/java/net/snowflake/client/core/ObjectMapperTest.java +++ b/src/test/java/net/snowflake/client/core/ObjectMapperTest.java @@ -4,52 +4,58 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import java.nio.charset.StandardCharsets; import java.sql.SQLException; -import java.util.ArrayList; import java.util.Base64; -import java.util.Collection; -import java.util.List; +import java.util.stream.Stream; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.After; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class ObjectMapperTest { private static final int jacksonDefaultMaxStringLength = 20_000_000; + static String originalLogger; - @Parameterized.Parameters(name = "lobSizeInMB={0}, maxJsonStringLength={1}") - public static Collection data() { - int[] lobSizeInMB = new int[] {16, 16, 32, 64, 128}; - // maxJsonStringLength to be set for the corresponding LOB size - int[] maxJsonStringLengths = - new int[] {jacksonDefaultMaxStringLength, 23_000_000, 45_000_000, 90_000_000, 180_000_000}; - List ret = new ArrayList<>(); - for (int i = 0; i < lobSizeInMB.length; i++) { - ret.add(new Object[] {lobSizeInMB[i], maxJsonStringLengths[i]}); + static class DataProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of(16 * 1024 * 1024, jacksonDefaultMaxStringLength), + Arguments.of(16 * 1024 * 1024, 23_000_000), + Arguments.of(32 * 1024 * 1024, 45_000_000), + Arguments.of(64 * 1024 * 1024, 90_000_000), + Arguments.of(128 * 1024 * 1024, 180_000_000)); } - return ret; } - private final int lobSizeInBytes; - private final int maxJsonStringLength; + @BeforeAll + public static void setProperty() { + originalLogger = System.getProperty("net.snowflake.jdbc.loggerImpl"); + System.setProperty("net.snowflake.jdbc.loggerImpl", "net.snowflake.client.log.JDK14Logger"); + } - @After - public void clearProperty() { + @AfterAll + public static void clearProperty() { + if (originalLogger != null) { + System.setProperty("net.snowflake.jdbc.loggerImpl", originalLogger); + } else { + System.clearProperty("net.snowflake.jdbc.loggerImpl"); + } System.clearProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM); } - public ObjectMapperTest(int lobSizeInMB, int maxJsonStringLength) { - // convert LOB size from MB to bytes - this.lobSizeInBytes = lobSizeInMB * 1024 * 1024; - this.maxJsonStringLength = maxJsonStringLength; + private static void setJacksonDefaultMaxStringLength(int maxJsonStringLength) { System.setProperty( ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString(maxJsonStringLength)); } @@ -61,15 +67,17 @@ public void testInvalidMaxJsonStringLength() throws SQLException { // default maxJsonStringLength value will be used ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); int stringLengthInMapper = mapper.getFactory().streamReadConstraints().getMaxStringLength(); - Assert.assertEquals(ObjectMapperFactory.DEFAULT_MAX_JSON_STRING_LEN, stringLengthInMapper); + assertEquals(ObjectMapperFactory.DEFAULT_MAX_JSON_STRING_LEN, stringLengthInMapper); } - @Test - public void testObjectMapperWithLargeJsonString() { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testObjectMapperWithLargeJsonString(int lobSizeInBytes, int maxJsonStringLength) { + setJacksonDefaultMaxStringLength(maxJsonStringLength); ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); try { JsonNode jsonNode = mapper.readTree(generateBase64EncodedJsonString(lobSizeInBytes)); - Assert.assertNotNull(jsonNode); + assertNotNull(jsonNode); } catch (Exception e) { // exception is expected when jackson's default maxStringLength value is used while retrieving // 16M string data diff --git a/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java index b3af68011..d5afb1af5 100644 --- a/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java +++ b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java @@ -1,42 +1,38 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class PrivateLinkDetectorTest { + static class DataProvider implements ArgumentsProvider { - @Parameterized.Parameters(name = "Host {0} is private link: {1}") - public static Object[][] data() { - return new Object[][] { - {"snowhouse.snowflakecomputing.com", false}, - {"snowhouse.privatelink.snowflakecomputing.com", true}, - {"snowhouse.PRIVATELINK.snowflakecomputing.com", true}, - {"snowhouse.snowflakecomputing.cn", false}, - {"snowhouse.privatelink.snowflakecomputing.cn", true}, - {"snowhouse.PRIVATELINK.snowflakecomputing.cn", true}, - {"snowhouse.snowflakecomputing.xyz", false}, - {"snowhouse.privatelink.snowflakecomputing.xyz", true}, - {"snowhouse.PRIVATELINK.snowflakecomputing.xyz", true}, - }; + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of("snowhouse.snowflakecomputing.com", false), + Arguments.of("snowhouse.privatelink.snowflakecomputing.com", true), + Arguments.of("snowhouse.PRIVATELINK.snowflakecomputing.com", true), + Arguments.of("snowhouse.snowflakecomputing.cn", false), + Arguments.of("snowhouse.privatelink.snowflakecomputing.cn", true), + Arguments.of("snowhouse.PRIVATELINK.snowflakecomputing.cn", true), + Arguments.of("snowhouse.snowflakecomputing.xyz", false), + Arguments.of("snowhouse.privatelink.snowflakecomputing.xyz", true), + Arguments.of("snowhouse.PRIVATELINK.snowflakecomputing.xyz", true)); + } } - private final String host; - private final boolean expectedToBePrivateLink; - - public PrivateLinkDetectorTest(String host, boolean expectedToBePrivateLink) { - this.host = host; - this.expectedToBePrivateLink = expectedToBePrivateLink; - } - - @Test - public void shouldDetectPrivateLinkHost() { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void shouldDetectPrivateLinkHost(String host, boolean expectedToBePrivateLink) { assertEquals( - String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), expectedToBePrivateLink, - PrivateLinkDetector.isPrivateLink(host)); + PrivateLinkDetector.isPrivateLink(host), + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink)); } } diff --git a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java index 862dd1c40..e13ecd673 100644 --- a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java +++ b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java @@ -5,12 +5,12 @@ package net.snowflake.client.core; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class QueryContextCacheTest { private QueryContextCache qcc = null; diff --git a/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java b/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java index af6ac5219..8be8fd471 100644 --- a/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java +++ b/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.File; import java.io.FileInputStream; @@ -26,9 +26,8 @@ import java.util.List; import java.util.Map; import java.util.Random; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.SkipOnThinJar; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.annotations.DontRunOnThinJar; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.ArrowResultChunk; import net.snowflake.client.jdbc.BaseJDBCWithSharedConnectionIT; import net.snowflake.client.jdbc.ErrorCode; @@ -63,17 +62,12 @@ import org.apache.arrow.vector.types.pojo.Schema; import org.apache.arrow.vector.util.Text; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class SFArrowResultSetIT extends BaseJDBCWithSharedConnectionIT { - - /** Necessary to conditional ignore tests */ - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); - private Random random = new Random(); /** @@ -83,11 +77,11 @@ public class SFArrowResultSetIT extends BaseJDBCWithSharedConnectionIT { protected BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); /** temporary folder to store result files */ - @Rule public TemporaryFolder resultFolder = new TemporaryFolder(); + @TempDir private File tempDir; /** Test the case that all results are returned in first chunk */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testNoOfflineData() throws Throwable { List fieldList = new ArrayList<>(); Map customFieldMeta = new HashMap<>(); @@ -103,8 +97,9 @@ public void testNoOfflineData() throws Throwable { int dataSize = (int) file.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(file); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(file)) { + is.read(dataBytes, 0, dataSize); + } SnowflakeResultSetSerializableV1 resultSetSerializable = new SnowflakeResultSetSerializableV1(); resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE)); @@ -149,7 +144,7 @@ public void testEmptyResultSet() throws Throwable { /** Testing the case that all data comes from chunk downloader */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testOnlyOfflineData() throws Throwable { final int colCount = 2; final int chunkCount = 10; @@ -199,7 +194,7 @@ public void testOnlyOfflineData() throws Throwable { /** Testing the case that all data comes from chunk downloader */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testFirstResponseAndOfflineData() throws Throwable { final int colCount = 2; final int chunkCount = 10; @@ -229,8 +224,9 @@ public void testFirstResponseAndOfflineData() throws Throwable { int dataSize = (int) arrowFile.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(arrowFile); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(arrowFile)) { + is.read(dataBytes, 0, dataSize); + } SnowflakeResultSetSerializableV1 resultSetSerializable = new SnowflakeResultSetSerializableV1(); resultSetSerializable.setFirstChunkStringData(Base64.getEncoder().encodeToString(dataBytes)); @@ -280,8 +276,7 @@ private class MockChunkDownloader implements ChunkDownloader { public SnowflakeResultChunk getNextChunkToConsume() throws SnowflakeSQLException { if (currentFileIndex < resultFileNames.size()) { ArrowResultChunk resultChunk = new ArrowResultChunk("", 0, 0, 0, rootAllocator, null); - try { - InputStream is = new FileInputStream(resultFileNames.get(currentFileIndex)); + try (InputStream is = new FileInputStream(resultFileNames.get(currentFileIndex))) { resultChunk.readArrowStream(is); currentFileIndex++; @@ -380,12 +375,13 @@ Object[][] generateData(Schema schema, int rowCount) { File createArrowFile(String fileName, Schema schema, Object[][] data, int rowsPerRecordBatch) throws IOException { - File file = resultFolder.newFile(fileName); + File file = new File(tempDir, fileName); + file.createNewFile(); VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator); - try (ArrowWriter writer = - new ArrowStreamWriter( - root, new DictionaryProvider.MapDictionaryProvider(), new FileOutputStream(file))) { + try (FileOutputStream fos = new FileOutputStream(file); + ArrowWriter writer = + new ArrowStreamWriter(root, new DictionaryProvider.MapDictionaryProvider(), fos)) { writer.start(); for (int i = 0; i < data[0].length; ) { @@ -592,7 +588,7 @@ private void writeTimestampStructToField( /** Test that first chunk containing struct vectors (used for timestamps) can be sorted */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testSortedResultChunkWithStructVectors() throws Throwable { try (Statement statement = connection.createStatement()) { statement.execute("create or replace table teststructtimestamp (t1 timestamp_ltz)"); @@ -638,8 +634,9 @@ public void testSortedResultChunkWithStructVectors() throws Throwable { int dataSize = (int) file.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(file); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(file)) { + is.read(dataBytes, 0, dataSize); + } resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE)); resultSetSerializable.setFirstChunkStringData( @@ -663,7 +660,7 @@ public void testSortedResultChunkWithStructVectors() throws Throwable { /** Test that the first chunk can be sorted */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testSortedResultChunk() throws Throwable { try (Statement statement = connection.createStatement()) { statement.execute( @@ -725,8 +722,9 @@ public void testSortedResultChunk() throws Throwable { int dataSize = (int) file.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(file); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(file)) { + is.read(dataBytes, 0, dataSize); + } resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE)); resultSetSerializable.setFirstChunkStringData( diff --git a/src/test/java/net/snowflake/client/core/SFLoginInputTest.java b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java index 7d8a5b67b..b34eebc02 100644 --- a/src/test/java/net/snowflake/client/core/SFLoginInputTest.java +++ b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFLoginInputTest { diff --git a/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java b/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java index 8c7a6fb1f..142f92217 100644 --- a/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java +++ b/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java @@ -7,10 +7,10 @@ import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.fail; import net.snowflake.client.jdbc.ErrorCode; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFSessionPropertyTest { @Test @@ -28,7 +28,7 @@ public void testCheckApplicationName() throws SFException { for (String invalid : invalidApplicationName) { try { SFSessionProperty.checkPropertyValue(SFSessionProperty.APPLICATION, invalid); - Assert.fail(); + fail(); } catch (SFException e) { assertThat(e.getVendorCode(), is(ErrorCode.INVALID_PARAMETER_VALUE.getMessageCode())); } @@ -48,7 +48,7 @@ public void testCustomSuffixForUserAgentHeaders() { public void testInvalidMaxRetries() { try { SFSessionProperty.checkPropertyValue(SFSessionProperty.MAX_HTTP_RETRIES, "invalidValue"); - Assert.fail("testInvalidMaxRetries"); + fail("testInvalidMaxRetries"); } catch (SFException e) { assertThat(e.getVendorCode(), is(ErrorCode.INVALID_PARAMETER_VALUE.getMessageCode())); } @@ -67,7 +67,7 @@ public void testvalidMaxRetries() throws SFException { public void testInvalidPutGetMaxRetries() { try { SFSessionProperty.checkPropertyValue(SFSessionProperty.PUT_GET_MAX_RETRIES, "invalidValue"); - Assert.fail("testInvalidMaxRetries"); + fail("testInvalidMaxRetries"); } catch (SFException e) { assertThat(e.getVendorCode(), is(ErrorCode.INVALID_PARAMETER_VALUE.getMessageCode())); } diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java index f30cd88e1..2645277b1 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java @@ -20,8 +20,9 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; import javax.net.ssl.SSLHandshakeException; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.SFLogger; @@ -29,45 +30,41 @@ import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SFTrustManagerIT extends BaseJDBCTest { private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManagerIT.class); - public SFTrustManagerIT(String host) { - this.host = host; - } - - @Parameterized.Parameters(name = "host={0}") - public static Object[][] data() { - return new Object[][] { - // this host generates many "SSLHandshake Certificate Revocation - // check failed. Could not retrieve OCSP Response." when running in parallel CI builds - // {"storage.googleapis.com"}, - {"ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch"}, - {"sfcsupport.snowflakecomputing.com"}, - {"sfcsupport.us-east-1.snowflakecomputing.com"}, - {"sfcsupport.eu-central-1.snowflakecomputing.com"}, - {"sfc-dev1-regression.s3.amazonaws.com"}, - {"sfc-ds2-customer-stage.s3.amazonaws.com"}, - {"snowflake.okta.com"}, - {"sfcdev2.blob.core.windows.net"} - }; + private static class HostProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + // this host generates many "SSLHandshake Certificate Revocation + // check failed. Could not retrieve OCSP Response." when running in parallel CI builds + // Arguments.of("storage.googleapis.com"), + Arguments.of("ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch"), + Arguments.of("sfcsupport.snowflakecomputing.com"), + Arguments.of("sfcsupport.us-east-1.snowflakecomputing.com"), + Arguments.of("sfcsupport.eu-central-1.snowflakecomputing.com"), + Arguments.of("sfc-dev1-regression.s3.amazonaws.com"), + Arguments.of("sfc-ds2-customer-stage.s3.amazonaws.com"), + Arguments.of("snowflake.okta.com"), + Arguments.of("sfcdev2.blob.core.windows.net")); + } } private boolean defaultState; - private final String host; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); service.updateContextForIT(getConnectionParameters()); @@ -76,7 +73,7 @@ public void setUp() { service.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); // wait 5 seconds while the service is flushing @@ -90,15 +87,16 @@ public void tearDown() throws InterruptedException { System.clearProperty(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL); } - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir File tmpFolder; /** * OCSP tests for the Snowflake and AWS S3 HTTPS connections. * *

Whatever the default method is used. */ - @Test - public void testOcsp() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcsp(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); HttpClient client = @@ -115,11 +113,13 @@ public void testOcsp() throws Throwable { * *

Specifying an non-existing file will force to fetch OCSP response. */ - @Test - public void testOcspWithFileCache() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcspWithFileCache(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); - File ocspCacheFile = tmpFolder.newFile(); + File ocspCacheFile = new File(tmpFolder, "ocsp-cache"); + ocspCacheFile.createNewFile(); HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), @@ -130,11 +130,13 @@ public void testOcspWithFileCache() throws Throwable { } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ - @Test - public void testOcspWithServerCache() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcspWithServerCache(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); - File ocspCacheFile = tmpFolder.newFile(); + File ocspCacheFile = new File(tmpFolder, "ocsp-cache"); + ocspCacheFile.createNewFile(); HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), @@ -148,11 +150,13 @@ public void testOcspWithServerCache() throws Throwable { * OCSP tests for the Snowflake and AWS S3 HTTPS connections without using the server cache. This * test should always pass - even with OCSP Outage. */ - @Test - public void testOcspWithoutServerCache() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcspWithoutServerCache(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); - File ocspCacheFile = tmpFolder.newFile(); + File ocspCacheFile = new File(tmpFolder, "ocsp-cache"); + ocspCacheFile.createNewFile(); HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), @@ -163,8 +167,9 @@ public void testOcspWithoutServerCache() throws Throwable { } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ - @Test - public void testInvalidCacheFile() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testInvalidCacheFile(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); // a file under never exists. diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java index 862f4867e..077ec6829 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java @@ -14,32 +14,32 @@ import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.MockedStatic; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SFTrustManagerMockitoMockLatestIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /* * Test SF_OCSP_RESPONSE_CACHE_DIR environment variable changes the * location of the OCSP cache directory. */ @Test - @Ignore("static initialization block of SFTrustManager class doesn't run sometimes") + @Disabled("static initialization block of SFTrustManager class doesn't run sometimes") public void testUnitOCSPWithCustomCacheDirectory() throws IOException { try (MockedStatic mockedTrustManagerFactory = mockStatic(TrustManagerFactory.class); MockedStatic mockedSnowflakeUtil = mockStatic(SnowflakeUtil.class)) { - File cacheFolder = tmpFolder.newFolder(); + File cacheFolder = new File(tmpFolder, "cache"); + cacheFolder.mkdirs(); mockedSnowflakeUtil .when(() -> TestUtil.systemGetEnv("SF_OCSP_RESPONSE_CACHE_DIR")) .thenReturn(cacheFolder.getCanonicalPath()); diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java index 6a55b2cd4..77a06cb2a 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java @@ -11,10 +11,24 @@ import java.util.Properties; import net.snowflake.client.jdbc.SnowflakeResultSetSerializable; import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class SFTrustManagerTest { /** Test building OCSP retry URL */ + static String originalRetryUrlPattern; + + @BeforeAll + public static void saveStaticValues() { + originalRetryUrlPattern = SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN; + } + + @AfterAll + public static void restoreStaticValues() { + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = originalRetryUrlPattern; + } + @Test public void testBuildRetryURL() throws Exception { // private link diff --git a/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java index 346d43c34..f8224a8eb 100644 --- a/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java +++ b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java @@ -4,7 +4,7 @@ import static org.mockito.Mockito.mock; import java.sql.SQLData; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SQLInputOutputTest { diff --git a/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java b/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java index b6f8a16ac..b79875038 100644 --- a/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java +++ b/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java @@ -16,11 +16,11 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningNotOnLinux; -import net.snowflake.client.RunningNotOnWinMac; -import org.junit.Rule; -import org.junit.Test; +import net.snowflake.client.annotations.RunOnLinux; +import net.snowflake.client.annotations.RunOnMac; +import net.snowflake.client.annotations.RunOnWindows; +import net.snowflake.client.annotations.RunOnWindowsOrMac; +import org.junit.jupiter.api.Test; class MockAdvapi32Lib implements SecureStorageWindowsManager.Advapi32Lib { @Override @@ -213,8 +213,6 @@ Pointer getPointer() { } public class SecureStorageManagerTest { - // This is required to use ConditionalIgnore annotation - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); private static final String host = "fakeHost"; private static final String user = "fakeUser"; @@ -227,7 +225,7 @@ public class SecureStorageManagerTest { private static final String MFA_TOKEN = "MFATOKEN"; @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnWinMac.class) + @RunOnWindowsOrMac public void testLoadNativeLibrary() { // Only run on Mac or Windows. Make sure the loading of native platform library won't break. if (Constants.getOS() == Constants.OS.MAC) { @@ -240,6 +238,7 @@ public void testLoadNativeLibrary() { } @Test + @RunOnWindows public void testWindowsManager() { SecureStorageWindowsManager.Advapi32LibManager.setInstance(new MockAdvapi32Lib()); SecureStorageManager manager = SecureStorageWindowsManager.builder(); @@ -249,6 +248,7 @@ public void testWindowsManager() { } @Test + @RunOnMac public void testMacManager() { SecureStorageAppleManager.SecurityLibManager.setInstance(new MockSecurityLib()); SecureStorageManager manager = SecureStorageAppleManager.builder(); @@ -258,7 +258,7 @@ public void testMacManager() { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnLinux.class) + @RunOnLinux public void testLinuxManager() { SecureStorageManager manager = SecureStorageLinuxManager.getInstance(); diff --git a/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java b/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java index 2ba00f378..02f6193d6 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java @@ -5,10 +5,9 @@ package net.snowflake.client.core; import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; import static org.mockito.Mockito.when; @@ -32,8 +31,9 @@ import net.snowflake.common.core.ClientAuthnDTO; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; -import org.junit.Ignore; -import org.junit.Test; +import org.hamcrest.MatcherAssert; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; @@ -164,11 +164,13 @@ public void testSessionUtilExternalBrowser() throws Throwable { SessionUtilExternalBrowser sub = FakeSessionUtilExternalBrowser.createInstance(loginInput, false); sub.authenticate(); - assertThat("", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); + MatcherAssert.assertThat( + "", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); sub = FakeSessionUtilExternalBrowser.createInstance(loginInput, true); sub.authenticate(); - assertThat("", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); + MatcherAssert.assertThat( + "", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); } } @@ -200,7 +202,7 @@ public void testSessionUtilExternalBrowserFail() throws Throwable { sub.authenticate(); fail("should have failed with an exception."); } catch (SnowflakeSQLException ex) { - assertThat("Error is expected", ex.getErrorCode(), equalTo(123456)); + MatcherAssert.assertThat("Error is expected", ex.getErrorCode(), equalTo(123456)); } } } @@ -248,7 +250,7 @@ private SFLoginInput initMockLoginInput() { // Run this test manually to test disabling storing temporary credetials with external browser // auth. This is valid for versions after 3.18.0. @Test - @Ignore + @Disabled public void testEnableClientStoreTemporaryCredential() throws Exception { Map params = AbstractDriverIT.getConnectionParameters(); SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); @@ -270,7 +272,7 @@ public void testEnableClientStoreTemporaryCredential() throws Exception { // open a browser window for authentication, close the window, and you should get the expected // error message within the set timeout. Valid for driver versions after 3.18.0. @Test - @Ignore + @Disabled public void testExternalBrowserTimeout() throws Exception { Map params = AbstractDriverIT.getConnectionParameters(); SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); diff --git a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java index be6c03b01..57dde2a7b 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java @@ -5,8 +5,8 @@ package net.snowflake.client.core; import static net.snowflake.client.TestUtil.systemGetEnv; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; @@ -21,7 +21,7 @@ import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeSQLException; @@ -33,14 +33,14 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.MockedStatic.Verification; import org.mockito.Mockito; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SessionUtilLatestIT extends BaseJDBCTest { /** @@ -50,7 +50,7 @@ public class SessionUtilLatestIT extends BaseJDBCTest { * @throws SFException * @throws SnowflakeSQLException */ - @Ignore + @Disabled @Test public void testJwtAuthTimeoutRetry() throws SFException, SnowflakeSQLException { final SFLoginInput loginInput = initMockLoginInput(); diff --git a/src/test/java/net/snowflake/client/core/SessionUtilTest.java b/src/test/java/net/snowflake/client/core/SessionUtilTest.java index cab5fb68f..86819dc5b 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilTest.java @@ -5,9 +5,9 @@ package net.snowflake.client.core; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.node.BooleanNode; import java.io.IOException; @@ -20,9 +20,25 @@ import net.snowflake.client.jdbc.MockConnectionTest; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class SessionUtilTest { + private static String originalUrlValue; + private static String originalRetryUrlPattern; + + @BeforeAll + public static void saveStaticValues() { + originalUrlValue = SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE; + originalRetryUrlPattern = SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN; + } + + @AfterAll + public static void restoreStaticValues() { + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = originalUrlValue; + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = originalRetryUrlPattern; + } /** Test isPrefixEqual */ @Test diff --git a/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java b/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java index f1f0a3e73..0524ab6b8 100644 --- a/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java +++ b/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java @@ -4,8 +4,9 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; @@ -29,9 +30,8 @@ import net.snowflake.client.jdbc.SnowflakeSQLException; import org.apache.commons.io.IOUtils; import org.apache.http.client.methods.HttpPost; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -216,7 +216,7 @@ public String answer(InvocationOnMock invocation) throws Throwable { // This connection would receive an exception and then should clean up the mfa cache try { Connection con3 = DriverManager.getConnection(url, prop); - Assert.fail(); + fail(); } catch (SnowflakeSQLException ex) { // An exception is forced to happen by mocking. Do nothing. } @@ -336,7 +336,7 @@ public void testUnavailableLocalSecureStorage() throws SQLException { // Run this test manually to test disabling the client request MFA token. Use an MFA // authentication enabled user. This is valid for versions after 3.18.0. @Test - @Ignore + @Disabled public void testEnableClientRequestMfaToken() throws SQLException { Map params = AbstractDriverIT.getConnectionParameters(); SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); diff --git a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java index 752229fc9..305f5563d 100644 --- a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Timestamp; import java.time.LocalDateTime; @@ -8,12 +8,12 @@ import java.util.Map; import java.util.TimeZone; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Ignore +@Disabled public class SqlInputTimestampUtilTest { private static final String TIMESTAMP_IN_FORMAT_1 = "2021-12-22 09:43:44.000 +0100"; @@ -24,7 +24,7 @@ public class SqlInputTimestampUtilTest { private static SFBaseSession mockSession; - @BeforeClass + @BeforeAll public static void setup() { CONNECTION_PARAMS.put("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM"); CONNECTION_PARAMS.put("TIMESTAMP_TZ_OUTPUT_FORMAT", "DY, DD MON YYYY HH24:MI:SS TZHTZM"); diff --git a/src/test/java/net/snowflake/client/core/StmtUtilTest.java b/src/test/java/net/snowflake/client/core/StmtUtilTest.java index 75daa9a03..7075416e4 100644 --- a/src/test/java/net/snowflake/client/core/StmtUtilTest.java +++ b/src/test/java/net/snowflake/client/core/StmtUtilTest.java @@ -13,17 +13,17 @@ import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.StmtUtil.StmtInput; import net.snowflake.client.jdbc.BaseJDBCTest; import org.apache.http.Header; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.MockedStatic.Verification; import org.mockito.Mockito; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class StmtUtilTest extends BaseJDBCTest { /** SNOW-862760 Verify that additional headers are added to request */ diff --git a/src/test/java/net/snowflake/client/core/URLUtilTest.java b/src/test/java/net/snowflake/client/core/URLUtilTest.java index b61324eee..d2903b2c5 100644 --- a/src/test/java/net/snowflake/client/core/URLUtilTest.java +++ b/src/test/java/net/snowflake/client/core/URLUtilTest.java @@ -3,11 +3,11 @@ */ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class URLUtilTest { diff --git a/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java b/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java index 75b24cc07..4dc6855b1 100644 --- a/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java @@ -4,43 +4,42 @@ package net.snowflake.client.core.arrow; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; import java.util.Random; import java.util.TimeZone; +import java.util.stream.Stream; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; -import org.junit.After; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.providers.TimezoneProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class ArrowResultUtilTest { - // test on multiple time zones - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, {"America/Los_Angeles"}, {"America/New_York"}, {"Asia/Singapore"}, {"MEZ"}, - }; - } - - @After - public void clearTimeZone() { + @AfterAll + public static void clearTimeZone() { System.clearProperty("user.timezone"); } - public ArrowResultUtilTest(String tz) { - System.setProperty("user.timezone", tz); + public static void setTimeZone(String string) { + System.setProperty("user.timezone", string); } - @Test - @Ignore + @ParameterizedTest(name = "Timezone = {0}") + @ArgumentsSource(TimezoneProvider.class) + @Disabled /** This is to show we can have 30X improvement using new API */ - public void testGetDatePerformance() throws SFException { + public void testGetDatePerformance(String timezone) throws SFException { + setTimeZone(timezone); Random random = new Random(); int dateBound = 50000; int times = 100000; @@ -71,17 +70,43 @@ public void testGetDatePerformance() throws SFException { System.out.println(duration1 + " " + duration2 + " " + duration3); } - @Test - public void testToJavaTimestamp() { + private static class testCasesProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("UTC"); + add("America/Los_Angeles"); + add("America/New_York"); + add("Asia/Singapore"); + add("MEZ"); + } + }; + + long[] cases = {-1123456789, -123456789, 123456789, 123123456789L, -123123456789L}; + long[] millisecs = {-1124, -124, 123, 123123, -123124}; + int[] nanos = {876543211, 876543211, 123456789, 123456789, 876543211}; + + List args = new ArrayList<>(); + for (String timezone : timezones) { + for (int i = 0; i < cases.length; i++) { + args.add(Arguments.of(timezone, cases[i], millisecs[i], nanos[i])); + } + } + + return args.stream(); + } + } + + @ParameterizedTest + @ArgumentsSource(testCasesProvider.class) + public void testToJavaTimestamp(String timezone, long cas, long millisecs, int nanos) { // ex: -1.123456789, -0.123456789, 0.123456789, 123.123456789, -123.123456789 - long[] cases = {-1123456789, -123456789, 123456789, 123123456789l, -123123456789l}; - long[] millisecs = {-1124, -124, 123, 123123, -123124}; - int[] nanos = {876543211, 876543211, 123456789, 123456789, 876543211}; + setTimeZone(timezone); int scale = 9; - for (int i = 0; i < cases.length; i++) { - Timestamp ts = ArrowResultUtil.toJavaTimestamp(cases[i], scale); - assertEquals(millisecs[i], ts.getTime()); - assertEquals(nanos[i], ts.getNanos()); - } + Timestamp ts = ArrowResultUtil.toJavaTimestamp(cas, scale); + assertEquals(millisecs, ts.getTime()); + assertEquals(nanos, ts.getNanos()); } } diff --git a/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java index 20a07a655..a738676fb 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java @@ -3,13 +3,16 @@ */ package net.snowflake.client.core.arrow; +import java.nio.ByteOrder; import java.util.TimeZone; import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.SFSession; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.common.core.SFBinaryFormat; import net.snowflake.common.core.SnowflakeDateTimeFormat; -import org.junit.After; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; public class BaseConverterTest implements DataConversionContext { private SnowflakeDateTimeFormat dateTimeFormat = @@ -27,11 +30,18 @@ public class BaseConverterTest implements DataConversionContext { private boolean honorClientTZForTimestampNTZ; protected final int invalidConversionErrorCode = ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(); - @After + @AfterEach public void clearTimeZone() { System.clearProperty("user.timezone"); } + @BeforeEach + public void assumeLittleEndian() { + Assumptions.assumeTrue( + ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN), + "Arrow doesn't support cross endianness"); + } + @Override public SnowflakeDateTimeFormat getTimestampLTZFormatter() { return timestampLTZFormat; diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java index 74eabad29..230288f4a 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java @@ -7,8 +7,8 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -27,7 +27,7 @@ import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BigIntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java index 9248440bb..b2be8f8cd 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java @@ -7,8 +7,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Time; import java.util.HashMap; @@ -20,32 +20,24 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class BigIntToTimeConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + public void setTimezone(String tz) { + System.setProperty("user.timezone", tz); } - public BigIntToTimeConverterTest(String tz) { - System.setProperty("user.timezone", tz); + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); } /** allocator for arrow */ @@ -55,8 +47,10 @@ public BigIntToTimeConverterTest(String tz) { private int scale = 9; - @Test - public void testTime() throws SFException { + @ParameterizedTest(name = "{0}") + @ArgumentsSource(TimezoneProvider.class) + public void testTime(String tz) throws SFException { + setTimezone(tz); // test old and new dates long[] testTimesInt64 = {12345678000000L}; diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java index 26fdbc052..298bf443b 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java @@ -8,8 +8,8 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; @@ -23,34 +23,17 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.providers.TimezoneProvider; import net.snowflake.common.core.SFTimestamp; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class BigIntToTimestampLTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - - public BigIntToTimestampLTZConverterTest(String tz) { - System.setProperty("user.timezone", tz); - } /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -59,8 +42,10 @@ public BigIntToTimestampLTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void testTimestampLTZ() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTimestampLTZ(String timezone) throws SFException { + System.setProperty("user.timezone", timezone); // test old and new dates long[] testTimestampsInt64 = { 1546391837, diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java index df4370641..6f2c0420d 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java @@ -4,18 +4,21 @@ package net.snowflake.client.core.arrow; +import static net.snowflake.client.providers.ProvidersUtil.cartesianProduct; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; @@ -23,33 +26,32 @@ import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; import net.snowflake.common.core.SFTimestamp; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class BigIntToTimestampNTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + static class FlagProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList(Arguments.of(true), Arguments.of(false)); + } } - public BigIntToTimestampNTZConverterTest(String tz) { - System.setProperty("user.timezone", tz); + static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return cartesianProduct(context, new TimezoneProvider(), new FlagProvider()); + } } /** allocator for arrow */ @@ -59,25 +61,18 @@ public BigIntToTimestampNTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void testHonorClientTZForTimestampNTZDisabled() throws SFException { - this.setHonorClientTZForTimestampNTZ(false); - testTimestampNTZ(); - } - - @Test - public void testHonorClientTZForTimestampNTZEnabled() throws SFException { - this.setHonorClientTZForTimestampNTZ(true); - testTimestampNTZ(); - } - - @Test - public void testWithNullTimezone() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testWithNullTimezone(String tz) throws SFException { + System.setProperty("user.timezone", tz); testTimestampNTZ(null); } - @Test - public void testTimestampNTZ() throws SFException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZ(String tz, boolean flag) throws SFException { + this.setHonorClientTZForTimestampNTZ(flag); + System.setProperty("user.timezone", tz); testTimestampNTZ(TimeZone.getDefault()); } diff --git a/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java index e5091d6fc..c30bbd0e6 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java @@ -3,8 +3,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.HashMap; @@ -19,7 +19,7 @@ import org.apache.arrow.vector.BitVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BitToBooleanConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java index b63ae9a2d..6857394fc 100644 --- a/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java @@ -3,8 +3,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.util.Arrays; @@ -18,33 +18,20 @@ import net.snowflake.client.TestUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.json.DateTimeConverter; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.DateDayVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class DateConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - public DateConverterTest(String tz) { + private static void setTimeZone(String tz) { System.setProperty("user.timezone", tz); } @@ -78,26 +65,28 @@ public DateConverterTest(String tz) { put("America/New_York", Arrays.asList("2016-04-20", -4)); put("Pacific/Honolulu", Arrays.asList("2016-04-20", -10)); put("Asia/Singapore", Arrays.asList("2016-04-19", 8)); - put("MEZ", Arrays.asList("2016-04-20", 0)); - put("MESZ", Arrays.asList("2016-04-20", 0)); + put("CET", Arrays.asList("2016-04-19", 2)); // because of daylight savings + put("GMT+0200", Arrays.asList("2016-04-19", 2)); } }; public static final int MILLIS_IN_ONE_HOUR = 3600000; private TimeZone defaultTimeZone; - @Before + @BeforeEach public void getDefaultTimeZone() { this.defaultTimeZone = TimeZone.getDefault(); } - @After + @AfterEach public void restoreDefaultTimeZone() { TimeZone.setDefault(defaultTimeZone); } - @Test - public void testDate() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testDate(String tz) throws SFException { + setTimeZone(tz); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "DATE"); Set nullValIndex = new HashSet<>(); @@ -153,8 +142,10 @@ public void testDate() throws SFException { vector.clear(); } - @Test - public void testRandomDates() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testRandomDates(String tz) throws SFException { + setTimeZone(tz); int dateBound = 50000; int rowCount = 50000; Map customFieldMeta = new HashMap<>(); @@ -196,8 +187,10 @@ public void testRandomDates() throws SFException { } } - @Test - public void testTimezoneDates() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTimezoneDates(String tz) throws SFException { + setTimeZone(tz); int testDay = 16911; Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "DATE"); @@ -211,7 +204,6 @@ public void testTimezoneDates() throws SFException { // Test JDBC_FORMAT_DATE_WITH_TIMEZONE=TRUE with different session timezones TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - String tz = System.getProperty("user.timezone"); ArrowVectorConverter converter = new DateConverter(vector, 0, this, true); converter.setUseSessionTimezone(true); converter.setSessionTimeZone(TimeZone.getTimeZone(tz)); diff --git a/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java index b242a2be8..718daa69c 100644 --- a/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -25,7 +25,7 @@ import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DoubleToRealConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java index c11d8275d..fc4db1875 100644 --- a/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java @@ -7,9 +7,9 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -29,7 +29,7 @@ import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class IntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java index 92c560db3..1f4bd955f 100644 --- a/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java @@ -8,10 +8,9 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import java.nio.ByteBuffer; import java.sql.Time; import java.util.HashMap; import java.util.HashSet; @@ -22,46 +21,31 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class IntToTimeConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - - private ByteBuffer bb; - - public IntToTimeConverterTest(String tz) { - System.setProperty("user.timezone", tz); - this.setScale(scale); - } - /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); private Random random = new Random(); + public IntToTimeConverterTest() { + this.setScale(scale); + } + private int scale = 3; - @Test - public void testTime() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTime(String timezone) throws SFException { + System.setProperty("user.timezone", timezone); // test old and new dates int[] testTimesInt = {12345678}; diff --git a/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java index 5513a420b..d37b005b1 100644 --- a/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java @@ -7,9 +7,9 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -29,7 +29,7 @@ import org.apache.arrow.vector.SmallIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SmallIntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java index 10721fbc1..09cd4a587 100644 --- a/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java @@ -4,15 +4,17 @@ package net.snowflake.client.core.arrow; +import static java.util.stream.Stream.concat; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -20,6 +22,7 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.stream.Stream; import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; @@ -33,29 +36,73 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class ThreeFieldStructToTimestampTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + private static class TimezoneProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("America/Los_Angeles"); + add("America/New_York"); + add("Pacific/Honolulu"); + add("Asia/Singapore"); + add("MESZ"); + add("MEZ"); + add("UTC"); + } + }; + + Stream args = Stream.empty(); + + for (String timezone : timezones) { + args = + concat( + args, + Stream.of( + Arguments.argumentSet( + timezone, + timezone, + new long[] {1546391837, 1546391837, 0, 123, -12346, -12345}, + new int[] {0, 10, 100, 456, 876543211, 0}, + new int[] {960, 1440, 960, 960, 1440, 1440}, + new String[] { + "1546391837.000000000 960", + "1546391837.000000010 1440", + "0.000000100 960", + "123.000000456 960", + "-12345.123456789 1440", + "-12345.000000000 1440" + }), + Arguments.argumentSet( + timezone + " Overflow", + timezone, + new long[] {1546391837}, + new int[] {0}, + new int[] {960}, + new String[] {"1546391837.000000000 960"}))); + } + + return args; + } } - public ThreeFieldStructToTimestampTZConverterTest(String tz) { + private static void setTimezone(String tz) { System.setProperty("user.timezone", tz); } + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); + } + /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -63,42 +110,16 @@ public ThreeFieldStructToTimestampTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void simpleTest() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 1546391837, 0, 123, -12346, -12345}; - - int[] testNanos = {0, 10, 100, 456, 876543211, 0}; - - int[] testTimeZoneIndices = {960, 1440, 960, 960, 1440, 1440}; - - String[] testTimesJson = { - "1546391837.000000000 960", - "1546391837.000000010 1440", - "0.000000100 960", - "123.000000456 960", - "-12345.123456789 1440", - "-12345.000000000 1440" - }; - testTimestampTZ(testSecondsInt64, testNanos, testTimeZoneIndices, testTimesJson); - } - - @Test - public void timestampOverflowTest() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837}; - - int[] testNanos = {0}; - - int[] testTimeZoneIndices = {960}; - - String[] testTimesJson = {"1546391837.000000000 960"}; - testTimestampTZ(testSecondsInt64, testNanos, testTimeZoneIndices, testTimesJson); - } - + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) public void testTimestampTZ( - long[] testSecondsInt64, int[] testNanos, int[] testTimeZoneIndices, String[] testTimesJson) + String tz, + long[] testSecondsInt64, + int[] testNanos, + int[] testTimeZoneIndices, + String[] testTimesJson) throws SFException { + setTimezone(tz); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "TIMESTAMP"); diff --git a/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java index 8000ec885..8a1e9b359 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java @@ -6,9 +6,9 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.util.ArrayList; @@ -27,7 +27,7 @@ import org.apache.arrow.vector.TinyIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TinyIntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java index 8ce93fb6a..4fd4f07f3 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java @@ -4,15 +4,17 @@ package net.snowflake.client.core.arrow; +import static java.util.stream.Stream.concat; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -20,6 +22,7 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.stream.Stream; import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; @@ -33,29 +36,70 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class TwoFieldStructToTimestampLTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + + static class DataProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("America/Los_Angeles"); + add("America/New_York"); + add("Pacific/Honolulu"); + add("Asia/Singapore"); + add("MESZ"); + add("MEZ"); + add("UTC"); + } + }; + + Stream args = Stream.empty(); + + for (String timezone : timezones) { + args = + concat( + args, + Stream.of( + Arguments.argumentSet( + timezone, + timezone, + new long[] {1546391837, 0, -1546391838, -1546391838, -1546391838}, + new int[] {0, 1, 999999990, 876543211, 1}, + new String[] { + "1546391837.000000000", + "0.000000001", + "-1546391837.000000010", + "-1546391837.123456789", + "-1546391837.999999999" + }), + Arguments.argumentSet( + timezone + " Overflow", + timezone, + new long[] {154639183700000L}, + new int[] {0}, + new String[] {"154639183700000.000000000"}))); + } + return args; + } } - public TwoFieldStructToTimestampLTZConverterTest(String tz) { + private static void setTimezone(String tz) { System.setProperty("user.timezone", tz); } + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); + } + /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -63,37 +107,13 @@ public TwoFieldStructToTimestampLTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void simpleTests() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 0, -1546391838, -1546391838, -1546391838}; - - int[] testNanoSecs = {0, 1, 999999990, 876543211, 1}; - - String[] testTimesJson = { - "1546391837.000000000", - "0.000000001", - "-1546391837.000000010", - "-1546391837.123456789", - "-1546391837.999999999" - }; - testTimestampLTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } - - @Test - public void timestampOverflowTests() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {154639183700000l}; - - int[] testNanoSecs = {0}; - - String[] testTimesJson = {"154639183700000.000000000"}; - testTimestampLTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } - - public void testTimestampLTZ(long[] testSecondsInt64, int[] testNanoSecs, String[] testTimesJson) + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZ( + String timezone, long[] testSecondsInt64, int[] testNanoSecs, String[] testTimesJson) throws SFException { + setTimezone(timezone); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "TIMESTAMP"); Set nullValIndex = new HashSet<>(); diff --git a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java index 2b5bf0e16..3b84176e4 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java @@ -4,15 +4,17 @@ package net.snowflake.client.core.arrow; +import static java.util.stream.Stream.concat; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -21,6 +23,7 @@ import java.util.Random; import java.util.Set; import java.util.TimeZone; +import java.util.stream.Stream; import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; @@ -33,29 +36,24 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class TwoFieldStructToTimestampNTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - public TwoFieldStructToTimestampNTZConverterTest(String tz) { + private static void setTimezone(String tz) { System.setProperty("user.timezone", tz); } + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); + } + /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -63,56 +61,80 @@ public TwoFieldStructToTimestampNTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void timestampOverflowTest() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {154639183700000l}; - - int[] testNanoSecs = {0}; - - String[] testTimesJson = {"154639183700000.000000000"}; - this.setHonorClientTZForTimestampNTZ(false); - testTimestampNTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } - - @Test - public void testHonorClientTZForTimestampNTZDisabled() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 0, -1546391838, -1546391838, -1546391838}; - - int[] testNanoSecs = {0, 1, 999999990, 876543211, 1}; - - String[] testTimesJson = { - "1546391837.000000000", - "0.000000001", - "-1546391837.000000010", - "-1546391837.123456789", - "-1546391837.999999999" - }; - this.setHonorClientTZForTimestampNTZ(false); - testTimestampNTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } + static class DataProvider implements ArgumentsProvider { + + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("America/Los_Angeles"); + add("America/New_York"); + add("Pacific/Honolulu"); + add("Asia/Singapore"); + add("MESZ"); + add("MEZ"); + add("UTC"); + } + }; + + Stream args = Stream.empty(); + + for (String timezone : timezones) { + args = + concat( + args, + Stream.of( + Arguments.argumentSet( + timezone + " Overflow", + timezone, + false, + new long[] {154639183700000L}, + new int[] {0}, + new String[] {"154639183700000.000000000"}), + Arguments.argumentSet( + timezone + " HonorClientTZForTimestampNTZ Disabled", + timezone, + false, + new long[] {1546391837, 0, -1546391838, -1546391838, -1546391838}, + new int[] {0, 1, 999999990, 876543211, 1}, + new String[] { + "1546391837.000000000", + "0.000000001", + "-1546391837.000000010", + "-1546391837.123456789", + "-1546391837.999999999" + }), + Arguments.argumentSet( + timezone + " HonorClientTZForTimestampNTZ Enabled", + timezone, + true, + new long[] {1546391837, 1546391837, 1546391837, 1546391837, 1546391837}, + new int[] {0, 1, 10, 100, 999999999}, + new String[] { + "1546391837.000000000", + "1546391837.000000001", + "1546391837.000000010", + "1546391837.000000100", + "1546391837.999999999" + }))); + } - @Test - public void testHonorClientTZForTimestampNTZEnabled() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 1546391837, 1546391837, 1546391837, 1546391837}; - - int[] testNanoSecs = {0, 1, 10, 100, 999999999}; - - String[] testTimesJson = { - "1546391837.000000000", - "1546391837.000000001", - "1546391837.000000010", - "1546391837.000000100", - "1546391837.999999999" - }; - this.setHonorClientTZForTimestampNTZ(true); - testTimestampNTZ(testSecondsInt64, testNanoSecs, testTimesJson); + return args; + } } - public void testTimestampNTZ(long[] testSecondsInt64, int[] testNanoSecs, String[] testTimesJson) + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZ( + String timezone, + boolean honorClientTZForTimestampNTZ, + long[] testSecondsInt64, + int[] testNanoSecs, + String[] testTimesJson) throws SFException { + this.setHonorClientTZForTimestampNTZ(honorClientTZForTimestampNTZ); + setTimezone(timezone); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "TIMESTAMP"); diff --git a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java index 742b82751..767938d06 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java @@ -7,8 +7,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; @@ -24,6 +24,7 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.providers.TimezoneProvider; import net.snowflake.common.core.SFTimestamp; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; @@ -33,27 +34,18 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class TwoFieldStructToTimestampTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + public static void setTimezone(String tz) { + System.setProperty("user.timezone", tz); } - public TwoFieldStructToTimestampTZConverterTest(String tz) { - System.setProperty("user.timezone", tz); + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); } /** allocator for arrow */ @@ -63,8 +55,10 @@ public TwoFieldStructToTimestampTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void testTimestampTZ() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTimestampTZ(String tz) throws SFException { + setTimezone(tz); // test old and new dates long[] testEpochesInt64 = {1546391837, 1546391837, 0, 123, -12345, -12345678}; diff --git a/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java index b6ea49f05..231df247c 100644 --- a/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Base64; @@ -25,7 +25,7 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class VarBinaryToBinaryConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java index 6569c0309..692e171d0 100644 --- a/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.charset.StandardCharsets; import java.sql.Date; @@ -27,7 +27,7 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class VarCharConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java index f3ae88eee..d50118ff8 100644 --- a/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java +++ b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java @@ -1,9 +1,9 @@ package net.snowflake.client.core.bind; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import net.snowflake.client.jdbc.telemetry.TelemetryField; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BindExceptionTest { diff --git a/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java b/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java index 2162d651a..292c3862f 100644 --- a/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java @@ -2,10 +2,11 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.sql.Types; import net.snowflake.client.core.SFException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BooleanConverterTest { private final BooleanConverter booleanConverter = new BooleanConverter(); @@ -44,8 +45,8 @@ public void testConvertString() throws SFException { assertThat(booleanConverter.getBoolean("FALSE", Types.CHAR), equalTo(false)); } - @Test(expected = SFException.class) - public void testConvertOtherType() throws SFException { - booleanConverter.getBoolean("1", Types.BINARY); + @Test + public void testConvertOtherType() { + assertThrows(SFException.class, () -> booleanConverter.getBoolean("1", Types.BINARY)); } } diff --git a/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java b/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java index 47e898486..3f7956ad7 100644 --- a/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.core.json; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import java.math.BigInteger; import java.nio.ByteBuffer; @@ -8,7 +8,7 @@ import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; import org.apache.arrow.vector.Float8Vector; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BytesConverterTest { private final Converters converters = diff --git a/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java b/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java index 985264f3e..21fe82043 100644 --- a/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java @@ -1,7 +1,7 @@ package net.snowflake.client.core.json; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.sql.Date; import java.sql.Time; @@ -15,7 +15,7 @@ import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DateTimeConverterTest { private final TimeZone honoluluTimeZone = diff --git a/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java b/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java index c37573b72..41f6460b4 100644 --- a/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java @@ -6,7 +6,7 @@ import java.math.BigDecimal; import java.sql.Types; import net.snowflake.client.core.SFException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class NumberConverterTest { private final NumberConverter numberConverter = new NumberConverter(); diff --git a/src/test/java/net/snowflake/client/core/json/StringConverterTest.java b/src/test/java/net/snowflake/client/core/json/StringConverterTest.java index 5fe3dd2cb..d2ddb3eee 100644 --- a/src/test/java/net/snowflake/client/core/json/StringConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/StringConverterTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.core.json; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import java.sql.Types; @@ -12,8 +12,8 @@ import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.common.core.SFBinaryFormat; import net.snowflake.common.core.SnowflakeDateTimeFormat; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class StringConverterTest { private final TimeZone honoluluTimeZone = @@ -24,7 +24,7 @@ public class StringConverterTest { private StringConverter stringConverter; - @Before + @BeforeEach public void init() { SnowflakeDateTimeFormat timestampNTZFormatter = SnowflakeDateTimeFormat.fromSqlFormat("YYYY-MM-DD HH24:MI:SS.FF3"); diff --git a/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java b/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java index 2c37ddf5d..59e2b30a2 100644 --- a/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java @@ -6,7 +6,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ArrowResultChunkTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java index a326dea12..c1abedf68 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.InputStream; diff --git a/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java b/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java index 5602bffca..f05d45afe 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java @@ -2,22 +2,29 @@ import java.sql.Connection; import java.sql.SQLException; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import java.sql.Statement; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; public class BaseJDBCWithSharedConnectionIT extends BaseJDBCTest { protected static Connection connection; - @BeforeClass + @BeforeAll public static void setUpConnection() throws SQLException { connection = getConnection(); } - @AfterClass + @AfterAll public static void closeConnection() throws SQLException { if (connection != null && !connection.isClosed()) { connection.close(); } } + + public Statement createStatement(String queryResultFormat) throws SQLException { + Statement stmt = connection.createStatement(); + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + return stmt; + } } diff --git a/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java b/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java index 5a2fe8e96..08069b95c 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java @@ -1,12 +1,12 @@ package net.snowflake.client.jdbc; -import static junit.framework.TestCase.assertEquals; import static net.snowflake.client.AbstractDriverIT.getConnectionParameters; +import static net.snowflake.client.AssumptionUtils.assumeNotRunningOnGithubActionsMac; +import static net.snowflake.client.AssumptionUtils.assumeNotRunningOnJava21; +import static net.snowflake.client.AssumptionUtils.assumeNotRunningOnJava8; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.awaitility.Awaitility.await; -import static org.junit.Assume.assumeFalse; -import static org.junit.Assume.assumeNoException; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.File; import java.io.IOException; @@ -16,9 +16,6 @@ import java.time.Duration; import java.util.Map; import java.util.Properties; -import net.snowflake.client.RunningNotOnGithubActionsMac; -import net.snowflake.client.RunningNotOnJava21; -import net.snowflake.client.RunningNotOnJava8; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -28,11 +25,12 @@ import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeAll; -public abstract class BaseWiremockTest { +abstract class BaseWiremockTest { protected static final SFLogger logger = SFLoggerFactory.getLogger(BaseWiremockTest.class); protected static final String WIREMOCK_HOME_DIR = ".wiremock"; @@ -45,27 +43,25 @@ public abstract class BaseWiremockTest { private static String originalTrustStorePath; protected static Process wiremockStandalone; - @BeforeClass + @BeforeAll public static void setUpClass() { - assumeFalse(RunningNotOnJava8.isRunningOnJava8()); - assumeFalse(RunningNotOnJava21.isRunningOnJava21()); - assumeFalse( - RunningNotOnGithubActionsMac - .isRunningOnGithubActionsMac()); // disabled until issue with access to localhost + assumeNotRunningOnJava8(); + assumeNotRunningOnJava21(); + assumeNotRunningOnGithubActionsMac(); // disabled until issue with access to localhost // (https://github.com/snowflakedb/snowflake-jdbc/pull/1807#discussion_r1686229430) is fixed on // github actions mac image. Ticket to enable when fixed: SNOW-1555950 originalTrustStorePath = systemGetProperty(TRUST_STORE_PROPERTY); startWiremockStandAlone(); } - @After + @AfterEach public void tearDown() { restoreTrustStorePathProperty(); resetWiremock(); HttpUtil.httpClient.clear(); } - @AfterClass + @AfterAll public static void tearDownClass() { stopWiremockStandAlone(); } @@ -225,10 +221,10 @@ protected void importMapping(String mappingImport) { HttpPost request = createWiremockPostRequest(mappingImport, "/__admin/mappings/import"); try (CloseableHttpClient httpClient = HttpClients.createDefault(); CloseableHttpResponse response = httpClient.execute(request)) { - assumeTrue(response.getStatusLine().getStatusCode() == 200); + Assumptions.assumeTrue(response.getStatusLine().getStatusCode() == 200); } catch (Exception e) { logger.error("Importing mapping failed", e); - assumeNoException(e); + Assumptions.abort("Importing mapping failed"); } } diff --git a/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java b/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java index dec8bd6aa..80bd20724 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java @@ -4,9 +4,9 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.sql.Connection; @@ -20,18 +20,18 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.bind.BindUploader; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryOthers.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(TestTags.OTHERS) public class BindUploaderIT extends BaseJDBCTest { BindUploader bindUploader; Connection conn; @@ -86,21 +86,21 @@ public class BindUploaderIT extends BaseJDBCTest { + STAGE_DIR + "' ORDER BY $1 ASC"; - @BeforeClass + @BeforeAll public static void classSetUp() throws Exception { Connection connection = getConnection(); connection.createStatement().execute(createTableSQL); connection.close(); } - @AfterClass + @AfterAll public static void classTearDown() throws Exception { Connection connection = getConnection(); connection.createStatement().execute(deleteTableSQL); connection.close(); } - @Before + @BeforeEach public void setUp() throws Exception { conn = getConnection(); session = conn.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -109,7 +109,7 @@ public void setUp() throws Exception { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); } - @After + @AfterEach public void tearDown() throws SQLException { conn.close(); bindUploader.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java index 41c409d8b..badd3fee0 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java @@ -12,8 +12,8 @@ import static net.snowflake.client.jdbc.BindUploaderIT.getBindings; import static net.snowflake.client.jdbc.BindUploaderIT.parseRow; import static net.snowflake.client.jdbc.BindUploaderIT.row1; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.sql.Connection; import java.sql.ResultSet; @@ -21,16 +21,16 @@ import java.sql.Statement; import java.util.Map; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.bind.BindUploader; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Bind Uploader tests for the latest JDBC driver. This doesn't work for the oldest supported @@ -38,24 +38,24 @@ * tests still is not applicable. If it is applicable, move tests to BindUploaderIT so that both the * latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class BindUploaderLatestIT extends BaseJDBCTest { BindUploader bindUploader; Connection conn; SFSession session; TimeZone prevTimeZone; // store last time zone and restore after tests - @BeforeClass + @BeforeAll public static void classSetUp() throws Exception { BindUploaderIT.classSetUp(); } - @AfterClass + @AfterAll public static void classTearDown() throws Exception { BindUploaderIT.classTearDown(); } - @Before + @BeforeEach public void setUp() throws Exception { conn = getConnection(); session = conn.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -64,7 +64,7 @@ public void setUp() throws Exception { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); } - @After + @AfterEach public void tearDown() throws SQLException { conn.close(); bindUploader.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java index a408e5d5a..55cdf9996 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java @@ -3,11 +3,11 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; @@ -30,40 +30,22 @@ import java.util.TimeZone; import java.util.stream.Collectors; import java.util.stream.Stream; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; -import org.junit.After; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +import net.snowflake.client.providers.ResultFormatProvider; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.RESULT_SET) public class BindingAndInsertingStructuredTypesLatestIT extends BaseJDBCTest { - - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - return new Object[][] { - {ResultSetFormatType.JSON}, - {ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES}, - {ResultSetFormatType.NATIVE_ARROW} - }; - } - - private final ResultSetFormatType queryResultFormat; - - public BindingAndInsertingStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - - public Connection init() throws SQLException { + public Connection init(ResultSetFormatType queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); @@ -84,25 +66,26 @@ public Connection init() throws SQLException { return conn; } - @Before + @BeforeEach public void setup() { SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); } - @After + @AfterEach public void clean() { SnowflakeObjectTypeFactories.unregister(SimpleClass.class); SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); } // TODO Structured types feature exists only on QA environments - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteObject() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteObject(ResultSetFormatType queryResultFormat) throws SQLException { SimpleClass sc = new SimpleClass("text1", 2); SimpleClass sc2 = new SimpleClass("text2", 3); - try (Connection connection = init()) { + try (Connection connection = init(queryResultFormat)) { Statement statement = connection.createStatement(); statement.execute( "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); @@ -133,11 +116,12 @@ public void testWriteObject() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteNullObject() throws SQLException { - Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteNullObject(ResultSetFormatType queryResultFormat) throws SQLException { + Assumptions.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmtement2 = (SnowflakePreparedStatementV1) @@ -158,10 +142,12 @@ public void testWriteNullObject() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteObjectBindingNull() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteObjectBindingNull(ResultSetFormatType queryResultFormat) + throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -181,11 +167,12 @@ public void testWriteObjectBindingNull() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteObjectAllTypes() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteObjectAllTypes(ResultSetFormatType queryResultFormat) throws SQLException { TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC)); - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -271,10 +258,11 @@ public static Timestamp toTimestamp(ZonedDateTime dateTime) { return new Timestamp(dateTime.toInstant().getEpochSecond() * 1000L); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteArray() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteArray(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -298,10 +286,11 @@ public void testWriteArray() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteArrayNoBinds() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteArrayNoBinds(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -322,10 +311,11 @@ public void testWriteArrayNoBinds() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteMapOfSqlData() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteMapOfSqlData(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -358,10 +348,11 @@ public void testWriteMapOfSqlData() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteMapOfInteger() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteMapOfInteger(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) diff --git a/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java b/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java index c2a8bc3ee..86a3b4613 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.sql.Date; import java.sql.PreparedStatement; @@ -18,21 +18,37 @@ import java.sql.Types; import java.util.Calendar; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.experimental.theories.DataPoints; -import org.junit.experimental.theories.Theories; -import org.junit.experimental.theories.Theory; -import org.junit.runner.RunWith; +import java.util.stream.Stream; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junit.jupiter.params.provider.ValueSource; /** Integration tests for binding variable */ -@RunWith(Theories.class) -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class BindingDataIT extends BaseJDBCWithSharedConnectionIT { - @DataPoints public static short[] shortValues = {0, 1, -1, Short.MIN_VALUE, Short.MAX_VALUE}; + static TimeZone timeZone; - @Theory + @BeforeAll + public static void setTimeZone() { + timeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + } + + @AfterAll + public static void resetTimeZone() { + TimeZone.setDefault(timeZone); + } + + @ParameterizedTest + @ValueSource(shorts = {0, 1, -1, Short.MIN_VALUE, Short.MAX_VALUE}) public void testBindShort(short shortValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -58,7 +74,8 @@ public void testBindShort(short shortValue) throws SQLException { } } - @Theory + @ParameterizedTest + @ValueSource(shorts = {0, 1, -1, Short.MIN_VALUE, Short.MAX_VALUE}) public void testBindShortViaSetObject(short shortValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -84,9 +101,8 @@ public void testBindShortViaSetObject(short shortValue) throws SQLException { } } - @DataPoints public static int[] intValues = {0, 1, -1, Integer.MAX_VALUE, Integer.MIN_VALUE}; - - @Theory + @ParameterizedTest + @ValueSource(ints = {0, 1, -1, Integer.MIN_VALUE, Integer.MAX_VALUE}) public void testBindInt(int intValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -113,9 +129,8 @@ public void testBindInt(int intValue) throws SQLException { } } - @DataPoints public static byte[] byteValues = {0, 1, -1, Byte.MAX_VALUE, Byte.MIN_VALUE}; - - @Theory + @ParameterizedTest + @ValueSource(bytes = {0, 1, -1, Byte.MAX_VALUE, Byte.MIN_VALUE}) public void testBindByte(byte byteValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -184,18 +199,21 @@ public void testBindNull() throws SQLException { } } - @DataPoints - public static Time[] timeValues = { - Time.valueOf("00:00:00"), - Time.valueOf("12:34:56"), - Time.valueOf("12:00:00"), - Time.valueOf("11:59:59"), - Time.valueOf("15:30:00"), - Time.valueOf("13:01:01"), - Time.valueOf("12:00:00"), - }; - - @Theory + static class TimeProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of(Time.valueOf("00:00:00")), + Arguments.of(Time.valueOf("12:34:56")), + Arguments.of(Time.valueOf("12:00:00")), + Arguments.of(Time.valueOf("11:59:59")), + Arguments.of(Time.valueOf("15:30:00")), + Arguments.of(Time.valueOf("13:01:01"))); + } + } + + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTime(Time timeVal) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -225,7 +243,8 @@ public void testBindTime(Time timeVal) throws SQLException { * Bind time with calendar is not supported now. Everything is in UTC, need to revisit in the * future */ - @Theory + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTimeWithCalendar(Time timeVal) throws SQLException { Calendar utcCal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); Calendar laCal = Calendar.getInstance(TimeZone.getTimeZone("PST")); @@ -256,7 +275,8 @@ public void testBindTimeWithCalendar(Time timeVal) throws SQLException { } } - @Theory + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTimeViaSetObject(Time timeVal) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -282,7 +302,8 @@ public void testBindTimeViaSetObject(Time timeVal) throws SQLException { } } - @Theory + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTimeViaSetObjectCast(Time timeVal) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -308,18 +329,22 @@ public void testBindTimeViaSetObjectCast(Time timeVal) throws SQLException { } } - @DataPoints - public static Date[] dateValues = { - Date.valueOf("2000-01-01"), - Date.valueOf("3000-01-01"), - Date.valueOf("1970-01-01"), - Date.valueOf("1969-01-01"), - Date.valueOf("1500-01-01"), - Date.valueOf("1400-01-01"), - Date.valueOf("1000-01-01") - }; - - @Theory + static class DateProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of(Date.valueOf("2000-01-01")), + Arguments.of(Date.valueOf("3000-01-01")), + Arguments.of(Date.valueOf("1970-01-01")), + Arguments.of(Date.valueOf("1969-01-01")), + Arguments.of(Date.valueOf("1500-01-01")), + Arguments.of(Date.valueOf("1400-01-01")), + Arguments.of(Date.valueOf("1000-01-01"))); + } + } + + @ParameterizedTest + @ArgumentsSource(DateProvider.class) public void testBindDate(Date dateValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -346,7 +371,8 @@ public void testBindDate(Date dateValue) throws SQLException { } } - @Theory + @ParameterizedTest + @ArgumentsSource(DateProvider.class) public void testBindDateWithCalendar(Date dateValue) throws SQLException { Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); @@ -375,7 +401,8 @@ public void testBindDateWithCalendar(Date dateValue) throws SQLException { } } - @Theory + @ParameterizedTest + @ValueSource(ints = {0, 1, -1, Integer.MIN_VALUE, Integer.MAX_VALUE}) public void testBindObjectWithScaleZero(int intValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -429,7 +456,7 @@ public void testBindNullForAllTypes() throws Throwable { while (result.next()) { String testType = result.getString(1); for (int i = 2; i <= 13; ++i) { - assertNull(String.format("Java Type: %s is not null", testType), result.getString(i)); + assertNull(result.getString(i), String.format("Java Type: %s is not null", testType)); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java index 71c556686..58298df8a 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java @@ -5,8 +5,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.PreparedStatement; @@ -17,11 +17,10 @@ import java.util.Calendar; import java.util.TimeZone; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Binding Data integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -29,7 +28,7 @@ * to examine if the tests still are not applicable. If it is applicable, move tests to * BindingDataIT so that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class BindingDataLatestIT extends AbstractDriverIT { TimeZone origTz = TimeZone.getDefault(); TimeZone tokyoTz = TimeZone.getTimeZone("Asia/Tokyo"); @@ -67,7 +66,7 @@ public void testBindTimestampTZ() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testTimestampBindingWithNTZType() throws SQLException { TimeZone.setDefault(tokyoTz); try (Connection connection = getConnection(); @@ -124,7 +123,7 @@ public void testTimestampBindingWithNTZType() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testTimestampBindingWithLTZType() throws SQLException { TimeZone.setDefault(tokyoTz); try (Connection connection = getConnection(); @@ -188,7 +187,7 @@ public void testTimestampBindingWithLTZType() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testTimestampBindingWithLTZTypeForDayLightSavingTimeZone() throws SQLException { Calendar australia = Calendar.getInstance(australiaTz); TimeZone.setDefault(australiaTz); diff --git a/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java b/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java index d6536dc93..1c2900958 100644 --- a/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java @@ -5,8 +5,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.net.URL; @@ -15,74 +15,26 @@ import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.Calendar; import java.util.HashMap; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) -public class CallableStatementIT extends BaseJDBCTest { - @Parameterized.Parameters - public static Object[][] data() { - // all tests in this class need to run for both query result formats json and arrow - return new Object[][] {{"JSON"}, {"arrow"}}; - } - - private static String queryResultFormat; - - public CallableStatementIT(String format) { - queryResultFormat = format; - } - - public static Connection getConnection() throws SQLException { - Connection conn = BaseJDBCTest.getConnection(); - try (Statement stmt = conn.createStatement()) { - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - } - return conn; - } - - private final String createStoredProcedure = - "create or replace procedure square_it(num FLOAT) returns float not " - + "null language javascript as $$ return NUM * NUM; $$"; - private final String createSecondStoredProcedure = - "create or replace procedure add_nums(x DOUBLE, y DOUBLE) " - + "returns double not null language javascript as $$ return X + Y; $$"; - private final String deleteStoredProcedure = "drop procedure if exists square_it(FLOAT)"; - private final String deleteSecondStoredProcedure = "drop procedure if exists add_nums(INT, INT)"; +@Tag(TestTags.STATEMENT) +public class CallableStatementIT extends CallableStatementITBase { - @Before - public void setUp() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - statement.execute(createStoredProcedure); - statement.execute(createSecondStoredProcedure); - } - } - - @After - public void tearDown() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - statement.execute(deleteStoredProcedure); - statement.execute(deleteSecondStoredProcedure); - } - } - - @Test - public void testPrepareCall() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareCall(String queryResultFormat) throws SQLException { // test CallableStatement with no binding parameters - try (Connection connection = getConnection()) { + try (Connection connection = getConnection(queryResultFormat)) { try (CallableStatement callableStatement = connection.prepareCall("call square_it(5)")) { assertThat(callableStatement.getParameterMetaData().getParameterCount(), is(0)); } diff --git a/src/test/java/net/snowflake/client/jdbc/CallableStatementITBase.java b/src/test/java/net/snowflake/client/jdbc/CallableStatementITBase.java new file mode 100644 index 000000000..8635d4246 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/CallableStatementITBase.java @@ -0,0 +1,48 @@ +package net.snowflake.client.jdbc; + +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; + +public class CallableStatementITBase extends BaseJDBCTest { + public static Connection getConnection() throws SQLException { + return BaseJDBCTest.getConnection(); + } + + public static Connection getConnection(String queryResultFormat) throws SQLException { + Connection conn = BaseJDBCTest.getConnection(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } + return conn; + } + + private final String createStoredProcedure = + "create or replace procedure square_it(num FLOAT) returns float not " + + "null language javascript as $$ return NUM * NUM; $$"; + private final String createSecondStoredProcedure = + "create or replace procedure add_nums(x DOUBLE, y DOUBLE) " + + "returns double not null language javascript as $$ return X + Y; $$"; + private final String deleteStoredProcedure = "drop procedure if exists square_it(FLOAT)"; + private final String deleteSecondStoredProcedure = "drop procedure if exists add_nums(INT, INT)"; + + @BeforeEach + public void setUp() throws SQLException { + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + statement.execute(createStoredProcedure); + statement.execute(createSecondStoredProcedure); + } + } + + @AfterEach + public void tearDown() throws SQLException { + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + statement.execute(deleteStoredProcedure); + statement.execute(deleteSecondStoredProcedure); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java index a4aaea709..af33e102c 100644 --- a/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java @@ -2,24 +2,23 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@Category(TestCategoryStatement.class) -public class CallableStatementLatestIT extends CallableStatementIT { - - public CallableStatementLatestIT(String format) { - super(format); - } +@Tag(TestTags.STATEMENT) +public class CallableStatementLatestIT extends CallableStatementITBase { /** * Test that function that removes curly brackets from outside of call statements works properly @@ -44,10 +43,11 @@ public void testParseSqlEscapeSyntaxFunction() { * * @throws SQLException */ - @Test - public void testPrepareCallWithCurlyBracketSyntax() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareCallWithCurlyBracketSyntax(String queryResultFormat) throws SQLException { // test CallableStatement with no binding parameters - try (Connection connection = getConnection()) { + try (Connection connection = getConnection(queryResultFormat)) { try (CallableStatement callableStatement = connection.prepareCall("{call square_it(5)}")) { assertThat(callableStatement.getParameterMetaData().getParameterCount(), is(0)); } diff --git a/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java index cfb8e086d..7824c9a01 100644 --- a/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.sql.Connection; import java.sql.ResultSet; @@ -12,7 +12,7 @@ import java.util.List; import java.util.Map; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFBaseSession; @@ -20,18 +20,18 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; import org.apache.http.impl.client.CloseableHttpClient; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class ChunkDownloaderS3RetryUrlLatestIT extends AbstractDriverIT { private SFStatement sfStatement; private SFBaseSession sfBaseSession; private ChunkDownloadContext sfContext; - @Before + @BeforeEach public void setup() throws SQLException, InterruptedException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java b/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java index 22a33286d..b90868b39 100644 --- a/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java @@ -1,27 +1,27 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.AbstractDriverIT.getConnection; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author azhan attempts to test the CLIENT_MEMORY_LIMIT working in multi-threading */ -@Category(TestCategoryOthers.class) -public class ClientMemoryLimitParallelIT { +@Tag(TestTags.OTHERS) +public class ClientMemoryLimitParallelIT extends BaseJDBCWithSharedConnectionIT { private static Logger LOGGER = LoggerFactory.getLogger(ClientMemoryLimitParallelIT.class.getName()); @@ -62,18 +62,16 @@ public class ClientMemoryLimitParallelIT { + rowCount + "));"; - @Before + @BeforeEach public void setUp() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { statement.execute(createTestTableSQL); } } - @After + @AfterEach public void tearDown() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { statement.execute("drop table if exists testtable_cml"); } } @@ -83,8 +81,8 @@ public void tearDown() throws SQLException { * in multi-threading */ @Test - @Ignore("Long term high memory usage test") - public void testParallelQueries() throws Exception { + @Disabled("Long term high memory usage test") + void testParallelQueries() throws Exception { Runnable testQuery = new Runnable() { public void run() { @@ -124,10 +122,8 @@ public void run() { * make sure there is no hanging */ @Test - public void testQueryNotHanging() throws SQLException { - Properties paramProperties = new Properties(); - try (Connection connection = getConnection(paramProperties); - Statement statement = connection.createStatement()) { + void testQueryNotHanging() throws SQLException { + try (Statement statement = connection.createStatement()) { queryRows(statement, 100, 160); } } diff --git a/src/test/java/net/snowflake/client/jdbc/CompressedStreamFactoryTest.java b/src/test/java/net/snowflake/client/jdbc/CompressedStreamFactoryTest.java new file mode 100644 index 000000000..0c3f69470 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/CompressedStreamFactoryTest.java @@ -0,0 +1,80 @@ +package net.snowflake.client.jdbc; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.github.luben.zstd.ZstdInputStream; +import com.github.luben.zstd.ZstdOutputStream; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; +import org.apache.commons.io.IOUtils; +import org.apache.http.Header; +import org.apache.http.message.BasicHeader; +import org.junit.jupiter.api.Test; + +public class CompressedStreamFactoryTest { + + private final CompressedStreamFactory factory = new CompressedStreamFactory(); + + @Test + public void testDetectContentEncodingAndGetInputStream_Gzip() throws Exception { + // Original data to compress and validate + String originalData = "Some data in GZIP"; + + // Creating encoding header + Header encodingHeader = new BasicHeader("Content-Encoding", "gzip"); + + // Creating a gzip byte array using GZIPOutputStream + byte[] gzipData; + try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); + GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream)) { + gzipOutputStream.write(originalData.getBytes(StandardCharsets.UTF_8)); + gzipOutputStream.close(); // close to flush and finish the compression + gzipData = byteArrayOutputStream.toByteArray(); + } + + // Mocking input stream with the gzip data + InputStream gzipStream = new ByteArrayInputStream(gzipData); + + // Call the private method using reflection + InputStream resultStream = factory.createBasedOnEncodingHeader(gzipStream, encodingHeader); + + // Decompress and validate the data matches original + assertTrue(resultStream instanceof GZIPInputStream); + String decompressedData = IOUtils.toString(resultStream, StandardCharsets.UTF_8); + assertEquals(originalData, decompressedData); + } + + @Test + public void testDetectContentEncodingAndGetInputStream_Zstd() throws Exception { + // Original data to compress and validate + String originalData = "Some data in ZSTD"; + + // Creating encoding header + Header encodingHeader = new BasicHeader("Content-Encoding", "zstd"); + + // Creating a zstd byte array using ZstdOutputStream + byte[] zstdData; + try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); + ZstdOutputStream zstdOutputStream = new ZstdOutputStream(byteArrayOutputStream)) { + zstdOutputStream.write(originalData.getBytes(StandardCharsets.UTF_8)); + zstdOutputStream.close(); // close to flush and finish the compression + zstdData = byteArrayOutputStream.toByteArray(); + } + + // Mocking input stream with the zstd data + InputStream zstdStream = new ByteArrayInputStream(zstdData); + + // Call the private method using reflection + InputStream resultStream = factory.createBasedOnEncodingHeader(zstdStream, encodingHeader); + + // Decompress and validate the data matches original + assertTrue(resultStream instanceof ZstdInputStream); + String decompressedData = IOUtils.toString(resultStream, StandardCharsets.UTF_8); + assertEquals(originalData, decompressedData); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java b/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java index 871a6cfcd..c2f7eeb5c 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java @@ -2,11 +2,11 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Properties; import net.snowflake.client.core.SFSessionProperty; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ConnectStringParseTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java index fd0b69488..98826eaa8 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java @@ -5,11 +5,11 @@ import java.sql.Connection; import java.util.Properties; -import net.snowflake.client.category.TestCategoryConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionAlreadyClosedIT extends BaseJDBCTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java index f91eee092..b0b120683 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java @@ -8,11 +8,11 @@ import java.sql.SQLException; import java.sql.Savepoint; import java.util.HashMap; -import net.snowflake.client.category.TestCategoryConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java index 43c5c7f81..9d99e01a1 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java @@ -3,16 +3,16 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.AssumptionUtils.assumeRunningOnGithubActions; import static net.snowflake.client.core.SessionUtil.CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.FileInputStream; @@ -42,24 +42,21 @@ import java.util.Properties; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import net.snowflake.client.ConditionalIgnoreRule.ConditionalIgnore; -import net.snowflake.client.RunningNotOnTestaccount; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.RunOnTestaccountNotOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.common.core.SqlState; import org.apache.commons.codec.binary.Base64; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** Connection integration tests */ -@Category(TestCategoryConnection.class) -public class ConnectionIT extends BaseJDBCTest { +@Tag(TestTags.CONNECTION) +public class ConnectionIT extends BaseJDBCWithSharedConnectionIT { // create a local constant for this code for testing purposes (already defined in GS) public static final int INVALID_CONNECTION_INFO_CODE = 390100; private static final int SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED = 390201; @@ -70,7 +67,7 @@ public class ConnectionIT extends BaseJDBCTest { String errorMessage = null; - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; @Test public void testSimpleConnection() throws SQLException { @@ -86,14 +83,13 @@ public void testSimpleConnection() throws SQLException { } @Test - @Ignore + @Disabled public void test300ConnectionsWithSingleClientInstance() throws SQLException { // concurrent testing int size = 300; - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - String database = con.getCatalog(); - String schema = con.getSchema(); + try (Statement statement = connection.createStatement()) { + String database = connection.getCatalog(); + String schema = connection.getSchema(); statement.execute( "create or replace table bigTable(rowNum number,rando " + "number) as (select seq4()," @@ -168,8 +164,7 @@ public void testProdConnectivity() throws SQLException { @Test public void testSetCatalogSchema() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String db = connection.getCatalog(); String schema = connection.getSchema(); connection.setCatalog(db); @@ -216,35 +211,34 @@ public void testDataCompletenessInLowMemory() throws Exception { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testConnectionGetAndSetDBAndSchema() throws SQLException { final String SECOND_DATABASE = "SECOND_DATABASE"; final String SECOND_SCHEMA = "SECOND_SCHEMA"; - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { try { final String database = TestUtil.systemGetEnv("SNOWFLAKE_TEST_DATABASE").toUpperCase(); final String schema = TestUtil.systemGetEnv("SNOWFLAKE_TEST_SCHEMA").toUpperCase(); - assertEquals(database, con.getCatalog()); - assertEquals(schema, con.getSchema()); + assertEquals(database, connection.getCatalog()); + assertEquals(schema, connection.getSchema()); statement.execute(String.format("create or replace database %s", SECOND_DATABASE)); statement.execute(String.format("create or replace schema %s", SECOND_SCHEMA)); statement.execute(String.format("use database %s", database)); - con.setCatalog(SECOND_DATABASE); - assertEquals(SECOND_DATABASE, con.getCatalog()); - assertEquals("PUBLIC", con.getSchema()); + connection.setCatalog(SECOND_DATABASE); + assertEquals(SECOND_DATABASE, connection.getCatalog()); + assertEquals("PUBLIC", connection.getSchema()); - con.setSchema(SECOND_SCHEMA); - assertEquals(SECOND_SCHEMA, con.getSchema()); + connection.setSchema(SECOND_SCHEMA); + assertEquals(SECOND_SCHEMA, connection.getSchema()); statement.execute(String.format("use database %s", database)); statement.execute(String.format("use schema %s", schema)); - assertEquals(database, con.getCatalog()); - assertEquals(schema, con.getSchema()); + assertEquals(database, connection.getCatalog()); + assertEquals(schema, connection.getSchema()); } finally { statement.execute(String.format("drop database if exists %s", SECOND_DATABASE)); } @@ -253,40 +247,39 @@ public void testConnectionGetAndSetDBAndSchema() throws SQLException { @Test public void testConnectionClientInfo() throws SQLException { - try (Connection con = getConnection()) { - Properties property = con.getClientInfo(); - assertEquals(0, property.size()); - Properties clientInfo = new Properties(); - clientInfo.setProperty("name", "Peter"); - clientInfo.setProperty("description", "SNOWFLAKE JDBC"); - try { - con.setClientInfo(clientInfo); - fail("setClientInfo should fail for any parameter."); - } catch (SQLClientInfoException e) { - assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); - assertEquals(200047, e.getErrorCode()); - assertEquals(2, e.getFailedProperties().size()); - } - try { - con.setClientInfo("ApplicationName", "valueA"); - fail("setClientInfo should fail for any parameter."); - } catch (SQLClientInfoException e) { - assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); - assertEquals(200047, e.getErrorCode()); - assertEquals(1, e.getFailedProperties().size()); - } + Properties property = connection.getClientInfo(); + assertEquals(0, property.size()); + Properties clientInfo = new Properties(); + clientInfo.setProperty("name", "Peter"); + clientInfo.setProperty("description", "SNOWFLAKE JDBC"); + try { + connection.setClientInfo(clientInfo); + fail("setClientInfo should fail for any parameter."); + } catch (SQLClientInfoException e) { + assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); + assertEquals(200047, e.getErrorCode()); + assertEquals(2, e.getFailedProperties().size()); + } + try { + connection.setClientInfo("ApplicationName", "valueA"); + fail("setClientInfo should fail for any parameter."); + } catch (SQLClientInfoException e) { + assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); + assertEquals(200047, e.getErrorCode()); + assertEquals(1, e.getFailedProperties().size()); } } // only support get and set @Test public void testNetworkTimeout() throws SQLException { - try (Connection con = getConnection()) { - int millis = con.getNetworkTimeout(); - assertEquals(0, millis); - con.setNetworkTimeout(null, 200); - assertEquals(200, con.getNetworkTimeout()); - } + int millis = connection.getNetworkTimeout(); + assertEquals(0, millis); + connection.setNetworkTimeout(null, 200); + assertEquals(200, connection.getNetworkTimeout()); + // Reset timeout to 0 since we are reusing connection in tests + connection.setNetworkTimeout(null, 0); + assertEquals(0, millis); } @Test @@ -354,7 +347,7 @@ public void testConnectViaDataSource() throws SQLException { } @Test - @Ignore + @Disabled public void testDataSourceOktaSerialization() throws Exception { // test with username/password authentication // set up DataSource object and ensure connection works @@ -372,7 +365,8 @@ public void testDataSourceOktaSerialization() throws Exception { ResultSet resultSet = statement.executeQuery("select 1")) { resultSet.next(); assertThat("select 1", resultSet.getInt(1), equalTo(1)); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -395,7 +389,7 @@ public void testDataSourceOktaSerialization() throws Exception { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testConnectUsingKeyPair() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -453,7 +447,7 @@ public void testConnectUsingKeyPair() throws Exception { DriverManager.getConnection(uri, properties); fail(); } catch (SQLException e) { - Assert.assertEquals(390144, e.getErrorCode()); + assertEquals(390144, e.getErrorCode()); } // test multiple key pair try (Connection connection = getConnection(); @@ -510,7 +504,7 @@ public void testBadPrivateKey() throws Exception { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDifferentKeyLength() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -725,18 +719,14 @@ public void testHeartbeatFrequencyTooLarge() throws Exception { @Test public void testNativeSQL() throws Throwable { - try (Connection connection = getConnection()) { - // today returning the source SQL. - assertEquals("select 1", connection.nativeSQL("select 1")); - } + // today returning the source SQL. + assertEquals("select 1", connection.nativeSQL("select 1")); } @Test public void testGetTypeMap() throws Throwable { - try (Connection connection = getConnection()) { - // return an empty type map. setTypeMap is not supported. - assertEquals(Collections.emptyMap(), connection.getTypeMap()); - } + // return an empty type map. setTypeMap is not supported. + assertEquals(Collections.emptyMap(), connection.getTypeMap()); } @Test @@ -829,7 +819,6 @@ public void testReadDateAfterSplittingResultSet() throws Exception { @Test public void testResultSetsClosedByStatement() throws SQLException { - Connection connection = getConnection(); Statement statement2 = connection.createStatement(); ResultSet rs1 = statement2.executeQuery("select 2;"); ResultSet rs2 = statement2.executeQuery("select 2;"); @@ -846,11 +835,10 @@ public void testResultSetsClosedByStatement() throws SQLException { assertTrue(rs2.isClosed()); assertTrue(rs3.isClosed()); assertTrue(rs4.isClosed()); - connection.close(); } @Test - @ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testOKTAConnection() throws Throwable { Map params = getConnectionParameters(); Properties properties = new Properties(); @@ -867,7 +855,7 @@ public void testOKTAConnection() throws Throwable { } @Test - @ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testOKTAConnectionWithOktauserParam() throws Throwable { Map params = getConnectionParameters(); Properties properties = new Properties(); @@ -898,7 +886,7 @@ public void testValidateDefaultParameters() throws Throwable { fail("should fail"); } catch (SQLException ex) { assertEquals( - "error code", ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED); + ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED, "error code"); } // schema is invalid @@ -909,7 +897,7 @@ public void testValidateDefaultParameters() throws Throwable { fail("should fail"); } catch (SQLException ex) { assertEquals( - "error code", ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED); + ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED, "error code"); } // warehouse is invalid @@ -920,7 +908,7 @@ public void testValidateDefaultParameters() throws Throwable { fail("should fail"); } catch (SQLException ex) { assertEquals( - "error code", ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED); + ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED, "error code"); } // role is invalid @@ -930,7 +918,7 @@ public void testValidateDefaultParameters() throws Throwable { DriverManager.getConnection(params.get("uri"), props); fail("should fail"); } catch (SQLException ex) { - assertEquals("error code", ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST); + assertEquals(ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST, "error code"); } } @@ -960,7 +948,7 @@ public void testNoValidateDefaultParameters() throws Throwable { DriverManager.getConnection(params.get("uri"), props); fail("should fail"); } catch (SQLException ex) { - assertEquals("error code", ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST); + assertEquals(ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST, "error code"); } } @@ -971,7 +959,7 @@ public void testNoValidateDefaultParameters() throws Throwable { * * @throws SQLException */ - @Ignore + @Disabled @Test public void testOrgAccountUrl() throws SQLException { Properties props = new Properties(); @@ -997,7 +985,7 @@ public void testOrgAccountUrl() throws SQLException { * @throws SQLException * @throws NoSuchAlgorithmException */ - @Ignore + @Disabled @Test public void testOrgAccountUrlWithKeyPair() throws SQLException, NoSuchAlgorithmException { @@ -1052,7 +1040,7 @@ private Properties setCommonConnectionParameters(boolean validateDefaultParamete @Test public void testFailOverOrgAccount() throws SQLException { // only when set_git_info.sh picks up a SOURCE_PARAMETER_FILE - assumeTrue(RunningOnGithubAction.isRunningOnGithubAction()); + assumeRunningOnGithubActions(); Map kvParams = getConnectionParameters(null, "ORG"); Properties connProps = kvMap2Properties(kvParams, false); diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index efed33896..68cd101bf 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -12,14 +12,15 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.AnyOf.anyOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -49,11 +50,11 @@ import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningNotOnAWS; -import net.snowflake.client.RunningOnGithubAction; +import javax.net.ssl.SSLHandshakeException; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.RunOnAWS; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.ObjectMapperFactory; @@ -71,13 +72,12 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.StringEntity; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * Connection integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -85,14 +85,14 @@ * if the tests still is not applicable. If it is applicable, move tests to ConnectionIT so that * both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private static final SFLogger logger = SFLoggerFactory.getLogger(ConnectionLatestIT.class); private boolean defaultState; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); service.updateContextForIT(getConnectionParameters()); @@ -101,7 +101,7 @@ public void setUp() { TelemetryService.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); // wait 5 seconds while the service is flushing @@ -191,12 +191,13 @@ public void testHeartbeatFrequencyTooSmall() throws Exception { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void putGetStatementsHaveQueryID() throws Throwable { try (Connection con = getConnection(); Statement statement = con.createStatement()) { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); SnowflakeStatement snowflakeStatement = statement.unwrap(SnowflakeStatement.class); @@ -208,7 +209,7 @@ public void putGetStatementsHaveQueryID() throws Throwable { String statementPutQueryId = snowflakeStatement.getQueryID(); TestUtil.assertValidQueryId(statementPutQueryId); assertNotEquals( - "create query id is override by put query id", createStageQueryId, statementPutQueryId); + createStageQueryId, statementPutQueryId, "create query id is override by put query id"); resultSetPutQueryId = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); TestUtil.assertValidQueryId(resultSetPutQueryId); assertEquals(resultSetPutQueryId, statementPutQueryId); @@ -220,7 +221,7 @@ public void putGetStatementsHaveQueryID() throws Throwable { String resultSetGetQueryId = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); TestUtil.assertValidQueryId(resultSetGetQueryId); assertNotEquals( - "put and get query id should be different", resultSetGetQueryId, resultSetPutQueryId); + resultSetGetQueryId, resultSetPutQueryId, "put and get query id should be different"); assertEquals(resultSetGetQueryId, statementGetQueryId); } } @@ -228,12 +229,13 @@ public void putGetStatementsHaveQueryID() throws Throwable { /** Added in > 3.14.4 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void putGetStatementsHaveQueryIDEvenWhenFail() throws Throwable { try (Connection con = getConnection(); Statement statement = con.createStatement()) { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); SnowflakeStatement snowflakeStatement = statement.unwrap(SnowflakeStatement.class); try { @@ -253,7 +255,7 @@ public void putGetStatementsHaveQueryIDEvenWhenFail() throws Throwable { assertEquals(snowflakeStatement.getQueryID(), e.getQueryId()); } String getQueryId = snowflakeStatement.getQueryID(); - assertNotEquals("put and get query id should be different", putQueryId, getQueryId); + assertNotEquals(putQueryId, getQueryId, "put and get query id should be different"); String stageName = "stage_" + SnowflakeUtil.randomAlphaNumeric(10); statement.execute("CREATE OR REPLACE STAGE " + stageName); TestUtil.assertValidQueryId(snowflakeStatement.getQueryID()); @@ -272,7 +274,6 @@ public void testAsyncQueryOpenAndCloseConnection() throws SQLException, IOException, InterruptedException { // open connection and run asynchronous query String queryID = null; - QueryStatusV2 statusV2 = null; try (Connection con = getConnection(); Statement statement = con.createStatement(); ResultSet rs1 = @@ -286,7 +287,7 @@ public void testAsyncQueryOpenAndCloseConnection() await() .atMost(Duration.ofSeconds(5)) .until(() -> sfrs.getStatusV2().getStatus(), not(equalTo(QueryStatus.NO_DATA))); - statusV2 = sfrs.getStatusV2(); + QueryStatusV2 statusV2 = sfrs.getStatusV2(); // Query should take 60 seconds so should be running assertEquals(QueryStatus.RUNNING, statusV2.getStatus()); assertEquals(QueryStatus.RUNNING.name(), statusV2.getName()); @@ -303,7 +304,7 @@ public void testAsyncQueryOpenAndCloseConnection() assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); } try (ResultSet rs = con.unwrap(SnowflakeConnection.class).createResultSet(queryID)) { - statusV2 = rs.unwrap(SnowflakeResultSet.class).getStatusV2(); + QueryStatusV2 statusV2 = rs.unwrap(SnowflakeResultSet.class).getStatusV2(); // Assert status of query is a success assertEquals(QueryStatus.SUCCESS, statusV2.getStatus()); assertEquals("No error reported", statusV2.getErrorMessage()); @@ -316,27 +317,16 @@ public void testAsyncQueryOpenAndCloseConnection() .unwrap(SnowflakeStatement.class) .executeAsyncQuery("select * from nonexistentTable")) { Thread.sleep(100); - statusV2 = rs1.unwrap(SnowflakeResultSet.class).getStatusV2(); - // when GS response is slow, allow up to 1 second of retries to get final query status SnowflakeResultSet sfrs1 = rs1.unwrap(SnowflakeResultSet.class); await() .atMost(Duration.ofSeconds(10)) - .until( - () -> { - QueryStatus qs = sfrs1.getStatusV2().getStatus(); - return !(qs == QueryStatus.NO_DATA || qs == QueryStatus.RUNNING); - }); - // If GS response is too slow to return data, do nothing to avoid flaky test failure. If - // response has returned, - // assert it is the error message that we are expecting. - if (statusV2.getStatus() != QueryStatus.NO_DATA) { - assertEquals(QueryStatus.FAILED_WITH_ERROR, statusV2.getStatus()); - assertEquals(2003, statusV2.getErrorCode()); - assertEquals( - "SQL compilation error:\n" - + "Object 'NONEXISTENTTABLE' does not exist or not authorized.", - statusV2.getErrorMessage()); - } + .until(() -> sfrs1.getStatusV2().getStatus() == QueryStatus.FAILED_WITH_ERROR); + statusV2 = sfrs1.getStatusV2(); + assertEquals(2003, statusV2.getErrorCode()); + assertEquals( + "SQL compilation error:\n" + + "Object 'NONEXISTENTTABLE' does not exist or not authorized.", + statusV2.getErrorMessage()); } } } @@ -717,7 +707,7 @@ public void testHttpsLoginTimeoutWithSSL() throws InterruptedException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testKeyPairFileDataSourceSerialization() throws Exception { // test with key/pair authentication where key is in file // set up DataSource object and ensure connection works @@ -737,7 +727,8 @@ public void testKeyPairFileDataSourceSerialization() throws Exception { connectAndExecuteSelect1(ds); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -762,7 +753,7 @@ private static String readPrivateKeyFileToBase64Content(String fileName) throws /** Works in > 3.18.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testKeyPairBase64DataSourceSerialization() throws Exception { // test with key/pair authentication where key is passed as a Base64 string value // set up DataSource object and ensure connection works @@ -782,7 +773,8 @@ public void testKeyPairBase64DataSourceSerialization() throws Exception { connectAndExecuteSelect1(ds); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -805,7 +797,7 @@ public void testKeyPairBase64DataSourceSerialization() throws Exception { * executions */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyInConnectionString() throws SQLException, IOException { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -908,7 +900,7 @@ private static void unsetPublicKey(String testUser) throws SQLException { // This will only work with JDBC driver versions higher than 3.15.1 @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyInConnectionStringWithBouncyCastle() throws SQLException, IOException { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); testPrivateKeyInConnectionString(); @@ -921,7 +913,7 @@ public void testPrivateKeyInConnectionStringWithBouncyCastle() throws SQLExcepti * executions */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyBase64InConnectionString() throws SQLException, IOException { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -1009,7 +1001,7 @@ private static void connectExpectingInvalidOrUnsupportedPrivateKey( /** Works in > 3.18.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyBase64InConnectionStringWithBouncyCastle() throws SQLException, IOException { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); @@ -1017,7 +1009,7 @@ public void testPrivateKeyBase64InConnectionStringWithBouncyCastle() } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testBasicDataSourceSerialization() throws Exception { // test with username/password authentication // set up DataSource object and ensure connection works @@ -1032,7 +1024,8 @@ public void testBasicDataSourceSerialization() throws Exception { connectAndExecuteSelect1(ds); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -1243,7 +1236,7 @@ public void testGetChildQueryIdsNegativeTestQueryFailed() throws Exception { * likely not having the test account we used here. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAuthenticatorEndpointWithDashInAccountName() throws Exception { Map params = getConnectionParameters(); String serverUrl = @@ -1302,7 +1295,7 @@ public void testReadOnly() throws Throwable { * the error code is ErrorCode.S3_OPERATION_ERROR so only runs on AWS. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnAWS.class) + @RunOnAWS public void testDownloadStreamWithFileNotFoundException() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -1313,7 +1306,7 @@ public void testDownloadStreamWithFileNotFoundException() throws SQLException { .unwrap(SnowflakeConnection.class) .downloadStream("@testDownloadStream_stage", "/fileNotExist.gz", true); } catch (SQLException ex) { - assertThat(ex.getErrorCode(), is(ErrorCode.S3_OPERATION_ERROR.getMessageCode())); + assertThat(ex.getErrorCode(), is(ErrorCode.FILE_NOT_FOUND.getMessageCode())); } long endDownloadTime = System.currentTimeMillis(); // S3Client retries some exception for a default timeout of 5 minutes @@ -1364,19 +1357,19 @@ private Boolean isPbes2KeySupported() throws SQLException, IOException, Security String passphrase = System.getenv(passphraseEnv); assertNotNull( + passphrase, privateKeyFileNameEnv + " environment variable can't be empty. " - + "Please provide the filename for your private key located in the resource folder", - passphrase); + + "Please provide the filename for your private key located in the resource folder"); assertNotNull( + passphrase, publicKeyFileNameEnv + " environment variable can't be empty. " - + "Please provide the filename for your public key located in the resource folder", - passphrase); + + "Please provide the filename for your public key located in the resource folder"); assertNotNull( - passphraseEnv + " environment variable is required to decrypt private key.", passphrase); + passphrase, passphraseEnv + " environment variable is required to decrypt private key."); Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); Properties properties = new Properties(); @@ -1434,8 +1427,8 @@ private Boolean isPbes2KeySupported() throws SQLException, IOException, Security * @throws IOException */ @Test - @Ignore - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled + @DontRunOnGithubActions public void testPbes2Support() throws SQLException, IOException { System.clearProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM); boolean pbes2Supported = isPbes2KeySupported(); @@ -1448,7 +1441,7 @@ public void testPbes2Support() throws SQLException, IOException { String failureMessage = "The failure means that the JDK version can decrypt a private key generated by OpenSSL v3 and " + "BouncyCastle shouldn't be needed anymore"; - assertFalse(failureMessage, pbes2Supported); + assertFalse(pbes2Supported, failureMessage); // The expectation is that this is going to pass once we add Bouncy Castle in the list of // providers @@ -1458,12 +1451,12 @@ public void testPbes2Support() throws SQLException, IOException { "Bouncy Castle Provider should have been loaded with the -D" + SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM + "JVM argument and this should have decrypted the private key generated by OpenSSL v3"; - assertTrue(failureMessage, pbes2Supported); + assertTrue(pbes2Supported, failureMessage); } // Test for regenerating okta one-time token for versions > 3.15.1 @Test - @Ignore + @Disabled public void testDataSourceOktaGenerates429StatusCode() throws Exception { // test with username/password authentication // set up DataSource object and ensure connection works @@ -1561,26 +1554,26 @@ public void shouldGetDifferentTimestampLtzConsistentBetweenFormats() throws Exce arrowResultSet.getTimestamp(column).getTimezoneOffset(), arrowResultSet.getTimestamp(column).getClass()); assertEquals( + jsonResultSet.getString(column), + arrowResultSet.getString(column), "Expecting that string representation are the same for row " + rowIdx + " and column " - + column, - jsonResultSet.getString(column), - arrowResultSet.getString(column)); + + column); assertEquals( + jsonResultSet.getTimestamp(column).toString(), + arrowResultSet.getTimestamp(column).toString(), "Expecting that string representation (via toString) are the same for row " + rowIdx + " and column " - + column, - jsonResultSet.getTimestamp(column).toString(), - arrowResultSet.getTimestamp(column).toString()); + + column); assertEquals( + jsonResultSet.getTimestamp(column), + arrowResultSet.getTimestamp(column), "Expecting that timestamps are the same for row " + rowIdx + " and column " - + column, - jsonResultSet.getTimestamp(column), - arrowResultSet.getTimestamp(column)); + + column); } rowIdx++; } @@ -1618,4 +1611,29 @@ public void shouldGetOverridenConnectionAndSocketTimeouts() throws Exception { assertEquals(Duration.ofMillis(200), HttpUtil.getSocketTimeout()); } } + + /** Added in > 3.19.0 */ + @Test + public void shouldFailOnSslExceptionWithLinkToTroubleShootingGuide() throws InterruptedException { + Properties properties = new Properties(); + properties.put("user", "fakeuser"); + properties.put("password", "testpassword"); + properties.put("ocspFailOpen", Boolean.FALSE.toString()); + + try { + DriverManager.getConnection("jdbc:snowflake://expired.badssl.com/", properties); + fail("should fail"); + } catch (SQLException e) { + // *.badssl.com may fail with timeout + if (!(e.getCause() instanceof SSLHandshakeException) + && e.getCause().getMessage().toLowerCase().contains("timed out")) { + return; + } + assertThat(e.getCause(), instanceOf(SSLHandshakeException.class)); + assertTrue( + e.getMessage() + .contains( + "https://docs.snowflake.com/en/user-guide/client-connectivity-troubleshooting/overview")); + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java b/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java index 91d5f7bc8..4b7d569d5 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java @@ -1,6 +1,6 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DriverManager; diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java index 770acda0a..a539dc7f9 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.mchange.v2.c3p0.ComboPooledDataSource; import com.zaxxer.hikari.HikariConfig; @@ -15,17 +15,17 @@ import java.sql.Statement; import java.util.Map; import java.util.Properties; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import org.apache.commons.dbcp.BasicDataSource; import org.apache.commons.dbcp.PoolingDataSource; import org.apache.commons.pool.impl.GenericObjectPool; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Connection pool interface test */ -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionPoolingIT { private BasicDataSource bds = null; private ComboPooledDataSource cpds = null; @@ -48,7 +48,7 @@ public ConnectionPoolingIT() { ssl = params.get("ssl"); } - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection connection = BaseJDBCTest.getConnection(); Statement statement = connection.createStatement()) { @@ -57,7 +57,7 @@ public void setUp() throws SQLException { } } - @After + @AfterEach public void tearDown() throws SQLException { try (Connection connection = BaseJDBCTest.getConnection(); Statement statement = connection.createStatement(); ) { diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java index 04c9c9311..96b896247 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java @@ -9,8 +9,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; import java.net.SocketTimeoutException; import java.security.cert.CertificateExpiredException; @@ -19,17 +19,16 @@ import java.util.Properties; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLPeerUnverifiedException; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFOCSPException; import net.snowflake.client.core.SFTrustManager; import org.hamcrest.Matcher; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for connection with OCSP mode mainly negative cases by injecting errors. @@ -38,7 +37,7 @@ * *

hang_webserver.py 12345 */ -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionWithOCSPModeIT extends BaseJDBCTest { private final String testUser = "fakeuser"; private final String testPassword = "testpassword"; @@ -46,12 +45,12 @@ public class ConnectionWithOCSPModeIT extends BaseJDBCTest { private static int nameCounter = 0; - @Before + @BeforeEach public void setUp() { SFTrustManager.deleteCache(); } - @After + @AfterEach public void tearDown() { SFTrustManager.cleanTestSystemParameters(); } @@ -109,7 +108,7 @@ public void testValidityExpiredOCSPResponseFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } @@ -147,7 +146,7 @@ public void testNoOCSPResponderURLFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } @@ -184,7 +183,7 @@ public void testValidityExpiredOCSPResponseInsecure() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } @@ -199,7 +198,7 @@ public void testCertAttachedInvalidFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } @@ -235,7 +234,7 @@ public void testUnknownOCSPCertFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } @@ -294,7 +293,7 @@ public void testOCSPCacheServerTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } @@ -333,14 +332,14 @@ public void testOCSPResponderTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } /** Test OCSP Responder hang and timeout. SocketTimeoutException exception should be raised. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOCSPResponderTimeoutFailClosed() { System.setProperty(SFTrustManager.SF_OCSP_TEST_OCSP_RESPONDER_TIMEOUT, "1000"); System.setProperty(SFTrustManager.SF_OCSP_TEST_RESPONDER_URL, "http://localhost:12345/hang"); @@ -369,7 +368,7 @@ public void testOCSPResponder403FailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), httpStatus403Or513()); + assertThat(ex.getMessage(), httpStatus403Or404Or513()); assertNull(ex.getCause()); } } @@ -380,7 +379,7 @@ public void testOCSPResponder403FailOpen() { * is invalid. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOCSPResponder403FailClosed() { System.setProperty(SFTrustManager.SF_OCSP_TEST_RESPONDER_URL, "http://localhost:12345/403"); System.setProperty( @@ -397,7 +396,7 @@ public void testOCSPResponder403FailClosed() { /** Test Certificate Expired. Will fail in both FAIL_OPEN and FAIL_CLOSED. */ @Test - @Ignore("Issuer of root CA expired") + @Disabled("Issuer of root CA expired") // https://support.sectigo.com/articles/Knowledge/Sectigo-AddTrust-External-CA-Root-Expiring-May-30-2020 public void testExpiredCert() { try { @@ -412,26 +411,39 @@ public void testExpiredCert() { /** Test Wrong host. Will fail in both FAIL_OPEN and FAIL_CLOSED. */ @Test - public void testWrongHost() { + public void testWrongHost() throws InterruptedException { try { DriverManager.getConnection( "jdbc:snowflake://wrong.host.badssl.com/", OCSPFailClosedProperties()); fail("should fail"); } catch (SQLException ex) { + // *.badssl.com may fail with timeout + if (!(ex.getCause() instanceof SSLPeerUnverifiedException) + && !(ex.getCause() instanceof SSLHandshakeException) + && ex.getCause().getMessage().toLowerCase().contains("timed out")) { + return; + } assertThat(ex, instanceOf(SnowflakeSQLException.class)); // The certificates used by badssl.com expired around 05/17/2022, - // https://github.com/chromium/badssl.com/issues/504. After the certificates had been updated, - // the exception seems to be changed from SSLPeerUnverifiedException to SSLHandshakeException. + // https://github.com/chromium/badssl.com/issues/504. After the certificates had been + // updated, + // the exception seems to be changed from SSLPeerUnverifiedException to + // SSLHandshakeException. assertThat( ex.getCause(), anyOf( instanceOf(SSLPeerUnverifiedException.class), instanceOf(SSLHandshakeException.class))); + return; } + fail("All retries failed"); } - private static Matcher httpStatus403Or513() { - return anyOf(containsString("HTTP status=403"), containsString("HTTP status=513")); + private static Matcher httpStatus403Or404Or513() { + return anyOf( + containsString("HTTP status=403"), + containsString("HTTP status=404"), + containsString("HTTP status=513")); } } diff --git a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java index c6fb29bf4..2673d543c 100644 --- a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java @@ -1,13 +1,13 @@ package net.snowflake.client.jdbc; -import static junit.framework.TestCase.assertEquals; -import static junit.framework.TestCase.fail; import static net.snowflake.client.AbstractDriverIT.getFullPathFileInResource; import static net.snowflake.client.jdbc.SnowflakeDriverIT.findFile; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.net.Authenticator; @@ -18,17 +18,16 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.HttpProtocol; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFSession; import net.snowflake.common.core.SqlState; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; // To run these tests, you must: // 1.) Start up a proxy connection. The simplest ways are via Squid or BurpSuite. Confluence doc on @@ -37,9 +36,9 @@ // 2.) Enter your own username and password for the account you're connecting to // 3.) Adjust parameters like role, database, schema, etc to match with account accordingly -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class CustomProxyLatestIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /** * Before running this test, change the user and password to appropriate values. Set up 2 @@ -51,7 +50,7 @@ public class CustomProxyLatestIT { * @throws SQLException */ @Test - @Ignore + @Disabled public void test2ProxiesWithSameJVM() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -107,7 +106,7 @@ public void test2ProxiesWithSameJVM() throws SQLException { * @throws SQLException */ @Test - @Ignore + @Disabled public void testTLSIssue() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -149,7 +148,7 @@ public void testTLSIssue() throws SQLException { * http instead of https proxy parameters for non-TLS proxy */ @Test - @Ignore + @Disabled public void testJVMParamsWithNonProxyHostsHonored() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -172,7 +171,7 @@ public void testJVMParamsWithNonProxyHostsHonored() throws SQLException { /** Test TLS issue against S3 client to ensure proxy works with PUT/GET statements */ @Test - @Ignore + @Disabled public void testTLSIssueWithConnectionStringAgainstS3() throws ClassNotFoundException, SQLException { @@ -193,7 +192,7 @@ public void testTLSIssueWithConnectionStringAgainstS3() * @throws SQLException */ @Test - @Ignore + @Disabled public void testNonProxyHostAltering() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -243,7 +242,7 @@ public void testNonProxyHostAltering() throws SQLException { * @throws SQLException */ @Test - @Ignore + @Disabled public void testSizeOfHttpClientNoProxies() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -279,7 +278,7 @@ public void testSizeOfHttpClientNoProxies() throws SQLException { } @Test - @Ignore + @Disabled public void testCorrectProxySettingFromConnectionString() throws ClassNotFoundException, SQLException { String connectionUrl = @@ -299,7 +298,7 @@ public void testCorrectProxySettingFromConnectionString() } @Test - @Ignore + @Disabled public void testWrongProxyPortSettingFromConnectionString() throws ClassNotFoundException, SQLException { @@ -313,7 +312,7 @@ public void testWrongProxyPortSettingFromConnectionString() } @Test - @Ignore + @Disabled public void testWrongProxyPasswordSettingFromConnectionString() throws ClassNotFoundException, SQLException { @@ -334,7 +333,7 @@ public void testWrongProxyPasswordSettingFromConnectionString() } @Test - @Ignore + @Disabled public void testInvalidProxyPortFromConnectionString() throws ClassNotFoundException, SQLException { @@ -355,7 +354,7 @@ public void testInvalidProxyPortFromConnectionString() } @Test - @Ignore + @Disabled public void testNonProxyHostsFromConnectionString() throws ClassNotFoundException, SQLException { String connectionUrl = @@ -368,7 +367,7 @@ public void testNonProxyHostsFromConnectionString() throws ClassNotFoundExceptio } @Test - @Ignore + @Disabled public void testWrongNonProxyHostsFromConnectionString() throws ClassNotFoundException, SQLException { @@ -383,7 +382,7 @@ public void testWrongNonProxyHostsFromConnectionString() } @Test - @Ignore + @Disabled public void testUnsetJvmPropertiesForInvalidSettings() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -435,11 +434,9 @@ public PasswordAuthentication getPasswordAuthentication() { stmt.execute("use warehouse TINY_WAREHOUSE"); stmt.execute("CREATE OR REPLACE STAGE testPutGet_stage"); assertTrue( - "Failed to put a file", stmt.execute( - "PUT file://" - + getFullPathFileInResource("orders_100.csv") - + " @testPutGet_stage")); + "PUT file://" + getFullPathFileInResource("orders_100.csv") + " @testPutGet_stage"), + "Failed to put a file"); String sql = "select $1 from values(1),(3),(5),(7)"; try (ResultSet res = stmt.executeQuery(sql)) { while (res.next()) { @@ -454,7 +451,7 @@ public PasswordAuthentication getPasswordAuthentication() { } @Test - @Ignore + @Disabled public void testProxyConnectionWithAzure() throws ClassNotFoundException, SQLException { String connectionUrl = "jdbc:snowflake://aztestaccount.east-us-2.azure.snowflakecomputing.com/?tracing=ALL"; @@ -463,7 +460,7 @@ public void testProxyConnectionWithAzure() throws ClassNotFoundException, SQLExc } @Test - @Ignore + @Disabled public void testProxyConnectionWithAzureWithConnectionString() throws ClassNotFoundException, SQLException { String connectionUrl = @@ -476,7 +473,7 @@ public void testProxyConnectionWithAzureWithConnectionString() } @Test - @Ignore + @Disabled public void testProxyConnectionWithoutProxyPortOrHost() throws ClassNotFoundException, SQLException { // proxyPort is empty @@ -553,7 +550,7 @@ public void testProxyConnectionWithoutProxyPortOrHost() * @throws SQLException */ @Test - @Ignore + @Disabled public void testProxyConnectionWithJVMParameters() throws SQLException, ClassNotFoundException { String connectionUrl = "jdbc:snowflake://aztestaccount.east-us-2.azure.snowflakecomputing.com/?tracing=ALL"; @@ -571,7 +568,7 @@ public void testProxyConnectionWithJVMParameters() throws SQLException, ClassNot } @Test - @Ignore + @Disabled public void testProxyConnectionWithAzureWithWrongConnectionString() throws ClassNotFoundException { String connectionUrl = @@ -598,7 +595,7 @@ public void testProxyConnectionWithAzureWithWrongConnectionString() * is specified. Set up a http proxy and change the settings below. */ @Test - @Ignore + @Disabled public void testSetJVMProxyHttp() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -624,7 +621,7 @@ public void testSetJVMProxyHttp() throws SQLException { * below. */ @Test - @Ignore + @Disabled public void testSetJVMProxyHttps() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -649,7 +646,7 @@ public void testSetJVMProxyHttps() throws SQLException { * https proxy and change the settings below. */ @Test - @Ignore + @Disabled public void testSetJVMProxyDefaultHttps() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -725,19 +722,20 @@ public PasswordAuthentication getPasswordAuthentication() { String TEST_DATA_FILE = "orders_100.csv"; String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; assertTrue( - "Failed to put a file", - stmt.execute("PUT file://" + sourceFilePath + " @testPutGet_stage")); + stmt.execute("PUT file://" + sourceFilePath + " @testPutGet_stage"), + "Failed to put a file"); findFile(stmt, "ls @testPutGet_stage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", stmt.execute( - "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java index 2ea144f3c..8f1f5b964 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java @@ -5,15 +5,15 @@ import static java.sql.DatabaseMetaData.procedureReturnsResult; import static java.sql.ResultSetMetaData.columnNullableUnknown; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.google.common.base.Strings; import java.sql.Connection; @@ -28,16 +28,15 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Database Metadata IT */ -@Category(TestCategoryOthers.class) -public class DatabaseMetaDataIT extends BaseJDBCTest { +@Tag(TestTags.OTHERS) +public class DatabaseMetaDataIT extends BaseJDBCWithSharedConnectionIT { private static final Pattern VERSION_PATTERN = Pattern.compile("^(\\d+)\\.(\\d+)(?:\\.\\d+)+\\s*.*"); private static final String PI_PROCEDURE = @@ -65,161 +64,150 @@ public class DatabaseMetaDataIT extends BaseJDBCTest { @Test public void testGetConnection() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - assertEquals(connection, metaData.getConnection()); - } + DatabaseMetaData metaData = connection.getMetaData(); + assertEquals(connection, metaData.getConnection()); } @Test public void testDatabaseAndDriverInfo() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - - // identifiers - assertEquals("Snowflake", metaData.getDatabaseProductName()); - assertEquals("Snowflake", metaData.getDriverName()); - - // Snowflake JDBC driver version - String driverVersion = metaData.getDriverVersion(); - Matcher m = VERSION_PATTERN.matcher(driverVersion); - assertTrue(m.matches()); - int majorVersion = metaData.getDriverMajorVersion(); - int minorVersion = metaData.getDriverMinorVersion(); - assertEquals(m.group(1), String.valueOf(majorVersion)); - assertEquals(m.group(2), String.valueOf(minorVersion)); - } + DatabaseMetaData metaData = connection.getMetaData(); + + // identifiers + assertEquals("Snowflake", metaData.getDatabaseProductName()); + assertEquals("Snowflake", metaData.getDriverName()); + + // Snowflake JDBC driver version + String driverVersion = metaData.getDriverVersion(); + Matcher m = VERSION_PATTERN.matcher(driverVersion); + assertTrue(m.matches()); + int majorVersion = metaData.getDriverMajorVersion(); + int minorVersion = metaData.getDriverMinorVersion(); + assertEquals(m.group(1), String.valueOf(majorVersion)); + assertEquals(m.group(2), String.valueOf(minorVersion)); } @Test public void testGetCatalogs() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - assertEquals(".", metaData.getCatalogSeparator()); - assertEquals("database", metaData.getCatalogTerm()); - - ResultSet resultSet = metaData.getCatalogs(); - verifyResultSetMetaDataColumns(resultSet, DBMetadataResultSetMetadata.GET_CATALOGS); - assertTrue(resultSet.isBeforeFirst()); - - int cnt = 0; - Set allVisibleDatabases = new HashSet<>(); - while (resultSet.next()) { - allVisibleDatabases.add(resultSet.getString(1)); - if (cnt == 0) { - assertTrue(resultSet.isFirst()); - } - ++cnt; - try { - resultSet.isLast(); - fail("No isLast support for query based metadata"); - } catch (SQLFeatureNotSupportedException ex) { - // nop - } - try { - resultSet.isAfterLast(); - fail("No isAfterLast support for query based metadata"); - } catch (SQLFeatureNotSupportedException ex) { - // nop - } + DatabaseMetaData metaData = connection.getMetaData(); + assertEquals(".", metaData.getCatalogSeparator()); + assertEquals("database", metaData.getCatalogTerm()); + + ResultSet resultSet = metaData.getCatalogs(); + verifyResultSetMetaDataColumns(resultSet, DBMetadataResultSetMetadata.GET_CATALOGS); + assertTrue(resultSet.isBeforeFirst()); + + int cnt = 0; + Set allVisibleDatabases = new HashSet<>(); + while (resultSet.next()) { + allVisibleDatabases.add(resultSet.getString(1)); + if (cnt == 0) { + assertTrue(resultSet.isFirst()); } - assertThat(cnt, greaterThanOrEqualTo(1)); + ++cnt; try { - assertTrue(resultSet.isAfterLast()); - fail("The result set is automatically closed when all rows are fetched."); - } catch (SQLException ex) { - assertEquals((int) ErrorCode.RESULTSET_ALREADY_CLOSED.getMessageCode(), ex.getErrorCode()); + resultSet.isLast(); + fail("No isLast support for query based metadata"); + } catch (SQLFeatureNotSupportedException ex) { + // nop } try { resultSet.isAfterLast(); fail("No isAfterLast support for query based metadata"); - } catch (SQLException ex) { - assertEquals((int) ErrorCode.RESULTSET_ALREADY_CLOSED.getMessageCode(), ex.getErrorCode()); + } catch (SQLFeatureNotSupportedException ex) { + // nop } - resultSet.close(); // double closing does nothing. - resultSet.next(); // no exception + } + assertThat(cnt, greaterThanOrEqualTo(1)); + try { + assertTrue(resultSet.isAfterLast()); + fail("The result set is automatically closed when all rows are fetched."); + } catch (SQLException ex) { + assertEquals((int) ErrorCode.RESULTSET_ALREADY_CLOSED.getMessageCode(), ex.getErrorCode()); + } + try { + resultSet.isAfterLast(); + fail("No isAfterLast support for query based metadata"); + } catch (SQLException ex) { + assertEquals((int) ErrorCode.RESULTSET_ALREADY_CLOSED.getMessageCode(), ex.getErrorCode()); + } + resultSet.close(); // double closing does nothing. + resultSet.next(); // no exception - List allAccessibleDatabases = - getInfoBySQL("select database_name from information_schema.databases"); + List allAccessibleDatabases = + getInfoBySQL("select database_name from information_schema.databases"); - assertTrue(allVisibleDatabases.containsAll(allAccessibleDatabases)); - } + assertTrue(allVisibleDatabases.containsAll(allAccessibleDatabases)); } @Test public void testGetSchemas() throws Throwable { // CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX = false - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - String currentSchema = connection.getSchema(); - assertEquals("schema", metaData.getSchemaTerm()); - Set schemas = new HashSet<>(); - try (ResultSet resultSet = metaData.getSchemas()) { - verifyResultSetMetaDataColumns(resultSet, DBMetadataResultSetMetadata.GET_SCHEMAS); - while (resultSet.next()) { - String schema = resultSet.getString(1); - if (currentSchema.equals(schema) || !TestUtil.isSchemaGeneratedInTests(schema)) { - schemas.add(schema); - } + DatabaseMetaData metaData = connection.getMetaData(); + String currentSchema = connection.getSchema(); + assertEquals("schema", metaData.getSchemaTerm()); + Set schemas = new HashSet<>(); + try (ResultSet resultSet = metaData.getSchemas()) { + verifyResultSetMetaDataColumns(resultSet, DBMetadataResultSetMetadata.GET_SCHEMAS); + while (resultSet.next()) { + String schema = resultSet.getString(1); + if (currentSchema.equals(schema) || !TestUtil.isSchemaGeneratedInTests(schema)) { + schemas.add(schema); } } - assertThat(schemas.size(), greaterThanOrEqualTo(1)); + } + assertThat(schemas.size(), greaterThanOrEqualTo(1)); - Set schemasInDb = new HashSet<>(); - try (ResultSet resultSet = metaData.getSchemas(connection.getCatalog(), "%")) { - while (resultSet.next()) { - String schema = resultSet.getString(1); - if (currentSchema.equals(schema) || !TestUtil.isSchemaGeneratedInTests(schema)) { - schemasInDb.add(schema); - } + Set schemasInDb = new HashSet<>(); + try (ResultSet resultSet = metaData.getSchemas(connection.getCatalog(), "%")) { + while (resultSet.next()) { + String schema = resultSet.getString(1); + if (currentSchema.equals(schema) || !TestUtil.isSchemaGeneratedInTests(schema)) { + schemasInDb.add(schema); } } - assertThat(schemasInDb.size(), greaterThanOrEqualTo(1)); - assertThat(schemas.size(), greaterThanOrEqualTo(schemasInDb.size())); - schemasInDb.forEach(schemaInDb -> assertThat(schemas, hasItem(schemaInDb))); - assertTrue(schemas.contains(currentSchema)); - assertTrue(schemasInDb.contains(currentSchema)); } + assertThat(schemasInDb.size(), greaterThanOrEqualTo(1)); + assertThat(schemas.size(), greaterThanOrEqualTo(schemasInDb.size())); + schemasInDb.forEach(schemaInDb -> assertThat(schemas, hasItem(schemaInDb))); + assertTrue(schemas.contains(currentSchema)); + assertTrue(schemasInDb.contains(currentSchema)); // CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX = true try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - DatabaseMetaData metaData = connection.getMetaData(); - assertEquals("schema", metaData.getSchemaTerm()); - try (ResultSet resultSet = metaData.getSchemas()) { - Set schemas = new HashSet<>(); + DatabaseMetaData metaData2 = connection.getMetaData(); + assertEquals("schema", metaData2.getSchemaTerm()); + try (ResultSet resultSet = metaData2.getSchemas()) { + Set schemas2 = new HashSet<>(); while (resultSet.next()) { - schemas.add(resultSet.getString(1)); + schemas2.add(resultSet.getString(1)); } - assertThat(schemas.size(), equalTo(1)); + assertThat(schemas2.size(), equalTo(1)); } } } @Test public void testGetTableTypes() throws Throwable { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - try (ResultSet resultSet = metaData.getTableTypes()) { - Set types = new HashSet<>(); - while (resultSet.next()) { - types.add(resultSet.getString(1)); - } - assertEquals(2, types.size()); - assertTrue(types.contains("TABLE")); - assertTrue(types.contains("VIEW")); + DatabaseMetaData metaData = connection.getMetaData(); + try (ResultSet resultSet = metaData.getTableTypes()) { + Set types = new HashSet<>(); + while (resultSet.next()) { + types.add(resultSet.getString(1)); } + assertEquals(2, types.size()); + assertTrue(types.contains("TABLE")); + assertTrue(types.contains("VIEW")); } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTables() throws Throwable { Set tables = null; - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); final String targetTable = "T0"; @@ -271,8 +259,7 @@ public void testGetTables() throws Throwable { @Test public void testGetPrimarykeys() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); final String targetTable = "T0"; @@ -340,8 +327,7 @@ static void verifyResultSetMetaDataColumns( @Test public void testGetImportedKeys() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); final String targetTable1 = "T0"; @@ -386,8 +372,7 @@ public void testGetImportedKeys() throws Throwable { @Test public void testGetExportedKeys() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement(); ) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); final String targetTable1 = "T0"; @@ -433,8 +418,7 @@ public void testGetExportedKeys() throws Throwable { @Test public void testGetCrossReferences() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); final String targetTable1 = "T0"; @@ -482,8 +466,7 @@ public void testGetCrossReferences() throws Throwable { @Test public void testGetObjectsDoesNotExists() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); final String targetTable = "T0"; @@ -544,50 +527,45 @@ public void testGetObjectsDoesNotExists() throws Throwable { @Test public void testTypeInfo() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - ResultSet resultSet = metaData.getTypeInfo(); - resultSet.next(); - assertEquals("NUMBER", resultSet.getString(1)); - resultSet.next(); - assertEquals("INTEGER", resultSet.getString(1)); - resultSet.next(); - assertEquals("DOUBLE", resultSet.getString(1)); - resultSet.next(); - assertEquals("VARCHAR", resultSet.getString(1)); - resultSet.next(); - assertEquals("DATE", resultSet.getString(1)); - resultSet.next(); - assertEquals("TIME", resultSet.getString(1)); - resultSet.next(); - assertEquals("TIMESTAMP", resultSet.getString(1)); - resultSet.next(); - assertEquals("BOOLEAN", resultSet.getString(1)); - assertFalse(resultSet.next()); - } + DatabaseMetaData metaData = connection.getMetaData(); + ResultSet resultSet = metaData.getTypeInfo(); + resultSet.next(); + assertEquals("NUMBER", resultSet.getString(1)); + resultSet.next(); + assertEquals("INTEGER", resultSet.getString(1)); + resultSet.next(); + assertEquals("DOUBLE", resultSet.getString(1)); + resultSet.next(); + assertEquals("VARCHAR", resultSet.getString(1)); + resultSet.next(); + assertEquals("DATE", resultSet.getString(1)); + resultSet.next(); + assertEquals("TIME", resultSet.getString(1)); + resultSet.next(); + assertEquals("TIMESTAMP", resultSet.getString(1)); + resultSet.next(); + assertEquals("BOOLEAN", resultSet.getString(1)); + assertFalse(resultSet.next()); } @Test public void testProcedure() throws Throwable { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - assertEquals("procedure", metaData.getProcedureTerm()); - // no stored procedure support - assertTrue(metaData.supportsStoredProcedures()); - try (ResultSet resultSet = metaData.getProcedureColumns("%", "%", "%", "%")) { - assertEquals(0, getSizeOfResultSet(resultSet)); - } - try (ResultSet resultSet = metaData.getProcedures("%", "%", "%")) { - assertEquals(0, getSizeOfResultSet(resultSet)); - } + DatabaseMetaData metaData = connection.getMetaData(); + assertEquals("procedure", metaData.getProcedureTerm()); + // no stored procedure support + assertTrue(metaData.supportsStoredProcedures()); + try (ResultSet resultSet = metaData.getProcedureColumns("%", "%", "%", "%")) { + assertEquals(0, getSizeOfResultSet(resultSet)); + } + try (ResultSet resultSet = metaData.getProcedures("%", "%", "%")) { + assertEquals(0, getSizeOfResultSet(resultSet)); } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTablePrivileges() throws Exception { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); try { @@ -641,8 +619,7 @@ public void testGetTablePrivileges() throws Exception { @Test public void testGetProcedures() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { try { String database = connection.getCatalog(); String schema = connection.getSchema(); @@ -670,197 +647,189 @@ public void testGetProcedures() throws SQLException { @Test public void testDatabaseMetadata() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - - String dbVersion = metaData.getDatabaseProductVersion(); - Matcher m = VERSION_PATTERN.matcher(dbVersion); - assertTrue(m.matches()); - int majorVersion = metaData.getDatabaseMajorVersion(); - int minorVersion = metaData.getDatabaseMinorVersion(); - assertEquals(m.group(1), String.valueOf(majorVersion)); - assertEquals(m.group(2), String.valueOf(minorVersion)); - - assertFalse(Strings.isNullOrEmpty(metaData.getSQLKeywords())); - assertFalse(Strings.isNullOrEmpty(metaData.getNumericFunctions())); - assertFalse(Strings.isNullOrEmpty(metaData.getStringFunctions())); - assertFalse(Strings.isNullOrEmpty(metaData.getSystemFunctions())); - assertFalse(Strings.isNullOrEmpty(metaData.getTimeDateFunctions())); - - assertEquals("\\", metaData.getSearchStringEscape()); - - assertTrue(metaData.getURL().startsWith("jdbc:snowflake://")); - assertFalse(metaData.allProceduresAreCallable()); - assertTrue(metaData.allTablesAreSelectable()); - assertTrue(metaData.dataDefinitionCausesTransactionCommit()); - assertFalse(metaData.dataDefinitionIgnoredInTransactions()); - assertFalse(metaData.deletesAreDetected(1)); - assertTrue(metaData.doesMaxRowSizeIncludeBlobs()); - assertTrue(metaData.supportsTransactions()); - assertEquals( - Connection.TRANSACTION_READ_COMMITTED, metaData.getDefaultTransactionIsolation()); - assertEquals("$", metaData.getExtraNameCharacters()); - assertEquals("\"", metaData.getIdentifierQuoteString()); - assertEquals(0, getSizeOfResultSet(metaData.getIndexInfo(null, null, null, true, true))); - assertEquals(EXPECTED_MAX_BINARY_LENGTH, metaData.getMaxBinaryLiteralLength()); - assertEquals(255, metaData.getMaxCatalogNameLength()); - assertEquals(EXPECTED_MAX_CHAR_LENGTH, metaData.getMaxCharLiteralLength()); - assertEquals(255, metaData.getMaxColumnNameLength()); - assertEquals(0, metaData.getMaxColumnsInGroupBy()); - assertEquals(0, metaData.getMaxColumnsInIndex()); - assertEquals(0, metaData.getMaxColumnsInOrderBy()); - assertEquals(0, metaData.getMaxColumnsInSelect()); - assertEquals(0, metaData.getMaxColumnsInTable()); - assertEquals(0, metaData.getMaxConnections()); - assertEquals(0, metaData.getMaxCursorNameLength()); - assertEquals(0, metaData.getMaxIndexLength()); - assertEquals(0, metaData.getMaxProcedureNameLength()); - assertEquals(0, metaData.getMaxRowSize()); - assertEquals(255, metaData.getMaxSchemaNameLength()); - assertEquals(0, metaData.getMaxStatementLength()); - assertEquals(0, metaData.getMaxStatements()); - assertEquals(255, metaData.getMaxTableNameLength()); - assertEquals(0, metaData.getMaxTablesInSelect()); - assertEquals(255, metaData.getMaxUserNameLength()); - assertEquals(0, getSizeOfResultSet(metaData.getTablePrivileges(null, null, null))); - // assertEquals("", metaData.getTimeDateFunctions()); - assertEquals(TestUtil.systemGetEnv("SNOWFLAKE_TEST_USER"), metaData.getUserName()); - assertFalse(metaData.insertsAreDetected(1)); - assertTrue(metaData.isCatalogAtStart()); - assertFalse(metaData.isReadOnly()); - assertTrue(metaData.nullPlusNonNullIsNull()); - assertFalse(metaData.nullsAreSortedAtEnd()); - assertFalse(metaData.nullsAreSortedAtStart()); - assertTrue(metaData.nullsAreSortedHigh()); - assertFalse(metaData.nullsAreSortedLow()); - assertFalse(metaData.othersDeletesAreVisible(1)); - assertFalse(metaData.othersInsertsAreVisible(1)); - assertFalse(metaData.othersUpdatesAreVisible(1)); - assertFalse(metaData.ownDeletesAreVisible(1)); - assertFalse(metaData.ownInsertsAreVisible(1)); - assertFalse(metaData.ownUpdatesAreVisible(ResultSet.TYPE_SCROLL_INSENSITIVE)); - assertFalse(metaData.storesLowerCaseIdentifiers()); - assertFalse(metaData.storesLowerCaseQuotedIdentifiers()); - assertFalse(metaData.storesMixedCaseIdentifiers()); - assertTrue(metaData.storesMixedCaseQuotedIdentifiers()); - assertTrue(metaData.storesUpperCaseIdentifiers()); - assertFalse(metaData.storesUpperCaseQuotedIdentifiers()); - assertTrue(metaData.supportsAlterTableWithAddColumn()); - assertTrue(metaData.supportsAlterTableWithDropColumn()); - assertTrue(metaData.supportsANSI92EntryLevelSQL()); - assertFalse(metaData.supportsANSI92FullSQL()); - assertFalse(metaData.supportsANSI92IntermediateSQL()); - assertTrue(metaData.supportsBatchUpdates()); - assertTrue(metaData.supportsCatalogsInDataManipulation()); - assertFalse(metaData.supportsCatalogsInIndexDefinitions()); - assertFalse(metaData.supportsCatalogsInPrivilegeDefinitions()); - assertFalse(metaData.supportsCatalogsInProcedureCalls()); - assertTrue(metaData.supportsCatalogsInTableDefinitions()); - assertTrue(metaData.supportsColumnAliasing()); - assertFalse(metaData.supportsConvert()); - assertFalse(metaData.supportsConvert(1, 2)); - assertFalse(metaData.supportsCoreSQLGrammar()); - assertTrue(metaData.supportsCorrelatedSubqueries()); - assertTrue(metaData.supportsDataDefinitionAndDataManipulationTransactions()); - assertFalse(metaData.supportsDataManipulationTransactionsOnly()); - assertFalse(metaData.supportsDifferentTableCorrelationNames()); - assertTrue(metaData.supportsExpressionsInOrderBy()); - assertFalse(metaData.supportsExtendedSQLGrammar()); - assertTrue(metaData.supportsFullOuterJoins()); - assertFalse(metaData.supportsGetGeneratedKeys()); - assertTrue(metaData.supportsGroupBy()); - assertTrue(metaData.supportsGroupByBeyondSelect()); - assertFalse(metaData.supportsGroupByUnrelated()); - assertFalse(metaData.supportsIntegrityEnhancementFacility()); - assertFalse(metaData.supportsLikeEscapeClause()); - assertTrue(metaData.supportsLimitedOuterJoins()); - assertFalse(metaData.supportsMinimumSQLGrammar()); - assertFalse(metaData.supportsMixedCaseIdentifiers()); - assertTrue(metaData.supportsMixedCaseQuotedIdentifiers()); - assertFalse(metaData.supportsMultipleOpenResults()); - assertFalse(metaData.supportsMultipleResultSets()); - assertTrue(metaData.supportsMultipleTransactions()); - assertFalse(metaData.supportsNamedParameters()); - assertTrue(metaData.supportsNonNullableColumns()); - assertFalse(metaData.supportsOpenCursorsAcrossCommit()); - assertFalse(metaData.supportsOpenCursorsAcrossRollback()); - assertFalse(metaData.supportsOpenStatementsAcrossCommit()); - assertFalse(metaData.supportsOpenStatementsAcrossRollback()); - assertTrue(metaData.supportsOrderByUnrelated()); - assertTrue(metaData.supportsOuterJoins()); - assertFalse(metaData.supportsPositionedDelete()); - assertFalse(metaData.supportsPositionedUpdate()); - assertTrue( - metaData.supportsResultSetConcurrency( - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); - assertFalse( - metaData.supportsResultSetConcurrency( - ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)); - assertTrue(metaData.supportsResultSetType(ResultSet.TYPE_FORWARD_ONLY)); - assertTrue(metaData.supportsResultSetHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT)); - assertFalse(metaData.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT)); - assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, metaData.getResultSetHoldability()); - assertFalse(metaData.supportsSavepoints()); - assertTrue(metaData.supportsSchemasInDataManipulation()); - assertFalse(metaData.supportsSchemasInIndexDefinitions()); - assertFalse(metaData.supportsSchemasInPrivilegeDefinitions()); - assertFalse(metaData.supportsSchemasInProcedureCalls()); - assertTrue(metaData.supportsSchemasInTableDefinitions()); - assertFalse(metaData.supportsSelectForUpdate()); - assertFalse(metaData.supportsStatementPooling()); - assertTrue(metaData.supportsStoredFunctionsUsingCallSyntax()); - assertTrue(metaData.supportsSubqueriesInComparisons()); - assertTrue(metaData.supportsSubqueriesInExists()); - assertTrue(metaData.supportsSubqueriesInIns()); - assertFalse(metaData.supportsSubqueriesInQuantifieds()); - assertTrue(metaData.supportsTableCorrelationNames()); - assertTrue(metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_COMMITTED)); - assertFalse( - metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ)); - assertFalse(metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_SERIALIZABLE)); - assertFalse( - metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_UNCOMMITTED)); - assertTrue(metaData.supportsUnion()); - assertTrue(metaData.supportsUnionAll()); - assertFalse(metaData.updatesAreDetected(1)); - assertFalse(metaData.usesLocalFilePerTable()); - assertFalse(metaData.usesLocalFiles()); - } + DatabaseMetaData metaData = connection.getMetaData(); + + String dbVersion = metaData.getDatabaseProductVersion(); + Matcher m = VERSION_PATTERN.matcher(dbVersion); + assertTrue(m.matches()); + int majorVersion = metaData.getDatabaseMajorVersion(); + int minorVersion = metaData.getDatabaseMinorVersion(); + assertEquals(m.group(1), String.valueOf(majorVersion)); + assertEquals(m.group(2), String.valueOf(minorVersion)); + + assertFalse(Strings.isNullOrEmpty(metaData.getSQLKeywords())); + assertFalse(Strings.isNullOrEmpty(metaData.getNumericFunctions())); + assertFalse(Strings.isNullOrEmpty(metaData.getStringFunctions())); + assertFalse(Strings.isNullOrEmpty(metaData.getSystemFunctions())); + assertFalse(Strings.isNullOrEmpty(metaData.getTimeDateFunctions())); + + assertEquals("\\", metaData.getSearchStringEscape()); + + assertTrue(metaData.getURL().startsWith("jdbc:snowflake://")); + assertFalse(metaData.allProceduresAreCallable()); + assertTrue(metaData.allTablesAreSelectable()); + assertTrue(metaData.dataDefinitionCausesTransactionCommit()); + assertFalse(metaData.dataDefinitionIgnoredInTransactions()); + assertFalse(metaData.deletesAreDetected(1)); + assertTrue(metaData.doesMaxRowSizeIncludeBlobs()); + assertTrue(metaData.supportsTransactions()); + assertEquals(Connection.TRANSACTION_READ_COMMITTED, metaData.getDefaultTransactionIsolation()); + assertEquals("$", metaData.getExtraNameCharacters()); + assertEquals("\"", metaData.getIdentifierQuoteString()); + assertEquals(0, getSizeOfResultSet(metaData.getIndexInfo(null, null, null, true, true))); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, metaData.getMaxBinaryLiteralLength()); + assertEquals(255, metaData.getMaxCatalogNameLength()); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, metaData.getMaxCharLiteralLength()); + assertEquals(255, metaData.getMaxColumnNameLength()); + assertEquals(0, metaData.getMaxColumnsInGroupBy()); + assertEquals(0, metaData.getMaxColumnsInIndex()); + assertEquals(0, metaData.getMaxColumnsInOrderBy()); + assertEquals(0, metaData.getMaxColumnsInSelect()); + assertEquals(0, metaData.getMaxColumnsInTable()); + assertEquals(0, metaData.getMaxConnections()); + assertEquals(0, metaData.getMaxCursorNameLength()); + assertEquals(0, metaData.getMaxIndexLength()); + assertEquals(0, metaData.getMaxProcedureNameLength()); + assertEquals(0, metaData.getMaxRowSize()); + assertEquals(255, metaData.getMaxSchemaNameLength()); + assertEquals(0, metaData.getMaxStatementLength()); + assertEquals(0, metaData.getMaxStatements()); + assertEquals(255, metaData.getMaxTableNameLength()); + assertEquals(0, metaData.getMaxTablesInSelect()); + assertEquals(255, metaData.getMaxUserNameLength()); + assertEquals(0, getSizeOfResultSet(metaData.getTablePrivileges(null, null, null))); + // assertEquals("", metaData.getTimeDateFunctions()); + assertEquals(TestUtil.systemGetEnv("SNOWFLAKE_TEST_USER"), metaData.getUserName()); + assertFalse(metaData.insertsAreDetected(1)); + assertTrue(metaData.isCatalogAtStart()); + assertFalse(metaData.isReadOnly()); + assertTrue(metaData.nullPlusNonNullIsNull()); + assertFalse(metaData.nullsAreSortedAtEnd()); + assertFalse(metaData.nullsAreSortedAtStart()); + assertTrue(metaData.nullsAreSortedHigh()); + assertFalse(metaData.nullsAreSortedLow()); + assertFalse(metaData.othersDeletesAreVisible(1)); + assertFalse(metaData.othersInsertsAreVisible(1)); + assertFalse(metaData.othersUpdatesAreVisible(1)); + assertFalse(metaData.ownDeletesAreVisible(1)); + assertFalse(metaData.ownInsertsAreVisible(1)); + assertFalse(metaData.ownUpdatesAreVisible(ResultSet.TYPE_SCROLL_INSENSITIVE)); + assertFalse(metaData.storesLowerCaseIdentifiers()); + assertFalse(metaData.storesLowerCaseQuotedIdentifiers()); + assertFalse(metaData.storesMixedCaseIdentifiers()); + assertTrue(metaData.storesMixedCaseQuotedIdentifiers()); + assertTrue(metaData.storesUpperCaseIdentifiers()); + assertFalse(metaData.storesUpperCaseQuotedIdentifiers()); + assertTrue(metaData.supportsAlterTableWithAddColumn()); + assertTrue(metaData.supportsAlterTableWithDropColumn()); + assertTrue(metaData.supportsANSI92EntryLevelSQL()); + assertFalse(metaData.supportsANSI92FullSQL()); + assertFalse(metaData.supportsANSI92IntermediateSQL()); + assertTrue(metaData.supportsBatchUpdates()); + assertTrue(metaData.supportsCatalogsInDataManipulation()); + assertFalse(metaData.supportsCatalogsInIndexDefinitions()); + assertFalse(metaData.supportsCatalogsInPrivilegeDefinitions()); + assertFalse(metaData.supportsCatalogsInProcedureCalls()); + assertTrue(metaData.supportsCatalogsInTableDefinitions()); + assertTrue(metaData.supportsColumnAliasing()); + assertFalse(metaData.supportsConvert()); + assertFalse(metaData.supportsConvert(1, 2)); + assertFalse(metaData.supportsCoreSQLGrammar()); + assertTrue(metaData.supportsCorrelatedSubqueries()); + assertTrue(metaData.supportsDataDefinitionAndDataManipulationTransactions()); + assertFalse(metaData.supportsDataManipulationTransactionsOnly()); + assertFalse(metaData.supportsDifferentTableCorrelationNames()); + assertTrue(metaData.supportsExpressionsInOrderBy()); + assertFalse(metaData.supportsExtendedSQLGrammar()); + assertTrue(metaData.supportsFullOuterJoins()); + assertFalse(metaData.supportsGetGeneratedKeys()); + assertTrue(metaData.supportsGroupBy()); + assertTrue(metaData.supportsGroupByBeyondSelect()); + assertFalse(metaData.supportsGroupByUnrelated()); + assertFalse(metaData.supportsIntegrityEnhancementFacility()); + assertFalse(metaData.supportsLikeEscapeClause()); + assertTrue(metaData.supportsLimitedOuterJoins()); + assertFalse(metaData.supportsMinimumSQLGrammar()); + assertFalse(metaData.supportsMixedCaseIdentifiers()); + assertTrue(metaData.supportsMixedCaseQuotedIdentifiers()); + assertFalse(metaData.supportsMultipleOpenResults()); + assertFalse(metaData.supportsMultipleResultSets()); + assertTrue(metaData.supportsMultipleTransactions()); + assertFalse(metaData.supportsNamedParameters()); + assertTrue(metaData.supportsNonNullableColumns()); + assertFalse(metaData.supportsOpenCursorsAcrossCommit()); + assertFalse(metaData.supportsOpenCursorsAcrossRollback()); + assertFalse(metaData.supportsOpenStatementsAcrossCommit()); + assertFalse(metaData.supportsOpenStatementsAcrossRollback()); + assertTrue(metaData.supportsOrderByUnrelated()); + assertTrue(metaData.supportsOuterJoins()); + assertFalse(metaData.supportsPositionedDelete()); + assertFalse(metaData.supportsPositionedUpdate()); + assertTrue( + metaData.supportsResultSetConcurrency( + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); + assertFalse( + metaData.supportsResultSetConcurrency( + ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)); + assertTrue(metaData.supportsResultSetType(ResultSet.TYPE_FORWARD_ONLY)); + assertTrue(metaData.supportsResultSetHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT)); + assertFalse(metaData.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT)); + assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, metaData.getResultSetHoldability()); + assertFalse(metaData.supportsSavepoints()); + assertTrue(metaData.supportsSchemasInDataManipulation()); + assertFalse(metaData.supportsSchemasInIndexDefinitions()); + assertFalse(metaData.supportsSchemasInPrivilegeDefinitions()); + assertFalse(metaData.supportsSchemasInProcedureCalls()); + assertTrue(metaData.supportsSchemasInTableDefinitions()); + assertFalse(metaData.supportsSelectForUpdate()); + assertFalse(metaData.supportsStatementPooling()); + assertTrue(metaData.supportsStoredFunctionsUsingCallSyntax()); + assertTrue(metaData.supportsSubqueriesInComparisons()); + assertTrue(metaData.supportsSubqueriesInExists()); + assertTrue(metaData.supportsSubqueriesInIns()); + assertFalse(metaData.supportsSubqueriesInQuantifieds()); + assertTrue(metaData.supportsTableCorrelationNames()); + assertTrue(metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_COMMITTED)); + assertFalse(metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ)); + assertFalse(metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_SERIALIZABLE)); + assertFalse( + metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_UNCOMMITTED)); + assertTrue(metaData.supportsUnion()); + assertTrue(metaData.supportsUnionAll()); + assertFalse(metaData.updatesAreDetected(1)); + assertFalse(metaData.usesLocalFilePerTable()); + assertFalse(metaData.usesLocalFiles()); } @Test public void testOtherEmptyTables() throws Throwable { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); - // index is not supported. - try (ResultSet resultSet = metaData.getIndexInfo(null, null, null, true, true)) { - assertEquals(0, getSizeOfResultSet(resultSet)); - } - // UDT is not supported. - try (ResultSet resultSet = metaData.getUDTs(null, null, null, new int[] {})) { - assertEquals(0, getSizeOfResultSet(resultSet)); - } + // index is not supported. + try (ResultSet resultSet = metaData.getIndexInfo(null, null, null, true, true)) { + assertEquals(0, getSizeOfResultSet(resultSet)); + } + // UDT is not supported. + try (ResultSet resultSet = metaData.getUDTs(null, null, null, new int[] {})) { + assertEquals(0, getSizeOfResultSet(resultSet)); } } @Test public void testFeatureNotSupportedException() throws Throwable { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - expectFeatureNotSupportedException( - () -> metaData.getBestRowIdentifier(null, null, null, 0, true)); - expectFeatureNotSupportedException(() -> metaData.getVersionColumns(null, null, null)); - expectFeatureNotSupportedException(() -> metaData.getSuperTypes(null, null, null)); - expectFeatureNotSupportedException(() -> metaData.getSuperTables(null, null, null)); - expectFeatureNotSupportedException(() -> metaData.getAttributes(null, null, null, null)); - expectFeatureNotSupportedException(metaData::getRowIdLifetime); - expectFeatureNotSupportedException(metaData::autoCommitFailureClosesAllResultSets); - expectFeatureNotSupportedException(metaData::getClientInfoProperties); - expectFeatureNotSupportedException(() -> metaData.getPseudoColumns(null, null, null, null)); - expectFeatureNotSupportedException(metaData::generatedKeyAlwaysReturned); - expectFeatureNotSupportedException( - () -> metaData.isWrapperFor(SnowflakeDatabaseMetaData.class)); - } + DatabaseMetaData metaData = connection.getMetaData(); + expectFeatureNotSupportedException( + () -> metaData.getBestRowIdentifier(null, null, null, 0, true)); + expectFeatureNotSupportedException(() -> metaData.getVersionColumns(null, null, null)); + expectFeatureNotSupportedException(() -> metaData.getSuperTypes(null, null, null)); + expectFeatureNotSupportedException(() -> metaData.getSuperTables(null, null, null)); + expectFeatureNotSupportedException(() -> metaData.getAttributes(null, null, null, null)); + expectFeatureNotSupportedException(metaData::getRowIdLifetime); + expectFeatureNotSupportedException(metaData::autoCommitFailureClosesAllResultSets); + expectFeatureNotSupportedException(metaData::getClientInfoProperties); + expectFeatureNotSupportedException(() -> metaData.getPseudoColumns(null, null, null, null)); + expectFeatureNotSupportedException(metaData::generatedKeyAlwaysReturned); + expectFeatureNotSupportedException( + () -> metaData.isWrapperFor(SnowflakeDatabaseMetaData.class)); } } diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java index ec590b066..00838fd1b 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java @@ -5,9 +5,9 @@ import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -15,24 +15,23 @@ import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Database Metadata IT */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DatabaseMetaDataInternalIT extends BaseJDBCTest { private Connection connection; private Statement statement; private DatabaseMetaData databaseMetaData; private ResultSet resultSet; - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection con = getConnection()) { initMetaData(con); @@ -68,7 +67,7 @@ static void initMetaData(Connection con) throws SQLException { } } - @After + @AfterEach public void tearDown() throws SQLException { try (Connection con = getConnection()) { endMetaData(con); @@ -83,7 +82,8 @@ static void endMetaData(Connection con) throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled // TODO: SNOW-1805299 + @DontRunOnGithubActions public void testGetColumn() throws SQLException { String getAllColumnsCount = "select count(*) from db.information_schema.columns"; connection = getConnection(); @@ -166,7 +166,7 @@ public void testGetColumn() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetFunctions() throws SQLException { connection = getConnection(); statement = connection.createStatement(); @@ -241,7 +241,8 @@ public void testGetFunctions() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled // TODO: SNOW-1805299 + @DontRunOnGithubActions public void testGetSchema() throws SQLException { String getSchemaCount = "select count(*) from db.information_schema.schemata"; connection = getConnection(); @@ -290,9 +291,9 @@ public void testGetSchema() throws SQLException { * getTables() function Author: Andong Zhan Created on 09/28/2018 */ @Test - @Ignore // SNOW-85084 detected this is a flaky test, so ignore it here. + @Disabled // SNOW-85084 detected this is a flaky test, so ignore it here. // We have other regression tests to cover it - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTablesReusingCachedResults() throws SQLException { Connection snowflakeConnection = getSnowflakeAdminConnection(); Statement snowflake = snowflakeConnection.createStatement(); @@ -449,7 +450,8 @@ private long getAccountId(Statement stmt, String accountName) throws SQLExceptio } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled // TODO: SNOW-1805299 + @DontRunOnGithubActions public void testGetTables() throws SQLException { String getAllTable = "select count(*) from db.information_schema.tables"; String getAllBaseTable = @@ -579,7 +581,7 @@ public void testGetTables() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetMetaDataUseConnectionCtx() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java index 15701ca17..622f94a0a 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java @@ -2,8 +2,8 @@ import static net.snowflake.client.jdbc.DatabaseMetaDataInternalIT.endMetaData; import static net.snowflake.client.jdbc.DatabaseMetaDataInternalIT.initMetaData; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -17,13 +17,12 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Database Metadata tests for the latest JDBC driver. This doesn't work for the oldest supported @@ -31,17 +30,17 @@ * tests still is not applicable. If it is applicable, move tests to DatabaseMetaDataIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DatabaseMetaDataInternalLatestIT extends BaseJDBCTest { - @Before + @BeforeEach public void setUp() throws Exception { try (Connection con = getConnection()) { initMetaData(con); } } - @After + @AfterEach public void tearDown() throws Exception { try (Connection con = getConnection()) { endMetaData(con); @@ -49,7 +48,7 @@ public void tearDown() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetMetaDataUseConnectionCtx() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -79,7 +78,7 @@ public void testGetMetaDataUseConnectionCtx() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetFunctionColumns() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -253,7 +252,7 @@ public void testGetFunctionColumns() throws SQLException { /** Tests that calling getTables() concurrently doesn't cause data race condition. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTablesRaceCondition() throws SQLException, ExecutionException, InterruptedException { try (Connection connection = getConnection()) { diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index bebe3d8f4..c038be49e 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -9,13 +9,13 @@ import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.NumericFunctionsSupported; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.StringFunctionsSupported; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.SystemFunctionsSupported; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.lang.reflect.Field; import java.sql.Connection; @@ -30,15 +30,15 @@ import java.util.Map; import java.util.Properties; import java.util.Set; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFSessionProperty; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * DatabaseMetaData test for the latest JDBC driver. This doesn't work for the oldest supported @@ -46,8 +46,8 @@ * tests still is not applicable. If it is applicable, move tests to DatabaseMetaDataIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) -public class DatabaseMetaDataLatestIT extends BaseJDBCTest { +@Tag(TestTags.OTHERS) +public class DatabaseMetaDataLatestIT extends BaseJDBCWithSharedConnectionIT { private static final String TEST_PROC = "create or replace procedure testproc(param1 float, param2 string)\n" + " returns varchar\n" @@ -80,12 +80,32 @@ public class DatabaseMetaDataLatestIT extends BaseJDBCTest { private static final String ENABLE_PATTERN_SEARCH = SFSessionProperty.ENABLE_PATTERN_SEARCH.getPropertyKey(); + private static final String startingSchema; + private static final String startingDatabase; + + static { + try { + startingSchema = connection.getSchema(); + startingDatabase = connection.getCatalog(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + /** Create catalog and schema for tests with double quotes */ public void createDoubleQuotedSchemaAndCatalog(Statement statement) throws SQLException { statement.execute("create or replace database \"dbwith\"\"quotes\""); statement.execute("create or replace schema \"dbwith\"\"quotes\".\"schemawith\"\"quotes\""); } + @BeforeEach + public void setUp() throws SQLException { + try (Statement stmt = connection.createStatement()) { + stmt.execute("USE DATABASE " + startingDatabase); + stmt.execute("USE SCHEMA " + startingSchema); + } + } + /** * Tests for getFunctions * @@ -199,9 +219,8 @@ public void testUseConnectionCtx() throws Exception { */ @Test public void testDoubleQuotedDatabaseAndSchema() throws Exception { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - String database = con.getCatalog(); + try (Statement statement = connection.createStatement()) { + String database = startingDatabase; // To query the schema and table, we can use a normal java escaped quote. Wildcards are also // escaped here String schemaRandomPart = SnowflakeUtil.randomAlphaNumeric(5); @@ -226,7 +245,7 @@ public void testDoubleQuotedDatabaseAndSchema() throws Exception { statement.execute( "create or replace table \"TESTTABLE_\"\"WITH_QUOTES\"\"\" (AMOUNT number," + " \"COL_\"\"QUOTED\"\"\" string)"); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getTables(database, querySchema, queryTable, null)) { // Assert 1 row returned for the testtable_"with_quotes" assertEquals(1, getSizeOfResultSet(rs)); @@ -252,15 +271,14 @@ public void testDoubleQuotedDatabaseAndSchema() throws Exception { * This tests the ability to have quotes inside a database or schema within getSchemas() function. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetSchemas() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { // Create a database with double quotes inside the database name statement.execute("create or replace database \"\"\"quoteddb\"\"\""); // Create a database, lowercase, with no double quotes inside the database name statement.execute("create or replace database \"unquoteddb\""); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); // Assert 2 rows returned for the PUBLIC and INFORMATION_SCHEMA schemas inside database try (ResultSet rs = metaData.getSchemas("\"quoteddb\"", null)) { assertEquals(2, getSizeOfResultSet(rs)); @@ -281,16 +299,15 @@ public void testDoubleQuotedDatabaseInGetSchemas() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetTables() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { // Create a database with double quotes inside the database name createDoubleQuotedSchemaAndCatalog(statement); // Create a table with two columns statement.execute( "create or replace table \"dbwith\"\"quotes\".\"schemawith\"\"quotes\".\"testtable\" (col1 string, col2 string)"); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getTables("dbwith\"quotes", "schemawith\"quotes", null, null)) { assertEquals(1, getSizeOfResultSet(rs)); } @@ -298,16 +315,15 @@ public void testDoubleQuotedDatabaseInGetTables() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetColumns() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name createDoubleQuotedSchemaAndCatalog(statement); // Create a table with two columns statement.execute( "create or replace table \"dbwith\"\"quotes\".\"schemawith\"\"quotes\".\"testtable\" (col1 string, col2 string)"); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getColumns("dbwith\"quotes", "schemawith\"quotes", null, null)) { assertEquals(2, getSizeOfResultSet(rs)); } @@ -315,10 +331,9 @@ public void testDoubleQuotedDatabaseInGetColumns() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeys() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name createDoubleQuotedSchemaAndCatalog(statement); // Create a table with a primary key constraint @@ -328,7 +343,7 @@ public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeys() throws SQL // key constraint statement.execute( "create or replace table \"dbwith\"\"quotes\".\"schemawith\"\"quotes\".\"test2\" (col_a integer not null, col_b integer not null, constraint fkey_1 foreign key (col_a, col_b) references \"test1\" (col1, col2) not enforced)"); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getPrimaryKeys("dbwith\"quotes", "schemawith\"quotes", null)) { // Assert 2 rows are returned for primary key constraint for table and schema with quotes assertEquals(2, getSizeOfResultSet(rs)); @@ -345,7 +360,7 @@ public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeys() throws SQL * getPrimaryKeys and getImportedKeys functions by setting enablePatternSearch = false. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeysWithPatternSearchDisabled() throws SQLException { Properties properties = new Properties(); @@ -374,17 +389,16 @@ public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeysWithPatternSe } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetProcedures() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name createDoubleQuotedSchemaAndCatalog(statement); // Create a procedure statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); statement.execute( "USE DATABASE \"dbwith\"\"quotes\"; USE SCHEMA \"schemawith\"\"quotes\"; " + TEST_PROC); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getProcedures("dbwith\"quotes", null, "TESTPROC")) { assertEquals(1, getSizeOfResultSet(rs)); } @@ -392,16 +406,15 @@ public void testDoubleQuotedDatabaseInGetProcedures() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetTablePrivileges() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name createDoubleQuotedSchemaAndCatalog(statement); // Create a table under the current user and role statement.execute( "create or replace table \"dbwith\"\"quotes\".\"schemawith\"\"quotes\".\"testtable\" (col1 string, col2 string)"); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getTablePrivileges("dbwith\"quotes", null, "%")) { assertEquals(1, getSizeOfResultSet(rs)); } @@ -455,9 +468,8 @@ public void testGetFunctionSqlInjectionProtection() throws Throwable { */ @Test public void testGetProcedureColumnsWildcards() throws Exception { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - String database = con.getCatalog(); + try (Statement statement = connection.createStatement()) { + String database = startingDatabase; String schemaPrefix = TestUtil.GENERATED_SCHEMA_PREFIX + SnowflakeUtil.randomAlphaNumeric(5).toUpperCase(); String schema1 = schemaPrefix + "SCH1"; @@ -473,7 +485,7 @@ public void testGetProcedureColumnsWildcards() throws Exception { () -> { statement.execute(TEST_PROC); // Create 2 schemas, each with the same stored procedure declared in them - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getProcedureColumns( database, schemaPrefix + "SCH_", "TESTPROC", "PARAM1")) { @@ -489,17 +501,15 @@ public void testGetProcedureColumnsWildcards() throws Exception { @Test public void testGetFunctions() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metadata = connection.getMetaData(); - String supportedStringFuncs = metadata.getStringFunctions(); - assertEquals(StringFunctionsSupported, supportedStringFuncs); + DatabaseMetaData metadata = connection.getMetaData(); + String supportedStringFuncs = metadata.getStringFunctions(); + assertEquals(StringFunctionsSupported, supportedStringFuncs); - String supportedNumberFuncs = metadata.getNumericFunctions(); - assertEquals(NumericFunctionsSupported, supportedNumberFuncs); + String supportedNumberFuncs = metadata.getNumericFunctions(); + assertEquals(NumericFunctionsSupported, supportedNumberFuncs); - String supportedSystemFuncs = metadata.getSystemFunctions(); - assertEquals(SystemFunctionsSupported, supportedSystemFuncs); - } + String supportedSystemFuncs = metadata.getSystemFunctions(); + assertEquals(SystemFunctionsSupported, supportedSystemFuncs); } @Test @@ -553,10 +563,9 @@ public void testGetStringValueFromColumnDef() throws SQLException { @Test public void testGetColumnsNullable() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { - String database = connection.getCatalog(); - String schema = connection.getSchema(); + try (Statement statement = connection.createStatement()) { + String database = startingDatabase; + String schema = startingSchema; final String targetTable = "T0"; statement.execute( @@ -579,7 +588,7 @@ public void testGetColumnsNullable() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSessionDatabaseParameter() throws Throwable { String altdb = "ALTERNATEDB"; String altschema1 = "ALTERNATESCHEMA1"; @@ -746,12 +755,11 @@ public void testSessionDatabaseParameter() throws Throwable { * returns 1 row per return value and 1 row per input parameter. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetFunctionColumns() throws Exception { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { - String database = connection.getCatalog(); - String schema = connection.getSchema(); + try (Statement statement = connection.createStatement()) { + String database = startingDatabase; + String schema = startingSchema; /* Create a table and put values into it */ statement.execute( @@ -1010,8 +1018,7 @@ public void testGetFunctionColumns() throws Exception { @Test public void testHandlingSpecialChars() throws Exception { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); DatabaseMetaData metaData = connection.getMetaData(); @@ -1120,9 +1127,8 @@ public void testHandlingSpecialChars() throws Exception { @Test public void testUnderscoreInSchemaNamePatternForPrimaryAndForeignKeys() throws Exception { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - String database = con.getCatalog(); + try (Statement statement = connection.createStatement()) { + String database = startingDatabase; TestUtil.withRandomSchema( statement, customSchema -> { @@ -1132,7 +1138,7 @@ public void testUnderscoreInSchemaNamePatternForPrimaryAndForeignKeys() throws E "create or replace table PK_TEST (c1 int PRIMARY KEY, c2 VARCHAR(10))"); statement.execute( "create or replace table FK_TEST (c1 int REFERENCES PK_TEST(c1), c2 VARCHAR(10))"); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet rs = metaData.getPrimaryKeys(database, escapedSchema, null)) { assertEquals(1, getSizeOfResultSet(rs)); } @@ -1215,8 +1221,7 @@ public void testTimestampWithTimezoneDataType() throws Exception { @Test public void testGetColumns() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); String schema = connection.getSchema(); final String targetTable = "T0"; @@ -1610,18 +1615,17 @@ public void testGetColumns() throws Throwable { public void testGetStreams() throws SQLException { final String targetStream = "S0"; final String targetTable = "T0"; - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - String database = con.getCatalog(); - String schema = con.getSchema(); - String owner = con.unwrap(SnowflakeConnectionV1.class).getSFBaseSession().getRole(); + try (Statement statement = connection.createStatement()) { + String database = connection.getCatalog(); + String schema = connection.getSchema(); + String owner = connection.unwrap(SnowflakeConnectionV1.class).getSFBaseSession().getRole(); String tableName = database + "." + schema + "." + targetTable; try { statement.execute("create or replace table " + targetTable + "(C1 int)"); statement.execute("create or replace stream " + targetStream + " on table " + targetTable); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); // match stream try (ResultSet resultSet = @@ -1662,23 +1666,20 @@ public void testGetStreams() throws SQLException { * This tests that an empty resultset will be returned for getProcedures when using a reader account. */ @Test - @Ignore + @Disabled public void testGetProceduresWithReaderAccount() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metadata = connection.getMetaData(); - try (ResultSet rs = metadata.getProcedures(null, null, null)) { - assertEquals(0, getSizeOfResultSet(rs)); - } + DatabaseMetaData metadata = connection.getMetaData(); + try (ResultSet rs = metadata.getProcedures(null, null, null)) { + assertEquals(0, getSizeOfResultSet(rs)); } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetProcedureColumns() throws Exception { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { - String database = connection.getCatalog(); - String schema = connection.getSchema(); + try (Statement statement = connection.createStatement()) { + String database = startingDatabase; + String schema = startingSchema; try { statement.execute(PI_PROCEDURE); DatabaseMetaData metaData = connection.getMetaData(); @@ -1724,8 +1725,7 @@ in the current database and schema. It will return all rows as well (1 row per r @Test public void testGetProcedureColumnsReturnsResultSet() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { + try (Statement statement = connection.createStatement()) { try { statement.execute( "create or replace table testtable (id int, name varchar(20), address varchar(20));"); @@ -1739,9 +1739,9 @@ public void testGetProcedureColumnsReturnsResultSet() throws SQLException { + " begin\n" + " return table(res);\n" + " end';"); - DatabaseMetaData metaData = con.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); try (ResultSet res = - metaData.getProcedureColumns(con.getCatalog(), null, "PROCTEST", "%")) { + metaData.getProcedureColumns(connection.getCatalog(), null, "PROCTEST", "%")) { res.next(); assertEquals("PROCTEST", res.getString("PROCEDURE_NAME")); assertEquals("id", res.getString("COLUMN_NAME")); @@ -1772,12 +1772,12 @@ public void testGetProcedureColumnsReturnsResultSet() throws SQLException { @Test public void testGetProcedureColumnsReturnsValue() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement(); ) { - DatabaseMetaData metaData = con.getMetaData(); + try (Statement statement = connection.createStatement(); ) { + DatabaseMetaData metaData = connection.getMetaData(); // create a procedure with no parameters that has a return value statement.execute(PI_PROCEDURE); - try (ResultSet res = metaData.getProcedureColumns(con.getCatalog(), null, "GETPI", "%")) { + try (ResultSet res = + metaData.getProcedureColumns(connection.getCatalog(), null, "GETPI", "%")) { res.next(); assertEquals("GETPI", res.getString("PROCEDURE_NAME")); assertEquals("", res.getString("COLUMN_NAME")); @@ -1790,7 +1790,7 @@ public void testGetProcedureColumnsReturnsValue() throws SQLException { // create a procedure that returns the value of the argument that is passed in statement.execute(MESSAGE_PROCEDURE); try (ResultSet res = - metaData.getProcedureColumns(con.getCatalog(), null, "MESSAGE_PROC", "%")) { + metaData.getProcedureColumns(connection.getCatalog(), null, "MESSAGE_PROC", "%")) { res.next(); assertEquals("MESSAGE_PROC", res.getString("PROCEDURE_NAME")); assertEquals("", res.getString("COLUMN_NAME")); @@ -1813,9 +1813,8 @@ public void testGetProcedureColumnsReturnsValue() throws SQLException { @Test public void testGetProcedureColumnsReturnsNull() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement(); ) { - DatabaseMetaData metaData = con.getMetaData(); + try (Statement statement = connection.createStatement(); ) { + DatabaseMetaData metaData = connection.getMetaData(); // The CREATE PROCEDURE statement must include a RETURNS clause that defines a return type, // even // if the procedure does not explicitly return anything. @@ -1830,7 +1829,7 @@ public void testGetProcedureColumnsReturnsNull() throws SQLException { + "snowflake.execute({sqlText: sqlcommand}); \n" + "';"); try (ResultSet res = - metaData.getProcedureColumns(con.getCatalog(), null, "INSERTPROC", "%")) { + metaData.getProcedureColumns(connection.getCatalog(), null, "INSERTPROC", "%")) { res.next(); // the procedure will return null as the value but column type will be varchar. assertEquals("INSERTPROC", res.getString("PROCEDURE_NAME")); @@ -1847,10 +1846,8 @@ public void testGetProcedureColumnsReturnsNull() throws SQLException { @Test public void testUpdateLocatorsCopyUnsupported() throws SQLException { - try (Connection con = getConnection()) { - DatabaseMetaData metaData = con.getMetaData(); - assertFalse(metaData.locatorsUpdateCopy()); - } + DatabaseMetaData metaData = connection.getMetaData(); + assertFalse(metaData.locatorsUpdateCopy()); } /** @@ -2106,8 +2103,7 @@ public void testPatternSearchAllowedForPrimaryAndForeignKeys() throws Exception final String table1 = "PATTERN_SEARCH_TABLE1"; final String table2 = "PATTERN_SEARCH_TABLE2"; - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String schemaRandomPart = SnowflakeUtil.randomAlphaNumeric(5); String schemaName = "\"" @@ -2327,28 +2323,23 @@ public void testPatternSearchAllowedForPrimaryAndForeignKeys() throws Exception */ @Test public void testGetJDBCVersion() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); + DatabaseMetaData metaData = connection.getMetaData(); - // JDBC x.x compatible - assertEquals(4, metaData.getJDBCMajorVersion()); - assertEquals(2, metaData.getJDBCMinorVersion()); - } + // JDBC x.x compatible + assertEquals(4, metaData.getJDBCMajorVersion()); + assertEquals(2, metaData.getJDBCMinorVersion()); } /** Added in > 3.15.1 */ @Test public void testKeywordsCount() throws SQLException { - try (Connection connection = getConnection()) { - DatabaseMetaData metaData = connection.getMetaData(); - assertEquals(43, metaData.getSQLKeywords().split(",").length); - } + DatabaseMetaData metaData = connection.getMetaData(); + assertEquals(43, metaData.getSQLKeywords().split(",").length); } /** Added in > 3.16.1 */ @Test public void testVectorDimension() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { statement.execute( "create or replace table JDBC_VECTOR(text_col varchar(32), float_vec VECTOR(FLOAT, 256), int_vec VECTOR(INT, 16))"); DatabaseMetaData metaData = connection.getMetaData(); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java index 0549a087d..2a62be5a2 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java @@ -3,9 +3,10 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -15,11 +16,14 @@ import java.sql.Types; import java.util.Arrays; import java.util.List; -import org.junit.Test; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +@Tag(TestTags.RESULT_SET) public class DatabaseMetaDataResultSetLatestIT extends BaseJDBCTest { - @Test(expected = SnowflakeLoggedFeatureNotSupportedException.class) + @Test public void testGetObjectNotSupported() throws SQLException { try (Connection con = getConnection(); Statement st = con.createStatement()) { @@ -31,7 +35,9 @@ public void testGetObjectNotSupported() throws SQLException { new SnowflakeDatabaseMetaDataResultSet( columnNames, columnTypeNames, columnTypes, rows, st)) { resultSet.next(); - assertEquals(1.2F, resultSet.getObject(1)); + assertThrows( + SnowflakeLoggedFeatureNotSupportedException.class, + () -> assertEquals(1.2F, resultSet.getObject(1))); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java index febcd4501..605ca3698 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java @@ -3,12 +3,11 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; -import java.sql.Connection; import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; @@ -18,12 +17,12 @@ import java.sql.Types; import java.util.Arrays; import java.util.List; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) -public class DatabaseMetaDataResultsetIT extends BaseJDBCTest { +@Tag(TestTags.OTHERS) +public class DatabaseMetaDataResultsetIT extends BaseJDBCWithSharedConnectionIT { private static final int columnCount = 9; private static final int INT_DATA = 1; private static final String TEXT_DATA = "TEST"; @@ -98,8 +97,7 @@ public void testRowIndex() throws SQLException { } private ResultSet getResultSet(boolean doNext) throws SQLException { - Connection con = getConnection(); - Statement st = con.createStatement(); + Statement st = connection.createStatement(); ResultSet resultSet = new SnowflakeDatabaseMetaDataResultSet(columnNames, columnTypeNames, columnTypes, rows, st); if (doNext) { diff --git a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java index 794af78df..bd2680be1 100644 --- a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java @@ -7,15 +7,16 @@ import java.sql.SQLException; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** A simple run on fetch result under boomi cloud environment's policy file */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DellBoomiCloudIT extends AbstractDriverIT { - @Before + @BeforeEach public void setup() { File file = new File(DellBoomiCloudIT.class.getResource("boomi.policy").getFile()); @@ -25,6 +26,7 @@ public void setup() { } @Test + @Disabled // TODO: SNOW-1805239 public void testSelectLargeResultSet() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement(); diff --git a/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java index 734446c92..5474488b3 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java @@ -4,22 +4,23 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import org.junit.After; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; /** This test could be run only on environment where file connection.toml is configured */ -@Ignore +@Disabled public class FileConnectionConfigurationLatestIT { - @After + @AfterEach public void cleanUp() { SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); } @@ -27,7 +28,7 @@ public void cleanUp() { @Test public void testThrowExceptionIfConfigurationDoesNotExist() { SnowflakeUtil.systemSetEnv("SNOWFLAKE_DEFAULT_CONNECTION_NAME", "non-existent"); - Assert.assertThrows(SnowflakeSQLException.class, () -> SnowflakeDriver.INSTANCE.connect()); + assertThrows(SnowflakeSQLException.class, () -> SnowflakeDriver.INSTANCE.connect()); } @Test @@ -46,7 +47,7 @@ private static void verifyConnetionToSnowflake(String connectionName) throws SQL DriverManager.getConnection(SnowflakeDriver.AUTO_CONNECTION_STRING_PREFIX, null); Statement statement = con.createStatement(); ResultSet resultSet = statement.executeQuery("show parameters")) { - Assert.assertTrue(resultSet.next()); + assertTrue(resultSet.next()); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java index a4426d449..8545ca998 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java @@ -4,9 +4,9 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -23,23 +23,22 @@ import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; import net.snowflake.client.core.OCSPMode; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** Tests for SnowflakeFileTransferAgent.expandFileNames */ public class FileUploaderExpandFileNamesTest { - @Rule public TemporaryFolder folder = new TemporaryFolder(); - @Rule public TemporaryFolder secondFolder = new TemporaryFolder(); + @TempDir private File folder; private String localFSFileSep = systemGetProperty("file.separator"); @Test public void testProcessFileNames() throws Exception { - folder.newFile("TestFileA"); - folder.newFile("TestFileB"); + new File(folder, "TestFileA").createNewFile(); + new File(folder, "TestFileB").createNewFile(); - String folderName = folder.getRoot().getCanonicalPath(); + String folderName = folder.getCanonicalPath(); + String originalUserDir = System.getProperty("user.dir"); + String originalUserHome = System.getProperty("user.home"); System.setProperty("user.dir", folderName); System.setProperty("user.home", folderName); @@ -58,6 +57,17 @@ public void testProcessFileNames() throws Exception { assertTrue(files.contains(folderName + File.separator + "TestFileC")); assertTrue(files.contains(folderName + File.separator + "TestFileD")); assertTrue(files.contains(folderName + File.separator + "TestFileE~")); + + if (originalUserHome != null) { + System.setProperty("user.home", originalUserHome); + } else { + System.clearProperty("user.home"); + } + if (originalUserDir != null) { + System.setProperty("user.dir", originalUserDir); + } else { + System.clearProperty("user.dir"); + } } @Test @@ -69,8 +79,8 @@ public void testProcessFileNamesException() { try { SnowflakeFileTransferAgent.expandFileNames(locations, null); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200007, err.getErrorCode()); - Assert.assertEquals("22000", err.getSQLState()); + assertEquals(200007, err.getErrorCode()); + assertEquals("22000", err.getSQLState()); } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); } @@ -150,8 +160,8 @@ public int read() throws IOException { */ @Test public void testFileListingDoesNotFailOnMissingFilesOfAnotherPattern() throws Exception { - folder.newFolder("TestFiles"); - String folderName = folder.getRoot().getCanonicalPath(); + new File(folder, "TestFiles").mkdirs(); + String folderName = folder.getCanonicalPath(); int filePatterns = 10; int filesPerPattern = 100; @@ -211,8 +221,8 @@ public void testFileListingDoesNotFailOnMissingFilesOfAnotherPattern() throws Ex @Test public void testFileListingDoesNotFailOnNotExistingDirectory() throws Exception { - folder.newFolder("TestFiles"); - String folderName = folder.getRoot().getCanonicalPath(); + new File(folder, "TestFiles").mkdirs(); + String folderName = folder.getCanonicalPath(); String[] locations = { folderName + localFSFileSep + "foo*", }; diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java index 378234715..a116a794b 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java @@ -5,10 +5,10 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.amazonaws.services.s3.model.ObjectMetadata; import com.fasterxml.jackson.databind.ObjectMapper; @@ -32,9 +32,8 @@ import java.util.List; import java.util.Map; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.OCSPMode; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; @@ -47,13 +46,12 @@ import net.snowflake.client.jdbc.cloud.storage.StorageProviderException; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import org.apache.commons.io.FileUtils; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Tests for SnowflakeFileTransferAgent that require an active connection */ -@Category(TestCategoryOthers.class) -public class FileUploaderLatestIT extends FileUploaderPrepIT { +@Tag(TestTags.OTHERS) +public class FileUploaderLatestIT extends FileUploaderPrep { private static final String OBJ_META_STAGE = "testObjMeta"; private ObjectMapper mapper = new ObjectMapper(); private static final String PUT_COMMAND = "put file:///dummy/path/file2.gz @testStage"; @@ -65,7 +63,7 @@ public class FileUploaderLatestIT extends FileUploaderPrepIT { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetS3StageDataWithS3Session() throws SQLException { try (Connection con = getConnection("s3testaccount")) { SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -74,16 +72,16 @@ public void testGetS3StageDataWithS3Session() throws SQLException { // Get sample stage info with session StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); // Assert that true value from session is reflected in StageInfo - Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); + assertEquals(true, stageInfo.getUseS3RegionalUrl()); // Set UseRegionalS3EndpointsForPresignedURL to false in session sfSession.setUseRegionalS3EndpointsForPresignedURL(false); stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); // Assert that false value from session is reflected in StageInfo - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + assertEquals(false, stageInfo.getUseS3RegionalUrl()); } } @@ -94,7 +92,7 @@ public void testGetS3StageDataWithS3Session() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetS3StageDataWithAzureSession() throws SQLException { try (Connection con = getConnection("azureaccount")) { SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -106,18 +104,18 @@ public void testGetS3StageDataWithAzureSession() throws SQLException { // Get sample stage info with session StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleAzureJsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); // Assert that UseRegionalS3EndpointsForPresignedURL is false in StageInfo even if it was set // to // true. // The value should always be false for non-S3 accounts - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + assertEquals(false, stageInfo.getUseS3RegionalUrl()); } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetObjectMetadataWithGCS() throws Exception { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -143,7 +141,7 @@ public void testGetObjectMetadataWithGCS() throws Exception { String remoteStageLocation = location.substring(0, idx); String path = location.substring(idx + 1) + TEST_DATA_FILE + ".gz"; StorageObjectMetadata metadata = client.getObjectMetadata(remoteStageLocation, path); - Assert.assertEquals("gzip", metadata.getContentEncoding()); + assertEquals("gzip", metadata.getContentEncoding()); } finally { statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } @@ -151,7 +149,7 @@ public void testGetObjectMetadataWithGCS() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -180,8 +178,8 @@ public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { fail("should raise exception"); } catch (Exception ex) { assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); + ex instanceof StorageProviderException, + "Wrong type of exception. Message: " + ex.getMessage()); assertTrue(ex.getMessage().matches(".*Blob.*not found in bucket.*")); } finally { statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); @@ -190,7 +188,7 @@ public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetObjectMetadataStorageExceptionWithGCS() throws Exception { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -218,8 +216,8 @@ public void testGetObjectMetadataStorageExceptionWithGCS() throws Exception { fail("should raise exception"); } catch (Exception ex) { assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); + ex instanceof StorageProviderException, + "Wrong type of exception. Message: " + ex.getMessage()); assertTrue(ex.getMessage().matches(".*Permission.*denied.*")); } finally { statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); @@ -253,8 +251,8 @@ public void testNullCommand() throws SQLException { SnowflakeFileTransferAgent sfAgent = new SnowflakeFileTransferAgent(null, sfSession, new SFStatement(sfSession)); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage() .contains("JDBC driver internal error: Missing sql for statement execution")); } finally { @@ -294,8 +292,8 @@ public void testCompressStreamWithGzipException() throws Exception { .setCommand(PUT_COMMAND) .build()); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage() .contains("JDBC driver internal error: error encountered for compression")); } finally { @@ -338,8 +336,8 @@ public void testCompressStreamWithGzipNoDigestException() throws Exception { .setCommand(PUT_COMMAND) .build()); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage() .contains("JDBC driver internal error: error encountered for compression")); } finally { @@ -382,7 +380,7 @@ public void testUploadWithoutConnectionException() throws Exception { .setCommand(PUT_COMMAND) .build()); } catch (Exception err) { - Assert.assertTrue( + assertTrue( err.getMessage() .contains( "Exception encountered during file upload: failed to push to remote store")); @@ -405,7 +403,7 @@ public void testInitFileMetadataFileNotFound() throws Exception { sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200008, err.getErrorCode()); + assertEquals(200008, err.getErrorCode()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -426,7 +424,7 @@ public void testInitFileMetadataFileIsDirectory() throws Exception { new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200009, err.getErrorCode()); + assertEquals(200009, err.getErrorCode()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -449,8 +447,8 @@ public void testCompareAndSkipFilesException() throws Exception { sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Error reading:")); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue(err.getMessage().contains("Error reading:")); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -472,8 +470,8 @@ public void testParseCommandException() throws SQLException { new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Failed to parse the locations")); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue(err.getMessage().contains("Failed to parse the locations")); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -534,8 +532,8 @@ public void testListObjectsStorageException() throws Exception { sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200016, err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Encountered exception during listObjects")); + assertEquals(200016, err.getErrorCode()); + assertTrue(err.getMessage().contains("Encountered exception during listObjects")); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -563,7 +561,7 @@ public void testUploadStreamInterruptedException() throws IOException, SQLExcept "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); } catch (SnowflakeSQLLoggedException err) { - Assert.assertEquals(200003, err.getErrorCode()); + assertEquals(200003, err.getErrorCode()); } finally { statement.execute("rm @~/" + DEST_PREFIX); } @@ -666,7 +664,7 @@ public void testUploadFileStreamWithNoOverwrite() throws Exception { assertEquals(expectedValue, actualValue); } } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -696,7 +694,7 @@ public void testUploadFileStreamWithOverwrite() throws Exception { assertFalse(expectedValue.equals(actualValue)); } } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -704,7 +702,7 @@ public void testUploadFileStreamWithOverwrite() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetS3StorageObjectMetadata() throws Throwable { try (Connection connection = getConnection("s3testaccount"); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java index d9ad6d2c1..418c70a05 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java @@ -3,55 +3,49 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import java.util.Arrays; -import java.util.Collection; import java.util.Optional; +import java.util.stream.Stream; import net.snowflake.common.core.FileCompressionType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * Tests for SnowflakeFileTransferAgent.mimeTypeToCompressionType See * https://github.com/apache/tika/blob/master/tika-core/src/main/resources/org/apache/tika/mime/tika-mimetypes.xml * for test cases */ -@RunWith(Parameterized.class) public class FileUploaderMimeTypeToCompressionTypeTest { - private final String mimeType; - private final FileCompressionType mimeSubType; - public FileUploaderMimeTypeToCompressionTypeTest( - String mimeType, FileCompressionType mimeSubType) { - this.mimeType = mimeType; - this.mimeSubType = mimeSubType; - } - - @Parameterized.Parameters(name = "mimeType={0}, mimeSubType={1}") - public static Collection primeNumbers() { - return Arrays.asList( - new Object[][] { - {"text/", null}, - {"text/csv", null}, - {"snowflake/orc", FileCompressionType.ORC}, - {"snowflake/orc;p=1", FileCompressionType.ORC}, - {"snowflake/parquet", FileCompressionType.PARQUET}, - {"application/zlib", FileCompressionType.DEFLATE}, - {"application/x-bzip2", FileCompressionType.BZIP2}, - {"application/zstd", FileCompressionType.ZSTD}, - {"application/x-brotli", FileCompressionType.BROTLI}, - {"application/x-lzip", FileCompressionType.LZIP}, - {"application/x-lzma", FileCompressionType.LZMA}, - {"application/x-xz", FileCompressionType.XZ}, - {"application/x-compress", FileCompressionType.COMPRESS}, - {"application/x-gzip", FileCompressionType.GZIP} - }); + static class MimeTypesProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of("text/", null), + Arguments.of("text/csv", null), + Arguments.of("snowflake/orc", FileCompressionType.ORC), + Arguments.of("snowflake/orc;p=1", FileCompressionType.ORC), + Arguments.of("snowflake/parquet", FileCompressionType.PARQUET), + Arguments.of("application/zlib", FileCompressionType.DEFLATE), + Arguments.of("application/x-bzip2", FileCompressionType.BZIP2), + Arguments.of("application/zstd", FileCompressionType.ZSTD), + Arguments.of("application/x-brotli", FileCompressionType.BROTLI), + Arguments.of("application/x-lzip", FileCompressionType.LZIP), + Arguments.of("application/x-lzma", FileCompressionType.LZMA), + Arguments.of("application/x-xz", FileCompressionType.XZ), + Arguments.of("application/x-compress", FileCompressionType.COMPRESS), + Arguments.of("application/x-gzip", FileCompressionType.GZIP)); + } } - @Test - public void testMimeTypeToCompressionType() throws Throwable { + @ParameterizedTest + @ArgumentsSource(MimeTypesProvider.class) + public void testMimeTypeToCompressionType(String mimeType, FileCompressionType mimeSubType) + throws Throwable { Optional foundCompType = SnowflakeFileTransferAgent.mimeTypeToCompressionType(mimeType); if (foundCompType.isPresent()) { diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java new file mode 100644 index 000000000..276eb0234 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2012-2023 Snowflake Computing Inc. All right reserved. + */ + +package net.snowflake.client.jdbc; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.BeforeAll; + +/** File uploader test prep reused by IT/connection tests and sessionless tests */ +abstract class FileUploaderPrep extends BaseJDBCTest { + + private static final ObjectMapper mapper = new ObjectMapper(); + + static JsonNode exampleS3JsonNode; + static JsonNode exampleS3StageEndpointJsonNode; + static JsonNode exampleAzureJsonNode; + static JsonNode exampleGCSJsonNode; + static JsonNode exampleGCSJsonNodeWithUseRegionalUrl; + static JsonNode exampleGCSJsonNodeWithEndPoint; + static List exampleNodes; + + private static JsonNode readJsonFromFile(String name) throws IOException { + try (InputStream is = + FileUploaderPrep.class.getResourceAsStream("/FileUploaderPrep/" + name + ".json")) { + return mapper.readTree(is); + } + } + + @BeforeAll + public static void setup() throws Exception { + exampleS3JsonNode = readJsonFromFile("exampleS3"); + exampleS3StageEndpointJsonNode = readJsonFromFile("exampleS3WithStageEndpoint"); + exampleAzureJsonNode = readJsonFromFile("exampleAzure"); + exampleGCSJsonNode = readJsonFromFile("exampleGCS"); + exampleGCSJsonNodeWithUseRegionalUrl = readJsonFromFile("exampleGCSWithUseRegionalUrl"); + exampleGCSJsonNodeWithEndPoint = readJsonFromFile("exampleGCSWithEndpoint"); + exampleNodes = + Arrays.asList( + exampleS3JsonNode, + exampleAzureJsonNode, + exampleGCSJsonNode, + exampleGCSJsonNodeWithUseRegionalUrl, + exampleGCSJsonNodeWithEndPoint); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderPrepIT.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderPrepIT.java deleted file mode 100644 index 73ebec2bf..000000000 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderPrepIT.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright (c) 2012-2023 Snowflake Computing Inc. All right reserved. - */ - -package net.snowflake.client.jdbc; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.util.Arrays; -import java.util.List; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TemporaryFolder; - -/** File uploader test prep reused by IT/connection tests and sessionless tests */ -abstract class FileUploaderPrepIT extends BaseJDBCTest { - @Rule public TemporaryFolder folder = new TemporaryFolder(); - private ObjectMapper mapper = new ObjectMapper(); - - private final String exampleS3JsonStringWithStageEndpoint = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"example/location\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": null\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/tmp/files/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": true,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"stage/location/foo/\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": \"s3-fips.us-east-1.amazonaws.com\"\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - private final String exampleS3JsonString = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"example/location\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": null\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/tmp/files/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": true,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"stage/location/foo/\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"useS3RegionalUrl\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": null\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - private final String exampleAzureJsonString = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"AZURE\",\n" - + " \"location\": \"EXAMPLE_LOCATION/\",\n" - + " \"path\": \"EXAMPLE_PATH/\",\n" - + " \"region\": \"westus\",\n" - + " \"storageAccount\": \"sfcdev2stage\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AZURE_SAS_TOKEN\": \"EXAMPLE_AZURE_SAS_TOKEN\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": \"blob.core.windows.net\"\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/foo/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": false,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"AZURE\",\n" - + " \"location\": \"EXAMPLE_LOCATION/\",\n" - + " \"path\": \"EXAMPLE_PATH/\",\n" - + " \"region\": \"westus\",\n" - + " \"storageAccount\": \"EXAMPLE_STORAGE_ACCOUNT\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AZURE_SAS_TOKEN\": \"EXAMPLE_AZURE_SAS_TOKEN\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": \"blob.core.windows.net\"\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - private final String exampleGCSJsonString = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"GCS\",\n" - + " \"location\": \"foo/tables/9224/\",\n" - + " \"path\": \"tables/9224/\",\n" - + " \"region\": \"US-WEST1\",\n" - + " \"storageAccount\": \"\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {},\n" - + " \"presignedUrl\": \"EXAMPLE_PRESIGNED_URL\",\n" - + " \"endPoint\": \"\"\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/foo/bart/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": false,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"GCS\",\n" - + " \"location\": \"foo/tables/9224/\",\n" - + " \"path\": \"tables/9224/\",\n" - + " \"region\": \"US-WEST1\",\n" - + " \"storageAccount\": \"\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {},\n" - + " \"presignedUrl\": \"EXAMPLE_PRESIGNED_URL\",\n" - + " \"endPoint\": \"\"\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - protected JsonNode exampleS3JsonNode; - protected JsonNode exampleS3StageEndpointJsonNode; - protected JsonNode exampleAzureJsonNode; - protected JsonNode exampleGCSJsonNode; - protected List exampleNodes; - - @Before - public void setup() throws Exception { - exampleS3JsonNode = mapper.readTree(exampleS3JsonString); - exampleS3StageEndpointJsonNode = mapper.readTree(exampleS3JsonStringWithStageEndpoint); - exampleAzureJsonNode = mapper.readTree(exampleAzureJsonString); - exampleGCSJsonNode = mapper.readTree(exampleGCSJsonString); - exampleNodes = Arrays.asList(exampleS3JsonNode, exampleAzureJsonNode, exampleGCSJsonNode); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java index 822ce8e92..1c74869f0 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java @@ -3,6 +3,12 @@ */ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -11,13 +17,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import net.snowflake.client.jdbc.cloud.storage.StageInfo; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Tests for SnowflakeFileTransferAgent.expandFileNames. */ -public class FileUploaderSessionlessTest extends FileUploaderPrepIT { +public class FileUploaderSessionlessTest extends FileUploaderPrep { private ObjectMapper mapper = new ObjectMapper(); @@ -31,8 +37,8 @@ public void testGetEncryptionMaterialMissing() throws Exception { SnowflakeFileTransferAgent.getEncryptionMaterial( SFBaseFileTransferAgent.CommandType.UPLOAD, modifiedNode); - Assert.assertEquals(1, encryptionMaterials.size()); - Assert.assertNull(encryptionMaterials.get(0)); + assertEquals(1, encryptionMaterials.size()); + assertNull(encryptionMaterials.get(0)); } @Test @@ -48,12 +54,12 @@ public void testGetEncryptionMaterial() throws Exception { SnowflakeFileTransferAgent.getEncryptionMaterial( SFBaseFileTransferAgent.CommandType.UPLOAD, exampleNode); - Assert.assertEquals(1, encryptionMaterials.size()); - Assert.assertEquals( + assertEquals(1, encryptionMaterials.size()); + assertEquals( expected.get(0).getQueryStageMasterKey(), encryptionMaterials.get(0).getQueryStageMasterKey()); - Assert.assertEquals(expected.get(0).getQueryId(), encryptionMaterials.get(0).getQueryId()); - Assert.assertEquals(expected.get(0).getSmkId(), encryptionMaterials.get(0).getSmkId()); + assertEquals(expected.get(0).getQueryId(), encryptionMaterials.get(0).getQueryId()); + assertEquals(expected.get(0).getSmkId(), encryptionMaterials.get(0).getSmkId()); } } @@ -67,14 +73,14 @@ public void testGetS3StageData() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("null", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); - Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("null", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(true, stageInfo.getUseS3RegionalUrl()); } @Test @@ -88,13 +94,13 @@ public void testGetS3StageDataWithStageEndpoint() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("s3-fips.us-east-1.amazonaws.com", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("s3-fips.us-east-1.amazonaws.com", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); } @Test @@ -103,13 +109,13 @@ public void testGetAzureStageData() throws Exception { Map expectedCreds = new HashMap<>(); expectedCreds.put("AZURE_SAS_TOKEN", "EXAMPLE_AZURE_SAS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("westus", stageInfo.getRegion()); - Assert.assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); - Assert.assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("westus", stageInfo.getRegion()); + assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); + assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); } @Test @@ -117,20 +123,20 @@ public void testGetGCSStageData() throws Exception { StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleGCSJsonNode, null); Map expectedCreds = new HashMap<>(); - Assert.assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); - Assert.assertEquals("foo/tables/9224/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("US-WEST1", stageInfo.getRegion()); - Assert.assertEquals(null, stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); + assertEquals("foo/tables/9224/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("US-WEST1", stageInfo.getRegion()); + assertEquals(null, stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); } @Test public void testGetFileTransferMetadatasS3() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleS3JsonNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -145,25 +151,25 @@ public void testGetFileTransferMetadatasS3() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("null", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("null", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); // EncryptionMaterial check - Assert.assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); - Assert.assertEquals( + assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); + assertEquals( "EXAMPLE_QUERY_STAGE_MASTER_KEY", metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); + assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertNull(metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertNull(metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } @Test @@ -174,7 +180,7 @@ public void testGetFileTransferMetadatasS3MissingEncryption() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -189,30 +195,30 @@ public void testGetFileTransferMetadatasS3MissingEncryption() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("null", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("null", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); // EncryptionMaterial check - Assert.assertNull(metadata.getEncryptionMaterial().getQueryId()); - Assert.assertNull(metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertNull(metadata.getEncryptionMaterial().getSmkId()); + assertNull(metadata.getEncryptionMaterial().getQueryId()); + assertNull(metadata.getEncryptionMaterial().getQueryStageMasterKey()); + assertNull(metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertNull(metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertNull(metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } @Test public void testGetFileTransferMetadatasAzure() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleAzureJsonNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -223,32 +229,32 @@ public void testGetFileTransferMetadatasAzure() throws Exception { Map expectedCreds = new HashMap<>(); expectedCreds.put("AZURE_SAS_TOKEN", "EXAMPLE_AZURE_SAS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("westus", stageInfo.getRegion()); - Assert.assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); - Assert.assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("westus", stageInfo.getRegion()); + assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); + assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); // EncryptionMaterial check - Assert.assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); - Assert.assertEquals( + assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); + assertEquals( "EXAMPLE_QUERY_STAGE_MASTER_KEY", metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); + assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertNull(metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertNull(metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } @Test public void testGetFileTransferMetadatasGCS() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -258,25 +264,56 @@ public void testGetFileTransferMetadatasGCS() throws Exception { Map expectedCreds = new HashMap<>(); - Assert.assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); - Assert.assertEquals("foo/tables/9224/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("US-WEST1", stageInfo.getRegion()); - Assert.assertEquals(null, stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); + assertEquals("foo/tables/9224/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("US-WEST1", stageInfo.getRegion()); + assertEquals(null, stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(Optional.empty(), stageInfo.gcsCustomEndpoint()); // EncryptionMaterial check - Assert.assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); - Assert.assertEquals( + assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); + assertEquals( "EXAMPLE_QUERY_STAGE_MASTER_KEY", metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); + assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertEquals("EXAMPLE_PRESIGNED_URL", metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertEquals("EXAMPLE_PRESIGNED_URL", metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + } + + @Test + public void testGetFileTransferMetadataGCSWithUseRegionalUrl() throws Exception { + List metadataList = + SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNodeWithUseRegionalUrl); + assertEquals(1, metadataList.size()); + + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + StageInfo stageInfo = metadata.getStageInfo(); + + assertTrue(stageInfo.getUseRegionalUrl()); + assertEquals(Optional.of("storage.us-west1.rep.googleapis.com"), stageInfo.gcsCustomEndpoint()); + } + + @Test + public void testGetFileTransferMetadataGCSWithEndPoint() throws Exception { + List metadataList = + SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNodeWithEndPoint); + assertEquals(1, metadataList.size()); + + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + StageInfo stageInfo = metadata.getStageInfo(); + + assertFalse(stageInfo.getUseRegionalUrl()); + assertEquals(Optional.of("example.com"), stageInfo.gcsCustomEndpoint()); } @Test @@ -284,10 +321,10 @@ public void testGetFileTransferMetadatasUploadError() throws Exception { JsonNode downloadNode = mapper.readTree("{\"data\": {\"command\": \"DOWNLOAD\"}}"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(downloadNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertEquals( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertEquals( "JDBC driver internal error: This API only supports PUT commands.", err.getMessage()); } } @@ -297,10 +334,10 @@ public void testGetFileTransferMetadatasEncryptionMaterialError() throws Excepti JsonNode garbageNode = mapper.readTree("{\"data\": {\"src_locations\": [1, 2]}}"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(garbageNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage().contains("JDBC driver internal error: Failed to parse the credentials")); } } @@ -312,11 +349,10 @@ public void testGetFileTransferMetadatasUnsupportedLocationError() throws Except foo.put("locationType", "LOCAL_FS"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage().contains("JDBC driver internal error: This API only supports")); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue(err.getMessage().contains("JDBC driver internal error: This API only supports")); } } @@ -325,10 +361,10 @@ public void testGetFileTransferMetadatasSrcLocationsArrayError() throws JsonProc JsonNode garbageNode = mapper.readTree("{\"data\": {\"src_locations\": \"abc\"}}"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(garbageNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage().contains("JDBC driver internal error: src_locations must be an array")); } } @@ -340,10 +376,10 @@ public void testGetFileMetadatasEncryptionMaterialsException() { foo.put("encryptionMaterial", "[1, 2, 3]]"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Failed to parse encryptionMaterial")); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue(err.getMessage().contains("Failed to parse encryptionMaterial")); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java index b2c316d50..44f9b7a48 100644 --- a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java +++ b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java @@ -3,41 +3,37 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) +@Tag(TestTags.RESULT_SET) public class GCPLargeResult extends BaseJDBCTest { - private final String queryResultFormat; - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - return new Object[][] {{"JSON"}, {"ARROW"}}; - } - - public GCPLargeResult(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - - Connection init() throws SQLException { + Connection init(String queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection("gcpaccount"); + System.out.println("Connected"); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); } return conn; } - @Test - public void testLargeResultSetGCP() throws Throwable { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testLargeResultSetGCP(String queryResultFormat) throws Throwable { + try (Connection con = init(queryResultFormat); PreparedStatement stmt = con.prepareStatement( "select seq8(), randstr(1000, random()) from table(generator(rowcount=>1000))")) { diff --git a/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java b/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java new file mode 100644 index 000000000..975f8ebb7 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.jdbc; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.List; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(TestTags.OTHERS) +public class GitRepositoryDownloadLatestIT extends BaseJDBCTest { + + /** + * Test needs to set up git integration which is not available in GH Action tests and needs + * accountadmin role. Added in > 3.19.0 + */ + @Test + @DontRunOnGithubActions + public void shouldDownloadFileAndStreamFromGitRepository() throws Exception { + try (Connection connection = getConnection()) { + prepareJdbcRepoInSnowflake(connection); + + String stageName = + String.format("@%s.%s.JDBC", connection.getCatalog(), connection.getSchema()); + String fileName = ".pre-commit-config.yaml"; + String filePathInGitRepo = "branches/master/" + fileName; + + List fetchedFileContent = + getContentFromFile(connection, stageName, filePathInGitRepo, fileName); + + List fetchedStreamContent = + getContentFromStream(connection, stageName, filePathInGitRepo); + + assertFalse(fetchedFileContent.isEmpty(), "File content cannot be empty"); + assertFalse(fetchedStreamContent.isEmpty(), "Stream content cannot be empty"); + assertEquals(fetchedFileContent, fetchedStreamContent); + } + } + + private static void prepareJdbcRepoInSnowflake(Connection connection) throws SQLException { + try (Statement statement = connection.createStatement()) { + statement.execute("use role accountadmin"); + statement.execute( + "CREATE OR REPLACE API INTEGRATION gh_integration\n" + + " API_PROVIDER = git_https_api\n" + + " API_ALLOWED_PREFIXES = ('https://github.com/snowflakedb/snowflake-jdbc.git')\n" + + " ENABLED = TRUE;"); + statement.execute( + "CREATE OR REPLACE GIT REPOSITORY jdbc\n" + + "ORIGIN = 'https://github.com/snowflakedb/snowflake-jdbc.git'\n" + + "API_INTEGRATION = gh_integration;"); + } + } + + private static List getContentFromFile( + Connection connection, String stageName, String filePathInGitRepo, String fileName) + throws IOException, SQLException { + Path tempDir = Files.createTempDirectory("git"); + String stagePath = stageName + "/" + filePathInGitRepo; + Path downloadedFile = tempDir.resolve(fileName); + String command = String.format("GET '%s' '%s'", stagePath, tempDir.toUri()); + + try (Statement statement = connection.createStatement(); + ResultSet rs = statement.executeQuery(command); ) { + // then + assertTrue(rs.next(), "has result"); + return Files.readAllLines(downloadedFile); + } finally { + Files.delete(downloadedFile); + Files.delete(tempDir); + } + } + + private static List getContentFromStream( + Connection connection, String stageName, String filePathInGitRepo) + throws SQLException, IOException { + SnowflakeConnection unwrap = connection.unwrap(SnowflakeConnection.class); + try (InputStream inputStream = unwrap.downloadStream(stageName, filePathInGitRepo, false)) { + return IOUtils.readLines(inputStream, StandardCharsets.UTF_8); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java index e7217f695..5dda21d55 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.jdbc; import static org.awaitility.Awaitility.await; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -12,18 +12,17 @@ import java.time.Duration; import java.util.Properties; import java.util.logging.Logger; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.QueryStatus; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test class for using heartbeat with asynchronous querying. This is a "Latest" class because old * driver versions do not contain the asynchronous querying API. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class HeartbeatAsyncLatestIT extends HeartbeatIT { private static Logger logger = Logger.getLogger(HeartbeatAsyncLatestIT.class.getName()); @@ -69,20 +68,20 @@ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAsynchronousQuerySuccess() throws Exception { testSuccess(); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAsynchronousQueryFailure() throws Exception { testFailure(); } /** Test that isValid() function returns false when session is expired */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testIsValidWithInvalidSession() throws Exception { try (Connection connection = getConnection()) { // assert that connection starts out valid diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java index eb41ce76f..5f6d7867b 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java @@ -3,12 +3,13 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.AssumptionUtils.isRunningOnGithubActions; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.ResultSet; @@ -24,16 +25,15 @@ import java.util.concurrent.Future; import java.util.logging.Logger; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** This test assumes that GS has been set up */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class HeartbeatIT extends AbstractDriverIT { private static Logger logger = Logger.getLogger(HeartbeatIT.class.getName()); @@ -43,9 +43,9 @@ public class HeartbeatIT extends AbstractDriverIT { *

change the master token validity to 10 seconds change the session token validity to 5 * seconds change the SESSION_RECORD_ACCESS_INTERVAL_SECS to 1 second */ - @BeforeClass + @BeforeAll public static void setUpClass() throws Exception { - if (!RunningOnGithubAction.isRunningOnGithubAction()) { + if (!isRunningOnGithubActions()) { try (Connection connection = getSnowflakeAdminConnection(); Statement statement = connection.createStatement()) { statement.execute( @@ -61,9 +61,9 @@ public static void setUpClass() throws Exception { * Reset master_token_validity, session_token_validity, SESSION_RECORD_ACCESS_INTERVAL_SECS to * default. */ - @AfterClass + @AfterAll public static void tearDownClass() throws Exception { - if (!RunningOnGithubAction.isRunningOnGithubAction()) { + if (!isRunningOnGithubActions()) { try (Connection connection = getSnowflakeAdminConnection(); Statement statement = connection.createStatement()) { statement.execute( @@ -115,7 +115,7 @@ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) * master token validity and issue a query to make sure the query succeeds. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSuccess() throws Exception { int concurrency = 10; ExecutorService executorService = Executors.newFixedThreadPool(10); @@ -146,7 +146,7 @@ public void testSuccess() throws Exception { * master token validity and issue a query to make sure the query fails. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testFailure() throws Exception { ExecutorService executorService = Executors.newFixedThreadPool(1); try { diff --git a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java index 56f02c6d5..eb2bdfeb1 100644 --- a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -17,25 +17,26 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import net.snowflake.client.category.TestCategoryStatement; +import java.util.stream.Stream; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.UUIDUtils; import org.apache.commons.text.RandomStringGenerator; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.STATEMENT) public class LobSizeLatestIT extends BaseJDBCTest { private static final Logger logger = Logger.getLogger(SnowflakeDriverIT.class.getName()); @@ -48,15 +49,16 @@ public class LobSizeLatestIT extends BaseJDBCTest { private static int smallLobSize = 16; private static int originLobSize = 16 * 1024 * 1024; - @BeforeClass + @BeforeAll public static void setUp() throws SQLException { - System.setProperty( - // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure - ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); try (Connection con = BaseJDBCTest.getConnection()) { // get max LOB size from session maxLobSize = con.getMetaData().getMaxCharLiteralLength(); logger.log(Level.INFO, "Using max lob size: " + maxLobSize); + System.setProperty( + // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure + ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, + Integer.toString((int) (maxLobSize * 1.5))); LobSizeStringValues.put(smallLobSize, generateRandomString(smallLobSize)); LobSizeStringValues.put(originLobSize, generateRandomString(originLobSize)); LobSizeStringValues.put(mediumLobSize, generateRandomString(mediumLobSize)); @@ -65,31 +67,20 @@ public static void setUp() throws SQLException { } } - @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") - public static Collection data() { - int[] lobSizes = - new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; - String[] resultFormats = new String[] {"Arrow", "JSON"}; - List ret = new ArrayList<>(); - for (int i = 0; i < lobSizes.length; i++) { - for (int j = 0; j < resultFormats.length; j++) { - ret.add(new Object[] {lobSizes[i], resultFormats[j]}); + static class DataProvider implements ArgumentsProvider { + + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + int[] lobSizes = + new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; + String[] resultFormats = new String[] {"Arrow", "JSON"}; + List ret = new ArrayList<>(); + for (int size : lobSizes) { + for (String format : resultFormats) { + ret.add(Arguments.of(size, format)); + } } - } - return ret; - } - - private final int lobSize; - - private final String resultFormat; - - public LobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { - this.lobSize = lobSize; - this.resultFormat = resultFormat; - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - createTable(lobSize, stmt); + return ret.stream(); } } @@ -134,7 +125,7 @@ private void preparedInsertQuery(String varCharValue, String uuidValue, Connecti } } - @AfterClass + @AfterAll public static void tearDown() throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { @@ -142,10 +133,13 @@ public static void tearDown() throws SQLException { } } - @Test - public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testStandardInsertAndSelectWithMaxLobSizeEnabled(int lobSize, String resultFormat) + throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); setResultFormat(stmt, resultFormat); String varCharValue = LobSizeStringValues.get(lobSize); @@ -161,10 +155,13 @@ public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLExcepti } } - @Test - public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testPreparedInsertWithMaxLobSizeEnabled(int lobSize, String resultFormat) + throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); setResultFormat(stmt, resultFormat); String maxVarCharValue = LobSizeStringValues.get(lobSize); @@ -180,8 +177,9 @@ public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { } } - @Test - public void testPutAndGet() throws IOException, SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testPutAndGet(int lobSize, String resultFormat) throws IOException, SQLException { File tempFile = File.createTempFile("LobSizeTest", ".csv"); // Delete file when JVM shuts down tempFile.deleteOnExit(); @@ -201,6 +199,7 @@ public void testPutAndGet() throws IOException, SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); setResultFormat(stmt, resultFormat); if (lobSize > originLobSize) { // for increased LOB size (16MB < lobSize < 128MB) stmt.execute("alter session set ALLOW_LARGE_LOBS_IN_EXTERNAL_SCAN = true"); diff --git a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java index 8962b8141..17afeeb53 100644 --- a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java @@ -4,17 +4,19 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.text.IsEmptyString.emptyOrNullString; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +@Tag(TestTags.STATEMENT) public class MaxLobSizeLatestIT extends BaseJDBCTest { /** @@ -23,7 +25,7 @@ public class MaxLobSizeLatestIT extends BaseJDBCTest { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testIncreasedMaxLobSize() throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { @@ -38,7 +40,7 @@ public void testIncreasedMaxLobSize() throws SQLException { stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=true"); try (ResultSet resultSet = stmt.executeQuery("select randstr(20000000, random()) as large_str")) { - Assert.assertTrue(resultSet.next()); + assertTrue(resultSet.next()); assertThat(resultSet.getString(1), is(not(emptyOrNullString()))); } finally { stmt.execute("alter session unset ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT"); diff --git a/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java b/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java index c763606fe..65118cec6 100644 --- a/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java +++ b/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -31,7 +31,6 @@ import java.util.concurrent.Future; import java.util.stream.Collectors; import java.util.stream.IntStream; -import net.snowflake.client.category.TestCategoryConnection; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.QueryContextDTO; @@ -52,15 +51,15 @@ import net.snowflake.client.jdbc.telemetry.TelemetryData; import net.snowflake.common.core.SFBinaryFormat; import net.snowflake.common.core.SnowflakeDateTimeFormat; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Test; /** * IT test for testing the "pluggable" implementation of SnowflakeConnection, SnowflakeStatement, * and ResultSet. These tests will query Snowflake normally, retrieve the JSON result, and replay it * back using a custom implementation of these objects that simply echoes a given JSON response. */ -@Category(TestCategoryConnection.class) +// TODO: SNOW-1821554 +// @Tag(TestTags.CONNECTION) public class MockConnectionTest extends BaseJDBCTest { // Simple pair class container for the error test. @@ -277,7 +276,7 @@ public void testMockResponse() throws SQLException, JsonProcessingException { mockConnection.prepareStatement("select count(*) from " + testTableName).executeQuery(); fakeResultSet.next(); String val = fakeResultSet.getString(1); - assertEquals("colA value from the mock connection was not what was expected", "rowOne", val); + assertEquals("rowOne", val, "colA value from the mock connection was not what was expected"); mockConnection.close(); } @@ -411,7 +410,7 @@ public void testMockTransferAgent() throws SQLException, IOException { InputStream downloadStream1 = mockConnection.downloadStream("@fakeStage", "file1", false); byte[] outputBytes1 = new byte[downloadStream1.available()]; downloadStream1.read(outputBytes1); - assertArrayEquals("downloaded bytes not what was expected", outputBytes1, inputBytes1); + assertArrayEquals(outputBytes1, inputBytes1, "downloaded bytes not what was expected"); } private JsonNode createDummyResponseWithRows(List> rows, List dataTypes) { @@ -540,7 +539,7 @@ private void compareResultSets( resultSetRows++; } - assertEquals("row-count was not what was expected", numRows, resultSetRows); + assertEquals(numRows, resultSetRows, "row-count was not what was expected"); } // DataTypes supported with mock responses in test: diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java index 0a1fb9ce2..5ea3b3f27 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.jdbc; -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class MultiStatementArrowIT extends MultiStatementIT { public MultiStatementArrowIT() { diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java index 06ccc4196..f4e9da56d 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java @@ -3,33 +3,31 @@ */ package net.snowflake.client.jdbc; -import static net.snowflake.client.ConditionalIgnoreRule.ConditionalIgnore; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.common.core.SqlState; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Multi Statement tests */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class MultiStatementIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; - @Before + @BeforeEach public void setQueryResultFormat() throws SQLException { try (Statement stmt = connection.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -418,7 +416,7 @@ public void testMultiStmtCountNotMatch() throws SQLException { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testInvalidParameterCount() throws SQLException { String userName = null; String accountName = null; @@ -455,7 +453,7 @@ public void testInvalidParameterCount() throws SQLException { for (int i = 0; i < testSuites.length; i++) { try { statement.execute(testSuites[i]); - Assert.fail(); + fail(); } catch (SQLException e) { assertThat(e.getErrorCode(), is(expectedErrorCodes[i])); } diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java index 59f5ba795..849742ef9 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java @@ -3,19 +3,19 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * MultiStatement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -23,22 +23,20 @@ * if the tests still is not applicable. If it is applicable, move tests to MultiStatementIT so that * both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) -public class MultiStatementLatestIT extends BaseJDBCTest { +@Tag(TestTags.STATEMENT) +public class MultiStatementLatestIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; - public static Connection getConnection() throws SQLException { - Connection conn = BaseJDBCTest.getConnection(); - try (Statement stmt = conn.createStatement()) { + @BeforeEach + public void setQueryResultFormat() throws SQLException { + try (Statement stmt = connection.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); } - return conn; } @Test public void testMultiStmtExecute() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); String multiStmtQuery = "create or replace temporary table test_multi (cola int);\n" @@ -74,8 +72,7 @@ public void testMultiStmtExecute() throws SQLException { @Test public void testMultiStmtTransaction() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { try { statement.execute( "create or replace table test_multi_txn(c1 number, c2 string)" + " as select 10, 'z'"); @@ -120,8 +117,7 @@ public void testMultiStmtTransaction() throws SQLException { @Test public void testMultiStmtExecuteUpdate() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String multiStmtQuery = "create or replace temporary table test_multi (cola int);\n" + "insert into test_multi VALUES (1), (2);\n" @@ -157,8 +153,7 @@ public void testMultiStmtExecuteUpdate() throws SQLException { @Test public void testMultiStmtTransactionRollback() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { try { statement.execute( "create or replace table test_multi_txn_rb(c1 number, c2 string)" @@ -207,8 +202,7 @@ public void testMultiStmtTransactionRollback() throws SQLException { @Test public void testMultiStmtExecuteQuery() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { String multiStmtQuery = "select 1;\n" + "create or replace temporary table test_multi (cola int);\n" @@ -254,8 +248,7 @@ public void testMultiStmtExecuteQuery() throws SQLException { @Test public void testMultiStmtUpdateCount() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); boolean isResultSet = statement.execute( @@ -280,8 +273,7 @@ public void testMultiStmtUpdateCount() throws SQLException { /** Test use of anonymous blocks (SNOW-758262) */ @Test public void testAnonymousBlocksUse() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + try (Statement statement = connection.createStatement()) { statement.execute("create or replace table tab758262(c1 number)"); // Test anonymous block with multistatement int multistatementcount = 2; diff --git a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java index d767456a2..f8ca2f48a 100644 --- a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java @@ -3,24 +3,25 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.AbstractDriverIT; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Test OpenGroup CLI */ -@Category(TestCategoryOthers.class) -public class OpenGroupCLIFuncIT extends BaseJDBCTest { - public static Connection getConnection() throws SQLException { - Connection connection = AbstractDriverIT.getConnection(); +@Tag(TestTags.OTHERS) +public class OpenGroupCLIFuncIT extends BaseJDBCWithSharedConnectionIT { + + @BeforeAll + public static void setSessionTimezone() throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute( "alter session set " @@ -31,114 +32,101 @@ public static Connection getConnection() throws SQLException { + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); } - return connection; } @Test public void testStringFunction() throws SQLException { - try (Connection connection = getConnection()) { - testFunction(connection, "select {fn ASCII('snowflake')}", "115"); - testFunction(connection, "select {fn CHAR(115)}", "s"); - testFunction(connection, "select {fn CONCAT('snow', 'flake')}", "snowflake"); - // DIFFERENCE is not supported - // testFunction(connection, "select {fn DIFFERENCE('snow', 'flake')}", "snowflake"); - testFunction(connection, "select {fn INSERT('snowflake', 2, 3, 'insert')}", "sinsertflake"); - testFunction(connection, "select {fn LCASE('SNOWflake')}", "snowflake"); - testFunction(connection, "select {fn LEFT('snowflake', 4)}", "snow"); - testFunction(connection, "select {fn LENGTH(' snowflake ')}", "11"); - testFunction(connection, "select {fn LOCATE('str', 'strstrstr', 2)}", "4"); - testFunction(connection, "select {fn LTRIM(' snowflake ')}", "snowflake "); - testFunction(connection, "select {fn REPEAT('snow', 3)}", "snowsnowsnow"); - testFunction(connection, "select {fn REPLACE('snowssnowsn', 'sn', 'aa')}", "aaowsaaowaa"); - testFunction(connection, "select {fn RIGHT('snowflake', 5)}", "flake"); - testFunction(connection, "select {fn RTRIM(' snowflake ')}", " snowflake"); - // SOUNDEX is not supported - // testFunction(connection, "select {fn SOUNDEX('snowflake')}", " snowflake"); - testFunction(connection, "select {fn SPACE(4)}", " "); - testFunction(connection, "select {fn SUBSTRING('snowflake', 2, 3)}", "now"); - testFunction(connection, "select {fn UCASE('snowflake')}", "SNOWFLAKE"); - } + testFunction(connection, "select {fn ASCII('snowflake')}", "115"); + testFunction(connection, "select {fn CHAR(115)}", "s"); + testFunction(connection, "select {fn CONCAT('snow', 'flake')}", "snowflake"); + // DIFFERENCE is not supported + // testFunction(connection, "select {fn DIFFERENCE('snow', 'flake')}", "snowflake"); + testFunction(connection, "select {fn INSERT('snowflake', 2, 3, 'insert')}", "sinsertflake"); + testFunction(connection, "select {fn LCASE('SNOWflake')}", "snowflake"); + testFunction(connection, "select {fn LEFT('snowflake', 4)}", "snow"); + testFunction(connection, "select {fn LENGTH(' snowflake ')}", "11"); + testFunction(connection, "select {fn LOCATE('str', 'strstrstr', 2)}", "4"); + testFunction(connection, "select {fn LTRIM(' snowflake ')}", "snowflake "); + testFunction(connection, "select {fn REPEAT('snow', 3)}", "snowsnowsnow"); + testFunction(connection, "select {fn REPLACE('snowssnowsn', 'sn', 'aa')}", "aaowsaaowaa"); + testFunction(connection, "select {fn RIGHT('snowflake', 5)}", "flake"); + testFunction(connection, "select {fn RTRIM(' snowflake ')}", " snowflake"); + // SOUNDEX is not supported + // testFunction(connection, "select {fn SOUNDEX('snowflake')}", " snowflake"); + testFunction(connection, "select {fn SPACE(4)}", " "); + testFunction(connection, "select {fn SUBSTRING('snowflake', 2, 3)}", "now"); + testFunction(connection, "select {fn UCASE('snowflake')}", "SNOWFLAKE"); } @Test public void testDateTimeFunction() throws SQLException { - try (Connection connection = getConnection()) { - // testFunction(connection, "select {fn CURDATE()}",""); - // testFunction(connection, "select {fn CURTIME()}",""); - testFunction(connection, "select {fn DAYNAME('2016-5-25')}", "Wed"); - testFunction(connection, "select {fn DAYOFMONTH(to_date('2016-5-25'))}", "25"); - testFunction(connection, "select {fn DAYOFWEEK(to_date('2016-5-25'))}", "3"); - testFunction(connection, "select {fn DAYOFYEAR(to_date('2016-5-25'))}", "146"); - testFunction(connection, "select {fn HOUR(to_timestamp('2016-5-25 12:34:56.789789'))}", "12"); - testFunction( - connection, "select {fn MINUTE(to_timestamp('2016-5-25 12:34:56.789789'))}", "34"); - testFunction(connection, "select {fn MONTH(to_date('2016-5-25'))}", "5"); - testFunction(connection, "select {fn MONTHNAME(to_date('2016-5-25'))}", "May"); - // testFunction(connection, "select {fn NOW()}", "May"); - testFunction(connection, "select {fn QUARTER(to_date('2016-5-25'))}", "2"); - testFunction( - connection, "select {fn SECOND(to_timestamp('2016-5-25 12:34:56.789789'))}", "56"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_FRAC_SECOND, 1000, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Wed, 25 May 2016 12:34:56 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_SECOND, 1, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Wed, 25 May 2016 12:34:57 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_MINUTE, 1, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Wed, 25 May 2016 12:35:56 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_HOUR, 1, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Wed, 25 May 2016 13:34:56 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_DAY, 1, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Thu, 26 May 2016 12:34:56 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_MONTH, 1, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Sat, 25 Jun 2016 12:34:56 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_QUARTER, 1, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Thu, 25 Aug 2016 12:34:56 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPADD(SQL_TSI_YEAR, 1, " - + "to_timestamp('2016-5-25 12:34:56.789789'))}", - "Thu, 25 May 2017 12:34:56 -0700"); - testFunction( - connection, - "select {fn TIMESTAMPDIFF(SQL_TSI_SECOND, " - + "to_timestamp('2016-5-25 12:34:56.789789'), to_timestamp('2016-5-25 12:34:57.789789'))}", - "1"); - testFunction(connection, "select {fn WEEK(to_timestamp('2016-5-25 12:34:56.789789'))}", "21"); - testFunction( - connection, "select {fn YEAR(to_timestamp('2016-5-25 12:34:56.789789'))}", "2016"); - } + // testFunction(connection, "select {fn CURDATE()}",""); + // testFunction(connection, "select {fn CURTIME()}",""); + testFunction(connection, "select {fn DAYNAME('2016-5-25')}", "Wed"); + testFunction(connection, "select {fn DAYOFMONTH(to_date('2016-5-25'))}", "25"); + testFunction(connection, "select {fn DAYOFWEEK(to_date('2016-5-25'))}", "3"); + testFunction(connection, "select {fn DAYOFYEAR(to_date('2016-5-25'))}", "146"); + testFunction(connection, "select {fn HOUR(to_timestamp('2016-5-25 12:34:56.789789'))}", "12"); + testFunction(connection, "select {fn MINUTE(to_timestamp('2016-5-25 12:34:56.789789'))}", "34"); + testFunction(connection, "select {fn MONTH(to_date('2016-5-25'))}", "5"); + testFunction(connection, "select {fn MONTHNAME(to_date('2016-5-25'))}", "May"); + // testFunction(connection, "select {fn NOW()}", "May"); + testFunction(connection, "select {fn QUARTER(to_date('2016-5-25'))}", "2"); + testFunction(connection, "select {fn SECOND(to_timestamp('2016-5-25 12:34:56.789789'))}", "56"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_FRAC_SECOND, 1000, " + + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Wed, 25 May 2016 12:34:56 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_SECOND, 1, " + + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Wed, 25 May 2016 12:34:57 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_MINUTE, 1, " + + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Wed, 25 May 2016 12:35:56 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_HOUR, 1, " + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Wed, 25 May 2016 13:34:56 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_DAY, 1, " + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Thu, 26 May 2016 12:34:56 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_MONTH, 1, " + + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Sat, 25 Jun 2016 12:34:56 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_QUARTER, 1, " + + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Thu, 25 Aug 2016 12:34:56 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPADD(SQL_TSI_YEAR, 1, " + "to_timestamp('2016-5-25 12:34:56.789789'))}", + "Thu, 25 May 2017 12:34:56 -0700"); + testFunction( + connection, + "select {fn TIMESTAMPDIFF(SQL_TSI_SECOND, " + + "to_timestamp('2016-5-25 12:34:56.789789'), to_timestamp('2016-5-25 12:34:57.789789'))}", + "1"); + testFunction(connection, "select {fn WEEK(to_timestamp('2016-5-25 12:34:56.789789'))}", "21"); + testFunction(connection, "select {fn YEAR(to_timestamp('2016-5-25 12:34:56.789789'))}", "2016"); } @Test public void testSystemFunctions() throws SQLException { - try (Connection connection = getConnection()) { - testFunction(connection, "select {fn DATABASE()}", connection.getCatalog()); - testFunction(connection, "select {fn IFNULL(NULL, 1)}", "1"); - testFunction( - connection, - "select {fn USER()}", - TestUtil.systemGetEnv("SNOWFLAKE_TEST_USER").toUpperCase()); - } + testFunction(connection, "select {fn DATABASE()}", connection.getCatalog()); + testFunction(connection, "select {fn IFNULL(NULL, 1)}", "1"); + testFunction( + connection, + "select {fn USER()}", + TestUtil.systemGetEnv("SNOWFLAKE_TEST_USER").toUpperCase()); } static void testFunction(Connection connection, String sql, String expected) throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java index 4f7004004..3b79dc616 100644 --- a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java @@ -7,9 +7,9 @@ import java.sql.Connection; import java.sql.SQLException; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Open Group CLI function integration tests for the latest JDBC driver. This doesn't work for the @@ -17,7 +17,7 @@ * examine if the tests still are not applicable. If it is applicable, move tests to * OpenGroupCLIFuncIT so that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class OpenGroupCLIFuncLatestIT extends BaseJDBCTest { /** * Numeric function tests diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtArrowIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtArrowIT.java deleted file mode 100644 index 54a3e8ec1..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtArrowIT.java +++ /dev/null @@ -1,12 +0,0 @@ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryArrow.class) -public class PreparedMultiStmtArrowIT extends PreparedMultiStmtIT { - public PreparedMultiStmtArrowIT() { - super(); - queryResultFormat = "arrow"; - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java index 3d1997193..adb92036d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java @@ -3,41 +3,44 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryStatement.class) -public class PreparedMultiStmtIT extends BaseJDBCTest { - - protected static String queryResultFormat = "json"; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.STATEMENT) +public class PreparedMultiStmtIT extends BaseJDBCWithSharedConnectionIT { + private static SnowflakeConnectionV1 sfConnectionV1; + + public PreparedMultiStmtIT() { + this.sfConnectionV1 = (SnowflakeConnectionV1) connection; + } - public static Connection getConnection() throws SQLException { - Connection conn = BaseJDBCTest.getConnection(); - try (Statement stmt = conn.createStatement()) { + public void setSessionResultFormat(String queryResultFormat) throws SQLException { + try (Statement stmt = connection.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); } - return conn; } - @Test - public void testExecuteUpdateCount() throws Exception { - try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteUpdateCount(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); + try (Statement statement = sfConnectionV1.createStatement()) { try { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); statement.execute("create or replace table test_multi_bind(c1 number)"); try (PreparedStatement preparedStatement = - connection.prepareStatement( + sfConnectionV1.prepareStatement( "insert into test_multi_bind(c1) values(?); insert into " + "test_multi_bind values (?), (?)")) { @@ -74,16 +77,17 @@ public void testExecuteUpdateCount() throws Exception { } /** Less bindings than expected in statement */ - @Test - public void testExecuteLessBindings() throws Exception { - try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteLessBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); + try (Statement statement = sfConnectionV1.createStatement()) { try { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); statement.execute("create or replace table test_multi_bind(c1 number)"); try (PreparedStatement preparedStatement = - connection.prepareStatement( + sfConnectionV1.prepareStatement( "insert into test_multi_bind(c1) values(?); insert into " + "test_multi_bind values (?), (?)")) { @@ -95,7 +99,7 @@ public void testExecuteLessBindings() throws Exception { // first statement try { preparedStatement.executeUpdate(); - Assert.fail(); + fail(); } catch (SQLException e) { // error code comes from xp, which is js execution failed. assertThat(e.getErrorCode(), is(100132)); @@ -107,16 +111,17 @@ public void testExecuteLessBindings() throws Exception { } } - @Test - public void testExecuteMoreBindings() throws Exception { - try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteMoreBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); + try (Statement statement = sfConnectionV1.createStatement()) { try { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); statement.execute("create or replace table test_multi_bind(c1 number)"); try (PreparedStatement preparedStatement = - connection.prepareStatement( + sfConnectionV1.prepareStatement( "insert into test_multi_bind(c1) values(?); insert into " + "test_multi_bind values (?), (?)")) { @@ -154,14 +159,15 @@ public void testExecuteMoreBindings() throws Exception { } } - @Test - public void testExecuteQueryBindings() throws Exception { - try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteQueryBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); + try (Statement statement = sfConnectionV1.createStatement()) { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); try (PreparedStatement preparedStatement = - connection.prepareStatement("select ?; select ?, ?; select ?, ?, ?")) { + sfConnectionV1.prepareStatement("select ?; select ?, ?; select ?, ?, ?")) { assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(6)); @@ -197,14 +203,15 @@ public void testExecuteQueryBindings() throws Exception { } } - @Test - public void testExecuteQueryNoBindings() throws Exception { - try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteQueryNoBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); + try (Statement statement = sfConnectionV1.createStatement()) { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); try (PreparedStatement preparedStatement = - connection.prepareStatement("select 10; select 20, 30; select 40, 50, 60")) { + sfConnectionV1.prepareStatement("select 10; select 20, 30; select 40, 50, 60")) { assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(0)); diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java index 7c05163dc..aa9a90859 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java @@ -6,14 +6,16 @@ import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; /** Prepared statement integration tests */ abstract class PreparedStatement0IT extends BaseJDBCTest { - private final String queryResultFormat; - Connection init() throws SQLException { + return BaseJDBCTest.getConnection(); + } + + protected Connection getConn(String queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection(); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -34,21 +36,17 @@ Connection init() throws SQLException { final String enableCacheReuse = "alter session set USE_CACHED_RESULT=true"; final String tableFuncSQL = "select 1 from table(generator(rowCount => ?))"; - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection con = init()) { con.createStatement().execute(createTableSQL); } } - @After + @AfterEach public void tearDown() throws SQLException { try (Connection con = init()) { con.createStatement().execute(deleteTableSQL); } } - - PreparedStatement0IT(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java index 56bef419f..d0074230d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java @@ -6,12 +6,12 @@ import static net.snowflake.client.jdbc.ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.DriverManager; @@ -25,26 +25,22 @@ import java.sql.Types; import java.util.Map; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryStatement.class) +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.STATEMENT) public class PreparedStatement1IT extends PreparedStatement0IT { - public PreparedStatement1IT() { - super("json"); - } - - PreparedStatement1IT(String queryFormat) { - super(queryFormat); - } - @Test - public void testGetParameterMetaData() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetParameterMetaData(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement preparedStatement = connection.prepareStatement(updateSQL)) { /* All binding parameters are of type text and have null precision and scale and are not nullable. Since every binding parameter currently has identical properties, testing is minimal until this changes. @@ -83,9 +79,10 @@ public void testGetParameterMetaData() throws SQLException { } /** Trigger default stage array binding threshold so that it can be run on travis */ - @Test - public void testInsertStageArrayBind() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInsertStageArrayBind(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { connection .createStatement() @@ -122,9 +119,10 @@ static void bindOneParamSet( prepst.setShort(6, colE); } - @Test - public void testPrepareStatementWithKeys() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareStatementWithKeys(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute(createTableSQL); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL, Statement.NO_GENERATED_KEYS)) { @@ -138,11 +136,12 @@ public void testPrepareStatementWithKeys() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertBatch() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertBatch(String queryResultFormat) throws SQLException { int[] countResult; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { connection .createStatement() .execute( @@ -164,11 +163,12 @@ public void testInsertBatch() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertBatchStage() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertBatchStage(String queryResultFormat) throws SQLException { int[] countResult; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { connection .createStatement() .execute( @@ -188,12 +188,13 @@ public void testInsertBatchStage() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertBatchStageMultipleTimes() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertBatchStageMultipleTimes(String queryResultFormat) throws SQLException { // using the same statement to run a query multiple times shouldn't result in duplicates int[] countResult; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { connection .createStatement() .execute( @@ -223,10 +224,11 @@ public void testInsertBatchStageMultipleTimes() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchNull() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchNull(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled @@ -253,26 +255,27 @@ public void testStageBatchNull() throws SQLException { String errorMessage = "Column should be null (" + (threshold > 0 ? "stage" : "non-stage") + ")"; resultSet.getInt(1); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getDouble(2); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getFloat(3); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getString(4); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getLong(5); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getShort(6); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); } } } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageString() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageString(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled String[] rows = { @@ -297,7 +300,7 @@ public void testStageString() throws SQLException { "Strings should match (" + (threshold > 0 ? "stage" : "non-stage") + ")"; for (String row : rows) { assertTrue(resultSet.next()); - assertEquals(errorMessage, row, resultSet.getString(1)); + assertEquals(row, resultSet.getString(1), errorMessage); } } } @@ -305,10 +308,11 @@ public void testStageString() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testIncorrectTypes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testIncorrectTypes(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled @@ -338,10 +342,11 @@ public void testIncorrectTypes() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchTimestamps() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchTimestamps(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { Timestamp tsEpoch = new Timestamp(0L); Timestamp tsEpochMinusOneSec = new Timestamp(-1000L); // negative epoch no fraction of seconds @@ -409,11 +414,11 @@ public void testStageBatchTimestamps() throws SQLException { for (int i = 0; i < timestamps.length; i++) { assertEquals( + nonStageResult[i], + stageResult[i], "Stage binding timestamp should match non-stage binding timestamp (" + tsType - + ")", - nonStageResult[i], - stageResult[i]); + + ")"); } } } @@ -424,10 +429,11 @@ public void testStageBatchTimestamps() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchTimes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchTimes(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { Time tMidnight = new Time(0); Time tNeg = new Time(-1); @@ -487,9 +493,9 @@ public void testStageBatchTimes() throws SQLException { for (int i = 0; i < times.length; i++) { assertEquals( - "Stage binding time should match non-stage binding time", nonStageResult[i], - stageResult[i]); + stageResult[i], + "Stage binding time should match non-stage binding time"); } } } @@ -499,9 +505,10 @@ public void testStageBatchTimes() throws SQLException { } } - @Test - public void testClearParameters() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testClearParameters(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.clearParameters(); @@ -522,9 +529,10 @@ public void testClearParameters() throws SQLException { } } - @Test - public void testClearBatch() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testClearBatch(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -555,9 +563,10 @@ public void testClearBatch() throws SQLException { } } - @Test - public void testInsertOneRow() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInsertOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -576,9 +585,10 @@ public void testInsertOneRow() throws SQLException { } } - @Test - public void testUpdateOneRow() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testUpdateOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -611,9 +621,10 @@ public void testUpdateOneRow() throws SQLException { } } - @Test - public void testDeleteOneRow() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDeleteOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -654,9 +665,10 @@ public void testDeleteOneRow() throws SQLException { } } - @Test - public void testSelectOneRow() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSelectOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -680,9 +692,10 @@ public void testSelectOneRow() throws SQLException { } } - @Test - public void testUpdateBatch() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testUpdateBatch(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -715,10 +728,11 @@ public void testUpdateBatch() throws SQLException { } } - @Test - public void testBatchInsertWithCacheEnabled() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBatchInsertWithCacheEnabled(String queryResultFormat) throws SQLException { int[] countResult; - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // ensure enable the cache result use statement.execute(enableCacheReuse); @@ -764,7 +778,7 @@ public void testBatchInsertWithCacheEnabled() throws SQLException { * @throws SQLException arises if any exception occurs */ @Test - @Ignore + @Disabled public void manualTestForPreparedStatementLogging() throws SQLException { Map params = getConnectionParameters(); Properties props = new Properties(); diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java index 872c8aab6..9c316edba 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java @@ -4,10 +4,10 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.jdbc.PreparedStatement1IT.bindOneParamSet; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.math.BigInteger; import java.sql.Connection; @@ -17,12 +17,14 @@ import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import java.util.TimeZone; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -30,19 +32,13 @@ * if the tests still are not applicable. If it is applicable, move tests to PreparedStatement1IT so * that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatement1LatestIT extends PreparedStatement0IT { - public PreparedStatement1LatestIT() { - super("json"); - } - - PreparedStatement1LatestIT(String queryResultFormat) { - super(queryResultFormat); - } - @Test - public void testPrepStWithCacheEnabled() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepStWithCacheEnabled(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // ensure enable the cache result use statement.execute(enableCacheReuse); @@ -107,10 +103,13 @@ public void testPrepStWithCacheEnabled() throws SQLException { * * @throws SQLException arises if any exception occurs */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertStageArrayBindWithTime() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertStageArrayBindWithTime(String queryResultFormat) throws SQLException { + TimeZone originalTimeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("alter session set CLIENT_STAGE_ARRAY_BINDING_THRESHOLD=2"); @@ -140,6 +139,7 @@ public void testInsertStageArrayBindWithTime() throws SQLException { } finally { statement.execute("drop table if exists testStageBindTime"); statement.execute("alter session unset CLIENT_STAGE_ARRAY_BINDING_THRESHOLD"); + TimeZone.setDefault(originalTimeZone); } } } @@ -154,10 +154,11 @@ public void testInsertStageArrayBindWithTime() throws SQLException { * * @throws SQLException */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSetObjectForTimestampTypes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testSetObjectForTimestampTypes(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // set timestamp mapping to default value try { @@ -210,14 +211,15 @@ public void testSetObjectForTimestampTypes() throws SQLException { * * @throws SQLException arises if any exception occurs */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testExecuteEmptyBatch() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testExecuteEmptyBatch(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { // executeBatch shouldn't throw exceptions assertEquals( - "For empty batch, we should return int[0].", 0, prepStatement.executeBatch().length); + 0, prepStatement.executeBatch().length, "For empty batch, we should return int[0]."); } connection @@ -228,7 +230,7 @@ public void testExecuteEmptyBatch() throws SQLException { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { // executeBatch shouldn't throw exceptions assertEquals( - "For empty batch, we should return int[0].", 0, prepStatement.executeBatch().length); + 0, prepStatement.executeBatch().length, "For empty batch, we should return int[0]."); } } } @@ -238,9 +240,10 @@ public void testExecuteEmptyBatch() throws SQLException { * * @throws SQLException */ - @Test - public void testSetObjectMethodWithVarbinaryColumn() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetObjectMethodWithVarbinaryColumn(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute("create or replace table test_binary(b VARBINARY)"); try (PreparedStatement prepStatement = @@ -251,16 +254,17 @@ public void testSetObjectMethodWithVarbinaryColumn() throws SQLException { } } - @Test - public void testSetObjectMethodWithBigIntegerColumn() { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetObjectMethodWithBigIntegerColumn(String queryResultFormat) { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute("create or replace table test_bigint(id NUMBER)"); try (PreparedStatement prepStatement = connection.prepareStatement("insert into test_bigint(id) values(?)")) { prepStatement.setObject(1, BigInteger.valueOf(9999)); int rows = prepStatement.executeUpdate(); - assertTrue("Row count doesn't match", rows == 1); + assertTrue(rows == 1, "Row count doesn't match"); } } catch (SQLException e) { e.printStackTrace(); @@ -270,9 +274,10 @@ public void testSetObjectMethodWithBigIntegerColumn() { } } - @Test - public void testSetObjectMethodWithLargeBigIntegerColumn() { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetObjectMethodWithLargeBigIntegerColumn(String queryResultFormat) { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute("create or replace table test_bigint(id NUMBER)"); try (PreparedStatement prepStatement = @@ -280,7 +285,7 @@ public void testSetObjectMethodWithLargeBigIntegerColumn() { BigInteger largeBigInt = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TEN); prepStatement.setObject(1, largeBigInt); int rows = prepStatement.executeUpdate(); - assertTrue("Row count doesn't match", rows == 1); + assertTrue(rows == 1, "Row count doesn't match"); } } catch (SQLException e) { e.printStackTrace(); @@ -290,9 +295,13 @@ public void testSetObjectMethodWithLargeBigIntegerColumn() { } } - @Test - public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBatchInsertWithTimestampInputFormatSet(String queryResultFormat) + throws SQLException { + TimeZone originalTimeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("alter session set TIMESTAMP_INPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FFTZH'"); @@ -315,6 +324,8 @@ public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { statement.execute("drop table if exists testStageBindTypes"); statement.execute("alter session unset TIMESTAMP_INPUT_FORMAT"); } + } finally { + TimeZone.setDefault(originalTimeZone); } } @@ -324,10 +335,11 @@ public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { * * @throws SQLException */ - @Test - @Ignore - public void testCallStatement() throws SQLException { - try (Connection connection = getConnection(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @Disabled + public void testCallStatement(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.executeQuery( diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java index efb8ef944..96765131a 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java @@ -7,12 +7,12 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.google.common.collect.Sets; import java.math.BigDecimal; @@ -28,27 +28,21 @@ import java.sql.Timestamp; import java.util.Calendar; import java.util.Set; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryStatement.class) +import java.util.TimeZone; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.STATEMENT) public class PreparedStatement2IT extends PreparedStatement0IT { - public PreparedStatement2IT() { - super("json"); - } - - PreparedStatement2IT(String queryFormat) { - super(queryFormat); - } - - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchDates() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchDates(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { Date dEpoch = new Date(0); Date dAfterEpoch = new Date(24 * 60 * 60 * 1000); @@ -111,9 +105,9 @@ public void testStageBatchDates() throws SQLException { for (int i = 0; i < dates.length; i++) { assertEquals( - "Stage binding date should match non-stage binding date", nonStageResult[i], - stageResult[i]); + stageResult[i], + "Stage binding date should match non-stage binding date"); } } } @@ -123,9 +117,10 @@ public void testStageBatchDates() throws SQLException { } } - @Test - public void testBindWithNullValue() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBindWithNullValue(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( "create or replace table testBindNull(cola date, colb time, colc timestamp, cold number)"); @@ -183,9 +178,10 @@ public void testBindWithNullValue() throws SQLException { } } - @Test - public void testPrepareDDL() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareDDL(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { try (PreparedStatement prepStatement = @@ -203,9 +199,10 @@ public void testPrepareDDL() throws SQLException { } } - @Test - public void testPrepareSCL() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareSCL(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement("use SCHEMA PUBLIC")) { prepStatement.execute(); } @@ -217,9 +214,10 @@ public void testPrepareSCL() throws SQLException { } } - @Test - public void testPrepareTCL() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareTCL(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { connection.setAutoCommit(false); String[] testCases = {"BEGIN", "COMMIT"}; @@ -234,9 +232,10 @@ public void testPrepareTCL() throws SQLException { } } - @Test - public void testPrepareShowCommand() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareShowCommand(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement("show databases")) { try (ResultSet resultSet = prepStatement.executeQuery()) { assertTrue(resultSet.next()); @@ -253,14 +252,16 @@ public void testPrepareShowCommand() throws SQLException { * @throws SQLException Will be thrown if any of driver calls fail * @throws InterruptedException Will be thrown if the sleep is interrupted */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testPrepareTimeout() throws SQLException, InterruptedException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testPrepareTimeout(String queryResultFormat) + throws SQLException, InterruptedException { try (Connection adminCon = getSnowflakeAdminConnection(); Statement adminStatement = adminCon.createStatement()) { adminStatement.execute("alter system set enable_combined_describe=true"); try { - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("create or replace table t(c1 string) as select 1"); statement.execute("alter session set jdbc_enable_combined_describe=true"); @@ -281,11 +282,12 @@ public void testPrepareTimeout() throws SQLException, InterruptedException { } /** Test case to make sure 2 non null bind refs was not constant folded into one */ - @Test - public void testSnow36284() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow36284(String queryResultFormat) throws Exception { String query = "select * from (values ('a'), ('b')) x where x.COLUMN1 in (?,?);"; - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); PreparedStatement preparedStatement = connection.prepareStatement(query)) { preparedStatement.setString(1, "a"); preparedStatement.setString(2, "b"); @@ -296,17 +298,18 @@ public void testSnow36284() throws Exception { rowcount++; valuesReturned.add(rs.getString(1)); } - assertEquals("Should get back 2 rows", 2, rowcount); - assertEquals("", valuesReturned, Sets.newHashSet("a", "b")); + assertEquals(2, rowcount, "Should get back 2 rows"); + assertEquals(valuesReturned, Sets.newHashSet("a", "b"), ""); } } } /** Test for coalesce with bind and null arguments in a prepared statement */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSnow35923() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testSnow35923(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( "alter session set " + "optimizer_eliminate_scans_for_constant_select=false"); @@ -325,14 +328,15 @@ public void testSnow35923() throws Exception { * Tests binding of object literals, including binding with object names as well as binding with * object IDs */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBindObjectLiteral() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBindObjectLiteral(String queryResultFormat) throws Exception { long t1Id = 0; long t2Id = 0; String t1 = null; - try (Connection conn = init(); + try (Connection conn = getConn(queryResultFormat); Statement stmt = conn.createStatement()) { String sqlText = "create or replace table identifier(?) (c1 number)"; @@ -480,9 +484,10 @@ public void testBindObjectLiteral() throws Exception { } } - @Test - public void testBindTimestampTZViaString() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBindTimestampTZViaString(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -509,16 +514,23 @@ public void testBindTimestampTZViaString() throws SQLException { * Ensures binding a string type with TIMESTAMP_TZ works. The customer has to use the specific * timestamp format: YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBindTimestampTZViaStringBatch() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBindTimestampTZViaStringBatch(String queryResultFormat) throws SQLException { + TimeZone originalTimeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind statement.execute( "create or replace table testbindtstz(cola timestamp_tz, colb timestamp_ntz)"); + statement.execute( + "ALTER SESSION SET TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + statement.execute( + "ALTER SESSION SET TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into testbindtstz values(?,?)")) { @@ -546,6 +558,8 @@ public void testBindTimestampTZViaStringBatch() throws SQLException { } finally { statement.execute("drop table if exists testbindtstz"); } + } finally { + TimeZone.setDefault(originalTimeZone); } } @@ -555,9 +569,10 @@ public void testBindTimestampTZViaStringBatch() throws SQLException { * * @throws Exception raises if any error occurs */ - @Test - public void testSnow41620() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow41620(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // Create a table and insert 3 records statement.execute("CREATE or REPLACE TABLE SNOW41620 (c1 varchar(20)," + "c2 int" + " )"); @@ -592,9 +607,10 @@ public void testSnow41620() throws Exception { } } - @Test - public void testSnow50141() throws Exception { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow50141(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement("select 1 where true=?")) { prepStatement.setObject(1, true); try (ResultSet resultSet = prepStatement.executeQuery()) { @@ -617,9 +633,9 @@ public void testSnow50141() throws Exception { private void checkResultSetEqual(ResultSet rs1, ResultSet rs2) throws SQLException { int columns = rs1.getMetaData().getColumnCount(); assertEquals( - "Resultsets do not match in the number of columns returned", columns, - rs2.getMetaData().getColumnCount()); + rs2.getMetaData().getColumnCount(), + "Resultsets do not match in the number of columns returned"); while (rs1.next() && rs2.next()) { for (int columnIndex = 1; columnIndex <= columns; columnIndex++) { @@ -627,19 +643,20 @@ private void checkResultSetEqual(ResultSet rs1, ResultSet rs2) throws SQLExcepti final Object res2 = rs2.getObject(columnIndex); assertEquals( - String.format("%s and %s are not equal values at column %d", res1, res2, columnIndex), res1, - res2); + res2, + String.format("%s and %s are not equal values at column %d", res1, res2, columnIndex)); } assertEquals( - "Number of records returned by the results does not match", rs1.isLast(), rs2.isLast()); + rs1.isLast(), rs2.isLast(), "Number of records returned by the results does not match"); } } - @Test - public void testPreparedStatementWithSkipParsing() throws Exception { - try (Connection con = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPreparedStatementWithSkipParsing(String queryResultFormat) throws Exception { + try (Connection con = getConn(queryResultFormat)) { PreparedStatement stmt = con.unwrap(SnowflakeConnectionV1.class).prepareStatement("select 1", true); try (ResultSet rs = stmt.executeQuery()) { @@ -649,9 +666,11 @@ public void testPreparedStatementWithSkipParsing() throws Exception { } } - @Test - public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPreparedStatementWithSkipParsingAndBinding(String queryResultFormat) + throws Exception { + try (Connection con = getConn(queryResultFormat); Statement statement = con.createStatement()) { statement.execute("create or replace table t(c1 int)"); try { @@ -679,9 +698,10 @@ public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { * workaround is added. More specifically, ErrorCode returned for this statement is caught in * SnowflakePreparedStatementV1 so that execution can continue */ - @Test - public void testSnow44393() throws Exception { - try (Connection con = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow44393(String queryResultFormat) throws Exception { + try (Connection con = getConn(queryResultFormat)) { assertFalse( con.createStatement() .execute("alter session set timestamp_ntz_output_format='YYYY-MM-DD HH24:MI:SS'")); @@ -697,10 +717,11 @@ public void testSnow44393() throws Exception { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAddBatchNumericNullFloatMixed() throws Exception { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testAddBatchNumericNullFloatMixed(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat)) { for (int threshold = 0; threshold < 2; ++threshold) { connection .createStatement() @@ -776,9 +797,10 @@ public void testAddBatchNumericNullFloatMixed() throws Exception { } } - @Test - public void testInvalidUsageOfApi() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInvalidUsageOfApi(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); PreparedStatement preparedStatement = connection.prepareStatement("select 1")) { final int expectedCode = ErrorCode.UNSUPPORTED_STATEMENT_TYPE_IN_EXECUTION_API.getMessageCode(); @@ -815,7 +837,7 @@ public void run() throws SQLException { private void assertException(RunnableWithSQLException runnable, int expectedCode) { try { runnable.run(); - Assert.fail(); + fail(); } catch (SQLException e) { assertThat(e.getErrorCode(), is(expectedCode)); } @@ -825,9 +847,10 @@ private interface RunnableWithSQLException { void run() throws SQLException; } - @Test - public void testCreatePreparedStatementWithParameters() throws Throwable { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testCreatePreparedStatementWithParameters(String queryResultFormat) throws Throwable { + try (Connection connection = getConn(queryResultFormat)) { connection.prepareStatement( "select 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); try { @@ -855,9 +878,10 @@ public void testCreatePreparedStatementWithParameters() throws Throwable { } } - @Test - public void testPrepareAndGetMeta() throws SQLException { - try (Connection con = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareAndGetMeta(String queryResultFormat) throws SQLException { + try (Connection con = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = con.prepareStatement("select 1 where 1 > ?")) { ResultSetMetaData meta = prepStatement.getMetaData(); assertThat(meta.getColumnCount(), is(1)); diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java index f7ca395de..563406d23 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java @@ -6,22 +6,23 @@ import static net.snowflake.client.jdbc.PreparedStatement1IT.bindOneParamSet; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -29,19 +30,13 @@ * if the tests still are not applicable. If it is applicable, move tests to PreparedStatement2IT so * that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatement2LatestIT extends PreparedStatement0IT { - public PreparedStatement2LatestIT() { - super("json"); - } - - PreparedStatement2LatestIT(String queryFormat) { - super(queryFormat); - } - @Test - public void testPrepareUDTF() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareUDTF(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table employee(id number, address text)"); @@ -77,10 +72,10 @@ public void testPrepareUDTF() throws Exception { // second argument is invalid prepStatement.setInt(1, 1); prepStatement.execute(); - Assert.fail(); + fail(); } catch (SQLException e) { // failed because argument type did not match - Assert.assertThat(e.getErrorCode(), is(1044)); + assertThat(e.getErrorCode(), is(1044)); } // create a udf with same name but different arguments and return type @@ -110,9 +105,10 @@ public void testPrepareUDTF() throws Exception { * SNOW-88426: skip bind parameter index check if prepare fails and defer the error checks to * execute */ - @Test - public void testSelectWithBinding() throws Throwable { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSelectWithBinding(String queryResultFormat) throws Throwable { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table TESTNULL(created_time timestamp_ntz, mid int)"); @@ -144,9 +140,10 @@ public void testSelectWithBinding() throws Throwable { } } - @Test - public void testLimitBind() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testLimitBind(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { String stmtStr = "select seq4() from table(generator(rowcount=>100)) limit ?"; try (PreparedStatement prepStatement = connection.prepareStatement(stmtStr)) { prepStatement.setInt(1, 10); @@ -156,9 +153,10 @@ public void testLimitBind() throws SQLException { } /** SNOW-31746 */ - @Test - public void testConstOptLimitBind() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testConstOptLimitBind(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { String stmtStr = "select 1 limit ? offset ?"; try (PreparedStatement prepStatement = connection.prepareStatement(stmtStr)) { prepStatement.setInt(1, 10); @@ -172,10 +170,11 @@ public void testConstOptLimitBind() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testTableFuncBindInput() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testTableFuncBindInput(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(tableFuncSQL)) { prepStatement.setInt(1, 2); try (ResultSet resultSet = prepStatement.executeQuery()) { @@ -185,9 +184,10 @@ public void testTableFuncBindInput() throws SQLException { } } - @Test - public void testExecuteLargeBatch() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteLargeBatch(String queryResultFormat) throws SQLException { + try (Connection con = getConn(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table mytab(id int)"); @@ -212,11 +212,12 @@ public void testExecuteLargeBatch() throws SQLException { } } - @Test - public void testRemoveExtraDescribeCalls() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testRemoveExtraDescribeCalls(String queryResultFormat) throws SQLException { String queryId1 = null; String queryId2 = null; - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table test_uuid_with_bind(c1 number)"); @@ -264,10 +265,12 @@ public void testRemoveExtraDescribeCalls() throws SQLException { } } - @Test - public void testRemoveExtraDescribeCallsSanityCheck() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testRemoveExtraDescribeCallsSanityCheck(String queryResultFormat) + throws SQLException { String queryId1; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement preparedStatement = connection.prepareStatement( "create or replace table test_uuid_with_bind(c1 number, c2 string)")) { @@ -307,9 +310,10 @@ public void testRemoveExtraDescribeCallsSanityCheck() throws SQLException { } } - @Test - public void testAlreadyDescribedMultipleResults() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testAlreadyDescribedMultipleResults(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.execute(); @@ -342,9 +346,10 @@ public void testAlreadyDescribedMultipleResults() throws SQLException { * * @throws Exception */ - @Test - public void testConsecutiveBatchInsertError() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testConsecutiveBatchInsertError(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table testStageArrayBind(c1 integer, c2 string)"); @@ -381,9 +386,10 @@ public void testConsecutiveBatchInsertError() throws SQLException { } } - @Test - public void testToString() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testToString(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); PreparedStatement prepStatement = connection.prepareStatement("select current_version() --testing toString()")) { diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1IT.java deleted file mode 100644 index 379a471dd..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1IT.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** Test PreparedStatement in ARROW format 2/2 */ -@Category(TestCategoryArrow.class) -public class PreparedStatementArrow1IT extends PreparedStatement1IT { - public PreparedStatementArrow1IT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1LatestIT.java deleted file mode 100644 index 5c68c198b..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1LatestIT.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.experimental.categories.Category; - -/** - * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest - * supported driver. Drop this file when PrepareStatement1IT is dropped. - */ -@Category(TestCategoryStatement.class) -public class PreparedStatementArrow1LatestIT extends PreparedStatement1LatestIT { - public PreparedStatementArrow1LatestIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2IT.java deleted file mode 100644 index d2b7b9f85..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2IT.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** Test PreparedStatement in ARROW format 2/2 */ -@Category(TestCategoryArrow.class) -public class PreparedStatementArrow2IT extends PreparedStatement2IT { - public PreparedStatementArrow2IT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2LatestIT.java deleted file mode 100644 index 9c3922de4..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2LatestIT.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** - * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest - * supported driver. Drop this file when PrepareStatement2IT is dropped. - */ -@Category(TestCategoryArrow.class) -public class PreparedStatementArrow2LatestIT extends PreparedStatement2LatestIT { - public PreparedStatementArrow2LatestIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java index f80a00528..be20395a5 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java @@ -6,11 +6,11 @@ import java.net.URL; import java.sql.Connection; import java.sql.PreparedStatement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatementFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java index 883fe0c4d..c12242af8 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.spy; import java.sql.Connection; @@ -11,15 +11,14 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ExecTimeTelemetryData; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatementLargeUpdateLatestIT extends BaseJDBCTest { /** @@ -28,7 +27,7 @@ public class PreparedStatementLargeUpdateLatestIT extends BaseJDBCTest { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testLargeUpdate() throws Throwable { try (Connection con = getConnection(); Statement statement = con.createStatement()) { @@ -64,7 +63,7 @@ public void testLargeUpdate() throws Throwable { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testExecuteLargeBatchOverIntMax() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java index 4fca52d1c..85f1e1a28 100644 --- a/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java @@ -1,7 +1,7 @@ package net.snowflake.client.jdbc; -import static junit.framework.TestCase.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -13,20 +13,20 @@ import java.sql.Statement; import java.util.Objects; import java.util.Properties; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; -import org.junit.After; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class ProxyLatestIT extends BaseWiremockTest { - @After + @AfterEach public void tearDown() { super.tearDown(); unsetJvmProperties(); @@ -122,9 +122,9 @@ private void verifyRequestToProxy(String pathPattern, int expectedCount) { ObjectMapper mapper = new ObjectMapper(); JsonNode json = mapper.readTree(responseString); assertEquals( - "expected request count not matched for pattern: " + pathPattern, expectedCount, - json.get("count").asInt()); + json.get("count").asInt(), + "expected request count not matched for pattern: " + pathPattern); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java index 5cd03355c..940ab44e2 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.io.File; import java.io.FileInputStream; @@ -13,23 +13,22 @@ import java.sql.ResultSet; import java.sql.Statement; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.io.IOUtils; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class PutFileWithSpaceIncludedIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /** Test PUT command to send a data file, which file name contains a space. */ @Test - @Ignore + @Disabled public void putFileWithSpaceIncluded() throws Exception { String AWS_SECRET_KEY = TestUtil.systemGetEnv("AWS_SECRET_ACCESS_KEY"); String AWS_KEY_ID = TestUtil.systemGetEnv("AWS_ACCESS_KEY_ID"); @@ -43,7 +42,8 @@ public void putFileWithSpaceIncluded() throws Exception { assertNotNull(AWS_SECRET_KEY); assertNotNull(AWS_KEY_ID); - File dataFolder = tmpFolder.newFolder(); + File dataFolder = new File(tmpFolder, "data"); + dataFolder.mkdirs(); String tarFile = getFullPathFileInResource("snow-13400.tar"); FileInputStream fis = new FileInputStream(tarFile); TarArchiveInputStream tis = new TarArchiveInputStream(fis); diff --git a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java index f9579636d..5de4ec5bf 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java @@ -18,15 +18,15 @@ import java.sql.ResultSet; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import org.apache.commons.io.FileUtils; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class PutUnescapeBackslashIT extends AbstractDriverIT { - @BeforeClass + @BeforeAll public static void setUpClass() throws Exception {} /** diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java index 6e1a26428..7fdaab9bb 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java @@ -3,14 +3,14 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.AssumptionUtils.assumeRunningOnLinuxMac; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.RunningNotOnLinuxMac; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; @@ -32,7 +31,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.CloseableHttpClient; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -337,18 +336,16 @@ class TestCase { for (TestCase t : testCases) { if (t.result) { assertTrue( + RestRequest.isNonRetryableHTTPCode(anyStatusCodeResponse(t.statusCode), t.retryHTTP403), String.format( "Result must be true but false: HTTP Code: %d, RetryHTTP403: %s", - t.statusCode, t.retryHTTP403), - RestRequest.isNonRetryableHTTPCode( - anyStatusCodeResponse(t.statusCode), t.retryHTTP403)); + t.statusCode, t.retryHTTP403)); } else { assertFalse( + RestRequest.isNonRetryableHTTPCode(anyStatusCodeResponse(t.statusCode), t.retryHTTP403), String.format( "Result must be false but true: HTTP Code: %d, RetryHTTP403: %s", - t.statusCode, t.retryHTTP403), - RestRequest.isNonRetryableHTTPCode( - anyStatusCodeResponse(t.statusCode), t.retryHTTP403)); + t.statusCode, t.retryHTTP403)); } } } @@ -459,8 +456,8 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false); } - @Test(expected = SnowflakeSQLException.class) - public void testMaxRetriesExceeded() throws IOException, SnowflakeSQLException { + @Test + public void testMaxRetriesExceeded() throws IOException { boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -482,8 +479,9 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl try { TelemetryService.disable(); - execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false, 1); - fail("testMaxRetries"); + assertThrows( + SnowflakeSQLException.class, + () -> execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false, 1)); } finally { if (telemetryEnabled) { TelemetryService.enable(); @@ -516,8 +514,8 @@ public CloseableHttpResponse answer(InvocationOnMock invocationOnMock) execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false, 1); } - @Test(expected = SnowflakeSQLException.class) - public void testLoginMaxRetries() throws IOException, SnowflakeSQLException { + @Test + public void testLoginMaxRetries() throws IOException { boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -539,8 +537,9 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl try { TelemetryService.disable(); - execute(client, "/session/v1/login-request", 0, 0, 0, true, false, 1); - fail("testMaxRetries"); + assertThrows( + SnowflakeSQLException.class, + () -> execute(client, "/session/v1/login-request", 0, 0, 0, true, false, 1)); } finally { if (telemetryEnabled) { TelemetryService.enable(); @@ -552,7 +551,7 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl @Test public void testLoginTimeout() throws IOException { - assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assumeRunningOnLinuxMac(); boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -643,18 +642,18 @@ public void shouldGenerateBackoffInRangeExceptTheLastBackoff() { elapsedMilliForTransientIssues); assertTrue( - "Backoff should be lower or equal to max backoff limit", - backoffInMilli <= maxBackoffInMilli); + backoffInMilli <= maxBackoffInMilli, + "Backoff should be lower or equal to max backoff limit"); if (elapsedMilliForTransientIssues + backoffInMilli >= retryTimeoutInMilli) { assertEquals( - "Backoff should fill time till retry timeout", retryTimeoutInMilli - elapsedMilliForTransientIssues, - backoffInMilli); + backoffInMilli, + "Backoff should fill time till retry timeout"); break; } else { assertTrue( - "Backoff should be higher or equal to min backoff limit", - backoffInMilli >= minBackoffInMilli); + backoffInMilli >= minBackoffInMilli, + "Backoff should be higher or equal to min backoff limit"); } elapsedMilliForTransientIssues += backoffInMilli; } diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java b/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java index 76856b985..505af0b53 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java @@ -1,16 +1,16 @@ package net.snowflake.client.jdbc; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ExecTimeTelemetryData; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class RestRequestWiremockLatestIT extends BaseWiremockTest { String connectionResetByPeerScenario = diff --git a/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java b/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java index 7349a26f2..0675b9758 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java @@ -3,14 +3,14 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import net.snowflake.client.core.SFSession; import org.apache.commons.text.StringEscapeUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** This is the unit tests for ResultJsonParserV2 */ public class ResultJsonParserV2Test { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java index 90cc98aa6..bebe0a54e 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java @@ -3,24 +3,22 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Before; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; /** Result set test base class. */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSet0IT extends BaseJDBCWithSharedConnectionIT { - private final String queryResultFormat; - - public Connection init(Properties paramProperties) throws SQLException { + public Connection init(Properties paramProperties, String queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); try (Statement stmt = conn.createStatement()) { @@ -29,11 +27,9 @@ public Connection init(Properties paramProperties) throws SQLException { return conn; } - @Before + @BeforeEach public void setUp() throws SQLException { try (Statement statement = connection.createStatement()) { - - statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); // TEST_RS statement.execute("create or replace table test_rs (colA string)"); statement.execute("insert into test_rs values('rowOne')"); @@ -50,22 +46,22 @@ public void setUp() throws SQLException { + "error_on_column_count_mismatch=false)"); // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc"), + "Failed to put a file"); assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc"), + "Failed to put a file"); int numRows = statement.executeUpdate("copy into orders_jdbc"); - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + assertEquals(73, numRows, "Unexpected number of rows copied: " + numRows); } } - ResultSet numberCrossTesting() throws SQLException { - Statement statement = connection.createStatement(); + ResultSet numberCrossTesting(String queryResultFormat) throws SQLException { + Statement statement = createStatement(queryResultFormat); statement.execute( "create or replace table test_types(c1 number, c2 integer, c3 float, c4 boolean," + "c5 char, c6 varchar, c7 date, c8 datetime, c9 time, c10 timestamp_ltz, " @@ -80,8 +76,4 @@ ResultSet numberCrossTesting() throws SQLException { statement.execute("insert into test_types (c5, c6) values('h', 'hello')"); return statement.executeQuery("select * from test_types"); } - - ResultSet0IT(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java index 82f3c3244..091c7928e 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.sql.DatabaseMetaData; @@ -12,11 +12,11 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Calendar; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetAlreadyClosedIT extends BaseJDBCWithSharedConnectionIT { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java index c6edc67fb..69c6031f3 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java @@ -6,46 +6,52 @@ import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; import java.util.List; import java.util.TimeZone; -import org.junit.After; -import org.junit.Before; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.ScaleProvider; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; abstract class ResultSetArrowForce0MultiTimeZone extends BaseJDBCTest { - static List testData() { - String[] timeZones = new String[] {"UTC", "America/New_York", "MEZ"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); - } + protected static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new SimpleResultFormatProvider(), new TimezoneProvider(3)); } - return ret; } - protected final String queryResultFormat; - protected final String tz; - private TimeZone origTz; - - ResultSetArrowForce0MultiTimeZone(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; - this.tz = timeZone; + protected static class DataWithScaleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct(context, new DataProvider(), new ScaleProvider()); + } } - @Before - public void setUp() { + private static TimeZone origTz; + + @BeforeAll + public static void setUp() { origTz = TimeZone.getDefault(); - TimeZone.setDefault(TimeZone.getTimeZone(this.tz)); } - @After - public void tearDown() { + @AfterAll + public static void tearDown() { TimeZone.setDefault(origTz); } - Connection init(String table, String column, String values) throws SQLException { + protected static void setTimezone(String tz) { + TimeZone.setDefault(TimeZone.getTimeZone(tz)); + } + + Connection init(String table, String column, String values, String queryResultFormat) + throws SQLException { Connection con = BaseJDBCTest.getConnection(); try (Statement statement = con.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java index f998fb5d4..a612870a5 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java @@ -3,45 +3,31 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.SimpleDateFormat; -import java.util.Collection; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.category.TestTags; import org.apache.commons.lang3.StringUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Compare json and arrow resultSet behaviors 1/2 */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetArrowForceLTZMultiTimeZoneIT extends ResultSetArrowForce0MultiTimeZone { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - return ResultSetArrowForce0MultiTimeZone.testData(); - } - - public ResultSetArrowForceLTZMultiTimeZoneIT(String queryResultFormat, String timeZone) { - super(queryResultFormat, timeZone); - } - - @Test - public void testTimestampLTZ() throws SQLException { - for (int scale = 0; scale <= 9; scale++) { - testTimestampLTZWithScale(scale); - } - } - private void testTimestampLTZWithScale(int scale) throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTimestampLTZWithScale(String queryResultFormat, String tz, int scale) + throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", @@ -72,7 +58,7 @@ private void testTimestampLTZWithScale(int scale) throws SQLException { String column = "(a timestamp_ltz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); + Connection con = init(table, column, values, queryResultFormat); ResultSet rs = con.createStatement().executeQuery("select * from " + table); int i = 0; while (i < cases.length) { @@ -85,8 +71,11 @@ private void testTimestampLTZWithScale(int scale) throws SQLException { finish(table, con); } - @Test - public void testTimestampLTZOutputFormat() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZOutputFormat(String queryResultFormat, String tz) + throws SQLException { + setTimezone(tz); String[] cases = {"2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", "2014-01-02 12:34:56 Z"}; long[] times = {1483272000000L, 1388678400000L, 1388666096000L}; @@ -99,7 +88,7 @@ public void testTimestampLTZOutputFormat() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'),('") + "')"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement()) { try { // use initialized ltz output format @@ -146,13 +135,14 @@ public void testTimestampLTZOutputFormat() throws SQLException { } } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampLTZWithNulls() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZWithNulls(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", @@ -183,7 +173,7 @@ public void testTimestampLTZWithNulls() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'), (null),('") + "')"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -200,13 +190,14 @@ public void testTimestampLTZWithNulls() throws SQLException { } } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampLTZWithNanos() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZWithNanos(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.123456789", "2014-01-02 16:00:00.000000001", @@ -229,7 +220,7 @@ public void testTimestampLTZWithNanos() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -243,7 +234,6 @@ public void testTimestampLTZWithNanos() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java index e073bfccf..db0984081 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java @@ -3,43 +3,29 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.Collection; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.category.TestTags; import org.apache.commons.lang3.StringUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Compare json and arrow resultSet behaviors 2/2 */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetArrowForceTZMultiTimeZoneIT extends ResultSetArrowForce0MultiTimeZone { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - return ResultSetArrowForce0MultiTimeZone.testData(); - } - - public ResultSetArrowForceTZMultiTimeZoneIT(String queryResultFormat, String timeZone) { - super(queryResultFormat, timeZone); - } - - @Test - public void testTimestampTZ() throws SQLException { - for (int scale = 0; scale <= 9; scale++) { - testTimestampTZWithScale(scale); - } - } - private void testTimestampTZWithScale(int scale) throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTimestampTZWithScale(String queryResultFormat, String tz, int scale) + throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", @@ -67,7 +53,7 @@ private void testTimestampTZWithScale(int scale) throws SQLException { String column = "(a timestamp_tz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -81,13 +67,14 @@ private void testTimestampTZWithScale(int scale) throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampTZWithNanos() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampTZWithNanos(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.1", "2014-01-02 16:00:00.123456789", @@ -119,7 +106,7 @@ public void testTimestampTZWithNanos() throws SQLException { String column = "(a timestamp_tz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -138,13 +125,14 @@ public void testTimestampTZWithNanos() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampTZWithMicros() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampTZWithMicros(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.1", "2014-01-02 16:00:00.123456", @@ -178,7 +166,7 @@ public void testTimestampTZWithMicros() throws SQLException { String column = "(a timestamp_tz(6))"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -197,7 +185,6 @@ public void testTimestampTZWithMicros() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowIT.java deleted file mode 100644 index a7e982024..000000000 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowIT.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryArrow.class) -public class ResultSetArrowIT extends ResultSetIT { - public ResultSetArrowIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowLatestIT.java deleted file mode 100644 index 4ea7f7d8f..000000000 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowLatestIT.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** - * ResultSet integration tests for the latest JDBC driver. This doesn't work for the oldest - * supported driver. Drop this file when ResultSetLatestIT is dropped. - */ -@Category(TestCategoryArrow.class) -public class ResultSetArrowLatestIT extends ResultSetLatestIT { - public ResultSetArrowLatestIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java index a6a63a65d..b86a65c95 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java @@ -4,11 +4,11 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.Reader; import java.math.BigDecimal; @@ -28,13 +28,13 @@ import java.util.List; import java.util.Map; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.category.TestTags; import net.snowflake.common.core.SqlState; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Test AsyncResultSet */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetAsyncIT extends BaseJDBCWithSharedConnectionIT { @Test @@ -155,11 +155,11 @@ public void testOrderAndClosureFunctions() throws SQLException { statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); // test isFirst, isBeforeFirst - assertTrue("should be before the first", resultSet.isBeforeFirst()); - assertFalse("should not be the first", resultSet.isFirst()); + assertTrue(resultSet.isBeforeFirst(), "should be before the first"); + assertFalse(resultSet.isFirst(), "should not be the first"); resultSet.next(); - assertFalse("should not be before the first", resultSet.isBeforeFirst()); - assertTrue("should be the first", resultSet.isFirst()); + assertFalse(resultSet.isBeforeFirst(), "should not be before the first"); + assertTrue(resultSet.isFirst(), "should be the first"); // test isClosed functions queryID = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java index dd534d469..9bcbd83b4 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java @@ -4,19 +4,19 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Test AsyncResultSet */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetAsyncLatestIT extends BaseJDBCTest { @Test public void testAsyncResultSet() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java index 423661c77..8f9da34e1 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java @@ -9,11 +9,11 @@ import java.sql.Time; import java.sql.Timestamp; import java.util.Collections; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetFeatureNotSupportedIT extends BaseJDBCWithSharedConnectionIT { @Test public void testQueryResultSetNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java index 193246368..760d83a75 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java @@ -7,13 +7,13 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.InputStream; import java.io.InputStreamReader; @@ -30,14 +30,15 @@ import java.sql.Statement; import java.sql.Types; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Test ResultSet */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetIT extends ResultSet0IT { private final String selectAllSQL = "select * from test_rs"; @@ -47,25 +48,19 @@ public class ResultSetIT extends ResultSet0IT { (byte) 0x00, (byte) 0xFF, (byte) 0x42, (byte) 0x01 }; - public ResultSetIT() { - this("json"); - } - - ResultSetIT(String queryResultFormat) { - super(queryResultFormat); - } - - @Test - public void testFindColumn() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testFindColumn(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(1, resultSet.findColumn("COLA")); } } - @Test - public void testGetColumnClassNameForBinary() throws Throwable { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetColumnClassNameForBinary(String queryResultFormat) throws Throwable { + try (Statement statement = createStatement(queryResultFormat); ) { try { statement.execute("create or replace table bintable (b binary)"); statement.execute("insert into bintable values ('00f1f2')"); @@ -88,8 +83,9 @@ public void testGetColumnClassNameForBinary() throws Throwable { } } - @Test - public void testGetMethod() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetMethod(String queryResultFormat) throws Throwable { String prepInsertString = "insert into test_get values(?, ?, ?, ?, ?, ?, ?, ?)"; int bigInt = Integer.MAX_VALUE; long bigLong = Long.MAX_VALUE; @@ -100,7 +96,7 @@ public void testGetMethod() throws Throwable { Clob clob = connection.createClob(); clob.setString(1, "hello world"); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute( "create or replace table test_get(colA integer, colB number, colC number, " @@ -153,8 +149,11 @@ public void testGetMethod() throws Throwable { } } - @Test - public void testGetObjectOnDatabaseMetadataResultSet() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetObjectOnDatabaseMetadataResultSet(String queryResultFormat) + throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) {} DatabaseMetaData databaseMetaData = connection.getMetaData(); try (ResultSet resultSet = databaseMetaData.getTypeInfo()) { assertTrue(resultSet.next()); @@ -163,9 +162,10 @@ public void testGetObjectOnDatabaseMetadataResultSet() throws SQLException { } } - @Test - public void testGetShort() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetShort(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -205,9 +205,10 @@ public void testGetShort() throws SQLException { } } - @Test - public void testGetInt() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetInt(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -246,9 +247,10 @@ public void testGetInt() throws SQLException { } } - @Test - public void testGetLong() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetLong(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -287,9 +289,10 @@ public void testGetLong() throws SQLException { } } - @Test - public void testGetFloat() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetFloat(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -328,9 +331,10 @@ public void testGetFloat() throws SQLException { } } - @Test - public void testGetDouble() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetDouble(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -369,9 +373,10 @@ public void testGetDouble() throws SQLException { } } - @Test - public void testGetBigDecimal() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBigDecimal(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table test_get(colA number(38,9))"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into test_get values(?)")) { @@ -393,7 +398,7 @@ public void testGetBigDecimal() throws SQLException { statement.execute("drop table if exists test_get"); } - try (ResultSet resultSet = numberCrossTesting()) { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); for (int i = 1; i < 13; i++) { assertNull(resultSet.getBigDecimal(i)); @@ -426,9 +431,10 @@ public void testGetBigDecimal() throws SQLException { } } - @Test - public void testGetBigDecimalNegative() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBigDecimalNegative(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table test_dec(colA time)"); try (PreparedStatement preparedStatement = @@ -454,9 +460,10 @@ public void testGetBigDecimalNegative() throws SQLException { } } - @Test - public void testCursorPosition() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testCursorPosition(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute(selectAllSQL); try (ResultSet resultSet = statement.getResultSet()) { assertTrue(resultSet.next()); @@ -480,10 +487,11 @@ public void testCursorPosition() throws SQLException { * * @throws SQLException arises if any exception occurs. */ - @Test - public void testGetBytes() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBytes(String queryResultFormat) throws SQLException { Properties props = new Properties(); - try (Connection connection = init(props); + try (Connection connection = init(props, queryResultFormat); Statement statement = connection.createStatement()) { try { ingestBinaryTestData(connection); @@ -530,11 +538,12 @@ private void ingestBinaryTestData(Connection connection) throws SQLException { * * @throws Exception arises if any error occurs */ - @Test - public void testGetBytesInBase64() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBytesInBase64(String queryResultFormat) throws Exception { Properties props = new Properties(); props.setProperty("binary_output_format", "BAse64"); - try (Connection connection = init(props); + try (Connection connection = init(props, queryResultFormat); Statement statement = connection.createStatement()) { try { ingestBinaryTestData(connection); @@ -557,9 +566,10 @@ public void testGetBytesInBase64() throws Exception { } // SNOW-31647 - @Test - public void testColumnMetaWithZeroPrecision() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testColumnMetaWithZeroPrecision(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute( "create or replace table testColDecimal(cola number(38, 0), " + "colb number(17, 5))"); @@ -578,9 +588,10 @@ public void testColumnMetaWithZeroPrecision() throws SQLException { } } - @Test - public void testGetObjectOnFixedView() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetObjectOnFixedView(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute( "create or replace table testFixedView" @@ -592,9 +603,9 @@ public void testGetObjectOnFixedView() throws Exception { // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView"), + "Failed to put a file"); try (ResultSet resultSet = statement.executeQuery( @@ -613,11 +624,12 @@ public void testGetObjectOnFixedView() throws Exception { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetColumnDisplaySizeAndPrecision() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testGetColumnDisplaySizeAndPrecision(String queryResultFormat) throws SQLException { ResultSetMetaData resultSetMetaData = null; - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet resultSet = statement.executeQuery("select cast(1 as char)")) { resultSetMetaData = resultSet.getMetaData(); @@ -665,9 +677,10 @@ public void testGetColumnDisplaySizeAndPrecision() throws SQLException { } } - @Test - public void testGetBoolean() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBoolean(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table testBoolean(cola boolean)"); statement.execute("insert into testBoolean values(false)"); try (ResultSet resultSet = statement.executeQuery("select * from testBoolean")) { @@ -733,9 +746,10 @@ public void testGetBoolean() throws SQLException { } } - @Test - public void testGetClob() throws Throwable { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetClob(String queryResultFormat) throws Throwable { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table testClob(cola text)"); statement.execute("insert into testClob values('hello world')"); statement.execute("insert into testClob values('hello world1')"); @@ -772,9 +786,10 @@ public void testGetClob() throws Throwable { } } - @Test - public void testFetchOnClosedResultSet() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testFetchOnClosedResultSet(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { ResultSet resultSet = statement.executeQuery(selectAllSQL); assertFalse(resultSet.isClosed()); resultSet.close(); @@ -783,11 +798,13 @@ public void testFetchOnClosedResultSet() throws SQLException { } } - @Test - public void testReleaseDownloaderCurrentMemoryUsage() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testReleaseDownloaderCurrentMemoryUsage(String queryResultFormat) + throws SQLException { final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { statement.executeQuery( "select current_date(), true,2345234, 2343.0, 'testrgint\\n\\t' from table(generator(rowcount=>1000000))"); @@ -802,21 +819,25 @@ public void testReleaseDownloaderCurrentMemoryUsage() throws SQLException { equalTo(initialMemoryUsage)); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testResultColumnSearchCaseSensitiveOld() throws Exception { - subTestResultColumnSearchCaseSensitive("JDBC_RS_COLUMN_CASE_INSENSITIVE"); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testResultColumnSearchCaseSensitiveOld(String queryResultFormat) throws Exception { + subTestResultColumnSearchCaseSensitive("JDBC_RS_COLUMN_CASE_INSENSITIVE", queryResultFormat); } - @Test - public void testResultColumnSearchCaseSensitive() throws Exception { - subTestResultColumnSearchCaseSensitive("CLIENT_RESULT_COLUMN_CASE_INSENSITIVE"); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testResultColumnSearchCaseSensitive(String queryResultFormat) throws Exception { + subTestResultColumnSearchCaseSensitive( + "CLIENT_RESULT_COLUMN_CASE_INSENSITIVE", queryResultFormat); } - private void subTestResultColumnSearchCaseSensitive(String parameterName) throws Exception { + private void subTestResultColumnSearchCaseSensitive( + String parameterName, String queryResultFormat) throws Exception { Properties prop = new Properties(); prop.put("tracing", "FINEST"); - try (Connection connection = init(prop); + try (Connection connection = init(prop, queryResultFormat); Statement statement = connection.createStatement()) { try (ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL")) { @@ -847,9 +868,10 @@ private void subTestResultColumnSearchCaseSensitive(String parameterName) throws } } - @Test - public void testInvalidColumnIndex() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInvalidColumnIndex(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertTrue(resultSet.next()); @@ -869,11 +891,11 @@ public void testInvalidColumnIndex() throws SQLException { } /** SNOW-28882: wasNull was not set properly */ - @Test - public void testWasNull() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testWasNull(String queryResultFormat) throws Exception { try (ResultSet ret = - connection - .createStatement() + createStatement(queryResultFormat) .executeQuery( "select cast(1/nullif(0,0) as double)," + "cast(1/nullif(0,0) as int), 100, " @@ -891,9 +913,10 @@ public void testWasNull() throws Exception { } /** SNOW-28390 */ - @Test - public void testParseInfAndNaNNumber() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testParseInfAndNaNNumber(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet ret = statement.executeQuery("select to_double('inf'), to_double('-inf')")) { assertTrue(ret.next()); assertThat("Positive Infinite Number", ret.getDouble(1), equalTo(Double.POSITIVE_INFINITY)); @@ -910,10 +933,11 @@ public void testParseInfAndNaNNumber() throws Exception { } /** SNOW-33227 */ - @Test - public void testTreatDecimalAsInt() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testTreatDecimalAsInt(String queryResultFormat) throws Exception { ResultSetMetaData metaData; - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet ret = statement.executeQuery("select 1")) { metaData = ret.getMetaData(); @@ -929,60 +953,62 @@ public void testTreatDecimalAsInt() throws Exception { } } - @Test - public void testIsLast() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testIsLast(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet ret = statement.executeQuery("select * from orders_jdbc")) { - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); + assertTrue(ret.isBeforeFirst(), "should be before the first"); + assertFalse(ret.isFirst(), "should not be the first"); assertTrue(ret.next()); - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); + assertFalse(ret.isBeforeFirst(), "should not be before the first"); + assertTrue(ret.isFirst(), "should be the first"); int cnt = 0; while (ret.next()) { cnt++; if (cnt == 72) { - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + assertTrue(ret.isLast(), "should be the last"); + assertFalse(ret.isAfterLast(), "should not be after the last"); } } assertEquals(72, cnt); assertFalse(ret.next()); - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be afterthe last", ret.isAfterLast()); + assertFalse(ret.isLast(), "should not be the last"); + assertTrue(ret.isAfterLast(), "should be afterthe last"); } // PUT one file try (ResultSet ret = statement.executeQuery( "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~")) { - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); + assertTrue(ret.isBeforeFirst(), "should be before the first"); + assertFalse(ret.isFirst(), "should not be the first"); assertTrue(ret.next()); - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); + assertFalse(ret.isBeforeFirst(), "should not be before the first"); + assertTrue(ret.isFirst(), "should be the first"); - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + assertTrue(ret.isLast(), "should be the last"); + assertFalse(ret.isAfterLast(), "should not be after the last"); assertFalse(ret.next()); - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be after the last", ret.isAfterLast()); + assertFalse(ret.isLast(), "should not be the last"); + assertTrue(ret.isAfterLast(), "should be after the last"); } } } - @Test - public void testUpdateCountOnCopyCmd() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testUpdateCountOnCopyCmd(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table testcopy(cola string)"); @@ -1001,16 +1027,18 @@ public void testUpdateCountOnCopyCmd() throws Exception { } } - @Test - public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetTimeNullTimestampAndTimestampNullTime(String queryResultFormat) + throws Throwable { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table testnullts(c1 timestamp, c2 time)"); statement.execute("insert into testnullts(c1, c2) values(null, null)"); try (ResultSet rs = statement.executeQuery("select * from testnullts")) { - assertTrue("should return result", rs.next()); - assertNull("return value must be null", rs.getTime(1)); - assertNull("return value must be null", rs.getTimestamp(2)); + assertTrue(rs.next(), "should return result"); + assertNull(rs.getTime(1), "return value must be null"); + assertNull(rs.getTimestamp(2), "return value must be null"); } } finally { statement.execute("drop table if exists testnullts"); @@ -1018,9 +1046,10 @@ public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { } } - @Test - public void testNextNegative() throws SQLException { - try (ResultSet rs = connection.createStatement().executeQuery("select 1")) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testNextNegative(String queryResultFormat) throws SQLException { + try (ResultSet rs = createStatement(queryResultFormat).executeQuery("select 1")) { assertTrue(rs.next()); System.setProperty("snowflake.enable_incident_test2", "true"); try { @@ -1034,9 +1063,11 @@ public void testNextNegative() throws SQLException { } /** SNOW-1416051; Added in > 3.16.0 */ - @Test - public void shouldSerializeArrayAndObjectAsStringOnGetObject() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void shouldSerializeArrayAndObjectAsStringOnGetObject(String queryResultFormat) + throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet resultSet = statement.executeQuery( "select ARRAY_CONSTRUCT(1,2,3), OBJECT_CONSTRUCT('a', 4, 'b', 'test')")) { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java index 65cc27242..d8e3d111a 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java @@ -3,15 +3,15 @@ */ package net.snowflake.client.jdbc; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -26,34 +26,21 @@ import java.util.List; import java.util.TimeZone; import java.util.stream.Collectors; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; import org.apache.arrow.vector.BigIntVector; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Completely compare json and arrow resultSet behaviors */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetJsonVsArrowIT extends BaseJDBCTest { - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - // all tests in this class need to run for both query result formats json and arrow - return new Object[][] {{"JSON"}, {"Arrow"}}; - } - - protected String queryResultFormat; - - public ResultSetJsonVsArrowIT(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - public Connection init() throws SQLException { + public Connection init(String queryResultFormat) throws SQLException { Connection conn = getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -61,9 +48,10 @@ public Connection init() throws SQLException { return conn; } - @Test - public void testGSResult() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResult(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery( @@ -89,9 +77,10 @@ public void testGSResult() throws SQLException { } } - @Test - public void testGSResultReal() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResultReal(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table t (a real)"); @@ -106,10 +95,11 @@ public void testGSResultReal() throws SQLException { } } - @Test - public void testGSResultScan() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResultScan(String queryResultFormat) throws SQLException { String queryId = null; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table t (a text)"); @@ -130,9 +120,10 @@ public void testGSResultScan() throws SQLException { } } - @Test - public void testGSResultForEmptyAndSmallTable() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResultForEmptyAndSmallTable(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table t (a int)"); @@ -150,9 +141,10 @@ public void testGSResultForEmptyAndSmallTable() throws SQLException { } } - @Test - public void testSNOW89737() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSNOW89737(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute( @@ -203,9 +195,10 @@ public void testSNOW89737() throws SQLException { * * @throws SQLException */ - @Test - public void testSemiStructuredData() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSemiStructuredData(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery( @@ -240,10 +233,11 @@ public void testSemiStructuredData() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStructuredTypes() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStructuredTypes(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement stmt = con.createStatement()) { stmt.execute("alter session set feature_structured_types = 'ENABLED';"); @@ -263,8 +257,9 @@ public void testStructuredTypes() throws SQLException { } } - private Connection init(String table, String column, String values) throws SQLException { - Connection con = init(); + private Connection init(String queryResultFormat, String table, String column, String values) + throws SQLException { + Connection con = init(queryResultFormat); try (Statement statement = con.createStatement()) { statement.execute("create or replace table " + table + " " + column); statement.execute("insert into " + table + " values " + values); @@ -272,7 +267,7 @@ private Connection init(String table, String column, String values) throws SQLEx return con; } - private boolean isJSON() { + private boolean isJSON(String queryResultFormat) { return queryResultFormat.equalsIgnoreCase("json"); } @@ -287,13 +282,14 @@ private boolean isJSON() { * * @throws SQLException */ - @Test - public void testTinyInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testTinyInt(String queryResultFormat) throws SQLException { int[] cases = {0, 1, -1, 127, -128}; String table = "test_arrow_tiny_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -349,13 +345,14 @@ public void testTinyInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledTinyInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledTinyInt(String queryResultFormat) throws SQLException { float[] cases = {0.0f, 0.11f, -0.11f, 1.27f, -1.28f}; String table = "test_arrow_tiny_int"; String column = "(a number(3,2))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_tiny_int")) { try { @@ -396,7 +393,7 @@ public void testScaledTinyInt() throws SQLException { assertEquals(val, rs.getDouble(1), delta); assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - if (isJSON()) { + if (isJSON(queryResultFormat)) { try { rs.getByte(1); fail(); @@ -408,7 +405,7 @@ public void testScaledTinyInt() throws SQLException { assertEquals(((byte) (cases[i] * 100)), rs.getByte(1)); } - if (!isJSON()) { + if (!isJSON(queryResultFormat)) { byte[] bytes = new byte[1]; bytes[0] = rs.getByte(1); assertArrayEquals(bytes, rs.getBytes(1)); @@ -446,13 +443,14 @@ public void testScaledTinyInt() throws SQLException { * * @throws SQLException */ - @Test - public void testSmallInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSmallInt(String queryResultFormat) throws SQLException { short[] cases = {0, 1, -1, 127, -128, 128, -129, 32767, -32768}; String table = "test_arrow_small_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -477,7 +475,7 @@ public void testSmallInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -490,7 +488,7 @@ public void testSmallInt() throws SQLException { } ByteBuffer bb = ByteBuffer.allocate(2); bb.putShort(cases[i]); - if (isJSON()) { + if (isJSON(queryResultFormat)) { byte[] res = rs.getBytes(1); for (int j = res.length - 1; j >= 0; j--) { assertEquals(bb.array()[2 - res.length + j], res[j]); @@ -531,14 +529,15 @@ public void testSmallInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledSmallInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledSmallInt(String queryResultFormat) throws SQLException { float[] cases = {0, 2.0f, -2.0f, 32.767f, -32.768f}; short[] shortCompact = {0, 2000, -2000, 32767, -32768}; String table = "test_arrow_small_int"; String column = "(a number(5,3))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_small_int")) { try { @@ -583,7 +582,7 @@ public void testScaledSmallInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -598,7 +597,7 @@ public void testScaledSmallInt() throws SQLException { byteBuffer.putShort(shortCompact[i]); assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -639,15 +638,16 @@ public void testScaledSmallInt() throws SQLException { * * @throws SQLException */ - @Test - public void testInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInt(String queryResultFormat) throws SQLException { int[] cases = { 0, 1, -1, 127, -128, 128, -129, 32767, -32768, 32768, -32769, 2147483647, -2147483648 }; String table = "test_arrow_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { try { @@ -686,7 +686,7 @@ public void testInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -699,7 +699,7 @@ public void testInt() throws SQLException { } ByteBuffer bb = ByteBuffer.allocate(4); bb.putInt(cases[i]); - if (isJSON()) { + if (isJSON(queryResultFormat)) { byte[] res = rs.getBytes(1); for (int j = res.length - 1; j >= 0; j--) { assertEquals(bb.array()[4 - res.length + j], res[j]); @@ -740,8 +740,9 @@ public void testInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledInt(String queryResultFormat) throws SQLException { int scale = 9; int[] intCompacts = {0, 123456789, -123456789, 2147483647, -2147483647}; List caseList = @@ -755,7 +756,7 @@ public void testScaledInt() throws SQLException { String column = String.format("(a number(10,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_int")) { try { @@ -800,7 +801,7 @@ public void testScaledInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -815,7 +816,7 @@ public void testScaledInt() throws SQLException { byteBuffer.putInt(intCompacts[i]); assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -856,8 +857,9 @@ public void testScaledInt() throws SQLException { * * @throws SQLException */ - @Test - public void testBigInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBigInt(String queryResultFormat) throws SQLException { long[] cases = { 0, 1, @@ -880,7 +882,7 @@ public void testBigInt() throws SQLException { String table = "test_arrow_big_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -934,7 +936,7 @@ public void testBigInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -984,8 +986,9 @@ public void testBigInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledBigInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledBigInt(String queryResultFormat) throws SQLException { int scale = 18; long[] longCompacts = { 0, 123456789, -123456789, 2147483647, -2147483647, Long.MIN_VALUE, Long.MAX_VALUE @@ -1001,7 +1004,7 @@ public void testScaledBigInt() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -1046,7 +1049,7 @@ public void testScaledBigInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1061,7 +1064,7 @@ public void testScaledBigInt() throws SQLException { byteBuffer.putLong(longCompacts[i]); assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -1103,8 +1106,9 @@ public void testScaledBigInt() throws SQLException { * * @throws SQLException */ - @Test - public void testDecimalNoScale() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDecimalNoScale(String queryResultFormat) throws SQLException { int scale = 0; String[] longCompacts = { "10000000000000000000000000000000000000", @@ -1120,7 +1124,7 @@ public void testDecimalNoScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -1166,7 +1170,7 @@ public void testDecimalNoScale() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1212,8 +1216,9 @@ public void testDecimalNoScale() throws SQLException { * * @throws SQLException */ - @Test - public void testDecimalWithLargeScale() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDecimalWithLargeScale(String queryResultFormat) throws SQLException { int scale = 37; String[] longCompacts = { "1.0000000000000000000000000000000000000", @@ -1229,7 +1234,7 @@ public void testDecimalWithLargeScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -1274,7 +1279,7 @@ public void testDecimalWithLargeScale() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1287,7 +1292,7 @@ public void testDecimalWithLargeScale() throws SQLException { try { assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -1329,9 +1334,10 @@ public void testDecimalWithLargeScale() throws SQLException { * * @throws SQLException */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testDecimal() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testDecimal(String queryResultFormat) throws SQLException { int scale = 37; long[] longCompacts = { 0, 123456789, -123456789, 2147483647, -2147483647, Long.MIN_VALUE, Long.MAX_VALUE @@ -1347,7 +1353,7 @@ public void testDecimal() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { try { @@ -1393,7 +1399,7 @@ public void testDecimal() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1406,7 +1412,7 @@ public void testDecimal() throws SQLException { try { assertArrayEquals(byteBuf.putLong(0, longCompacts[i]).array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -1440,8 +1446,9 @@ public void testDecimal() throws SQLException { * * @throws SQLException */ - @Test - public void testDoublePrecision() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDoublePrecision(String queryResultFormat) throws SQLException { String[] cases = { // SNOW-31249 "-86.6426540296895", @@ -1470,12 +1477,12 @@ public void testDoublePrecision() throws SQLException { String column = "(a double)"; String values = "(" + StringUtils.join(cases, "),(") + ")"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { int i = 0; - if (isJSON()) { + if (isJSON(queryResultFormat)) { while (rs.next()) { assertEquals(json_results[i++], Double.toString(rs.getDouble(1))); } @@ -1491,12 +1498,13 @@ public void testDoublePrecision() throws SQLException { } } - @Test - public void testBoolean() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBoolean(String queryResultFormat) throws SQLException { String table = "test_arrow_boolean"; String column = "(a boolean)"; String values = "(true),(null),(false)"; - try (Connection conn = init(table, column, values); + try (Connection conn = init(queryResultFormat, table, column, values); Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { assertTrue(rs.next()); @@ -1512,12 +1520,13 @@ public void testBoolean() throws SQLException { } } - @Test - public void testClientSideSorting() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testClientSideSorting(String queryResultFormat) throws SQLException { String table = "test_arrow_sort_on"; String column = "( a int, b double, c string)"; String values = "(1,2.0,'test'),(0,2.0, 'test'),(1,2.0,'abc')"; - try (Connection conn = init(table, column, values); + try (Connection conn = init(queryResultFormat, table, column, values); Statement statement = conn.createStatement()) { try { // turn on sorting mode @@ -1537,9 +1546,10 @@ public void testClientSideSorting() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testClientSideSortingOnBatchedChunk() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testClientSideSortingOnBatchedChunk(String queryResultFormat) throws SQLException { // in this test, the first chunk contains multiple batches when the format is Arrow String[] queries = { "set-sf-property sort on", @@ -1557,7 +1567,7 @@ public void testClientSideSortingOnBatchedChunk() throws SQLException { "insert into T values (3);", }; - try (Connection conn = init(); + try (Connection conn = init(queryResultFormat); Statement stat = conn.createStatement()) { try { for (String q : queries) { @@ -1580,9 +1590,10 @@ public void testClientSideSortingOnBatchedChunk() throws SQLException { } } - @Test - public void testTimestampNTZAreAllNulls() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testTimestampNTZAreAllNulls(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.executeQuery( @@ -1600,10 +1611,11 @@ public void testTimestampNTZAreAllNulls() throws SQLException { } } - @Test - public void TestArrowStringRoundTrip() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void TestArrowStringRoundTrip(String queryResultFormat) throws SQLException { String big_number = "11111111112222222222333333333344444444"; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement st = con.createStatement()) { try { for (int i = 0; i < 38; i++) { @@ -1625,10 +1637,11 @@ public void TestArrowStringRoundTrip() throws SQLException { } } - @Test - public void TestArrowFloatRoundTrip() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void TestArrowFloatRoundTrip(String queryResultFormat) throws SQLException { float[] cases = {Float.MAX_VALUE, Float.MIN_VALUE}; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement st = con.createStatement()) { try { for (float f : cases) { @@ -1645,12 +1658,13 @@ public void TestArrowFloatRoundTrip() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void TestTimestampNTZWithDLS() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void TestTimestampNTZWithDLS(String queryResultFormat) throws SQLException { TimeZone origTz = TimeZone.getDefault(); String[] timeZones = new String[] {"America/New_York", "America/Los_Angeles"}; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement st = con.createStatement()) { for (String timeZone : timeZones) { TimeZone.setDefault(TimeZone.getTimeZone(timeZone)); @@ -1751,10 +1765,11 @@ public void TestTimestampNTZWithDLS() throws SQLException { } } - @Test - public void TestTimestampNTZBinding() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void TestTimestampNTZBinding(String queryResultFormat) throws SQLException { TimeZone origTz = TimeZone.getDefault(); - try (Connection con = init()) { + try (Connection con = init(queryResultFormat)) { TimeZone.setDefault(TimeZone.getTimeZone("PST")); try (Statement st = con.createStatement()) { st.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java index f62e7701c..d89cebfc6 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java @@ -3,46 +3,51 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.ScaleProvider; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.commons.lang3.StringUtils; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Completely compare json and arrow resultSet behaviors */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetJsonVsArrowMultiTZIT extends BaseJDBCWithSharedConnectionIT { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - // all tests in this class need to run for both query result formats json and arrow - String[] timeZones = new String[] {"UTC", "America/New_York", "Asia/Singapore"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); - } + static String originalTz; + + static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new SimpleResultFormatProvider(), new TimezoneProvider(3)); } - return ret; } - private final String queryResultFormat; - private final String tz; + static class DataWithScaleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct(context, new DataProvider(), new ScaleProvider()); + } + } - @Before + @BeforeEach public void setSessionTimezone() throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute( @@ -56,13 +61,26 @@ public void setSessionTimezone() throws SQLException { } } - public ResultSetJsonVsArrowMultiTZIT(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; - System.setProperty("user.timezone", timeZone); - tz = timeZone; + private static void setTimezone(String tz) { + System.setProperty("user.timezone", tz); + } + + @BeforeAll + public static void saveTimezone() { + originalTz = System.getProperty("user.timezone"); + } + + @AfterAll + public static void restoreTimezone() { + if (originalTz != null) { + System.setProperty("user.timezone", originalTz); + } else { + System.clearProperty("user.timezone"); + } } - private void init(String table, String column, String values) throws SQLException { + private void init(String table, String column, String values, String queryResultFormat) + throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); statement.execute("create or replace table " + table + " " + column); @@ -70,8 +88,10 @@ private void init(String table, String column, String values) throws SQLExceptio } } - @Test - public void testTime() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTime(String queryResultFormat, String tz, int scale) throws SQLException { + setTimezone(tz); String[] times = { "00:01:23", "00:01:23.1", @@ -84,13 +104,13 @@ public void testTime() throws SQLException { "00:01:23.12345678", "00:01:23.123456789" }; - for (int scale = 0; scale <= 9; scale++) { - testTimeWithScale(times, scale); - } + testTimeWithScale(times, scale, queryResultFormat); } - @Test - public void testDate() throws Exception { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testDate(String queryResultFormat, String tz) throws Exception { + setTimezone(tz); String[] cases = { "2017-01-01", "2014-01-02", @@ -108,8 +128,8 @@ public void testDate() throws Exception { String column = "(a date)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement()) { + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet rs = statement.executeQuery("select * from " + table)) { int i = 0; while (i < cases.length) { @@ -129,12 +149,13 @@ public void testDate() throws Exception { } } - public void testTimeWithScale(String[] times, int scale) throws SQLException { + public void testTimeWithScale(String[] times, int scale, String queryResultFormat) + throws SQLException { String table = "test_arrow_time"; String column = "(a time(" + scale + "))"; String values = "('" + StringUtils.join(times, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement(); + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat); ResultSet rs = statement.executeQuery("select * from " + table)) { for (int i = 0; i < times.length; i++) { assertTrue(rs.next()); @@ -146,14 +167,11 @@ public void testTimeWithScale(String[] times, int scale) throws SQLException { } } - @Test - public void testTimestampNTZ() throws SQLException { - for (int scale = 0; scale <= 9; scale++) { - testTimestampNTZWithScale(scale); - } - } - - public void testTimestampNTZWithScale(int scale) throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTimestampNTZWithScale(String queryResultFormat, String tz, int scale) + throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00", "2014-01-02 16:00:00", @@ -181,8 +199,8 @@ public void testTimestampNTZWithScale(int scale) throws SQLException { String column = "(a timestamp_ntz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement()) { + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet rs = statement.executeQuery("select * from " + table)) { int i = 0; while (i < cases.length) { @@ -193,12 +211,13 @@ public void testTimestampNTZWithScale(int scale) throws SQLException { assertNull(rs.getString(1)); } statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } - @Test - public void testTimestampNTZWithNanos() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZWithNanos(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.123456789", "2014-01-02 16:00:00.0123", @@ -215,8 +234,8 @@ public void testTimestampNTZWithNanos() throws SQLException { String column = "(a timestamp_ntz)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement()) { + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet rs = statement.executeQuery("select * from " + table)) { int i = 0; while (i < cases.length) { @@ -227,7 +246,6 @@ public void testTimestampNTZWithNanos() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index d82cd9ff2..ab7a6e081 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -6,12 +6,12 @@ import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import java.lang.reflect.Field; @@ -43,10 +43,9 @@ import java.util.TimeZone; import java.util.concurrent.ExecutionException; import java.util.regex.Pattern; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SessionUtil; @@ -55,10 +54,14 @@ import net.snowflake.client.jdbc.telemetry.TelemetryData; import net.snowflake.client.jdbc.telemetry.TelemetryField; import net.snowflake.client.jdbc.telemetry.TelemetryUtil; +import net.snowflake.client.providers.SimpleResultFormatProvider; import net.snowflake.common.core.SFBinary; import org.apache.arrow.vector.Float8Vector; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * ResultSet integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -66,15 +69,11 @@ * if the tests still is not applicable. If it is applicable, move tests to ResultSetIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetLatestIT extends ResultSet0IT { - - public ResultSetLatestIT() { - this("json"); - } - - ResultSetLatestIT(String queryResultFormat) { - super(queryResultFormat); + private static void setQueryResultFormat(Statement stmt, String queryResultFormat) + throws SQLException { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); } private String createTableSql = @@ -91,10 +90,10 @@ public ResultSetLatestIT() { * * @throws Throwable */ - @Test - public void testMemoryClearingAfterInterrupt() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMemoryClearingAfterInterrupt(String queryResultFormat) throws Throwable { + try (Statement statement = createStatement(queryResultFormat)) { final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); try { // Inject an InterruptedException into the SnowflakeChunkDownloader.terminate() function @@ -128,12 +127,12 @@ public void testMemoryClearingAfterInterrupt() throws Throwable { * multiple statements concurrently uses a lot of memory. This checks that chunks download even * when there is not enough memory available for concurrent prefetching. */ - @Test - public void testChunkDownloaderNoHang() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testChunkDownloaderNoHang(String queryResultFormat) throws SQLException { int stmtCount = 30; int rowCount = 170000; - try (Connection connection = getConnection(); - Statement stmt = connection.createStatement()) { + try (Statement stmt = createStatement(queryResultFormat)) { List rsList = new ArrayList<>(); // Set memory limit to low number connection @@ -165,12 +164,12 @@ public void testChunkDownloaderNoHang() throws SQLException { } /** This tests that the SnowflakeChunkDownloader doesn't hang when memory limits are low. */ - @Test - public void testChunkDownloaderSetRetry() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testChunkDownloaderSetRetry(String queryResultFormat) throws SQLException { int stmtCount = 3; int rowCount = 170000; - try (Connection connection = getConnection(); - Statement stmt = connection.createStatement()) { + try (Statement stmt = createStatement(queryResultFormat)) { connection .unwrap(SnowflakeConnectionV1.class) .getSFBaseSession() @@ -214,9 +213,12 @@ public void testChunkDownloaderSetRetry() throws SQLException { * @throws ExecutionException arises if error occurred when sending telemetry events * @throws InterruptedException arises if error occurred when sending telemetry events */ - @Test - public void testMetadataAPIMetricCollection() + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMetadataAPIMetricCollection(String queryResultFormat) throws SQLException, ExecutionException, InterruptedException { + Statement stmt = createStatement(queryResultFormat); + stmt.close(); Telemetry telemetry = connection.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); DatabaseMetaData metadata = connection.getMetaData(); @@ -276,9 +278,10 @@ public void testMetadataAPIMetricCollection() * * @throws SQLException */ - @Test - public void testGetCharacterStreamNull() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetCharacterStreamNull(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table JDBC_NULL_CHARSTREAM (col1 varchar(16))"); statement.execute("insert into JDBC_NULL_CHARSTREAM values(NULL)"); try (ResultSet rs = statement.executeQuery("select * from JDBC_NULL_CHARSTREAM")) { @@ -293,9 +296,10 @@ public void testGetCharacterStreamNull() throws SQLException { * * @throws SQLException arises if any exception occurs */ - @Test - public void testMultipleChunks() throws Exception { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMultipleChunks(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat); // 10000 rows should be enough to force result into multiple chunks ResultSet resultSet = @@ -345,10 +349,11 @@ public void testMultipleChunks() throws Exception { * * @throws SQLException arises if any exception occurs */ - @Test - public void testResultSetMetadata() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testResultSetMetadata(String queryResultFormat) throws SQLException { final Map params = getConnectionParameters(); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); @@ -396,9 +401,10 @@ public void testResultSetMetadata() throws SQLException { * * @throws SQLException */ - @Test - public void testEmptyResultSet() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testEmptyResultSet(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); // the only function that returns ResultSetV1.emptyResultSet() ResultSet rs = statement.getGeneratedKeys()) { assertFalse(rs.next()); @@ -505,9 +511,10 @@ public void testEmptyResultSet() throws SQLException { * * @throws Exception arises if any exception occurs. */ - @Test - public void testBytesCrossTypeTests() throws Exception { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBytesCrossTypeTests(String queryResultFormat) throws Exception { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -538,9 +545,11 @@ public void testBytesCrossTypeTests() throws Exception { // SNOW-204185 // 30s for timeout. This test usually finishes in around 10s. - @Test(timeout = 30000) - public void testResultChunkDownloaderException() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @Timeout(30) + public void testResultChunkDownloaderException(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { // The generated resultSet must be big enough for triggering result chunk downloader String query = @@ -578,8 +587,7 @@ public void testResultChunkDownloaderException() throws SQLException { */ @Test public void testGetObjectWithBigInt() throws SQLException { - try (Statement statement = connection.createStatement()) { - statement.execute("alter session set jdbc_query_result_format ='json'"); + try (Statement statement = createStatement("json")) { // test with greatest possible number and greatest negative possible number String[] extremeNumbers = { "99999999999999999999999999999999999999", "-99999999999999999999999999999999999999" @@ -608,9 +616,10 @@ private byte[] floatToByteArray(float i) { * * @throws SQLException */ - @Test - public void testGetBigDecimalWithScale() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBigDecimalWithScale(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table test_get(colA number(38,9))"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into test_get values(?)")) { @@ -634,11 +643,13 @@ public void testGetBigDecimalWithScale() throws SQLException { } } - @Test - public void testGetDataTypeWithTimestampTz() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetDataTypeWithTimestampTz(String queryResultFormat) throws Exception { try (Connection connection = getConnection()) { ResultSetMetaData resultSetMetaData = null; try (Statement statement = connection.createStatement()) { + setQueryResultFormat(statement, queryResultFormat); statement.executeQuery("create or replace table ts_test(ts timestamp_tz)"); try (ResultSet resultSet = statement.executeQuery("select * from ts_test")) { resultSetMetaData = resultSet.getMetaData(); @@ -669,13 +680,14 @@ public void testGetDataTypeWithTimestampTz() throws Exception { * * @throws SQLException */ - @Test - public void testGetEmptyOrNullClob() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetEmptyOrNullClob(String queryResultFormat) throws SQLException { Clob clob = connection.createClob(); clob.setString(1, "hello world"); Clob emptyClob = connection.createClob(); emptyClob.setString(1, ""); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute( "create or replace table test_get_clob(colA varchar, colNull varchar, colEmpty text)"); try (PreparedStatement preparedStatement = @@ -703,10 +715,11 @@ public void testGetEmptyOrNullClob() throws SQLException { * * @throws SQLException */ - @Test - public void testSetNullClob() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetNullClob(String queryResultFormat) throws SQLException { Clob clob = null; - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table test_set_clob(colNull varchar)"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into test_set_clob values(?)")) { @@ -722,12 +735,14 @@ public void testSetNullClob() throws SQLException { } } - @Test - public void testCallStatementType() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testCallStatementType(String queryResultFormat) throws SQLException { Properties props = new Properties(); props.put("USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS", "true"); try (Connection connection = getConnection(props); Statement statement = connection.createStatement()) { + setQueryResultFormat(statement, queryResultFormat); try { String sp = "CREATE OR REPLACE PROCEDURE \"SP_ZSDLEADTIME_ARCHIVE_DAILY\"()\n" @@ -793,9 +808,10 @@ public void testCallStatementType() throws SQLException { * Test that new query error message function for checking async query error messages is not * implemented for synchronous queries * */ - @Test - public void testNewFeaturesNotSupportedExeceptions() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testNewFeaturesNotSupportedExeceptions(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet rs = statement.executeQuery("select 1")) { expectSnowflakeLoggedFeatureNotSupportedException( rs.unwrap(SnowflakeResultSet.class)::getQueryErrorMessage); @@ -841,9 +857,10 @@ public void testNewFeaturesNotSupportedExeceptions() throws SQLException { } } - @Test - public void testInvalidUnWrap() throws SQLException { - try (ResultSet rs = connection.createStatement().executeQuery("select 1")) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInvalidUnWrap(String queryResultFormat) throws SQLException { + try (ResultSet rs = createStatement(queryResultFormat).executeQuery("select 1")) { try { rs.unwrap(SnowflakeUtil.class); } catch (SQLException ex) { @@ -856,9 +873,8 @@ public void testInvalidUnWrap() throws SQLException { @Test public void testGetObjectJsonResult() throws SQLException { - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement("json")) { try { - statement.execute("alter session set jdbc_query_result_format ='json'"); statement.execute("create or replace table testObj (colA double, colB boolean)"); try (PreparedStatement preparedStatement = @@ -878,9 +894,10 @@ public void testGetObjectJsonResult() throws SQLException { } } - @Test - public void testMetadataIsCaseSensitive() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMetadataIsCaseSensitive(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { String sampleCreateTableWithAllColTypes = "CREATE or replace TABLE case_sensitive (" @@ -929,14 +946,14 @@ public void testMetadataIsCaseSensitive() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAutoIncrementJsonResult() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testAutoIncrementResult(String queryResultFormat) throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("ENABLE_FIX_759900", true); - try (Connection connection = init(paramProperties); + try (Connection connection = init(paramProperties, queryResultFormat); Statement statement = connection.createStatement()) { - statement.execute("alter session set jdbc_query_result_format ='json'"); statement.execute( "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); @@ -953,34 +970,11 @@ public void testAutoIncrementJsonResult() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAutoIncrementArrowResult() throws SQLException { - Properties paramProperties = new Properties(); - paramProperties.put("ENABLE_FIX_759900", true); - try (Connection connection = init(paramProperties); - Statement statement = connection.createStatement()) { - statement.execute("alter session set jdbc_query_result_format ='arrow'"); - - statement.execute( - "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); - statement.execute("insert into auto_inc(name) values('test1')"); - - try (ResultSet resultSet = statement.executeQuery("select * from auto_inc")) { - assertTrue(resultSet.next()); - - ResultSetMetaData metaData = resultSet.getMetaData(); - assertTrue(metaData.isAutoIncrement(1)); - assertFalse(metaData.isAutoIncrement(2)); - assertTrue(metaData.isAutoIncrement(3)); - } - } - } - - @Test - public void testGranularTimeFunctionsInSessionTimezone() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGranularTimeFunctionsInSessionTimezone(String queryResultFormat) + throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table testGranularTime(t time)"); statement.execute("insert into testGranularTime values ('10:10:10')"); @@ -997,39 +991,43 @@ public void testGranularTimeFunctionsInSessionTimezone() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGranularTimeFunctionsInUTC() throws SQLException { - try (Connection connection = getConnection()) { - TimeZone origTz = TimeZone.getDefault(); - try (Statement statement = connection.createStatement()) { - try { - TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); - statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); - statement.execute("create or replace table testGranularTime(t time)"); - statement.execute("insert into testGranularTime values ('10:10:10')"); - try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { - assertTrue(resultSet.next()); - assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); - assertEquals(02, resultSet.getTime(1).getHours()); - assertEquals(10, resultSet.getTime(1).getMinutes()); - assertEquals(10, resultSet.getTime(1).getSeconds()); - } - } finally { - TimeZone.setDefault(origTz); - statement.execute("drop table if exists testGranularTime"); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testGranularTimeFunctionsInUTC(String queryResultFormat) throws SQLException { + TimeZone origTz = TimeZone.getDefault(); + try (Statement statement = createStatement(queryResultFormat)) { + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); + statement.execute("create or replace table testGranularTime(t time)"); + statement.execute("insert into testGranularTime values ('10:10:10')"); + try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { + assertTrue(resultSet.next()); + assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); + assertEquals(02, resultSet.getTime(1).getHours()); + assertEquals(10, resultSet.getTime(1).getMinutes()); + assertEquals(10, resultSet.getTime(1).getSeconds()); } + } finally { + TimeZone.setDefault(origTz); + statement.execute("drop table if exists testGranularTime"); } } } /** Added in > 3.14.5 */ - @Test - public void testLargeStringRetrieval() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testLargeStringRetrieval(String queryResultFormat) throws SQLException { + String originalMaxJsonStringLength = + System.getProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM); + System.clearProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM); String tableName = "maxJsonStringLength_table"; int colLength = 16777216; try (Connection con = getConnection(); Statement statement = con.createStatement()) { + setQueryResultFormat(statement, queryResultFormat); SFBaseSession session = con.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); Integer maxVarcharSize = (Integer) session.getOtherParameter("VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"); @@ -1039,7 +1037,6 @@ public void testLargeStringRetrieval() throws SQLException { statement.execute("create or replace table " + tableName + " (c1 string(" + colLength + "))"); statement.execute( "insert into " + tableName + " select randstr(" + colLength + ", random())"); - assertNull(System.getProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM)); try (ResultSet rs = statement.executeQuery("select * from " + tableName)) { assertTrue(rs.next()); assertEquals(colLength, rs.getString(1).length()); @@ -1047,25 +1044,30 @@ public void testLargeStringRetrieval() throws SQLException { } } catch (Exception e) { fail("executeQuery should not fail"); + } finally { + if (originalMaxJsonStringLength != null) { + System.setProperty( + ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, originalMaxJsonStringLength); + } } } private static void assertAllColumnsAreLongButBigIntIsBigDecimal(ResultSet rs) throws SQLException { while (rs.next()) { - assertEquals(java.lang.Long.class, rs.getObject(1).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(2).getClass()); - assertEquals(java.lang.Long.class, rs.getObject(3).getClass()); - assertEquals(java.lang.Long.class, rs.getObject(4).getClass()); + assertEquals(Long.class, rs.getObject(1).getClass()); + assertEquals(BigDecimal.class, rs.getObject(2).getClass()); + assertEquals(Long.class, rs.getObject(3).getClass()); + assertEquals(Long.class, rs.getObject(4).getClass()); } } private static void assertAllColumnsAreBigDecimal(ResultSet rs) throws SQLException { while (rs.next()) { - assertEquals(java.math.BigDecimal.class, rs.getObject(1).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(2).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(3).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(4).getClass()); + assertEquals(BigDecimal.class, rs.getObject(1).getClass()); + assertEquals(BigDecimal.class, rs.getObject(2).getClass()); + assertEquals(BigDecimal.class, rs.getObject(3).getClass()); + assertEquals(BigDecimal.class, rs.getObject(4).getClass()); } } @@ -1140,9 +1142,10 @@ public void testGetObjectForJSONResultFormatUsingJDBCDecimalAsInt() throws SQLEx } } - @Test - public void testGetObjectWithType() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetObjectWithType(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute( " CREATE OR REPLACE TABLE test_all_types (" + " string VARCHAR, " @@ -1178,7 +1181,7 @@ public void testGetObjectWithType() throws SQLException { assertResultValueAndType(statement, BigDecimal.valueOf(3.3), "bd", BigDecimal.class); assertResultValueAndType(statement, "FALSE", "bool", String.class); assertResultValueAndType(statement, Boolean.FALSE, "bool", Boolean.class); - assertResultValueAndType(statement, Long.valueOf(0), "bool", Long.class); + assertResultValueAndType(statement, 0L, "bool", Long.class); assertResultValueAsString( statement, new SnowflakeTimestampWithTimezone( diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java index c0a494613..93266290c 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java @@ -5,11 +5,11 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.Date; @@ -22,46 +22,74 @@ import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; -import java.util.Collection; import java.util.List; -import java.util.Properties; import java.util.TimeZone; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Test ResultSet */ -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetMultiTimeZoneIT extends BaseJDBCTest { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - // all tests in this class need to run for both query result formats json and arrow - String[] timeZones = new String[] {"UTC", "Asia/Singapore", "MEZ"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); + static TimeZone ogTz; + + static class DataProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("UTC"); + add("Asia/Singapore"); + add("CET"); + } + }; + List queryFormats = + new ArrayList() { + { + add("json"); + add("arrow"); + } + }; + + List args = new ArrayList<>(); + for (String timeZone : timezones) { + for (String queryFormat : queryFormats) { + args.add(Arguments.argumentSet(timeZone + " " + queryFormat, timeZone, queryFormat)); + } } + + return args.stream(); } - return ret; } - private final String queryResultFormat; + @BeforeAll + public static void setDefaultTimezone() { + ogTz = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + } - public ResultSetMultiTimeZoneIT(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; + private static void setTimezone(String timeZone) { System.setProperty("user.timezone", timeZone); } - public Connection init() throws SQLException { + @AfterAll + public static void clearTimezone() { + TimeZone.setDefault(ogTz); + System.clearProperty("user.timezone"); + } + + public Connection init(String queryResultFormat) throws SQLException { Connection connection = BaseJDBCTest.getConnection(); try (Statement statement = connection.createStatement()) { @@ -78,15 +106,23 @@ public Connection init() throws SQLException { return connection; } - public Connection init(Properties paramProperties) throws SQLException { - Connection conn = getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); - try (Statement stmt = conn.createStatement()) { - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + public Connection init() throws SQLException { + Connection connection = BaseJDBCTest.getConnection(); + + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); } - return conn; + return connection; } - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection con = init(); Statement statement = con.createStatement()) { @@ -107,21 +143,21 @@ public void setUp() throws SQLException { + "error_on_column_count_mismatch=false)"); // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc"), + "Failed to put a file"); assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc"), + "Failed to put a file"); int numRows = statement.executeUpdate("copy into orders_jdbc"); - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + assertEquals(73, numRows, "Unexpected number of rows copied: " + numRows); } } - @After + @AfterEach public void tearDown() throws SQLException { System.clearProperty("user.timezone"); try (Connection con = init(); @@ -131,10 +167,12 @@ public void tearDown() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetDateAndTime() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testGetDateAndTime(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table dateTime(colA Date, colB Timestamp, colC Time)"); @@ -189,11 +227,13 @@ public void testGetDateAndTime() throws SQLException { } // SNOW-25029: The driver should reduce Time milliseconds mod 24h. - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testTimeRange() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testTimeRange(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); final String insertTime = "insert into timeTest values (?), (?), (?), (?)"; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table timeTest (c1 time)"); @@ -243,11 +283,13 @@ public void testTimeRange() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testCurrentTime() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testCurrentTime(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); final String insertTime = "insert into datetime values (?, ?, ?)"; - try (Connection connection = init()) { + try (Connection connection = init(queryResultFormat)) { assertFalse(connection.createStatement().execute("alter session set TIMEZONE='UTC'")); @@ -285,10 +327,12 @@ public void testCurrentTime() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBindTimestampTZ() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testBindTimestampTZ(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -315,10 +359,12 @@ public void testBindTimestampTZ() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetOldDate() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testGetOldDate(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table testOldDate(d date)"); @@ -353,9 +399,11 @@ public void testGetOldDate() throws SQLException { } } - @Test - public void testGetStringForDates() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testGetStringForDates(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { String expectedDate1 = "2020-08-01"; String expectedDate2 = "1920-11-11"; @@ -370,10 +418,13 @@ public void testGetStringForDates() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testDateTimeRelatedTypeConversion() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testDateTimeRelatedTypeConversion(String tz, String queryResultFormat) + throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -437,10 +488,12 @@ public void testDateTimeRelatedTypeConversion() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetOldTimestamp() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testGetOldTimestamp(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table testOldTs(cola timestamp_ntz)"); @@ -464,12 +517,14 @@ public void testGetOldTimestamp() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testPrepareOldTimestamp() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testPrepareOldTimestamp(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); TimeZone origTz = TimeZone.getDefault(); TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table testPrepOldTs(cola timestamp_ntz, colb date)"); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java index e03dc35df..2d0bbd6b6 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java @@ -1,7 +1,7 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.PreparedStatement; @@ -10,51 +10,67 @@ import java.sql.Statement; import java.sql.Timestamp; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Calendar; -import java.util.Collection; import java.util.List; import java.util.TimeZone; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.BooleanProvider; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * ResultSet multi timezone tests for the latest JDBC driver. This cannot run for the old driver. */ -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetMultiTimeZoneLatestIT extends BaseJDBCWithSharedConnectionIT { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - // all tests in this class need to run for both query result formats json and arrow - // UTC and Europe/London have different offsets during daylight savings time so it is important - // to test both to ensure daylight savings time is correct - String[] timeZones = new String[] {"UTC", "Asia/Singapore", "MEZ", "Europe/London"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); - } + + private static String originalTz; + + private static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new SimpleResultFormatProvider(), new TimezoneProvider(4)); } - return ret; } - private final String queryResultFormat; + private static class DataWithFlagProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct(context, new DataProvider(), new BooleanProvider()); + } + } + + @BeforeAll + public static void saveTimezone() { + originalTz = System.getProperty("user.timezone"); + } + + @AfterAll + public static void restoreTimezone() { + if (originalTz != null) { + System.setProperty("user.timezone", originalTz); + } else { + System.clearProperty("user.timezone"); + } + } - public ResultSetMultiTimeZoneLatestIT(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; - System.setProperty("user.timezone", timeZone); + private static void setTimezone(String tz) { + System.setProperty("user.timezone", tz); } - @Before - public void init() throws SQLException { + public void init(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); try (Statement statement = connection.createStatement()) { statement.execute( "alter session set " @@ -74,9 +90,11 @@ public void init() throws SQLException { * * @throws SQLException */ - @Test - public void testTimesWithGetTimestamp() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimesWithGetTimestamp(String queryResultFormat, String tz) throws SQLException { + init(queryResultFormat, tz); + try (Statement statement = createStatement(queryResultFormat)) { String timeStringValue = "10:30:50.123456789"; String timestampStringValue = "1970-01-01 " + timeStringValue; int length = timestampStringValue.length(); @@ -108,9 +126,12 @@ public void testTimesWithGetTimestamp() throws SQLException { * * @throws SQLException */ - @Test - public void testTimestampNTZWithDaylightSavings() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZWithDaylightSavings(String queryResultFormat, String tz) + throws SQLException { + init(queryResultFormat, tz); + try (Statement statement = createStatement(queryResultFormat)) { statement.execute( "alter session set TIMESTAMP_TYPE_MAPPING='TIMESTAMP_NTZ'," + "TIMEZONE='Europe/London'"); try (ResultSet rs = statement.executeQuery("select TIMESTAMP '2011-09-04 00:00:00'")) { @@ -125,13 +146,18 @@ public void testTimestampNTZWithDaylightSavings() throws SQLException { * Test for getDate(int columnIndex, Calendar cal) function to ensure it matches values with * getTimestamp function */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testDateAndTimestampWithTimezone() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testDateAndTimestampWithTimezone(String queryResultFormat, String tz) + throws SQLException { + init(queryResultFormat, tz); Calendar cal = null; SimpleDateFormat sdf = null; - - try (Statement statement = connection.createStatement()) { + // The following line allows for the tests to work locally. This should be removed when the + // tests are properly fixed. + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); try (ResultSet rs = statement.executeQuery( @@ -186,31 +212,6 @@ public void testDateAndTimestampWithTimezone() throws SQLException { } } - /** - * Tests that formats are correct when JDBC_USE_SESSION_TIMEZONE=true and other related time/date - * formatting parameters are at their default values - * - * @throws SQLException - */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSessionTimezoneUsage() throws SQLException { - testUseSessionTimeZoneHelper(true); - } - - /** - * Tests that the new param overrides previous time/date/timestamp formatting parameters such as - * JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC, CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ, and - * JDBC_FORMAT_DATE_WITH_TIMEZONE. - * - * @throws SQLException - */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testUseSessionTimeZoneOverrides() throws SQLException { - testUseSessionTimeZoneHelper(false); - } - /** * Helper function to test behavior of parameter JDBC_USE_SESSION_TIMEZONE. When * JDBC_USE_SESSION_TIMEZONE=true, time/date/timestamp values are displayed using the session @@ -226,8 +227,12 @@ public void testUseSessionTimeZoneOverrides() throws SQLException { * parameters * @throws SQLException */ - private void testUseSessionTimeZoneHelper(boolean useDefaultParamSettings) throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(DataWithFlagProvider.class) + public void testUseSessionTimeZoneHelper( + String queryResultFormat, String tz, boolean useDefaultParamSettings) throws SQLException { + init(queryResultFormat, tz); + try (Statement statement = createStatement(queryResultFormat)) { try { // create table with all timestamp types, time, and date statement.execute( diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java index bc553b2f6..23cdc5b6b 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java @@ -1,9 +1,10 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.jdbc.SnowflakeUtil.EXTRA_TYPES_VECTOR; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.ResultSetMetaData; @@ -12,11 +13,11 @@ import java.sql.Types; import java.util.Arrays; import java.util.List; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * ResultSet integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -25,69 +26,60 @@ * If it is applicable, move tests to ResultSetVectorIT so that both the latest and oldest supported * driver run the tests. */ -@Category(TestCategoryResultSet.class) -@RunWith(Parameterized.class) +@Tag(TestTags.RESULT_SET) public class ResultSetVectorLatestIT extends ResultSet0IT { - private final String queryResultFormat; - - public ResultSetVectorLatestIT(String queryResultFormat) { - super(queryResultFormat); - this.queryResultFormat = queryResultFormat; - } - - @Parameterized.Parameters(name = "format={0}") - public static List queryResultFormats() { - return Arrays.asList("json", "arrow"); - } - - @Test - public void testGetIntVectorAsIntArray() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetIntVectorAsIntArray(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Integer[] vector = {-1, 5}; try (ResultSet resultSet = stmt.executeQuery("select " + vectorToString(vector, "int"))) { assertTrue(resultSet.next()); Integer[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Integer.class); - assertEquals(vector, result); + assertArrayEquals(vector, result); assertVectorMetadata(resultSet, 1, Types.INTEGER, 1); } } } - @Test - public void testGetIntVectorAsLongArray() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetIntVectorAsLongArray(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Long[] vector = {-1L, 5L}; try (ResultSet resultSet = stmt.executeQuery("select " + vectorToString(vector, "int"))) { assertTrue(resultSet.next()); Long[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Long.class); - assertEquals(vector, result); + assertArrayEquals(vector, result); assertVectorMetadata(resultSet, 1, Types.INTEGER, 1); } } } - @Test - public void testGetFloatVectorAsFloatArray() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetFloatVectorAsFloatArray(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Float[] vector = {-1.2f, 5.1f, 15.87f}; try (ResultSet resultSet = stmt.executeQuery("select " + vectorToString(vector, "float"))) { assertTrue(resultSet.next()); Float[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Float.class); - assertEquals(vector, result); + assertArrayEquals(vector, result); assertVectorMetadata(resultSet, 1, Types.FLOAT, 1); } } } - @Test - public void testGetNullAsIntVector() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetNullAsIntVector(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); try (ResultSet resultSet = stmt.executeQuery("select null::vector(int, 2)")) { assertTrue(resultSet.next()); Integer[] result = @@ -98,10 +90,11 @@ public void testGetNullAsIntVector() throws SQLException { } } - @Test - public void testGetNullAsFloatVector() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetNullAsFloatVector(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); try (ResultSet resultSet = stmt.executeQuery("select null::vector(float, 2)")) { assertTrue(resultSet.next()); Integer[] result = @@ -112,42 +105,46 @@ public void testGetNullAsFloatVector() throws SQLException { } } - @Test - public void testGetIntVectorFromTable() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetIntVectorFromTable(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); stmt.execute("create or replace table test_vector_int(x vector(int, 2), y int)"); stmt.execute("insert into test_vector_int select [3, 7]::vector(int, 2), 15"); try (ResultSet resultSet = stmt.executeQuery("select x, y from test_vector_int")) { assertTrue(resultSet.next()); Integer[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Integer.class); - assertEquals(new Integer[] {3, 7}, result); + assertArrayEquals(new Integer[] {3, 7}, result); assertVectorMetadata(resultSet, 1, Types.INTEGER, 2); } } } - @Test - public void testGetFloatVectorFromTable() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetFloatVectorFromTable(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); stmt.execute("create or replace table test_vector_float(x vector(float, 2), y float)"); stmt.execute("insert into test_vector_float select [-3, 7.1]::vector(float, 2), 20.3"); try (ResultSet resultSet = stmt.executeQuery("select x, y from test_vector_float")) { assertTrue(resultSet.next()); Float[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Float.class); - assertEquals(new Float[] {-3f, 7.1f}, result); + assertArrayEquals(new Float[] {-3f, 7.1f}, result); assertVectorMetadata(resultSet, 1, Types.FLOAT, 2); } } } /** Added in > 3.16.1 */ - @Test - public void testGetVectorViaGetStringIsEqualToTheGetObject() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetVectorViaGetStringIsEqualToTheGetObject(String queryResultFormat) + throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Integer[] intVector = {-1, 5}; Float[] floatVector = {-1.2f, 5.1f, 15.87f}; try (ResultSet resultSet = @@ -164,7 +161,7 @@ public void testGetVectorViaGetStringIsEqualToTheGetObject() throws SQLException assertTrue(resultSet.next()); assertGetObjectAndGetStringBeTheSame(resultSet, "[-1,5]", 1); String floatArrayRepresentation = - "json".equals(queryResultFormat) + "json".equalsIgnoreCase(queryResultFormat) // in json we have slightly different format that we accept in the result ? "[-1.200000,5.100000,15.870000]" : "[-1.2,5.1,15.87]"; @@ -195,7 +192,8 @@ private String nullVectorToString(String vectorType) { return "null::vector(" + vectorType + ", 2)"; } - private void enforceQueryResultFormat(Statement stmt) throws SQLException { + private void enforceQueryResultFormat(Statement stmt, String queryResultFormat) + throws SQLException { String sql = String.format( "alter session set jdbc_query_result_format = '%s'", queryResultFormat.toUpperCase()); diff --git a/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java b/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java index 51c9179b4..6fbadd92f 100644 --- a/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java @@ -6,7 +6,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.mock; @@ -38,7 +38,7 @@ import net.snowflake.common.core.ClientAuthnDTO; import org.apache.commons.io.IOUtils; import org.apache.http.client.methods.HttpPost; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; diff --git a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java index bd51ef533..737cc1ffe 100644 --- a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java @@ -13,7 +13,7 @@ import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFSessionProperty; import org.apache.http.client.methods.HttpRequestBase; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; diff --git a/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java index a91fa4a89..4056dda1b 100644 --- a/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java @@ -1,11 +1,14 @@ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.fasterxml.jackson.databind.ObjectMapper; import java.util.Map; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SessionUtil; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SessionUtilTest { @Test @@ -16,56 +19,56 @@ public void testGetCommonParams() throws Exception { Map result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": true}]")); - Assert.assertTrue((boolean) result.get("testParam")); + assertTrue((boolean) result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": false}]")); - Assert.assertFalse((boolean) result.get("testParam")); + assertFalse((boolean) result.get("testParam")); result = SessionUtil.getCommonParams(mapper.readTree("[{\"name\": \"testParam\", \"value\": 0}]")); - Assert.assertEquals(0, (int) result.get("testParam")); + assertEquals(0, (int) result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": 1000}]")); - Assert.assertEquals(1000, (int) result.get("testParam")); + assertEquals(1000, (int) result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": \"\"}]")); - Assert.assertEquals("", result.get("testParam")); + assertEquals("", result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": \"value\"}]")); - Assert.assertEquals("value", result.get("testParam")); + assertEquals("value", result.get("testParam")); // Test known param name result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"CLIENT_DISABLE_INCIDENTS\", \"value\": true}]")); - Assert.assertTrue((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); + assertTrue((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"CLIENT_DISABLE_INCIDENTS\", \"value\": false}]")); - Assert.assertFalse((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); + assertFalse((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); result = SessionUtil.getCommonParams( mapper.readTree( "[{\"name\": \"CLIENT_STAGE_ARRAY_BINDING_THRESHOLD\", \"value\": 0}]")); - Assert.assertEquals(0, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); + assertEquals(0, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); result = SessionUtil.getCommonParams( mapper.readTree( "[{\"name\": \"CLIENT_STAGE_ARRAY_BINDING_THRESHOLD\", \"value\": 1000}]")); - Assert.assertEquals(1000, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); + assertEquals(1000, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); result = SessionUtil.getCommonParams(mapper.readTree("[{\"name\": \"TIMEZONE\", \"value\": \"\"}]")); - Assert.assertEquals("", result.get("TIMEZONE")); + assertEquals("", result.get("TIMEZONE")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"TIMEZONE\", \"value\": \"value\"}]")); - Assert.assertEquals("value", result.get("TIMEZONE")); + assertEquals("value", result.get("TIMEZONE")); } } diff --git a/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java b/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java index 5a8d28922..c5f3c8a1f 100644 --- a/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.sql.Connection; import java.sql.ResultSet; @@ -14,11 +14,11 @@ import java.util.Map; import java.util.Properties; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public final class SessionVariablesIT extends AbstractDriverIT { @Test public void testSettingSessionVariablesInConnectionProperties() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java index c0c5dc18d..b57bdc86b 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java @@ -3,6 +3,10 @@ */ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.StorageExtendedErrorInformation; import java.io.File; @@ -13,26 +17,23 @@ import java.sql.SQLException; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.cloud.storage.SnowflakeAzureClient; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.Mockito; /** Test for SnowflakeAzureClient handle exception function */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeAzureClientHandleExceptionLatestIT extends AbstractDriverIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private Connection connection; private SFStatement sfStatement; private SFSession sfSession; @@ -41,7 +42,7 @@ public class SnowflakeAzureClientHandleExceptionLatestIT extends AbstractDriverI private int overMaxRetry; private int maxRetry; - @Before + @BeforeEach public void setup() throws SQLException { connection = getConnection("azureaccount"); sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -60,7 +61,7 @@ public void setup() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void error403RenewExpired() throws SQLException, InterruptedException { // Unauthenticated, renew is called. spyingClient.handleStorageException( @@ -101,99 +102,130 @@ public void run() { thread.start(); thread.interrupt(); thread.join(); - Assert.assertNull("Exception must not have been thrown in here", exceptionContainer[0]); + assertNull(exceptionContainer[0], "Exception must not have been thrown in here"); Mockito.verify(spyingClient, Mockito.times(4)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void error403OverMaxRetryThrow() throws SQLException { - spyingClient.handleStorageException( - new StorageException( - "403", "Unauthenticated", 403, new StorageExtendedErrorInformation(), new Exception()), - overMaxRetry, - "upload", - sfSession, - command, - null); + @Test + @DontRunOnGithubActions + public void error403OverMaxRetryThrow() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + "403", + "Unauthenticated", + 403, + new StorageExtendedErrorInformation(), + new Exception()), + overMaxRetry, + "upload", + sfSession, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void error403NullSession() throws SQLException { - spyingClient.handleStorageException( - new StorageException( - "403", "Unauthenticated", 403, new StorageExtendedErrorInformation(), new Exception()), - 0, - "upload", - null, - command, - null); + @Test + @DontRunOnGithubActions + public void error403NullSession() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + "403", + "Unauthenticated", + 403, + new StorageExtendedErrorInformation(), + new Exception()), + 0, + "upload", + null, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInvalidKey() throws SQLException { - spyingClient.handleStorageException( - new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorInvalidKey() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Test + @DontRunOnGithubActions public void errorInterruptedException() throws SQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new InterruptedException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new InterruptedException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new InterruptedException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorSocketTimeoutException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorSocketTimeoutException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new SocketTimeoutException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new SocketTimeoutException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new SocketTimeoutException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorUnknownException() throws SQLException { - spyingClient.handleStorageException(new Exception(), 0, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorUnknownException() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNoSpaceLeftOnDevice() throws SQLException, IOException { - File destFolder = tmpFolder.newFolder(); + @Test + @DontRunOnGithubActions + public void errorNoSpaceLeftOnDevice() throws IOException { + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file://" + destFolderCanonicalPath + "'"; - spyingClient.handleStorageException( - new StorageException( - "", - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - 0, - "download", - null, - getCommand, - null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + "", + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + 0, + "download", + null, + getCommand, + null)); } - @After + @AfterEach public void cleanUp() throws SQLException { sfStatement.close(); connection.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java index 8df351889..a2f4638b1 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java @@ -5,12 +5,12 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.SQLException; import java.util.Properties; import net.snowflake.client.core.SFSessionProperty; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Data source unit test */ public class SnowflakeBasicDataSourceTest { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java index b597c4dd0..0251f3984 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -11,11 +11,40 @@ import java.sql.Statement; import java.util.List; import java.util.Properties; -import org.junit.Test; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; +@Tag(TestTags.CORE) public class SnowflakeChunkDownloaderLatestIT extends BaseJDBCTest { + private static String originalProxyHost; + private static String originalProxyPort; + private static String originalNonProxyHosts; + @BeforeAll + public static void setUp() throws Exception { + originalProxyHost = System.getProperty("https.proxyHost"); + originalProxyPort = System.getProperty("https.proxyPort"); + originalNonProxyHosts = System.getProperty("https.nonProxyHosts"); + } + + private static void restoreProperty(String key, String value) { + if (value != null) { + System.setProperty(key, value); + } else { + System.clearProperty(key); + } + } + + @AfterAll + public static void tearDown() throws Exception { + restoreProperty("https.proxyHost", originalProxyHost); + restoreProperty("https.proxyPort", originalProxyPort); + restoreProperty("https.nonProxyHosts", originalNonProxyHosts); + } /** * Tests that the chunk downloader uses the maxHttpRetries and doesn't enter and infinite loop of * retries. diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java index fa3d4de6e..b08221a41 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.InputStream; @@ -13,7 +13,7 @@ import java.io.Reader; import java.nio.charset.StandardCharsets; import java.sql.SQLException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeClobTest extends BaseJDBCTest { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java index a17a89b15..de6c4fb70 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java @@ -6,7 +6,7 @@ import java.util.Map; import java.util.Properties; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Created by hyu on 2/2/18. */ public class SnowflakeConnectionV1Test { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java index 161e9c939..b50388d5d 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java @@ -4,7 +4,7 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.sql.Connection; import java.sql.ResultSet; diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java index a540adcec..b245f8c0b 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java @@ -5,13 +5,14 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -24,6 +25,7 @@ import java.nio.channels.FileChannel; import java.sql.Connection; import java.sql.DatabaseMetaData; +import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; @@ -49,23 +51,21 @@ import java.util.logging.Level; import java.util.logging.Logger; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.RunningOnTestaccount; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.DontRunOnTestaccount; +import net.snowflake.client.category.TestTags; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.SqlState; import org.apache.commons.io.FileUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** General integration tests */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeDriverIT extends BaseJDBCTest { private static final int MAX_CONCURRENT_QUERIES_PER_USER = 50; private static final String getCurrenTransactionStmt = "SELECT CURRENT_TRANSACTION()"; @@ -73,15 +73,15 @@ public class SnowflakeDriverIT extends BaseJDBCTest { private static String ORDERS_JDBC = "ORDERS_JDBC"; - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private ObjectMapper mapper = new ObjectMapper(); - @Rule public TemporaryFolder tmpFolder2 = new TemporaryFolder(); + @TempDir public File tmpFolder2; public String testStageName = String.format("test_stage_%s", UUID.randomUUID().toString()).replaceAll("-", "_"); - @BeforeClass + @BeforeAll public static void setUp() throws Throwable { try (Connection connection = getConnection()) { try (Statement statement = connection.createStatement()) { @@ -99,22 +99,22 @@ public static void setUp() throws Throwable { // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc"), + "Failed to put a file"); assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc"), + "Failed to put a file"); int numRows = statement.executeUpdate("copy into orders_jdbc"); - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + assertEquals(73, numRows, "Unexpected number of rows copied: " + numRows); } } } - @AfterClass + @AfterAll public static void tearDown() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -145,7 +145,7 @@ public static Connection getConnection() throws SQLException { /** Test connection to database using Snowflake Oauth instead of username/pw * */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOauthConnection() throws SQLException { Map params = getConnectionParameters(); String role = null; @@ -182,7 +182,7 @@ public void testOauthConnection() throws SQLException { } } - @Ignore + @Disabled @Test public void testConnections() throws Throwable { ExecutorService executorService = Executors.newFixedThreadPool(MAX_CONCURRENT_QUERIES_PER_USER); @@ -237,7 +237,7 @@ public void testShowColumns() throws Throwable { try (Connection connection = getConnection(paramProperties); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("show columns in clustered_jdbc")) { - assertEquals("number of columns", 2, countRows(resultSet)); + assertEquals(2, countRows(resultSet), "number of columns"); } } @@ -259,7 +259,7 @@ public void testRowsPerResultset() throws Throwable { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); int numColumns = resultSetMetaData.getColumnCount(); assertEquals(9, numColumns); - assertEquals("number of columns", 73, countRows(resultSet)); + assertEquals(73, countRows(resultSet), "number of columns"); } } } @@ -356,18 +356,18 @@ private void assertConstraintResults( // primary key for testConstraintsP1 should contain two rows for (int i = 0; i < numRows; i++) { - assertTrue("get constraint result row count", resultSet.next()); + assertTrue(resultSet.next(), "get constraint result row count"); if (pkTableName != null) { assertTrue( - "get constraint result primary table name", - pkTableName.equalsIgnoreCase(resultSet.getString(3))); + pkTableName.equalsIgnoreCase(resultSet.getString(3)), + "get constraint result primary table name"); } if (fkTableName != null) { assertTrue( - "get constraint result foreign table name", - fkTableName.equalsIgnoreCase(resultSet.getString(7))); + fkTableName.equalsIgnoreCase(resultSet.getString(7)), + "get constraint result foreign table name"); } } } @@ -513,8 +513,8 @@ public void testConstraints() throws Throwable { null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF1"); assertFalse( - "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty", - manualResultSet.next()); + manualResultSet.next(), + "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty"); manualResultSet.close(); assertFalse(manualResultSet.next()); } finally { @@ -543,7 +543,7 @@ public void testQueryWithMaxRows() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testCancelQueryBySystemFunction() throws Throwable { try (Connection connection = getConnection(); Statement getSessionIdStmt = connection.createStatement()) { @@ -579,7 +579,7 @@ public void run() { fail("should raise an exception"); } catch (SQLException ex) { // assert the sqlstate is what we expect (QUERY CANCELLED) - assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); + assertEquals(SqlState.QUERY_CANCELED, ex.getSQLState(), "sqlstate mismatch"); } } @@ -595,34 +595,34 @@ public void testDBMetadata() throws Throwable { // the following will issue try (ResultSet databaseSet = metaData.getCatalogs()) { - assertTrue("databases shouldn't be empty", databaseSet.next()); + assertTrue(databaseSet.next(), "databases shouldn't be empty"); // "show schemas in [databaseName]" ResultSet schemaSet = metaData.getSchemas(connection.getCatalog(), connection.getSchema()); - assertTrue("schemas shouldn't be empty", schemaSet.next()); + assertTrue(schemaSet.next(), "schemas shouldn't be empty"); assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2)), + "database should be " + connection.getCatalog()); assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1)), + "schema should be " + connection.getSchema()); // snow tables in a schema try (ResultSet tableSet = metaData.getTables( connection.getCatalog(), connection.getSchema(), ORDERS_JDBC, null)) { // types assertTrue( + tableSet.next(), String.format( "table %s should exists in db: %s, schema: %s", - ORDERS_JDBC, connection.getCatalog(), connection.getSchema()), - tableSet.next()); + ORDERS_JDBC, connection.getCatalog(), connection.getSchema())); assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2)), + "database should be " + connection.getCatalog()); assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1)), + "schema should be " + connection.getSchema()); assertTrue( - "table should be orders_jdbc", ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3))); + ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3)), "table should be orders_jdbc"); } } @@ -643,7 +643,7 @@ public void testDBMetadata() throws Throwable { assertTrue(ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))); ++cnt; } - assertEquals("number of tables", 1, cnt); + assertEquals(1, cnt, "number of tables"); } // test pattern try (ResultSet tableMetaDataResultSet = @@ -667,7 +667,7 @@ public void testDBMetadata() throws Throwable { break; } } - assertTrue("orders_jdbc not found", found); + assertTrue(found, "orders_jdbc not found"); } // get column metadata @@ -734,7 +734,7 @@ public void testDBMetadata() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutWithWildcardGCP() throws Throwable { Properties _connectionProperties = new Properties(); _connectionProperties.put("inject_wait_in_put", 5); @@ -748,21 +748,22 @@ public void testPutWithWildcardGCP() throws Throwable { // replace file name with wildcard character sourceFilePath = sourceFilePath.replace("orders_100.csv", "orders_10*.csv"); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); statement.execute("CREATE OR REPLACE STAGE wildcard_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage")); + statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage"), + "Failed to put a file"); findFile(statement, "ls @wildcard_stage/"); assertTrue( - "Failed to get files", statement.execute( - "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); File downloaded; // download the files we just uploaded to stage @@ -808,23 +809,26 @@ private void copyContentFrom(File file1, File file2) throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetLargeFileGCP() throws Throwable { try (Connection connection = getConnection("gcpaccount"); Statement statement = connection.createStatement()) { try { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File largeTempFile = tmpFolder.newFile("largeFile.csv"); + File largeTempFile = new File(tmpFolder, "largeFile.csv"); + largeTempFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); } - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + File largeTempFile2 = new File(tmpFolder, "largeFile2.csv"); + largeTempFile2.createNewFile(); String sourceFilePath = largeTempFile.getCanonicalPath(); @@ -840,8 +844,8 @@ public void testPutGetLargeFileGCP() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE largefile_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @largefile_stage/"); @@ -856,9 +860,9 @@ public void testPutGetLargeFileGCP() throws Throwable { // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); @@ -885,15 +889,17 @@ public void testPutGetLargeFileGCP() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutOverwrite() throws Throwable { // create 2 files: an original, and one that will overwrite the original - File file1 = tmpFolder.newFile("testfile.csv"); + File file1 = new File(tmpFolder, "testfile.csv"); + file1.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { bw.write("Writing original file content. This should get overwritten."); } - File file2 = tmpFolder2.newFile("testfile.csv"); + File file2 = new File(tmpFolder2, "testfile.csv"); + file2.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { bw.write("This is all new! This should be the result of the overwriting."); } @@ -901,7 +907,8 @@ public void testPutOverwrite() throws Throwable { String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -915,25 +922,25 @@ public void testPutOverwrite() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE testing_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // put another file in same stage with same filename with overwrite = true assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true")); + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); @@ -955,7 +962,7 @@ public void testPutOverwrite() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPut() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); @@ -971,11 +978,11 @@ public void testPut() throws Throwable { // put files assertTrue( - "Failed to put a file", statement.execute( "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) - + " @%testLoadToLocalFS/orders parallel=10")); + + " @%testLoadToLocalFS/orders parallel=10"), + "Failed to put a file"); try (ResultSet resultSet = statement.getResultSet()) { @@ -1042,16 +1049,16 @@ static void findFile(Statement statement, String checkSQL) throws Throwable { } // give enough time for s3 eventual consistency for US region Thread.sleep(1000); - assertTrue("Could not find a file", fileFound); + assertTrue(fileFound, "Could not find a file"); // assert the first column not null - assertNotNull("Null result", resultSet.getString(1)); + assertNotNull(resultSet.getString(1), "Null result"); } } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSQLError42S02() throws SQLException { try (Connection connection = getConnection(); @@ -1061,13 +1068,13 @@ public void testSQLError42S02() throws SQLException { fail("SQL exception not raised"); } catch (SQLException ex1) { // assert the sqlstate "42S02" which means BASE_TABLE_OR_VIEW_NOT_FOUND - assertEquals("sqlstate mismatch", "42S02", ex1.getSQLState()); + assertEquals("42S02", ex1.getSQLState(), "sqlstate mismatch"); } } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testExplainPlan() throws Throwable { try (Connection connection = getConnection(); Statement statement = connection.createStatement(); @@ -1077,8 +1084,8 @@ public void testExplainPlan() throws Throwable { statement.executeQuery("EXPLAIN PLAN FOR SELECT c1 FROM orders_jdbc")) { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertTrue("must return more than 4 columns", resultSetMetaData.getColumnCount() >= 4); - assertTrue("must return more than 3 rows", countRows(resultSet) > 3); + assertTrue(resultSetMetaData.getColumnCount() >= 4, "must return more than 4 columns"); + assertTrue(countRows(resultSet) > 3, "must return more than 3 rows"); } } @@ -1175,7 +1182,7 @@ public void testUpdateCount() throws Throwable { int numRows = statement.executeUpdate("INSERT INTO testUpdateCount values (1, 'a'), (2, 'b')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 2, numRows); + assertEquals(2, numRows, "Unexpected number of rows inserted: " + numRows); } finally { statement.execute("DROP TABLE if exists testUpdateCount"); } @@ -1202,7 +1209,7 @@ public void testSnow4245() throws Throwable { "insert into testSnow4245 values(NULL,NULL,NULL)," + "('2013-06-04 01:00:04','2013-06-04 01:00:04','2013-06-04 01:00:04')," + "('2013-06-05 23:00:05','2013-06-05 23:00:05','2013-06-05 23:00:05')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 3, numRows); + assertEquals(3, numRows, "Unexpected number of rows inserted: " + numRows); // query the data try (ResultSet resultSet = @@ -1251,7 +1258,7 @@ public void testSnow4394() throws Throwable { int numRows = statement.executeUpdate( String.format("INSERT INTO %s(str) values('%s')", tableName, data)); - assertEquals("Unexpected number of rows inserted: " + numRows, 1, numRows); + assertEquals(1, numRows, "Unexpected number of rows inserted: " + numRows); try (ResultSet rset = statement.executeQuery(String.format("SELECT str FROM %s", tableName))) { @@ -1259,7 +1266,7 @@ public void testSnow4394() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret, data, ret); + assertEquals(data, ret, "Unexpected string value: " + ret); } } finally { statement.execute(String.format("DROP TABLE if exists %s", tableName)); @@ -1318,8 +1325,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals(1, resultSet.getInt(1), "integer"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind float preparedStatement.setDouble(1, 1.2); @@ -1332,8 +1339,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("double", 1.2, resultSet.getDouble(1), 0); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals(1.2, resultSet.getDouble(1), 0, "double"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind string preparedStatement.setString(1, "hello"); @@ -1346,8 +1353,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("string1", "hello", resultSet.getString(1)); - assertEquals("string2", "hello", resultSet.getString(2)); + assertEquals("hello", resultSet.getString(1), "string1"); + assertEquals("hello", resultSet.getString(2), "string2"); } // bind date sqlDate = java.sql.Date.valueOf("2014-08-26"); @@ -1361,8 +1368,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("string", "2014-08-26", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("2014-08-26", resultSet.getString(1), "string"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); @@ -1378,8 +1385,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); assertEquals( - "Incorrect timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1), "Incorrect timestamp"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind time tm = new Time(12345678); // 03:25:45.678 @@ -1393,8 +1400,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("Incorrect time", "03:25:45", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("03:25:45", resultSet.getString(1), "Incorrect time"); + assertEquals("hello", resultSet.getString(2), "string"); } } // bind in where clause @@ -1412,8 +1419,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("c1", "100", resultSet.getString(1)); - assertEquals("c2", "147004", resultSet.getString(2)); + assertEquals("100", resultSet.getString(1), "c1"); + assertEquals("147004", resultSet.getString(2), "c2"); } } @@ -1437,20 +1444,20 @@ public void testBind() throws Throwable { int rowCount = preparedStatement.executeUpdate(); // update count should be 1 - assertEquals("update count", 1, rowCount); + assertEquals(1, rowCount, "update count"); // test the inserted rows try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); + assertEquals(1, resultSet.getInt(1), "int"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); + assertNull(resultSet.getString(7), "date"); } } // bind in update statement @@ -1465,13 +1472,13 @@ public void testBind() throws Throwable { try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "world", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); + assertEquals(1, resultSet.getInt(1), "int"); + assertEquals("world", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); + assertNull(resultSet.getString(7), "date"); } // array bind for insert try (PreparedStatement preparedStatement = @@ -1499,11 +1506,11 @@ public void testBind() throws Throwable { // GS optimizes this into one insert execution, but we expand the // return count into an array - assertEquals("Number of update counts", 2, updateCounts.length); + assertEquals(2, updateCounts.length, "Number of update counts"); // update count should be 1 for each - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + assertEquals(1, updateCounts[0], "update count"); + assertEquals(1, updateCounts[1], "update count"); } // test the inserted rows try (ResultSet resultSet = @@ -1511,12 +1518,12 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 2, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); + assertEquals(2, resultSet.getInt(1), "int"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); } try (ResultSet resultSet = @@ -1524,12 +1531,12 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 3, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); + assertEquals(3, resultSet.getInt(1), "int"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); } // describe mode @@ -1620,10 +1627,10 @@ public void testBind() throws Throwable { updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 16, updateCounts.length); + assertEquals(16, updateCounts.length, "Number of update counts"); for (int idx = 0; idx < 16; idx++) { - assertEquals("update count", 1, updateCounts[idx]); + assertEquals(1, updateCounts[idx], "update count"); } } } @@ -1798,7 +1805,7 @@ public void testBindTimestampNTZ() throws Throwable { int updateCount = preparedStatement.executeUpdate(); // update count should be 1 - assertEquals("update count", 1, updateCount); + assertEquals(1, updateCount, "update count"); // test the inserted rows try (ResultSet resultSet = @@ -1806,7 +1813,7 @@ public void testBindTimestampNTZ() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("timestamp", "Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1)); + assertEquals("Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1), "timestamp"); regularStatement.executeUpdate("truncate table testBindTimestampNTZ"); @@ -1816,7 +1823,7 @@ public void testBindTimestampNTZ() throws Throwable { updateCount = preparedStatement.executeUpdate(); // update count should be 1 - assertEquals("update count", 1, updateCount); + assertEquals(1, updateCount, "update count"); } // test the inserted rows try (ResultSet resultSet = @@ -1852,11 +1859,11 @@ public void testNullBind() throws Throwable { int[] updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + assertEquals(2, updateCounts.length, "Number of update counts"); // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + assertEquals(1, updateCounts[0], "update count"); + assertEquals(1, updateCounts[1], "update count"); preparedStatement.clearBatch(); @@ -1869,11 +1876,11 @@ public void testNullBind() throws Throwable { updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + assertEquals(2, updateCounts.length, "Number of update counts"); // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + assertEquals(1, updateCounts[0], "update count"); + assertEquals(1, updateCounts[1], "update count"); preparedStatement.clearBatch(); @@ -1883,10 +1890,10 @@ public void testNullBind() throws Throwable { updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 1, updateCounts.length); + assertEquals(1, updateCounts.length, "Number of update counts"); // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); + assertEquals(1, updateCounts[0], "update count"); preparedStatement.clearBatch(); @@ -1956,12 +1963,12 @@ public void testSnow12603() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + assertEquals(1, resultSet.getInt(1), "integer"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(new BigDecimal("1.3"), resultSet.getBigDecimal(3), "decimal"); + assertEquals(1.3, resultSet.getDouble(4), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(5), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6), "timestamp"); preparedStatement.setObject(1, 1, Types.INTEGER); preparedStatement.setObject(2, "hello", Types.VARCHAR); @@ -1986,12 +1993,12 @@ public void testSnow12603() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + assertEquals(1, resultSet.getInt(1), "integer"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(new BigDecimal("1.3"), resultSet.getBigDecimal(3), "decimal"); + assertEquals(1.3, resultSet.getDouble(4), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(5), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6), "timestamp"); } } } @@ -2016,11 +2023,11 @@ public void testSnow6290() throws Throwable { ResultSet res = statement.executeQuery("select ts from testSnow6290"); - assertTrue("expect a row", res.next()); + assertTrue(res.next(), "expect a row"); Timestamp tsFromDB = res.getTimestamp(1); - assertEquals("timestamp mismatch", ts.getTime(), tsFromDB.getTime()); + assertEquals(ts.getTime(), tsFromDB.getTime(), "timestamp mismatch"); } finally { statement.execute("DROP TABLE if exists testSnow6290"); } @@ -2056,28 +2063,28 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=BigDecimal", Long.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=BigDecimal"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("integer", resultSet.getObject(1) instanceof Long); + assertTrue(resultSet.getObject(1) instanceof Long, "integer"); } preparedStatement.setString(1, "hello"); try (ResultSet resultSet = preparedStatement.executeQuery()) { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=String", String.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=String"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("string", resultSet.getObject(1) instanceof String); + assertTrue(resultSet.getObject(1) instanceof String, "string"); } preparedStatement.setDouble(1, 1.2); @@ -2086,14 +2093,14 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=Double", Double.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=Double"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("double", resultSet.getObject(1) instanceof Double); + assertTrue(resultSet.getObject(1) instanceof Double, "double"); } preparedStatement.setTimestamp(1, new Timestamp(0)); @@ -2102,14 +2109,14 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=Timestamp", Timestamp.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=Timestamp"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("timestamp", resultSet.getObject(1) instanceof Timestamp); + assertTrue(resultSet.getObject(1) instanceof Timestamp, "timestamp"); } preparedStatement.setDate(1, new java.sql.Date(0)); @@ -2117,14 +2124,14 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=Date", - java.sql.Date.class.getName(), - resultSetMetaData.getColumnClassName(1)); + Date.class.getName(), + resultSetMetaData.getColumnClassName(1), + "column class name=Date"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("date", resultSet.getObject(1) instanceof java.sql.Date); + assertTrue(resultSet.getObject(1) instanceof Date, "date"); } } } @@ -2135,7 +2142,7 @@ public void testGetDoubleForNull() throws Throwable { Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery("select cast(null as int) as null_int")) { assertTrue(resultSet.next()); - assertEquals("0 for null", 0, resultSet.getDouble(1), 0.0001); + assertEquals(0, resultSet.getDouble(1), 0.0001, "0 for null"); } } @@ -2182,7 +2189,7 @@ public void testPutViaExecuteQuery() throws Throwable { } } - @Ignore("takes 7 min. enable this for long running tests") + @Disabled("takes 7 min. enable this for long running tests") @Test public void testSnow16332() throws Throwable { // use v1 query request API and inject 200ms socket timeout for first @@ -2292,7 +2299,7 @@ public void run() { fail("should be canceled"); } catch (SQLException ex) { // assert the sqlstate is what we expect (QUERY CANCELLED) - assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); + assertEquals(SqlState.QUERY_CANCELED, ex.getSQLState(), "sqlstate mismatch"); } } } @@ -2329,7 +2336,7 @@ public void testSnow14774() throws Throwable { tsStrInLA = sdf.format(tsInLA); // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + assertEquals(tsStrInUTC, tsStrInLA, "timestamp values not equal"); } // 30 minutes before daylight saving change try (ResultSet res = statement.executeQuery("select '2015-03-08 01:30:00'::timestamp_ntz")) { @@ -2351,7 +2358,7 @@ public void testSnow14774() throws Throwable { tsStrInLA = sdf.format(tsInLA); // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + assertEquals(tsStrInUTC, tsStrInLA, "timestamp values not equal"); } } } @@ -2416,7 +2423,7 @@ public void testSnow19819() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) + @DontRunOnTestaccount public void testClientInfo() throws Throwable { System.setProperty( "snowflake.client.info", @@ -2425,22 +2432,22 @@ public void testClientInfo() throws Throwable { Statement statement = connection.createStatement(); ResultSet res = statement.executeQuery("select current_session_client_info()")) { - assertTrue("result expected", res.next()); + assertTrue(res.next(), "result expected"); String clientInfoJSONStr = res.getString(1); JsonNode clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spark version and spark app are found - assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); + assertEquals("3.0.0", clientInfoJSON.get("spark.version").asText(), "spark version mismatch"); assertEquals( - "snowflakedb version mismatch", "2.8.5", - clientInfoJSON.get("spark.snowflakedb.version").asText()); + clientInfoJSON.get("spark.snowflakedb.version").asText(), + "snowflakedb version mismatch"); assertEquals( - "spark app mismatch", "SnowflakeSourceSuite", - clientInfoJSON.get("spark.app.name").asText()); + clientInfoJSON.get("spark.app.name").asText(), + "spark app mismatch"); closeSQLObjects(res, statement, connection); } @@ -2466,7 +2473,7 @@ public void testLargeResultSet() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSnow26503() throws Throwable { ResultSetMetaData resultSetMetaData; String queryId = null; @@ -2630,7 +2637,7 @@ public void testSnow31104() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGet() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); @@ -2640,7 +2647,8 @@ public void testPutGet() throws Throwable { try { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -2648,16 +2656,16 @@ public void testPutGet() throws Throwable { statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage")); + statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage"), + "Failed to put a file"); findFile(statement, "ls @testPutGet_stage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", statement.execute( - "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); @@ -2685,7 +2693,7 @@ public void testPutGet() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetToUnencryptedStage() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); @@ -2695,7 +2703,8 @@ public void testPutGetToUnencryptedStage() throws Throwable { try { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -2705,18 +2714,16 @@ public void testPutGetToUnencryptedStage() throws Throwable { "CREATE OR REPLACE STAGE testPutGet_unencstage encryption=(TYPE='SNOWFLAKE_SSE')"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @testPutGet_unencstage")); + statement.execute("PUT file://" + sourceFilePath + " @testPutGet_unencstage"), + "Failed to put a file"); findFile(statement, "ls @testPutGet_unencstage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", statement.execute( - "GET @testPutGet_unencstage 'file://" - + destFolderCanonicalPath - + "' parallel=8")); + "GET @testPutGet_unencstage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); @@ -2738,15 +2745,15 @@ public void testPutGetToUnencryptedStage() throws Throwable { } /** Prepare statement will fail if the connection is already closed. */ - @Test(expected = SQLException.class) - public void testNotClosedSession() throws Throwable { + @Test + public void testNotClosedSession() throws SQLException { Connection connection = getConnection(); connection.close(); - connection.prepareStatement("select 1"); + assertThrows(SnowflakeSQLException.class, () -> connection.prepareStatement("select 1")); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testToTimestampNullBind() throws Throwable { try (Connection connection = getConnection(); PreparedStatement preparedStatement = diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java index 989f1211a..fde744f15 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java @@ -7,10 +7,11 @@ import static net.snowflake.client.jdbc.SnowflakeDriver.implementVersion; import static net.snowflake.client.jdbc.SnowflakeDriverIT.findFile; import static net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1.mapper; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import com.google.cloud.storage.StorageException; @@ -39,11 +40,10 @@ import java.util.Properties; import java.util.UUID; import java.util.zip.GZIPInputStream; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.RunningOnTestaccount; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.DontRunOnTestaccount; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.OCSPMode; import net.snowflake.client.core.SFSession; @@ -58,11 +58,10 @@ import net.snowflake.common.core.SqlState; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * General JDBC tests for the latest JDBC driver. This doesn't work for the oldest supported driver. @@ -70,10 +69,10 @@ * is not applicable. If it is applicable, move tests to SnowflakeDriverIT so that both the latest * and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeDriverLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); - @Rule public TemporaryFolder tmpFolder2 = new TemporaryFolder(); + @TempDir private File tmpFolder; + @TempDir private File tmpFolder2; public String testStageName = String.format("test_stage_%s", UUID.randomUUID().toString()).replaceAll("-", "_"); @@ -105,7 +104,7 @@ public void testStaticVersionMatchesManifest() { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) + @DontRunOnTestaccount public void testClientInfoConnectionProperty() throws Throwable { String clientInfoJSONStr = null; JsonNode clientInfoJSON = null; @@ -122,11 +121,11 @@ public void testClientInfoConnectionProperty() throws Throwable { clientInfoJSONStr = res.getString(1); clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spart version and spark app are found - assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); + assertEquals("3.0.0", clientInfoJSON.get("spark.version").asText(), "spark version mismatch"); assertEquals( - "spark app mismatch", "SnowflakeSourceSuite", - clientInfoJSON.get("spark.app.name").asText()); + clientInfoJSON.get("spark.app.name").asText(), + "spark app mismatch"); } // Test that when session property is set, connection parameter overrides it @@ -142,11 +141,11 @@ public void testClientInfoConnectionProperty() throws Throwable { clientInfoJSONStr = res.getString(1); clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spart version and spark app are found - assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); + assertEquals("3.0.0", clientInfoJSON.get("spark.version").asText(), "spark version mismatch"); assertEquals( - "spark app mismatch", "SnowflakeSourceSuite", - clientInfoJSON.get("spark.app.name").asText()); + clientInfoJSON.get("spark.app.name").asText(), + "spark app mismatch"); } System.clearProperty("snowflake.client.info"); } @@ -163,7 +162,7 @@ public void testGetSessionID() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutThreshold() throws SQLException { try (Connection connection = getConnection()) { // assert that threshold equals default 200 from server side @@ -202,9 +201,10 @@ public void testPutThreshold() throws SQLException { /** Test API for Spark connector for FileTransferMetadata */ @Test - @Ignore + @Disabled public void testGCPFileTransferMetadataWithOneFile() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); try (Connection connection = getConnection("gcpaccount"); @@ -266,9 +266,9 @@ public void testGCPFileTransferMetadataWithOneFile() throws Throwable { // Download two files and verify their content. assertTrue( - "Failed to get files", statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8"), + "Failed to get files"); // Make sure that the downloaded files are EQUAL, // they should be gzip compressed @@ -284,9 +284,10 @@ public void testGCPFileTransferMetadataWithOneFile() throws Throwable { /** Test API for Kafka connector for FileTransferMetadata */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); @@ -353,13 +354,13 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { // Download two files and verify their content. assertTrue( - "Failed to get files", statement.execute( "GET @" + testStageName + " 'file://" + destFolderCanonicalPath - + "/' parallel=8")); + + "/' parallel=8"), + "Failed to get files"); // Make sure that the downloaded files are EQUAL, // they should be gzip compressed @@ -376,7 +377,7 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { /** Negative test for FileTransferMetadata. It is only supported for PUT. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGCPFileTransferMetadataNegativeOnlySupportPut() throws Throwable { int expectExceptionCount = 1; int actualExceptionCount = -1; @@ -392,7 +393,8 @@ public void testGCPFileTransferMetadataNegativeOnlySupportPut() throws Throwable SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @" + testStageName + " file://" + destFolderCanonicalPath; @@ -487,23 +489,26 @@ public void testGetPropertyInfo() throws SQLException { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutOverwriteFalseNoDigest() throws Throwable { // create 2 files: an original, and one that will overwrite the original - File file1 = tmpFolder.newFile("testfile.csv"); + File file1 = new File(tmpFolder, "testfile.csv"); + file1.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { bw.write("Writing original file content. This should get overwritten."); } - File file2 = tmpFolder2.newFile("testfile.csv"); + File file2 = new File(tmpFolder2, "testfile.csv"); + file2.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { bw.write("This is all new! This should be the result of the overwriting."); } String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -518,25 +523,25 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE testing_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // put another file in same stage with same filename with overwrite = true assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false")); + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); @@ -564,11 +569,12 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutDisable() throws Throwable { // create a file - File file = tmpFolder.newFile("testfile99.csv"); + File file = new File(tmpFolder, "testfile99.csv"); + file.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file))) { bw.write("This content won't be uploaded as PUT is disabled."); } @@ -584,7 +590,7 @@ public void testPutDisable() throws Throwable { Statement statement = connection.createStatement()) { statement.execute("PUT file://" + sourceFilePathOriginal + " @testPutGet_disable_stage"); - assertTrue("Shouldn't come here", false); + assertTrue(false, "Shouldn't come here"); } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); @@ -598,11 +604,12 @@ public void testPutDisable() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetDisable() throws Throwable { // create a folder - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); Properties paramProperties = new Properties(); @@ -616,7 +623,7 @@ public void testGetDisable() throws Throwable { statement.execute( "GET @testPutGet_disable_stage 'file://" + destFolderCanonicalPath + "' parallel=8"); - assertTrue("Shouldn't come here", false); + assertTrue(false, "Shouldn't come here"); } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); @@ -798,7 +805,7 @@ public void testSnow76376() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeoOutputTypes() throws Throwable { Properties paramProperties = new Properties(); @@ -862,7 +869,7 @@ private void testGeoOutputTypeSingle( } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeoMetadata() throws Throwable { Properties paramProperties = new Properties(); @@ -913,7 +920,7 @@ private void testGeoMetadataSingle( } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeometryOutputTypes() throws Throwable { Properties paramProperties = new Properties(); @@ -967,7 +974,7 @@ private void testGeometryOutputTypeSingle( } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeometryMetadata() throws Throwable { Properties paramProperties = new Properties(); @@ -1015,7 +1022,7 @@ private void testGeometryMetadataSingle( * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetGcsDownscopedCredential() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -1027,7 +1034,7 @@ public void testPutGetGcsDownscopedCredential() throws Throwable { /** Added in > 3.15.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetGcsDownscopedCredentialWithDisabledDefaultCredentials() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -1041,7 +1048,8 @@ public void testPutGetGcsDownscopedCredentialWithDisabledDefaultCredentials() th private void putAndGetFile(Statement statement) throws Throwable { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE_2); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -1049,16 +1057,16 @@ private void putAndGetFile(Statement statement) throws Throwable { statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage")); + statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage"), + "Failed to put a file"); findFile(statement, "ls @testPutGet_stage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", statement.execute( - "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE_2 + ".gz"); @@ -1088,25 +1096,28 @@ private void putAndGetFile(Statement statement) throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); try (Connection connection = getConnection("gcpaccount", paramProperties); Statement statement = connection.createStatement()) { try { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File largeTempFile = tmpFolder.newFile("largeFile.csv"); + File largeTempFile = new File(tmpFolder, "largeFile.csv"); + largeTempFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); } - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + File largeTempFile2 = new File(tmpFolder, "largeFile2.csv"); + largeTempFile2.createNewFile(); String sourceFilePath = largeTempFile.getCanonicalPath(); @@ -1120,8 +1131,8 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE largefile_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @largefile_stage/"); @@ -1136,9 +1147,9 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); @@ -1165,24 +1176,27 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetLargeFileAzure() throws Throwable { Properties paramProperties = new Properties(); try (Connection connection = getConnection("azureaccount", paramProperties); Statement statement = connection.createStatement()) { try { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File largeTempFile = tmpFolder.newFile("largeFile.csv"); + File largeTempFile = new File(tmpFolder, "largeFile.csv"); + largeTempFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { bw.write("Creating large test file for Azure PUT/GET test"); bw.write(System.lineSeparator()); bw.write("Creating large test file for Azure PUT/GET test"); bw.write(System.lineSeparator()); } - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + File largeTempFile2 = new File(tmpFolder, "largeFile2.csv"); + largeTempFile2.createNewFile(); String sourceFilePath = largeTempFile.getCanonicalPath(); @@ -1196,8 +1210,8 @@ public void testPutGetLargeFileAzure() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE largefile_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @largefile_stage/"); @@ -1212,9 +1226,9 @@ public void testPutGetLargeFileAzure() throws Throwable { // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); @@ -1259,9 +1273,10 @@ private void copyContentFrom(File file1, File file2) throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutS3RegionalUrl() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); @@ -1344,13 +1359,13 @@ public void testPutS3RegionalUrl() throws Throwable { // Download two files and verify their content. assertTrue( - "Failed to get files", statement.execute( "GET @" + testStageName + " 'file://" + destFolderCanonicalPath - + "/' parallel=8")); + + "/' parallel=8"), + "Failed to get files"); // Make sure that the downloaded files are EQUAL, // they should be gzip compressed @@ -1370,7 +1385,7 @@ public void testPutS3RegionalUrl() throws Throwable { * and Azure */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { String clientName = "clientName"; String clientKey = "clientKey"; @@ -1433,7 +1448,8 @@ public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { } } - @Test(expected = SnowflakeSQLException.class) + @Test + @DontRunOnGithubActions public void testNoSpaceLeftOnDeviceException() throws SQLException { List supportedAccounts = Arrays.asList("gcpaccount", "s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { @@ -1452,16 +1468,19 @@ public void testNoSpaceLeftOnDeviceException() throws SQLException { SnowflakeStorageClient client = StorageClientFactory.getFactory().createClient(info, 1, null, /* session= */ null); - client.handleStorageException( - new StorageException( - client.getMaxRetries(), - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - client.getMaxRetries(), - "download", - null, - command, - null); + assertThrows( + SnowflakeSQLException.class, + () -> + client.handleStorageException( + new StorageException( + client.getMaxRetries(), + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + client.getMaxRetries(), + "download", + null, + command, + null)); } finally { statement.execute("DROP STAGE if exists testPutGet_stage"); } @@ -1471,9 +1490,10 @@ public void testNoSpaceLeftOnDeviceException() throws SQLException { } @Test - @Ignore // ignored until SNOW-1616480 is resolved + @Disabled // TODO: ignored until SNOW-1616480 is resolved public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); // set parameter for presignedUrl upload instead of downscoped token Properties paramProperties = new Properties(); @@ -1508,9 +1528,9 @@ public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { } assertTrue( - "Failed to get files", statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8"), + "Failed to get files"); assertTrue(isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); } finally { statement.execute("DROP STAGE if exists " + testStageName); @@ -1519,14 +1539,14 @@ public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testUploadWithGCSDownscopedCredentialWithoutConnection() throws Throwable { uploadWithGCSDownscopedCredentialWithoutConnection(); } /** Added in > 3.15.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testUploadWithGCSDownscopedCredentialAndDisabledGcsDefaultCredentialsWithoutConnection() throws Throwable { @@ -1539,7 +1559,8 @@ public void testUploadWithGCSDownscopedCredentialWithoutConnection() throws Thro } private void uploadWithGCSDownscopedCredentialWithoutConnection() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -1577,9 +1598,9 @@ private void uploadWithGCSDownscopedCredentialWithoutConnection() throws Throwab .setOcspMode(OCSPMode.FAIL_OPEN) .build()); assertTrue( - "Failed to get files with down-scoped token", statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/'")); + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/'"), + "Failed to get files with down-scoped token"); assertTrue( isFileContentEqual( srcPath, false, destFolderCanonicalPath + "/" + targetFileName, true)); @@ -1599,7 +1620,7 @@ private void uploadWithGCSDownscopedCredentialWithoutConnection() throws Throwab * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testHTAPOptimizations() throws SQLException { try { // Set the HTAP test parameter to true @@ -1671,7 +1692,7 @@ public void testHTAPOptimizations() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testHTAPStatementParameterCaching() throws SQLException { // Set the HTAP test parameter to true try (Connection con = getSnowflakeAdminConnection()) { @@ -1730,9 +1751,10 @@ public void testHTAPStatementParameterCaching() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testS3PutInGS() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); Properties paramProperties = new Properties(); try (Connection connection = getConnection("s3testaccount", paramProperties); @@ -1759,7 +1781,7 @@ public void testS3PutInGS() throws Throwable { new FileInputStream(destFolderCanonicalPath + "/" + fileName); String downloadedFile = IOUtils.toString(downloadedFileStream, StandardCharsets.UTF_8); assertTrue( - "downloaded content does not equal uploaded content", content.equals(downloadedFile)); + content.equals(downloadedFile), "downloaded content does not equal uploaded content"); } finally { statement.execute("DROP STAGE if exists " + testStageName); } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java index 8b42be5c4..f268577e1 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java @@ -3,13 +3,13 @@ */ package net.snowflake.client.jdbc; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.SQLException; import java.util.ArrayList; @@ -19,7 +19,7 @@ import java.util.Locale; import java.util.Map; import java.util.Properties; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Driver unit test */ public class SnowflakeDriverTest { @@ -52,16 +52,16 @@ void match(String url, SnowflakeConnectString sc) { int port = sc.getPort(); Map parameters = sc.getParameters(); - assertEquals("URL scheme: " + url, this.scheme, scheme); - assertEquals("URL scheme: " + url, this.host, host); - assertEquals("URL scheme: " + url, this.port, port); - assertEquals("URL scheme: " + url, this.parameters.size(), parameters.size()); - assertEquals("URL scheme. " + url, this.account, account); + assertEquals(this.scheme, scheme, "URL scheme: " + url); + assertEquals(this.host, host, "URL scheme: " + url); + assertEquals(this.port, port, "URL scheme: " + url); + assertEquals(this.parameters.size(), parameters.size(), "URL scheme: " + url); + assertEquals(this.account, account, "URL scheme. " + url); for (Map.Entry entry : this.parameters.entrySet()) { String k = entry.getKey().toUpperCase(Locale.US); Object v = parameters.get(k); - assertEquals("URL scheme: " + url + ", key: " + k, entry.getValue(), v); + assertEquals(entry.getValue(), v, "URL scheme: " + url + ", key: " + k); } } } @@ -355,7 +355,7 @@ public void testAcceptUrls() throws Exception { expectedParameters)); for (TestCase t : testCases) { - assertTrue("URL is not valid: " + t.url, snowflakeDriver.acceptsURL(t.url)); + assertTrue(snowflakeDriver.acceptsURL(t.url), "URL is not valid: " + t.url); t.match(t.url, SnowflakeConnectString.parse(t.url, SnowflakeDriver.EMPTY_PROPERTIES)); } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java index 22c26465d..fd6ee0d81 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java @@ -1,5 +1,9 @@ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + import com.google.cloud.storage.StorageException; import java.io.File; import java.io.IOException; @@ -10,26 +14,23 @@ import java.sql.Statement; import java.util.Properties; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.cloud.storage.SnowflakeGCSClient; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.Mockito; /** Test for SnowflakeGcsClient handle exception function, only work with latest driver */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeGcsClientHandleExceptionLatestIT extends AbstractDriverIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private Connection connection; private SFStatement sfStatement; private SFSession sfSession; @@ -38,7 +39,7 @@ public class SnowflakeGcsClientHandleExceptionLatestIT extends AbstractDriverIT private int overMaxRetry; private int maxRetry; - @Before + @BeforeEach public void setup() throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -59,7 +60,7 @@ public void setup() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void error401RenewExpired() throws SQLException, InterruptedException { // Unauthenticated, renew is called. spyingClient.handleStorageException( @@ -94,94 +95,117 @@ public void run() { thread.start(); thread.interrupt(); thread.join(); - Assert.assertNull("Exception must not have been thrown in here", exceptionContainer[0]); + assertNull(exceptionContainer[0], "Exception must not have been thrown in here"); Mockito.verify(spyingClient, Mockito.times(2)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void error401OverMaxRetryThrow() throws SQLException { - spyingClient.handleStorageException( - new StorageException(401, "Unauthenticated"), - overMaxRetry, - "upload", - sfSession, - command, - null); + @Test + @DontRunOnGithubActions + public void error401OverMaxRetryThrow() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException(401, "Unauthenticated"), + overMaxRetry, + "upload", + sfSession, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInvalidKey() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorInvalidKey() { // Unauthenticated, renew is called. - spyingClient.handleStorageException( - new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Test + @DontRunOnGithubActions public void errorInterruptedException() throws SQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new InterruptedException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new InterruptedException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new InterruptedException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorSocketTimeoutException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorSocketTimeoutException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new SocketTimeoutException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new SocketTimeoutException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new SocketTimeoutException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorUnknownException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorUnknownException() { // Unauthenticated, renew is called. - spyingClient.handleStorageException(new Exception(), 0, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorWithNullSession() throws SQLException { - spyingClient.handleStorageException( - new StorageException(401, "Unauthenticated"), 0, "upload", null, command, null); + @Test + @DontRunOnGithubActions + public void errorWithNullSession() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException(401, "Unauthenticated"), 0, "upload", null, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNoSpaceLeftOnDevice() throws SQLException, IOException { - File destFolder = tmpFolder.newFolder(); + @Test + @DontRunOnGithubActions + public void errorNoSpaceLeftOnDevice() throws IOException { + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file://" + destFolderCanonicalPath + "'"; - spyingClient.handleStorageException( - new StorageException( - maxRetry, - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - 0, - "download", - null, - getCommand, - null); + assertThrows( + SQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + maxRetry, + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + 0, + "download", + null, + getCommand, + null)); } - @After + @AfterEach public void cleanUp() throws SQLException { sfStatement.close(); connection.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableArrowIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableArrowIT.java deleted file mode 100644 index d9cb057d2..000000000 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableArrowIT.java +++ /dev/null @@ -1,12 +0,0 @@ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** Test SnowflakeResultSetSerializable for Arrow */ -@Category(TestCategoryArrow.class) -public class SnowflakeResultSetSerializableArrowIT extends SnowflakeResultSetSerializableIT { - public SnowflakeResultSetSerializableArrowIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java index f9c2bb66d..3b6206f55 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java @@ -1,10 +1,10 @@ package net.snowflake.client.jdbc; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.FileInputStream; @@ -20,41 +20,33 @@ import java.util.List; import java.util.Properties; import javax.annotation.Nullable; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** SnowflakeResultSetSerializable tests */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class SnowflakeResultSetSerializableIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private static boolean developPrint = false; - private String queryResultFormat; - // sfFullURL is used to support private link URL. // This test case is not for private link env, so just use a valid URL for testing purpose. private String sfFullURL = "https://sfctest0.snowflakecomputing.com"; - public SnowflakeResultSetSerializableIT() { - this("json"); - } - - SnowflakeResultSetSerializableIT(String format) { - queryResultFormat = format; + public Connection init(String queryResultFormat) throws SQLException { + return init(null, queryResultFormat); } - public Connection init() throws SQLException { - return init(null); - } - - public Connection init(@Nullable Properties properties) throws SQLException { + public Connection init(@Nullable Properties properties, String queryResultFormat) + throws SQLException { Connection conn = BaseJDBCTest.getConnection(properties); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -121,7 +113,7 @@ private List serializeResultSet( SnowflakeResultSetSerializable entry = resultSetChunks.get(i); // Write object to file - String tmpFileName = tmpFolder.getRoot().getPath() + "_result_" + i + "." + fileNameAppendix; + String tmpFileName = tmpFolder.getPath() + "_result_" + i + "." + fileNameAppendix; try (FileOutputStream fo = new FileOutputStream(tmpFileName); ObjectOutputStream so = new ObjectOutputStream(fo)) { so.writeObject(entry); @@ -238,11 +230,16 @@ private String deserializeResultSetWithProperties(List files, Properties * @throws Throwable If any error happens. */ private void testBasicTableHarness( - int rowCount, long maxSizeInBytes, String whereClause, boolean needSetupTable, boolean async) + int rowCount, + long maxSizeInBytes, + String whereClause, + boolean needSetupTable, + boolean async, + String queryResultFormat) throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init()) { + try (Connection connection = init(queryResultFormat)) { Statement statement = connection.createStatement(); if (developPrint) { @@ -289,57 +286,61 @@ private void testBasicTableHarness( assertEquals(chunkResultString, originalResultCSVString); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithEmptyResult() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithEmptyResult(String queryResultFormat) throws Throwable { // Use complex WHERE clause in order to test both ARROW and JSON. // It looks GS only generates JSON format result. - testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, false); + testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, true); + testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, true, queryResultFormat); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithOnlyFirstChunk() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithOnlyFirstChunk(String queryResultFormat) throws Throwable { // Result only includes first data chunk, test maxSize is small. - testBasicTableHarness(1, 1, "", true, false); + testBasicTableHarness(1, 1, "", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(1, 1, "", true, true); + testBasicTableHarness(1, 1, "", true, true, queryResultFormat); // Result only includes first data chunk, test maxSize is big. - testBasicTableHarness(1, 1024 * 1024, "", false, false); + testBasicTableHarness(1, 1024 * 1024, "", false, false, queryResultFormat); // Test async mode - testBasicTableHarness(1, 1024 * 1024, "", false, true); + testBasicTableHarness(1, 1024 * 1024, "", false, true, queryResultFormat); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithOneFileChunk() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithOneFileChunk(String queryResultFormat) throws Throwable { // Result only includes first data chunk, test maxSize is small. - testBasicTableHarness(300, 1, "", true, false); + testBasicTableHarness(300, 1, "", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(300, 1, "", true, true); + testBasicTableHarness(300, 1, "", true, true, queryResultFormat); // Result only includes first data chunk, test maxSize is big. - testBasicTableHarness(300, 1024 * 1024, "", false, false); + testBasicTableHarness(300, 1024 * 1024, "", false, false, queryResultFormat); // Test Async mode - testBasicTableHarness(300, 1024 * 1024, "", false, true); + testBasicTableHarness(300, 1024 * 1024, "", false, true, queryResultFormat); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithSomeFileChunks() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithSomeFileChunks(String queryResultFormat) throws Throwable { // Result only includes first data chunk, test maxSize is small. - testBasicTableHarness(90000, 1, "", true, false); + testBasicTableHarness(90000, 1, "", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(90000, 1, "", true, true); + testBasicTableHarness(90000, 1, "", true, true, queryResultFormat); // Result only includes first data chunk, test maxSize is median. - testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, false); + testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, false, queryResultFormat); // Test Async mode - testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, true); + testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, true, queryResultFormat); // Result only includes first data chunk, test maxSize is big. - testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, false); + testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, false, queryResultFormat); // Test Async mode - testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, true); + testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, true, queryResultFormat); } /** @@ -365,11 +366,12 @@ private void testTimestampHarness( String format_ntz, String format_ltz, String format_tz, - String timezone) + String timezone, + String queryResultFormat) throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("alter session set DATE_OUTPUT_FORMAT = '" + format_date + "'"); statement.execute("alter session set TIME_OUTPUT_FORMAT = '" + format_time + "'"); @@ -419,9 +421,10 @@ private void testTimestampHarness( assertEquals(chunkResultString, originalResultCSVString); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testTimestamp() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testTimestamp(String queryResultFormat) throws Throwable { String[] dateFormats = {"YYYY-MM-DD", "DD-MON-YYYY", "MM/DD/YYYY"}; String[] timeFormats = {"HH24:MI:SS.FFTZH:TZM", "HH24:MI:SS.FF", "HH24:MI:SS"}; String[] timestampFormats = { @@ -441,16 +444,19 @@ public void testTimestamp() throws Throwable { timestampFormats[i], timestampFormats[i], timestampFormats[i], - timezones[i]); + timezones[i], + queryResultFormat); } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithSerializeObjectsAfterReadResultSet() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithSerializeObjectsAfterReadResultSet(String queryResultFormat) + throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("create or replace schema testschema"); @@ -528,13 +534,14 @@ private synchronized List splitResultSetSerializables( return resultFileList; } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSplitResultSetSerializable() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testSplitResultSetSerializable(String queryResultFormat) throws Throwable { List fileNameList = null; String originalResultCSVString = null; int rowCount = 90000; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( @@ -594,10 +601,11 @@ private void hackToSetupWrongURL(List resultSetS } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testCloseUnconsumedResultSet() throws Throwable { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testCloseUnconsumedResultSet(String queryResultFormat) throws Throwable { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -624,13 +632,14 @@ public void testCloseUnconsumedResultSet() throws Throwable { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testNegativeWithChunkFileNotExist() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testNegativeWithChunkFileNotExist(String queryResultFormat) throws Throwable { // This test takes about (download worker retry times * networkTimeout) long to finish Properties properties = new Properties(); properties.put("networkTimeout", 10000); // 10000 millisec - try (Connection connection = init(properties)) { + try (Connection connection = init(properties, queryResultFormat)) { try (Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -678,10 +687,11 @@ public void testNegativeWithChunkFileNotExist() throws Throwable { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testNegativeWithClosedResultSet() throws Throwable { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testNegativeWithClosedResultSet(String queryResultFormat) throws Throwable { + try (Connection connection = init(queryResultFormat)) { Statement statement = connection.createStatement(); statement.execute( @@ -730,15 +740,16 @@ public void testNegativeWithClosedResultSet() throws Throwable { * * @throws Throwable */ - @Test - @Ignore - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testCustomProxyWithFiles() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @Disabled + @DontRunOnGithubActions + public void testCustomProxyWithFiles(String queryResultFormat) throws Throwable { boolean generateFiles = false; boolean correctProxy = false; if (generateFiles) { - generateTestFiles(); + generateTestFiles(queryResultFormat); fail("This is generate test file."); } @@ -775,8 +786,8 @@ public void testCustomProxyWithFiles() throws Throwable { } } - private void generateTestFiles() throws Throwable { - try (Connection connection = init(); + private void generateTestFiles(String queryResultFormat) throws Throwable { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( @@ -800,15 +811,16 @@ private void generateTestFiles() throws Throwable { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testRetrieveMetadata() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testRetrieveMetadata(String queryResultFormat) throws Throwable { List fileNameList; int rowCount = 90000; long expectedTotalRowCount = 0; long expectedTotalCompressedSize = 0; long expectedTotalUncompressedSize = 0; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java index aed4d1f39..e104abc66 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java @@ -3,6 +3,10 @@ */ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; @@ -16,27 +20,24 @@ import java.sql.SQLException; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.cloud.storage.SnowflakeS3Client; import net.snowflake.client.jdbc.cloud.storage.StageInfo; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.Mockito; /** Test for SnowflakeS3Client handle exception function */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeS3ClientHandleExceptionLatestIT extends AbstractDriverIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private Connection connection; private SFStatement sfStatement; private SFSession sfSession; @@ -46,7 +47,7 @@ public class SnowflakeS3ClientHandleExceptionLatestIT extends AbstractDriverIT { private int maxRetry; private static final String EXPIRED_AWS_TOKEN_ERROR_CODE = "ExpiredToken"; - @Before + @BeforeEach public void setup() throws SQLException { connection = getConnection("s3testaccount"); sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -75,7 +76,7 @@ public void setup() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void errorRenewExpired() throws SQLException, InterruptedException { AmazonS3Exception ex = new AmazonS3Exception("unauthenticated"); ex.setErrorCode(EXPIRED_AWS_TOKEN_ERROR_CODE); @@ -100,19 +101,27 @@ public void run() { thread.start(); thread.interrupt(); thread.join(); - Assert.assertNull("Exception must not have been thrown in here", exceptionContainer[0]); + assertNull(exceptionContainer[0], "Exception must not have been thrown in here"); Mockito.verify(spyingClient, Mockito.times(2)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNotFound() throws SQLException { - spyingClient.handleStorageException( - new AmazonS3Exception("Not found"), overMaxRetry, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorNotFound() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new AmazonS3Exception("Not found"), + overMaxRetry, + "upload", + sfSession, + command, + null)); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void errorBadRequestTokenExpired() throws SQLException { AmazonServiceException ex = new AmazonServiceException("Bad Request"); ex.setServiceName("Amazon S3"); @@ -126,91 +135,113 @@ public void errorBadRequestTokenExpired() throws SQLException { Mockito.verify(spyingClient, Mockito.times(1)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorClientUnknown() throws SQLException { - spyingClient.handleStorageException( - new AmazonClientException("Not found", new IOException()), - overMaxRetry, - "upload", - sfSession, - command, - null); + @Test + @DontRunOnGithubActions + public void errorClientUnknown() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new AmazonClientException("Not found", new IOException()), + overMaxRetry, + "upload", + sfSession, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInvalidKey() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorInvalidKey() { // Unauthenticated, renew is called. - spyingClient.handleStorageException( - new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInterruptedException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorInterruptedException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new InterruptedException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new InterruptedException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new InterruptedException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorSocketTimeoutException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorSocketTimeoutException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new SocketTimeoutException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new SocketTimeoutException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new SocketTimeoutException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorUnknownException() throws SQLException { - spyingClient.handleStorageException(new Exception(), 0, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorUnknownException() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorRenewExpiredNullSession() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorRenewExpiredNullSession() { // Unauthenticated, renew is called. AmazonS3Exception ex = new AmazonS3Exception("unauthenticated"); ex.setErrorCode(EXPIRED_AWS_TOKEN_ERROR_CODE); - spyingClient.handleStorageException(ex, 0, "upload", null, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> spyingClient.handleStorageException(ex, 0, "upload", null, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNoSpaceLeftOnDevice() throws SQLException, IOException { - File destFolder = tmpFolder.newFolder(); + @Test + @DontRunOnGithubActions + public void errorNoSpaceLeftOnDevice() throws IOException { + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file://" + destFolderCanonicalPath + "'"; - spyingClient.handleStorageException( - new StorageException( - maxRetry, - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - 0, - "download", - null, - getCommand, - null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + maxRetry, + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + 0, + "download", + null, + getCommand, + null)); } - @After + @AfterEach public void cleanUp() throws SQLException { sfStatement.close(); connection.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java index 4cd2fa7e8..92d00affc 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java @@ -2,10 +2,10 @@ import static net.snowflake.client.jdbc.SnowflakeChunkDownloader.NoOpChunkDownloader; import static net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1.ChunkFileMetadata; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -16,7 +16,7 @@ import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFBaseStatement; import net.snowflake.client.core.SFStatementType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeSerializableTest { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java index ebf32dcef..41bee6e85 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java @@ -3,88 +3,81 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Timestamp; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.TimeZone; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * Tests SnowflakeTimestampWithTimezone to ensure the output is not impacted by Day Light Saving * Time. Not this test case is not thread safe, because TimeZone.setDefault is called. */ -@RunWith(Parameterized.class) public class SnowflakeTimestampWithTimezoneTest extends BaseJDBCTest { private static TimeZone orgTimeZone; - private final String timeZone; - private final String inputTimestamp; - private final String outputTimestamp; + static class Params implements ArgumentsProvider { + public Stream provideArguments(ExtensionContext context) { + String[] timeZoneList = {"PST", "America/New_York", "UTC", "Asia/Singapore"}; - public SnowflakeTimestampWithTimezoneTest( - String timeZone, String inputTimestamp, String outputTimestamp) { - this.timeZone = timeZone; - this.inputTimestamp = inputTimestamp; - this.outputTimestamp = outputTimestamp; - } - - @Parameterized.Parameters(name = "tz={0}, input={1}, output={2}") - public static Collection convert() { - String[] timeZoneList = {"PST", "America/New_York", "UTC", "Asia/Singapore"}; + String[] dateTimeList = { + "2018-03-11 01:10:34.0123456", + "2018-03-11 02:10:34.0123456", + "2018-03-11 03:10:34.0123456", + "2018-11-04 01:10:34.123", + "2018-11-04 02:10:34.123", + "2018-11-04 03:10:34.123", + "2020-03-11 01:10:34.456", + "2020-03-11 02:10:34.456", + "2020-03-11 03:10:34.456", + "2020-11-01 01:10:34.123", + "2020-11-01 02:10:34.123", + "2020-11-01 03:10:34.123" + }; - String[] dateTimeList = { - "2018-03-11 01:10:34.0123456", - "2018-03-11 02:10:34.0123456", - "2018-03-11 03:10:34.0123456", - "2018-11-04 01:10:34.123", - "2018-11-04 02:10:34.123", - "2018-11-04 03:10:34.123", - "2020-03-11 01:10:34.456", - "2020-03-11 02:10:34.456", - "2020-03-11 03:10:34.456", - "2020-11-01 01:10:34.123", - "2020-11-01 02:10:34.123", - "2020-11-01 03:10:34.123" - }; - - List testCases = new ArrayList<>(); - for (String timeZone : timeZoneList) { - for (String dateTime : dateTimeList) { - testCases.add(new Object[] {timeZone, dateTime, dateTime}); + List testCases = new ArrayList<>(); + for (String timeZone : timeZoneList) { + for (String dateTime : dateTimeList) { + testCases.add(Arguments.of(timeZone, dateTime, dateTime)); + } } + return testCases.stream(); } - return testCases; } /** Records the original TimeZone */ - @BeforeClass + @BeforeAll public static void keepOriginalTimeZone() { orgTimeZone = TimeZone.getDefault(); } - @AfterClass + @AfterAll public static void restoreTimeZone() { TimeZone.setDefault(orgTimeZone); } - @Test - public void testTimestampNTZ() throws Throwable { + @ParameterizedTest(name = "{index}: {1} {0}") + @ArgumentsSource(Params.class) + public void testTimestampNTZ(String timeZone, String inputTimestamp, String outputTimestamp) { TimeZone.setDefault(TimeZone.getTimeZone(timeZone)); - LocalDateTime dt = parseTimestampNTZ(this.inputTimestamp); + LocalDateTime dt = parseTimestampNTZ(inputTimestamp); SnowflakeTimestampWithTimezone stn = new SnowflakeTimestampWithTimezone( dt.toEpochSecond(ZoneOffset.UTC) * 1000, dt.getNano(), TimeZone.getTimeZone("UTC")); - assertEquals(this.outputTimestamp, stn.toString()); + assertEquals(outputTimestamp, stn.toString()); } @Test diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java index 29c58b787..b24825c96 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java @@ -2,15 +2,16 @@ import static net.snowflake.client.jdbc.SnowflakeType.convertStringToType; import static net.snowflake.client.jdbc.SnowflakeType.getJavaType; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.math.BigDecimal; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; import java.sql.Types; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeTypeTest { @@ -97,7 +98,7 @@ public void testJavaTypeToSFType() throws SnowflakeSQLException { @Test public void testJavaTypeToClassName() throws SQLException { assertEquals(SnowflakeType.javaTypeToClassName(Types.DECIMAL), BigDecimal.class.getName()); - assertEquals(SnowflakeType.javaTypeToClassName(Types.TIME), java.sql.Time.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.TIME), Time.class.getName()); assertEquals(SnowflakeType.javaTypeToClassName(Types.BOOLEAN), Boolean.class.getName()); assertThrows( SQLFeatureNotSupportedException.class, diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java index 6e61d82dc..054aef9fe 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -3,11 +3,13 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap; +import static net.snowflake.client.jdbc.SnowflakeUtil.extractColumnMetadata; import static net.snowflake.client.jdbc.SnowflakeUtil.getSnowflakeType; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,12 +18,17 @@ import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; -import net.snowflake.client.category.TestCategoryCore; +import java.util.HashMap; +import java.util.Map; +import java.util.TreeMap; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ObjectMapperFactory; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.apache.http.Header; +import org.apache.http.message.BasicHeader; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SnowflakeUtilTest extends BaseJDBCTest { private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); @@ -39,8 +46,7 @@ public void testCreateMetadata() throws Throwable { SnowflakeColumnMetadata expectedColumnMetadata = createExpectedMetadata(rootNode, fieldOne, fieldTwo); // when - SnowflakeColumnMetadata columnMetadata = - SnowflakeUtil.extractColumnMetadata(rootNode, false, null); + SnowflakeColumnMetadata columnMetadata = extractColumnMetadata(rootNode, false, null); // then assertNotNull(columnMetadata); assertEquals( @@ -62,8 +68,7 @@ public void testCreateFieldsMetadataForObject() throws Throwable { rootNode.putIfAbsent("fields", fields); // when - SnowflakeColumnMetadata columnMetadata = - SnowflakeUtil.extractColumnMetadata(rootNode, false, null); + SnowflakeColumnMetadata columnMetadata = extractColumnMetadata(rootNode, false, null); // then assertNotNull(columnMetadata); assertEquals("OBJECT", columnMetadata.getTypeName()); @@ -82,6 +87,36 @@ public void testCreateFieldsMetadataForObject() throws Throwable { assertTrue(secondField.isNullable()); } + @Test + public void shouldConvertCreateCaseInsensitiveMap() { + Map map = new HashMap<>(); + map.put("key1", "value1"); + + map = SnowflakeUtil.createCaseInsensitiveMap(map); + assertTrue(map instanceof TreeMap); + assertEquals(String.CASE_INSENSITIVE_ORDER, ((TreeMap) map).comparator()); + assertEquals("value1", map.get("key1")); + assertEquals("value1", map.get("Key1")); + assertEquals("value1", map.get("KEy1")); + + map.put("KEY1", "changed_value1"); + assertEquals("changed_value1", map.get("KEY1")); + } + + @Test + public void shouldConvertHeadersCreateCaseInsensitiveMap() { + Header[] headers = + new Header[] {new BasicHeader("key1", "value1"), new BasicHeader("key2", "value2")}; + + Map map = createCaseInsensitiveMap(headers); + assertTrue(map instanceof TreeMap); + assertEquals(String.CASE_INSENSITIVE_ORDER, ((TreeMap) map).comparator()); + assertEquals("value1", map.get("key1")); + assertEquals("value2", map.get("key2")); + assertEquals("value1", map.get("Key1")); + assertEquals("value2", map.get("Key2")); + } + private static SnowflakeColumnMetadata createExpectedMetadata( JsonNode rootNode, JsonNode fieldOne, JsonNode fieldTwo) throws SnowflakeSQLLoggedException { ColumnTypeInfo columnTypeInfo = diff --git a/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java b/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java index 37819457c..03fa47418 100644 --- a/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java @@ -1,10 +1,10 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.node.ObjectNode; import net.minidev.json.JSONObject; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SqlFeatureNotSupportedTelemetryTest { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java index 08bf2ed72..268173e92 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java @@ -3,17 +3,17 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementAlreadyClosedIT extends BaseJDBCTest { @Test public void testStatementAlreadyClosed() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java b/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java index 061edb528..f66bbb7c2 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.jdbc; -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class StatementArrowIT extends StatementIT { public StatementArrowIT() { super(); diff --git a/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java index 01be27150..b6c62ddc1 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java @@ -6,11 +6,11 @@ import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementIT.java b/src/test/java/net/snowflake/client/jdbc/StatementIT.java index 2fa713308..075889834 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementIT.java @@ -6,13 +6,13 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.sql.BatchUpdateException; @@ -24,21 +24,19 @@ import java.time.Duration; import java.util.List; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryClient; import net.snowflake.common.core.SqlState; import org.awaitility.Awaitility; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** Statement tests */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; @@ -50,7 +48,7 @@ public static Connection getConnection() throws SQLException { return conn; } - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; @Test public void testFetchDirection() throws SQLException { @@ -64,7 +62,7 @@ public void testFetchDirection() throws SQLException { } } - @Ignore("Not working for setFetchSize") + @Disabled("Not working for setFetchSize") @Test public void testFetchSize() throws SQLException { try (Statement statement = connection.createStatement()) { @@ -362,7 +360,8 @@ public void testExecuteBatch() throws Exception { "put file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%test_batch auto_compress=false"); - File tempFolder = tmpFolder.newFolder("test_downloads_folder"); + File tempFolder = new File(tmpFolder, "test_downloads_folder"); + tempFolder.mkdirs(); statement.addBatch("get @%test_batch file://" + tempFolder.getCanonicalPath()); rowCounts = statement.executeBatch(); @@ -423,7 +422,7 @@ public void testExecuteLargeBatch() throws SQLException { * @throws SQLException if any error occurs */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testExecuteUpdateZeroCount() throws SQLException { try (Connection connection = getConnection()) { String[] testCommands = { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java index d041b1694..b0eefd096 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java @@ -1,15 +1,15 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Large update test. No JSON/ARROW specific test case is required. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementLargeUpdateIT extends BaseJDBCTest { @Test public void testLargeUpdate() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java index 9d96f44ea..e2f030464 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java @@ -6,11 +6,11 @@ import static net.snowflake.client.jdbc.ErrorCode.ROW_DOES_NOT_EXIST; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.net.URL; @@ -22,17 +22,15 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.bind.BindUploader; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * Statement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -40,7 +38,7 @@ * if the tests still is not applicable. If it is applicable, move tests to StatementIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementLatestIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; @@ -52,7 +50,7 @@ public static Connection getConnection() throws SQLException { return conn; } - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; @Test public void testExecuteCreateAndDrop() throws SQLException { @@ -83,9 +81,10 @@ public void testExecuteCreateAndDrop() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testCopyAndUpload() throws Exception { - File tempFolder = tmpFolder.newFolder("test_downloads_folder"); + File tempFolder = new File(tmpFolder, "test_downloads_folder"); + tempFolder.mkdirs(); List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { String fileName = "test_copy.csv"; @@ -198,7 +197,7 @@ public void testExecuteOpenResultSets() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPreparedStatementLogging() throws SQLException { try (Connection con = getConnection(); Statement stmt = con.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java index 12aa69882..22b58584d 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java @@ -4,17 +4,17 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementNoOpLatestIT { @Test public void testSnowflakeNoOpStatement() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/StreamIT.java b/src/test/java/net/snowflake/client/jdbc/StreamIT.java index d1762904d..e6407c16c 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.InputStream; import java.io.StringWriter; @@ -14,15 +14,14 @@ import java.sql.Statement; import java.util.Arrays; import java.util.List; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import org.apache.commons.io.IOUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Stream interface tests. Snowflake JDBC specific API */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class StreamIT extends BaseJDBCTest { /** * Test Upload Stream @@ -53,7 +52,7 @@ public void testUploadStream() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } } finally { statement.execute("rm @~/" + DEST_PREFIX); @@ -69,7 +68,7 @@ public void testUploadStream() throws Throwable { * @throws Throwable if any error occurs. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDownloadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); @@ -132,7 +131,7 @@ public void testCompressAndUploadStream() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } } finally { diff --git a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java index 3ab179b70..af7c8eea3 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedWriter; import java.io.File; @@ -19,15 +19,13 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import org.apache.commons.io.IOUtils; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * Stream API tests for the latest JDBC driver. This doesn't work for the oldest supported driver. @@ -35,10 +33,10 @@ * is not applicable. If it is applicable, move tests to StreamIT so that both the latest and oldest * supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class StreamLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /** * Test Upload Stream with atypical stage names @@ -72,7 +70,7 @@ public void testUnusualStageName() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); @@ -92,7 +90,7 @@ public void testUnusualStageName() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } } finally { statement.execute("DROP TABLE IF EXISTS \"ice cream (nice)\""); @@ -101,7 +99,7 @@ public void testUnusualStageName() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; Properties paramProperties = new Properties(); @@ -118,8 +116,8 @@ public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { } catch (Exception ex) { assertTrue(ex instanceof SQLException); assertTrue( - "Wrong exception message: " + ex.getMessage(), - ex.getMessage().matches(".*Blob.*not found in bucket.*")); + ex.getMessage().contains("File not found"), + "Wrong exception message: " + ex.getMessage()); } finally { statement.execute("rm @~/" + DEST_PREFIX); } @@ -127,7 +125,7 @@ public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { } @Test - @Ignore + @Disabled public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOException { final String DEST_PREFIX = "testUploadStream"; @@ -141,7 +139,7 @@ public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOExcepti + " @testgcpstage/" + DEST_PREFIX)) { assertTrue(rset.next()); - assertEquals("Error message:" + rset.getString(8), "UPLOADED", rset.getString(7)); + assertEquals("UPLOADED", rset.getString(7), "Error message:" + rset.getString(8)); InputStream out = connection @@ -162,7 +160,7 @@ public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOExcepti } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDownloadToStreamGCS() throws SQLException, IOException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; Properties paramProperties = new Properties(); @@ -202,7 +200,8 @@ public void testSpecialCharactersInFileName() throws SQLException, IOException { Statement statement = connection.createStatement()) { try { // Create a temporary file with special characters in the name and write to it - File specialCharFile = tmpFolder.newFile("(special char@).txt"); + File specialCharFile = new File(tmpFolder, "(special char@).txt"); + specialCharFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(specialCharFile))) { bw.write("Creating test file for downloadStream test"); } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java index 20a070a02..d09182860 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java @@ -2,26 +2,28 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.InputStream; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.UUID; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class CloudStorageClientLatestIT extends BaseJDBCTest { /** * Test for SNOW-565154 - it was waiting for ~5 minutes so the test is waiting much shorter time */ - @Test(timeout = 30000L) + @Test + @Timeout(30) public void testDownloadStreamShouldFailFastOnNotExistingFile() throws Throwable { String stageName = "testDownloadStream_stage_" + UUID.randomUUID().toString().replaceAll("-", "_"); diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java index a560d1f81..61cd07769 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -16,8 +16,8 @@ import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; public class EncryptionProviderTest { @@ -41,7 +41,7 @@ public class EncryptionProviderTest { byte[] plainText = "the quick brown fox jumps over the lazy dog".getBytes(StandardCharsets.UTF_8); - @Before + @BeforeEach public void setUp() { encMat.setQueryStageMasterKey(queryStageMasterKey); encMat.setSmkId(123); diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java index b853ef639..f883324d3 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -25,8 +25,8 @@ import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; public class GcmEncryptionProviderTest { @@ -58,7 +58,7 @@ public class GcmEncryptionProviderTest { byte[] dataAad = "data aad".getBytes(StandardCharsets.UTF_8); byte[] keyAad = "key aad".getBytes(StandardCharsets.UTF_8); - @Before + @BeforeEach public void setUp() { encMat.setQueryStageMasterKey(queryStageMasterKey); encMat.setSmkId(123); @@ -138,21 +138,21 @@ public void testDecryptStreamWithInvalidKey() throws Exception { InputStream plainTextStream = new ByteArrayInputStream(plainText); byte[] cipherText = encryptStream(plainTextStream, dataAad, keyAad); + + byte[] encryptedKey = encKeyArgumentCaptor.getValue(); + encryptedKey[0] = (byte) ((encryptedKey[0] + 1) % 255); assertThrows( AEADBadTagException.class, - () -> { - byte[] encryptedKey = encKeyArgumentCaptor.getValue(); - encryptedKey[0] = (byte) ((encryptedKey[0] + 1) % 255); - IOUtils.toByteArray( - GcmEncryptionProvider.decryptStream( - new ByteArrayInputStream(cipherText), - Base64.getEncoder().encodeToString(encryptedKey), - Base64.getEncoder().encodeToString(dataIvDataArgumentCaptor.getValue()), - Base64.getEncoder().encodeToString(keyIvDataArgumentCaptor.getValue()), - encMat, - dataAad == null ? "" : Base64.getEncoder().encodeToString(dataAad), - keyAad == null ? "" : Base64.getEncoder().encodeToString(keyAad))); - }); + () -> + IOUtils.toByteArray( + GcmEncryptionProvider.decryptStream( + new ByteArrayInputStream(cipherText), + Base64.getEncoder().encodeToString(encryptedKey), + Base64.getEncoder().encodeToString(dataIvDataArgumentCaptor.getValue()), + Base64.getEncoder().encodeToString(keyIvDataArgumentCaptor.getValue()), + encMat, + dataAad == null ? "" : Base64.getEncoder().encodeToString(dataAad), + keyAad == null ? "" : Base64.getEncoder().encodeToString(keyAad)))); } @Test @@ -160,12 +160,12 @@ public void testDecryptStreamWithInvalidDataIV() throws Exception { InputStream plainTextStream = new ByteArrayInputStream(plainText); byte[] cipherText = encryptStream(plainTextStream, dataAad, keyAad); + byte[] dataIvBase64 = dataIvDataArgumentCaptor.getValue(); + dataIvBase64[0] = (byte) ((dataIvBase64[0] + 1) % 255); IOException ioException = assertThrows( IOException.class, () -> { - byte[] dataIvBase64 = dataIvDataArgumentCaptor.getValue(); - dataIvBase64[0] = (byte) ((dataIvBase64[0] + 1) % 255); IOUtils.toByteArray( GcmEncryptionProvider.decryptStream( new ByteArrayInputStream(cipherText), @@ -184,11 +184,11 @@ public void testDecryptStreamWithInvalidKeyIV() throws Exception { InputStream plainTextStream = new ByteArrayInputStream(plainText); byte[] cipherText = encryptStream(plainTextStream, dataAad, keyAad); + byte[] keyIvBase64 = keyIvDataArgumentCaptor.getValue(); + keyIvBase64[0] = (byte) ((keyIvBase64[0] + 1) % 255); assertThrows( AEADBadTagException.class, () -> { - byte[] keyIvBase64 = keyIvDataArgumentCaptor.getValue(); - keyIvBase64[0] = (byte) ((keyIvBase64[0] + 1) % 255); IOUtils.toByteArray( GcmEncryptionProvider.decryptStream( new ByteArrayInputStream(cipherText), diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java index 93539005a..4bca15b3a 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java @@ -1,15 +1,18 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.spy; +import com.amazonaws.services.kms.model.UnsupportedOperationException; import com.microsoft.azure.storage.blob.ListBlobItem; import java.sql.Connection; import java.sql.SQLException; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; +import java.util.ArrayList; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.BaseJDBCTest; @@ -17,11 +20,13 @@ import net.snowflake.client.jdbc.SnowflakeFileTransferAgent; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; -import org.junit.Test; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +@Tag(TestTags.OTHERS) public class SnowflakeAzureClientLatestIT extends BaseJDBCTest { @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { try (Connection connection = getConnection("azureaccount")) { SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -42,11 +47,11 @@ public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { @Test public void testCloudExceptionTest() { - Iterable mockList = null; + Iterable mockList = new ArrayList<>(); AzureObjectSummariesIterator iterator = new AzureObjectSummariesIterator(mockList); AzureObjectSummariesIterator spyIterator = spy(iterator); UnsupportedOperationException ex = assertThrows(UnsupportedOperationException.class, () -> spyIterator.remove()); - assertEquals(ex.getMessage(), "remove() method not supported"); + assertTrue(ex.getMessage().startsWith("remove() method not supported")); } } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java index f0ba5b3d4..efc49f41f 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java @@ -4,11 +4,11 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.microsoft.azure.storage.StorageExtendedErrorInformation; import java.util.LinkedHashMap; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeAzureClientTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java index de241162f..b46064778 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java @@ -3,8 +3,9 @@ */ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; @@ -13,23 +14,24 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnectionV1; import net.snowflake.client.jdbc.SnowflakeFileTransferAgent; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; +@Tag(TestTags.OTHERS) public class SnowflakeS3ClientLatestIT extends BaseJDBCTest { @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testS3Client256Encryption() throws SQLException { try (Connection connection = getConnection("s3testaccount")) { SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -70,7 +72,7 @@ public void testS3Client256Encryption() throws SQLException { * @throws SQLException */ @Test - @Ignore + @Disabled public void testS3ConnectionWithProxyEnvVariablesSet() throws SQLException { String testStageName = "s3TestStage"; @@ -100,7 +102,7 @@ public void testS3ConnectionWithProxyEnvVariablesSet() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testIsClientException400Or404() throws SQLException { AmazonServiceException servEx = new AmazonServiceException("S3 operation failed"); servEx.setServiceName("Amazon S3"); @@ -135,7 +137,7 @@ public void testIsClientException400Or404() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetMaxRetries() throws SQLException { Properties props = new Properties(); props.put("putGetMaxRetries", 1); @@ -167,7 +169,7 @@ public void testPutGetMaxRetries() throws SQLException { spy.handleStorageException( new InterruptedException(), 0, "download", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spy, Mockito.never()).renew(Mockito.anyMap()); spy.handleStorageException( diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java index 3daddf3df..91366cbaf 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeS3ClientTest { diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java new file mode 100644 index 000000000..6b27066c6 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.jdbc.cloud.storage; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Optional; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; + +public class StageInfoGcsCustomEndpointTest { + + private static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList( + Arguments.of("US-CENTRAL1", false, null, Optional.empty()), + Arguments.of("US-CENTRAL1", false, "", Optional.empty()), + Arguments.of("US-CENTRAL1", false, "null", Optional.empty()), + Arguments.of("US-CENTRAL1", false, " ", Optional.empty()), + Arguments.of("US-CENTRAL1", false, "example.com", Optional.of("example.com")), + Arguments.of( + "ME-CENTRAL2", false, null, Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of( + "ME-CENTRAL2", true, null, Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of( + "ME-CENTRAL2", true, "", Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of( + "ME-CENTRAL2", true, " ", Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of("ME-CENTRAL2", true, "example.com", Optional.of("example.com")), + Arguments.of( + "US-CENTRAL1", true, null, Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of( + "US-CENTRAL1", true, "", Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of( + "US-CENTRAL1", true, " ", Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of( + "US-CENTRAL1", true, "null", Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of("US-CENTRAL1", true, "example.com", Optional.of("example.com"))); + } + } + + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void shouldReturnEmptyGCSRegionalUrlWhenNotMeCentral1AndNotUseRegionalUrl( + String region, boolean useRegionalUrl, String endPoint, Optional expectedHost) { + StageInfo stageInfo = + StageInfo.createStageInfo("GCS", "bla", new HashMap<>(), region, endPoint, "account", true); + stageInfo.setUseRegionalUrl(useRegionalUrl); + assertEquals(expectedHost, stageInfo.gcsCustomEndpoint()); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java index 042c6b0f4..8df4f988e 100644 --- a/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java @@ -1,8 +1,8 @@ package net.snowflake.client.jdbc.diagnostic; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.net.InetSocketAddress; @@ -11,15 +11,15 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import net.snowflake.client.category.TestCategoryDiagnostic; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSessionProperty; -import org.junit.After; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryDiagnostic.class) +@Tag(TestTags.DIAGNOSTIC) public class DiagnosticContextLatestIT { private static final String HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts"; @@ -34,7 +34,7 @@ public class DiagnosticContextLatestIT { private static String oldJvmHttpsProxyHost; private static String oldJvmHttpsProxyPort; - @BeforeClass + @BeforeAll public static void init() { oldJvmNonProxyHosts = System.getProperty(HTTP_NON_PROXY_HOSTS); oldJvmHttpProxyHost = System.getProperty(HTTP_PROXY_HOST); @@ -43,7 +43,7 @@ public static void init() { oldJvmHttpsProxyPort = System.getProperty(HTTPS_PROXY_PORT); } - @Before + @BeforeEach public void clearJvmProperties() { System.clearProperty(HTTP_NON_PROXY_HOSTS); System.clearProperty(HTTP_PROXY_HOST); @@ -97,7 +97,7 @@ public void parseAllowListFileTest() { String testFailedMessage = "The lists of SnowflakeEndpoints in mockEndpoints and endpointsFromTestFile should be identical"; - assertTrue(testFailedMessage, endpointsFromTestFile.containsAll(mockEndpoints)); + assertTrue(endpointsFromTestFile.containsAll(mockEndpoints), testFailedMessage); } /** @@ -112,24 +112,24 @@ public void testEmptyProxyConfig() { DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); - assertFalse("Proxy configurations should be empty", diagnosticContext.isProxyEnabled()); + assertFalse(diagnosticContext.isProxyEnabled(), "Proxy configurations should be empty"); assertTrue( - "getHttpProxyHost() must return an empty string in the absence of proxy configuration", - diagnosticContext.getHttpProxyHost().isEmpty()); + diagnosticContext.getHttpProxyHost().isEmpty(), + "getHttpProxyHost() must return an empty string in the absence of proxy configuration"); assertEquals( - "getHttpProxyPort() must return -1 in the absence of proxy configuration", -1, - diagnosticContext.getHttpProxyPort()); + diagnosticContext.getHttpProxyPort(), + "getHttpProxyPort() must return -1 in the absence of proxy configuration"); assertTrue( - "getHttpsProxyHost() must return an empty string in the absence of proxy configuration", - diagnosticContext.getHttpsProxyHost().isEmpty()); + diagnosticContext.getHttpsProxyHost().isEmpty(), + "getHttpsProxyHost() must return an empty string in the absence of proxy configuration"); assertEquals( - "getHttpsProxyPort() must return -1 in the absence of proxy configuration", -1, - diagnosticContext.getHttpsProxyPort()); + diagnosticContext.getHttpsProxyPort(), + "getHttpsProxyPort() must return -1 in the absence of proxy configuration"); assertTrue( - "getHttpNonProxyHosts() must return an empty string in the absence of proxy configuration", - diagnosticContext.getHttpNonProxyHosts().isEmpty()); + diagnosticContext.getHttpNonProxyHosts().isEmpty(), + "getHttpNonProxyHosts() must return an empty string in the absence of proxy configuration"); } /** Test added in version > 3.16.1 */ @@ -329,7 +329,7 @@ public void testgetNoProxyAfterOverridingJvm() { assertEquals(noProxy, diagnosticContext.getProxy(host4)); } - @After + @AfterEach public void restoreJvmArguments() { System.clearProperty(HTTP_NON_PROXY_HOSTS); System.clearProperty(HTTP_PROXY_HOST); diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java index a926a649e..6072c3453 100644 --- a/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java @@ -1,10 +1,10 @@ package net.snowflake.client.jdbc.diagnostic; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeEndpointTest { @@ -20,9 +20,9 @@ public void shouldDetectPrivateLinkEndpoint() { (host, expectedToBePrivateLink) -> { SnowflakeEndpoint endpoint = new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", host, 443); assertEquals( - String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), expectedToBePrivateLink, - endpoint.isPrivateLink()); + endpoint.isPrivateLink(), + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink)); }); } } diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java index b1da95b99..1660ea57d 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java @@ -3,11 +3,11 @@ */ package net.snowflake.client.jdbc.structuredtypes; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.sql.Connection; @@ -24,10 +24,10 @@ import java.time.ZoneId; import java.util.List; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.TestUtil; import net.snowflake.client.ThrowingConsumer; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.ResultSetFormatType; @@ -38,34 +38,17 @@ import net.snowflake.client.jdbc.structuredtypes.sqldata.NullableFieldsSqlData; import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.StringClass; -import org.junit.After; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +import net.snowflake.client.providers.ResultFormatProvider; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.RESULT_SET) public class ResultSetStructuredTypesLatestIT extends BaseJDBCTest { - - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - return new Object[][] { - {ResultSetFormatType.JSON}, - {ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES}, - {ResultSetFormatType.NATIVE_ARROW} - }; - } - - private final ResultSetFormatType queryResultFormat; - - public ResultSetStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - - @Before + @BeforeEach public void setup() { SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); @@ -73,7 +56,7 @@ public void setup() { SnowflakeObjectTypeFactories.register(NullableFieldsSqlData.class, NullableFieldsSqlData::new); } - @After + @AfterEach public void clean() { SnowflakeObjectTypeFactories.unregister(StringClass.class); SnowflakeObjectTypeFactories.unregister(SimpleClass.class); @@ -81,7 +64,7 @@ public void clean() { SnowflakeObjectTypeFactories.unregister(NullableFieldsSqlData.class); } - public Connection init() throws SQLException { + public Connection init(ResultSetFormatType format) throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); @@ -89,9 +72,9 @@ public Connection init() throws SQLException { stmt.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); stmt.execute( "alter session set jdbc_query_result_format = '" - + queryResultFormat.sessionParameterTypeValue + + format.sessionParameterTypeValue + "'"); - if (queryResultFormat == ResultSetFormatType.NATIVE_ARROW) { + if (format == ResultSetFormatType.NATIVE_ARROW) { stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); stmt.execute("alter session set FORCE_ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); } @@ -99,20 +82,23 @@ public Connection init() throws SQLException { return conn; } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructToObjectWithFactory() throws SQLException { - testMapJson(true); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructToObjectWithFactory(ResultSetFormatType format) throws SQLException { + testMapJson(true, format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructToObjectWithReflection() throws SQLException { - testMapJson(false); - testMapJson(true); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructToObjectWithReflection(ResultSetFormatType format) throws SQLException { + testMapJson(false, format); + testMapJson(true, format); } - private void testMapJson(boolean registerFactory) throws SQLException { + private void testMapJson(boolean registerFactory, ResultSetFormatType format) + throws SQLException { if (registerFactory) { SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); } else { @@ -123,64 +109,32 @@ private void testMapJson(boolean registerFactory) throws SQLException { (resultSet) -> { StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); - }); + }, + format); SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapNullStruct() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapNullStruct(ResultSetFormatType format) throws SQLException { withFirstRow( "select null::OBJECT(string VARCHAR)", (resultSet) -> { StringClass object = resultSet.getObject(1, StringClass.class); assertNull(object); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructAllTypes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructAllTypes(ResultSetFormatType format) throws SQLException { + try (Connection connection = init(format); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); - try (ResultSet resultSet = - statement.executeQuery( - "select {" - + "'string': 'a', " - + "'b': 1, " - + "'s': 2, " - + "'i': 3, " - + "'l': 4, " - + "'f': 1.1, " - + "'d': 2.2, " - + "'bd': 3.3, " - + "'bool': true, " - + "'timestamp_ltz': '2021-12-22 09:43:44'::TIMESTAMP_LTZ, " - + "'timestamp_ntz': '2021-12-23 09:44:44'::TIMESTAMP_NTZ, " - + "'timestamp_tz': '2021-12-24 09:45:45 +0800'::TIMESTAMP_TZ, " - + "'date': '2023-12-24'::DATE, " - + "'time': '12:34:56'::TIME, " - + "'binary': TO_BINARY('616263', 'HEX'), " - + "'simpleClass': {'string': 'b', 'intValue': 2}" - + "}::OBJECT(" - + "string VARCHAR, " - + "b TINYINT, " - + "s SMALLINT, " - + "i INTEGER, " - + "l BIGINT, " - + "f FLOAT, " - + "d DOUBLE, " - + "bd DOUBLE, " - + "bool BOOLEAN, " - + "timestamp_ltz TIMESTAMP_LTZ, " - + "timestamp_ntz TIMESTAMP_NTZ, " - + "timestamp_tz TIMESTAMP_TZ, " - + "date DATE, " - + "time TIME, " - + "binary BINARY, " - + "simpleClass OBJECT(string VARCHAR, intValue INTEGER)" - + ")"); ) { + try (ResultSet resultSet = statement.executeQuery(AllTypesClass.ALL_TYPES_QUERY); ) { resultSet.next(); AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); assertEquals("a", object.getString()); @@ -213,15 +167,25 @@ public void testMapStructAllTypes() throws SQLException { assertTrue(object.getBool()); assertEquals("b", object.getSimpleClass().getString()); assertEquals(Integer.valueOf(2), object.getSimpleClass().getIntValue()); + + if (format == ResultSetFormatType.NATIVE_ARROW) { + // Only verify getString for Arrow since JSON representations have difficulties with + // floating point toString conversion (3.300000000000000e+00 vs 3.3 in native arrow) + String expectedArrowGetStringResult = + "{\"string\": \"a\",\"b\": 1,\"s\": 2,\"i\": 3,\"l\": 4,\"f\": 1.1,\"d\": 2.2,\"bd\": 3.3,\"bool\": true,\"timestamp_ltz\": \"Wed, 22 Dec 2021 09:43:44 +0100\",\"timestamp_ntz\": \"Thu, 23 Dec 2021 09:44:44 Z\",\"timestamp_tz\": \"Fri, 24 Dec 2021 09:45:45 +0800\",\"date\": \"2023-12-24\",\"time\": \"12:34:56\",\"binary\": \"616263\",\"simpleClass\": {\"string\": \"b\",\"intValue\": 2}}"; + assertEquals(expectedArrowGetStringResult, resultSet.getString(1)); + } } } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnStructAsStringIfTypeWasNotIndicated() throws SQLException { - Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnStructAsStringIfTypeWasNotIndicated(ResultSetFormatType format) + throws SQLException { + Assumptions.assumeTrue(format != ResultSetFormatType.NATIVE_ARROW); + try (Connection connection = init(format); Statement statement = connection.createStatement()) { statement.execute( "alter session set " @@ -234,43 +198,7 @@ public void testReturnStructAsStringIfTypeWasNotIndicated() throws SQLException + "TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM'," + "TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3'"); - try (ResultSet resultSet = - statement.executeQuery( - "select {" - + "'string': 'a', " - + "'b': 1, " - + "'s': 2, " - + "'i': 3, " - + "'l': 4, " - + "'f': 1.1, " - + "'d': 2.2, " - + "'bd': 3.3, " - + "'bool': true, " - + "'timestamp_ltz': '2021-12-22 09:43:44'::TIMESTAMP_LTZ, " - + "'timestamp_ntz': '2021-12-23 09:44:44'::TIMESTAMP_NTZ, " - + "'timestamp_tz': '2021-12-24 09:45:45 +0800'::TIMESTAMP_TZ, " - + "'date': '2023-12-24'::DATE, " - + "'time': '12:34:56'::TIME, " - + "'binary': TO_BINARY('616263', 'HEX'), " - + "'simpleClass': {'string': 'b', 'intValue': 2}" - + "}::OBJECT(" - + "string VARCHAR, " - + "b TINYINT, " - + "s SMALLINT, " - + "i INTEGER, " - + "l BIGINT, " - + "f FLOAT, " - + "d DOUBLE, " - + "bd DOUBLE, " - + "bool BOOLEAN, " - + "timestamp_ltz TIMESTAMP_LTZ, " - + "timestamp_ntz TIMESTAMP_NTZ, " - + "timestamp_tz TIMESTAMP_TZ, " - + "date DATE, " - + "time TIME, " - + "binary BINARY, " - + "simpleClass OBJECT(string VARCHAR, intValue INTEGER)" - + ")"); ) { + try (ResultSet resultSet = statement.executeQuery(AllTypesClass.ALL_TYPES_QUERY); ) { resultSet.next(); String object = (String) resultSet.getObject(1); String expected = @@ -300,26 +228,30 @@ public void testReturnStructAsStringIfTypeWasNotIndicated() throws SQLException } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testThrowingGettingObjectIfTypeWasNotIndicatedAndFormatNativeArrow() - throws SQLException { - Assume.assumeTrue(queryResultFormat == ResultSetFormatType.NATIVE_ARROW); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testThrowingGettingObjectIfTypeWasNotIndicatedAndFormatNativeArrow( + ResultSetFormatType format) throws SQLException { + Assumptions.assumeTrue(format == ResultSetFormatType.NATIVE_ARROW); withFirstRow( "select {'string':'a'}::OBJECT(string VARCHAR)", (resultSet) -> { assertThrows(SQLException.class, () -> resultSet.getObject(1)); - }); + }, + format); withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { assertThrows(SQLException.class, () -> resultSet.getObject(1, Map.class)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfSqlData() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfSqlData(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT({'string':'one'}, {'string':'two'}, {'string':'three'})::ARRAY(OBJECT(string VARCHAR))", (resultSet) -> { @@ -328,12 +260,15 @@ public void testReturnAsArrayOfSqlData() throws SQLException { assertEquals("one", resultArray[0].getString()); assertEquals("two", resultArray[1].getString()); assertEquals("three", resultArray[2].getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfNullableFieldsInSqlData(ResultSetFormatType format) + throws SQLException { withFirstRow( "SELECT OBJECT_CONSTRUCT_KEEP_NULL('string', null, 'nullableIntValue', null, 'nullableLongValue', null, " + "'date', null, 'bd', null, 'bytes', null, 'longValue', null)" @@ -350,13 +285,15 @@ public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { assertNull(result.getBd()); assertNull(result.getBytes()); assertEquals(Long.valueOf(0), result.getLongValue()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullsForAllTpesInSqlData() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullsForAllTpesInSqlData(ResultSetFormatType format) throws SQLException { + try (Connection connection = init(format); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); try (ResultSet resultSet = @@ -387,9 +324,10 @@ public void testReturnNullsForAllTpesInSqlData() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfString() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfString(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT('one', 'two','three')::ARRAY(VARCHAR)", (resultSet) -> { @@ -398,13 +336,15 @@ public void testReturnAsArrayOfString() throws SQLException { assertEquals("one", resultArray[0]); assertEquals("two", resultArray[1]); assertEquals("three", resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfNullableString() throws SQLException { - Assume.assumeTrue(queryResultFormat == ResultSetFormatType.NATIVE_ARROW); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfNullableString(ResultSetFormatType format) throws SQLException { + Assumptions.assumeTrue(format == ResultSetFormatType.NATIVE_ARROW); withFirstRow( "SELECT ARRAY_CONSTRUCT('one', 'two', null)::ARRAY(VARCHAR)", (resultSet) -> { @@ -413,24 +353,28 @@ public void testReturnAsArrayOfNullableString() throws SQLException { assertEquals("one", resultArray[0]); assertEquals("two", resultArray[1]); assertNull(resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullAsArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullAsArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT null::ARRAY(VARCHAR)", (resultSet) -> { String[] resultArray = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, String.class); assertNull(resultArray); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsListOfIntegers() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsListOfIntegers(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(1,2,3)::ARRAY(INTEGER)", (resultSet) -> { @@ -439,12 +383,14 @@ public void testReturnAsListOfIntegers() throws SQLException { assertEquals(Integer.valueOf(1), resultList.get(0)); assertEquals(Integer.valueOf(2), resultList.get(1)); assertEquals(Integer.valueOf(3), resultList.get(2)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsListOfFloat() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsListOfFloat(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(1.1,2.2,3.3)::ARRAY(FLOAT)", (resultSet) -> { @@ -453,12 +399,14 @@ public void testReturnAsListOfFloat() throws SQLException { assertEquals(Float.valueOf(1.1f), resultList[0]); assertEquals(Float.valueOf(2.2f), resultList[1]); assertEquals(Float.valueOf(3.3f), resultList[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsListOfDouble() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsListOfDouble(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(1.1,2.2,3.3)::ARRAY(DOUBLE)", (resultSet) -> { @@ -467,12 +415,14 @@ public void testReturnAsListOfDouble() throws SQLException { assertEquals(Double.valueOf(1.1), resultList.get(0)); assertEquals(Double.valueOf(2.2), resultList.get(1)); assertEquals(Double.valueOf(3.3), resultList.get(2)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMap(ResultSetFormatType format) throws SQLException { withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -481,13 +431,15 @@ public void testReturnAsMap() throws SQLException { assertEquals("one", map.get("x").getString()); assertEquals("two", map.get("y").getString()); assertEquals("three", map.get("z").getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapByGetObject() throws SQLException { - Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapByGetObject(ResultSetFormatType format) throws SQLException { + Assumptions.assumeTrue(format != ResultSetFormatType.NATIVE_ARROW); withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -495,12 +447,14 @@ public void testReturnAsMapByGetObject() throws SQLException { assertEquals("one", map.get("x").get("string")); assertEquals("two", map.get("y").get("string")); assertEquals("three", map.get("z").get("string")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapWithNullableValues() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapWithNullableValues(ResultSetFormatType format) throws SQLException { withFirstRow( "select {'x':{'string':'one'},'y':null,'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -509,36 +463,42 @@ public void testReturnAsMapWithNullableValues() throws SQLException { assertEquals("one", map.get("x").getString()); assertNull(map.get("y")); assertEquals("three", map.get("z").getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullAsObjectOfTypeMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullAsObjectOfTypeMap(ResultSetFormatType format) throws SQLException { withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { Map map = resultSet.unwrap(SnowflakeBaseResultSet.class).getObject(1, Map.class); assertNull(map); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullAsMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullAsMap(ResultSetFormatType format) throws SQLException { withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { Map map = resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertNull(map); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfTimestampsNtz() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfTimestampsNtz(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x': TO_TIMESTAMP_NTZ('2021-12-23 09:44:44'), 'y': TO_TIMESTAMP_NTZ('2021-12-24 09:55:55')}::MAP(VARCHAR, TIMESTAMP)", (resultSet) -> { @@ -554,12 +514,14 @@ public void testReturnAsMapOfTimestampsNtz() throws SQLException { .atZone(ZoneId.of("Europe/Warsaw")) .toInstant(), map.get("y").toInstant()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfTimestampsLtz() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfTimestampsLtz(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x': TO_TIMESTAMP_LTZ('2021-12-23 09:44:44'), 'y': TO_TIMESTAMP_LTZ('2021-12-24 09:55:55')}::MAP(VARCHAR, TIMESTAMP_LTZ)", (resultSet) -> { @@ -575,12 +537,14 @@ public void testReturnAsMapOfTimestampsLtz() throws SQLException { .atZone(ZoneId.of("Europe/Warsaw")) .toInstant(), map.get("y").toInstant()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfLong() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfLong(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':1, 'y':2, 'z':3}::MAP(VARCHAR, BIGINT)", (resultSet) -> { @@ -589,12 +553,14 @@ public void testReturnAsMapOfLong() throws SQLException { assertEquals(Long.valueOf(1), map.get("x")); assertEquals(Long.valueOf(2), map.get("y")); assertEquals(Long.valueOf(3), map.get("z")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfDate() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfDate(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':'2023-12-24', 'y':'2023-12-25'}::MAP(VARCHAR, DATE)", (resultSet) -> { @@ -604,12 +570,14 @@ public void testReturnAsMapOfDate() throws SQLException { Date.valueOf(LocalDate.of(2023, 12, 24)).toString(), map.get("x").toString()); assertEquals( Date.valueOf(LocalDate.of(2023, 12, 25)).toString(), map.get("y").toString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfTime() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfTime(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':'12:34:56', 'y':'12:34:58'}::MAP(VARCHAR, TIME)", (resultSet) -> { @@ -617,12 +585,14 @@ public void testReturnAsMapOfTime() throws SQLException { resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Time.class); assertEquals(Time.valueOf(LocalTime.of(12, 34, 56)), map.get("x")); assertEquals(Time.valueOf(LocalTime.of(12, 34, 58)), map.get("y")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfBoolean() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfBoolean(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':'true', 'y':0}::MAP(VARCHAR, BOOLEAN)", (resultSet) -> { @@ -630,12 +600,14 @@ public void testReturnAsMapOfBoolean() throws SQLException { resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Boolean.class); assertEquals(Boolean.TRUE, map.get("x")); assertEquals(Boolean.FALSE, map.get("y")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsList() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsList(ResultSetFormatType format) throws SQLException { withFirstRow( "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))", (resultSet) -> { @@ -643,12 +615,14 @@ public void testReturnAsList() throws SQLException { resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, StringClass.class); assertEquals("one", map.get(0).getString()); assertEquals("two", map.get(1).getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructsFromChunks() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructsFromChunks(ResultSetFormatType format) throws SQLException { withFirstRow( "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))", (resultSet) -> { @@ -656,12 +630,14 @@ public void testMapStructsFromChunks() throws SQLException { StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); } - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapIntegerArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapIntegerArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(INTEGER)", (resultSet) -> { @@ -669,12 +645,14 @@ public void testMapIntegerArray() throws SQLException { assertEquals(Long.valueOf(10), resultArray[0]); assertEquals(Long.valueOf(20), resultArray[1]); assertEquals(Long.valueOf(30), resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapFixedToLongArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapFixedToLongArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(SMALLINT)", (resultSet) -> { @@ -682,14 +660,16 @@ public void testMapFixedToLongArray() throws SQLException { assertEquals(Long.valueOf("10"), resultArray[0]); assertEquals(Long.valueOf("20"), resultArray[1]); assertEquals(Long.valueOf("30"), resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapDecimalArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapDecimalArray(ResultSetFormatType format) throws SQLException { // when: jdbc_treat_decimal_as_int=true scale=0 - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery( @@ -702,7 +682,7 @@ public void testMapDecimalArray() throws SQLException { } // when: jdbc_treat_decimal_as_int=true scale=2 - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery( @@ -715,7 +695,7 @@ public void testMapDecimalArray() throws SQLException { } // when: jdbc_treat_decimal_as_int=false scale=0 - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ) { statement.execute("alter session set jdbc_treat_decimal_as_int = false"); try (ResultSet resultSet = @@ -729,9 +709,10 @@ public void testMapDecimalArray() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapVarcharArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapVarcharArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT 'text', ARRAY_CONSTRUCT('10', '20','30')::ARRAY(VARCHAR)", (resultSet) -> { @@ -740,12 +721,14 @@ public void testMapVarcharArray() throws SQLException { assertEquals("10", resultArray[0]); assertEquals("20", resultArray[1]); assertEquals("30", resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapDatesArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapDatesArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(to_date('2023-12-24', 'YYYY-MM-DD'), to_date('2023-12-25', 'YYYY-MM-DD'))::ARRAY(DATE)", (resultSet) -> { @@ -754,12 +737,14 @@ public void testMapDatesArray() throws SQLException { Date.valueOf(LocalDate.of(2023, 12, 24)).toString(), resultArray[0].toString()); assertEquals( Date.valueOf(LocalDate.of(2023, 12, 25)).toString(), resultArray[1].toString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapTimeArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapTimeArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(to_time('15:39:20.123'), to_time('09:12:20.123'))::ARRAY(TIME)", (resultSet) -> { @@ -767,12 +752,14 @@ public void testMapTimeArray() throws SQLException { assertEquals( Time.valueOf(LocalTime.of(15, 39, 20)).toString(), resultArray[0].toString()); assertEquals(Time.valueOf(LocalTime.of(9, 12, 20)).toString(), resultArray[1].toString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapTimestampArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapTimestampArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(TO_TIMESTAMP_NTZ('2021-12-23 09:44:44'), TO_TIMESTAMP_NTZ('2021-12-24 09:55:55'))::ARRAY(TIMESTAMP)", (resultSet) -> { @@ -787,36 +774,42 @@ public void testMapTimestampArray() throws SQLException { .atZone(ZoneId.of("Europe/Warsaw")) .toInstant(), resultArray[1].toInstant()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapBooleanArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapBooleanArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(true,false)::ARRAY(BOOLEAN)", (resultSet) -> { Boolean[] resultArray = (Boolean[]) resultSet.getArray(1).getArray(); assertEquals(true, resultArray[0]); assertEquals(false, resultArray[1]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapBinaryArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapBinaryArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(TO_BINARY('616263', 'HEX'),TO_BINARY('616263', 'HEX'))::ARRAY(BINARY)", (resultSet) -> { Byte[][] resultArray = (Byte[][]) resultSet.getArray(1).getArray(); assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[0]); assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[1]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapArrayOfStructToMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapArrayOfStructToMap(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2} )::ARRAY(OBJECT(x VARCHAR, y INTEGER))", (resultSet) -> { @@ -827,12 +820,14 @@ public void testMapArrayOfStructToMap() throws SQLException { assertEquals(firstEntry.get("y").toString(), "1"); assertEquals(secondEntry.get("x").toString(), "def"); assertEquals(secondEntry.get("y").toString(), "2"); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapArrayOfArrays() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapArrayOfArrays(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2}) )::ARRAY(ARRAY(OBJECT(x VARCHAR, y INTEGER)))", (resultSet) -> { @@ -843,13 +838,15 @@ public void testMapArrayOfArrays() throws SQLException { assertEquals(firstEntry.get("y").toString(), "1"); assertEquals(secondEntry.get("x").toString(), "def"); assertEquals(secondEntry.get("y").toString(), "2"); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapNestedStructures() throws SQLException { - withFirstRow( + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapNestedStructures(ResultSetFormatType format) throws SQLException { + String structSelectStatement = "SELECT {'simpleClass': {'string': 'a', 'intValue': 2}, " + "'simpleClasses': ARRAY_CONSTRUCT({'string': 'a', 'intValue': 2}, {'string': 'b', 'intValue': 2}), " + "'arrayOfSimpleClasses': ARRAY_CONSTRUCT({'string': 'a', 'intValue': 2}, {'string': 'b', 'intValue': 2}), " @@ -863,7 +860,11 @@ public void testMapNestedStructures() throws SQLException { + "mapOfSimpleClasses MAP(VARCHAR, OBJECT(string VARCHAR, intValue INTEGER))," + "texts ARRAY(VARCHAR)," + "arrayOfDates ARRAY(DATE)," - + "mapOfIntegers MAP(VARCHAR, INTEGER))", + + "mapOfIntegers MAP(VARCHAR, INTEGER))"; + String expectedQueryResult = + "{\"simpleClass\": {\"string\": \"a\",\"intValue\": 2},\"simpleClasses\": [{\"string\": \"a\",\"intValue\": 2},{\"string\": \"b\",\"intValue\": 2}],\"arrayOfSimpleClasses\": [{\"string\": \"a\",\"intValue\": 2},{\"string\": \"b\",\"intValue\": 2}],\"mapOfSimpleClasses\": {\"x\": {\"string\": \"c\",\"intValue\": 2},\"y\": {\"string\": \"d\",\"intValue\": 2}},\"texts\": [\"string\",\"a\"],\"arrayOfDates\": [\"2023-12-24\",\"2023-12-25\"],\"mapOfIntegers\": {\"x\": 3,\"y\": 4}}"; + withFirstRow( + structSelectStatement, (resultSet) -> { NestedStructSqlData nestedStructSqlData = resultSet.getObject(1, NestedStructSqlData.class); @@ -908,22 +909,29 @@ public void testMapNestedStructures() throws SQLException { assertEquals(Integer.valueOf(3), nestedStructSqlData.getMapOfIntegers().get("x")); assertEquals(Integer.valueOf(4), nestedStructSqlData.getMapOfIntegers().get("y")); - }); + TestUtil.assertEqualsIgnoringWhitespace(expectedQueryResult, resultSet.getString(1)); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testColumnTypeWhenStructureTypeIsDisabled() throws Exception { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testColumnTypeWhenStructureTypeIsDisabled(ResultSetFormatType format) + throws Exception { withFirstRow( "SELECT {'string':'a'}", resultSet -> { assertEquals(Types.VARCHAR, resultSet.getMetaData().getColumnType(1)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testColumnTypeAndFieldsWhenStructureTypeIsReturned() throws Exception { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testColumnTypeAndFieldsWhenStructureTypeIsReturned(ResultSetFormatType format) + throws Exception { withFirstRow( "SELECT {'string':'a'}::OBJECT(string VARCHAR)", resultSet -> { @@ -951,12 +959,16 @@ public void testColumnTypeAndFieldsWhenStructureTypeIsReturned() throws Exceptio .getColumnFields(1) .get(0) .getName()); - }); + }, + format); } - private void withFirstRow(String sqlText, ThrowingConsumer consumer) + private void withFirstRow( + String sqlText, + ThrowingConsumer consumer, + ResultSetFormatType format) throws SQLException { - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sqlText); ) { assertTrue(rs.next()); diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringArrowJsonCompatibilityIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringArrowJsonCompatibilityIT.java new file mode 100644 index 000000000..c3ae5fdd8 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringArrowJsonCompatibilityIT.java @@ -0,0 +1,162 @@ +package net.snowflake.client.jdbc.structuredtypes; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.jdbc.ResultSetFormatType; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.ResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.RESULT_SET) +public class StructuredTypesGetStringArrowJsonCompatibilityIT + extends StructuredTypesGetStringBaseIT { + private static Map connections = new HashMap<>(); + + @BeforeAll + public static void setUpConnections() throws SQLException { + // We initialize connection here since we need to set server properties that cannot be set in GH + // actions and before class is running even when all the tests have conditional ignore of tests + for (ResultSetFormatType queryResultFormat : ResultSetFormatType.values()) { + connections.put(queryResultFormat, initConnection(queryResultFormat)); + } + } + + @AfterAll + public static void closeConnections() throws SQLException { + for (Connection connection : connections.values()) { + connection.close(); + } + } + + @ParameterizedTest + @DontRunOnGithubActions + @ArgumentsSource(DataProvider.class) + public void testRunAsGetString( + ResultSetFormatType queryResultFormat, + String selectSql, + String expectedStructureTypeRepresentation) + throws SQLException { + withFirstRow( + connections.get(queryResultFormat), + selectSql, + (resultSet) -> assertGetStringIsCompatible(resultSet, expectedStructureTypeRepresentation)); + } + + public static class SampleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + List samples = new LinkedList<>(); + samples.add(Arguments.of("select {'a':3}::map(text, int);", "{\"a\":3}")); + samples.add( + Arguments.of( + "select {'a':'zaÅ¼Ă³Å‚Ä‡ gęślÄ… jaźń'}::map(text, text);", + "{\"a\":\"zaÅ¼Ă³Å‚Ä‡ gęślÄ… jaźń\"}")); + samples.add(Arguments.of("select {'a':'bla'}::map(text, text);", "{\"a\":\"bla\"}")); + samples.add(Arguments.of("select {'1':'bla'}::map(int, text);", "{\"1\":\"bla\"}")); + samples.add(Arguments.of("select {'1':[1,2,3]}::map(int, ARRAY(int));", "{\"1\":[1,2,3]}")); + samples.add( + Arguments.of( + "select {'1':{'string':'a'}}::map(int, OBJECT(string VARCHAR));", + "{\"1\":{\"string\":\"a\"}}")); + samples.add( + Arguments.of( + "select {'1':{'string':'a'}}::map(int, map(string, string));", + "{\"1\":{\"string\":\"a\"}}")); + samples.add( + Arguments.of( + "select {'1':[{'string':'a'},{'bla':'ble'}]}::map(int, array(map(string, string)));", + "{\"1\":[{\"string\":\"a\"},{\"bla\":\"ble\"}]}")); + samples.add(Arguments.of("select [1,2,3]::array(int)", "[1,2,3]")); + samples.add( + Arguments.of( + "select [{'a':'a'}, {'b':'b'}]::array(map(string, string))", + "[{\"a\":\"a\"}, {\"b\":\"b\"}]")); + samples.add( + Arguments.of( + "select [{'a':true}, {'b':false}]::array(map(string, boolean))", + "[{\"a\":true}, {\"b\":false}]")); + samples.add( + Arguments.of( + "select [{'string':'a'}, {'string':'b'}]::array(object(string varchar))", + "[{\"string\":\"a\"}, {\"string\":\"b\"}]")); + samples.add( + Arguments.of("select {'string':'a'}::object(string varchar)", "{\"string\":\"a\"}")); + samples.add( + Arguments.of( + "select {'x':'a','b':'a','c':'a','d':'a','e':'a'}::object(x varchar,b varchar,c varchar,d varchar,e varchar)", + "{\"x\":\"a\",\"b\":\"a\",\"c\":\"a\",\"d\":\"a\",\"e\":\"a\"}")); + samples.add( + Arguments.of( + "select {'string':[1,2,3]}::object(string array(int))", "{\"string\":[1,2,3]}")); + samples.add( + Arguments.of( + "select {'string':{'a':15}}::object(string object(a int))", + "{\"string\":{\"a\":15}}")); + samples.add( + Arguments.of( + "select {'string':{'a':15}}::object(string map(string,int))", + "{\"string\":{\"a\":15}}")); + samples.add( + Arguments.of( + "select {'string':{'a':{'b':15}}}::object(string object(a map(string, int)))", + "{\"string\":{\"a\":{\"b\":15}}}")); + + samples.add( + Arguments.of( + "select {'string':{'a':{'b':[{'c': 15}]}}}::object(string map(string, object(b array(object(c int)))))", + "{\"string\":{\"a\":{\"b\":[{\"c\":15}]}}}")); + // DY, DD MON YYYY HH24:MI:SS TZHTZM + samples.add( + Arguments.of( + "select {'ltz': '2024-05-20 11:22:33'::TIMESTAMP_LTZ}::object(ltz TIMESTAMP_LTZ)", + "{\"ltz\":\"Mon, 20 May 2024 11:22:33 +0200\"}")); + samples.add( + Arguments.of( + "select {'ntz': '2024-05-20 11:22:33'::TIMESTAMP_NTZ}::object(ntz TIMESTAMP_NTZ)", + "{\"ntz\":\"Mon, 20 May 2024 11:22:33 Z\"}")); + samples.add( + Arguments.of( + "select {'tz': '2024-05-20 11:22:33+0800'::TIMESTAMP_TZ}::object(tz TIMESTAMP_TZ)", + "{\"tz\":\"Mon, 20 May 2024 11:22:33 +0800\"}")); + samples.add( + Arguments.of( + "select {'date': '2024-05-20'::DATE}::object(date DATE)", + "{\"date\":\"2024-05-20\"}")); + samples.add( + Arguments.of( + "select {'time': '22:14:55'::TIME}::object(time TIME)", "{\"time\":\"22:14:55\"}")); + samples.add(Arguments.of("select {'bool': TRUE}::object(bool BOOLEAN)", "{\"bool\":true}")); + samples.add(Arguments.of("select {'bool': 'y'}::object(bool BOOLEAN)", "{\"bool\":true}")); + samples.add( + Arguments.of( + "select {'binary': TO_BINARY('616263', 'HEX')}::object(binary BINARY)", + "{\"binary\":\"616263\"}")); + samples.add(Arguments.of("select [1,2,3]::VECTOR(INT, 3)", "[1,2,3]")); + samples.add(Arguments.of("select ['a','b','c']::ARRAY(varchar)", "[\"a\",\"b\",\"c\"]")); + + return samples; + } + } + + private static class DataProvider extends SnowflakeArgumentsProvider { + + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new ResultFormatProvider(), new SampleProvider()); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringBaseIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringBaseIT.java new file mode 100644 index 000000000..35d10c4b1 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringBaseIT.java @@ -0,0 +1,63 @@ +package net.snowflake.client.jdbc.structuredtypes; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import net.snowflake.client.TestUtil; +import net.snowflake.client.ThrowingConsumer; +import net.snowflake.client.jdbc.BaseJDBCTest; +import net.snowflake.client.jdbc.ResultSetFormatType; + +abstract class StructuredTypesGetStringBaseIT extends BaseJDBCTest { + public StructuredTypesGetStringBaseIT() {} + + protected Connection init(ResultSetFormatType queryResultFormat) throws SQLException { + return initConnection(queryResultFormat); + } + + protected static Connection initConnection(ResultSetFormatType queryResultFormat) + throws SQLException { + Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set USE_CACHED_RESULT = false"); + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); + stmt.execute("alter session set IGNORE_CLIENT_VESRION_IN_STRUCTURED_TYPES_RESPONSE = true"); + stmt.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); + stmt.execute( + "alter session set " + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + stmt.execute( + "alter session set jdbc_query_result_format = '" + + queryResultFormat.sessionParameterTypeValue + + "'"); + if (queryResultFormat == ResultSetFormatType.NATIVE_ARROW) { + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); + stmt.execute("alter session set FORCE_ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); + } + } + return conn; + } + + protected void assertGetStringIsCompatible(ResultSet resultSet, String expected) + throws SQLException { + String result = resultSet.getString(1); + TestUtil.assertEqualsIgnoringWhitespace(expected, result); + } + + protected void withFirstRow( + Connection connection, String sqlText, ThrowingConsumer consumer) + throws SQLException { + try (Statement statement = connection.createStatement(); + ResultSet rs = statement.executeQuery(sqlText); ) { + assertTrue(rs.next()); + consumer.accept(rs); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/sqldata/AllTypesClass.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/sqldata/AllTypesClass.java index 3f12a9f63..f8b494a87 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/sqldata/AllTypesClass.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/sqldata/AllTypesClass.java @@ -11,6 +11,43 @@ import net.snowflake.client.jdbc.SnowflakeColumn; public class AllTypesClass implements SQLData { + public static String ALL_TYPES_QUERY = + "select {" + + "'string': 'a', " + + "'b': 1, " + + "'s': 2, " + + "'i': 3, " + + "'l': 4, " + + "'f': 1.1, " + + "'d': 2.2, " + + "'bd': 3.3, " + + "'bool': true, " + + "'timestamp_ltz': '2021-12-22 09:43:44'::TIMESTAMP_LTZ, " + + "'timestamp_ntz': '2021-12-23 09:44:44'::TIMESTAMP_NTZ, " + + "'timestamp_tz': '2021-12-24 09:45:45 +0800'::TIMESTAMP_TZ, " + + "'date': '2023-12-24'::DATE, " + + "'time': '12:34:56'::TIME, " + + "'binary': TO_BINARY('616263', 'HEX'), " + + "'simpleClass': {'string': 'b', 'intValue': 2}" + + "}::OBJECT(" + + "string VARCHAR, " + + "b TINYINT, " + + "s SMALLINT, " + + "i INTEGER, " + + "l BIGINT, " + + "f FLOAT, " + + "d DOUBLE, " + + "bd DOUBLE, " + + "bool BOOLEAN, " + + "timestamp_ltz TIMESTAMP_LTZ, " + + "timestamp_ntz TIMESTAMP_NTZ, " + + "timestamp_tz TIMESTAMP_TZ, " + + "date DATE, " + + "time TIME, " + + "binary BINARY, " + + "simpleClass OBJECT(string VARCHAR, intValue INTEGER)" + + ")"; + private String string; private Byte b; private Short s; diff --git a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java index e100534e7..302146801 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc.telemetry; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -18,25 +18,23 @@ import java.sql.Statement; import java.util.Map; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SessionUtil; import org.apache.http.impl.client.CloseableHttpClient; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class TelemetryIT extends AbstractDriverIT { private Connection connection = null; private static final ObjectMapper mapper = new ObjectMapper(); - @Before + @BeforeEach public void init() throws SQLException, IOException { this.connection = getConnection(); } @@ -47,23 +45,23 @@ public void testTelemetry() throws Exception { testTelemetryInternal(telemetry); } - @Ignore + @Disabled @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSessionlessTelemetry() throws Exception, SFException { testTelemetryInternal(createSessionlessTelemetry()); } - @Ignore + @Disabled @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testJWTSessionlessTelemetry() throws Exception, SFException { testTelemetryInternal(createJWTSessionlessTelemetry()); } - @Ignore + @Disabled @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOAuthSessionlessTelemetry() throws Exception, SFException { testTelemetryInternal(createOAuthSessionlessTelemetry()); } @@ -143,13 +141,13 @@ public void testDisableTelemetry() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDisableJWTSessionlessTelemetry() throws Exception, SFException { testDisableTelemetryInternal(createJWTSessionlessTelemetry()); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDisableOAuthSessionlessTelemetry() throws Exception, SFException { testDisableTelemetryInternal(createOAuthSessionlessTelemetry()); } @@ -181,7 +179,7 @@ public void testDisableTelemetryInternal(TelemetryClient telemetry) throws Excep } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testClosedJWTSessionlessTelemetry() throws Exception, SFException { TelemetryClient telemetry = createJWTSessionlessTelemetry(); telemetry.close(); @@ -189,11 +187,11 @@ public void testClosedJWTSessionlessTelemetry() throws Exception, SFException { node.put("type", "query"); node.put("query_id", "sdasdasdasdasds"); telemetry.addLogToBatch(node, 1234567); - Assert.assertFalse(telemetry.sendBatchAsync().get()); + assertFalse(telemetry.sendBatchAsync().get()); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testClosedOAuthSessionlessTelemetry() throws Exception, SFException { TelemetryClient telemetry = createOAuthSessionlessTelemetry(); telemetry.close(); @@ -201,7 +199,7 @@ public void testClosedOAuthSessionlessTelemetry() throws Exception, SFException node.put("type", "query"); node.put("query_id", "sdasdasdasdasds"); telemetry.addLogToBatch(node, 1234567); - Assert.assertFalse(telemetry.sendBatchAsync().get()); + assertFalse(telemetry.sendBatchAsync().get()); } // Helper function to create a sessionless telemetry diff --git a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java index 6fc0c86d6..3d1471eb7 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java @@ -3,13 +3,13 @@ */ package net.snowflake.client.jdbc.telemetry; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import java.util.LinkedList; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Telemetry unit tests */ public class TelemetryTest { diff --git a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java index 1d8ec8c9e..347bc97e3 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java @@ -2,9 +2,10 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.SQLException; @@ -13,9 +14,8 @@ import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningNotOnTestaccount; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.annotations.RunOnTestaccountNotOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnectionV1; @@ -23,19 +23,19 @@ import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import net.snowflake.common.core.SqlState; import org.apache.commons.lang3.time.StopWatch; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Standalone test cases for the out of band telemetry service */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class TelemetryServiceIT extends BaseJDBCTest { private static final int WAIT_FOR_TELEMETRY_REPORT_IN_MILLISECS = 5000; private boolean defaultState; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); Map connectionParams = getConnectionParameters(); @@ -45,7 +45,7 @@ public void setUp() { service.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { // wait 5 seconds while the service is flushing TimeUnit.SECONDS.sleep(5); @@ -58,7 +58,7 @@ public void tearDown() throws InterruptedException { } @SuppressWarnings("divzero") - @Ignore + @Disabled @Test public void testCreateException() { TelemetryService service = TelemetryService.getInstance(); @@ -82,7 +82,7 @@ public void testCreateException() { } /** test wrong server url. */ - @Ignore + @Disabled @Test public void testWrongServerURL() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); @@ -102,7 +102,7 @@ public void testWrongServerURL() throws InterruptedException { assertThat("WrongServerURL do not block.", service.getEventCount() > count); } - @Ignore + @Disabled @Test public void testCreateLog() { // this log will be delivered to snowflake @@ -114,7 +114,7 @@ public void testCreateLog() { service.report(log); } - @Ignore + @Disabled @Test public void testCreateLogWithAWSSecret() { // this log will be delivered to snowflake @@ -135,7 +135,7 @@ public void testCreateLogWithAWSSecret() { service.report(log); } - @Ignore + @Disabled @Test public void stressTestCreateLog() { // this log will be delivered to snowflake @@ -161,7 +161,7 @@ public void stressTestCreateLog() { sw.stop(); } - @Ignore + @Disabled @Test public void testCreateLogInBlackList() { // this log will be delivered to snowflake @@ -172,7 +172,7 @@ public void testCreateLogInBlackList() { service.report(log); } - @Ignore + @Disabled @Test public void testCreateUrgentEvent() { // this log will be delivered to snowflake @@ -184,7 +184,7 @@ public void testCreateUrgentEvent() { service.report(log); } - @Ignore + @Disabled @Test public void stressTestCreateUrgentEvent() { // this log will be delivered to snowflake @@ -229,7 +229,7 @@ private int generateSQLFeatureNotSupportedException() throws SQLFeatureNotSuppor * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testSnowflakeSQLLoggedExceptionOOBTelemetry() throws SQLException, InterruptedException { // make a connection to initialize telemetry instance @@ -264,7 +264,7 @@ public void testSnowflakeSQLLoggedExceptionOOBTelemetry() * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testSQLFeatureNotSupportedOOBTelemetry() throws InterruptedException { // with null session, OOB telemetry will be thrown try { @@ -290,7 +290,7 @@ public void testSQLFeatureNotSupportedOOBTelemetry() throws InterruptedException * * @throws SQLException */ - @Ignore + @Disabled @Test public void testHTAPTelemetry() throws SQLException { Properties properties = new Properties(); @@ -317,7 +317,7 @@ public void testHTAPTelemetry() throws SQLException { * Requires part 2 of SNOW-844477. Make sure CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED is true at * account level. Tests connection property CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED=true */ - @Ignore + @Disabled @Test public void testOOBTelemetryEnabled() throws SQLException { Properties properties = new Properties(); @@ -334,7 +334,7 @@ public void testOOBTelemetryEnabled() throws SQLException { * Requires part 2 of SNOW-844477. Make sure CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED is false at * account level. Tests connection property CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED=false */ - @Ignore + @Disabled @Test public void testOOBTelemetryDisabled() throws SQLException { Properties properties = new Properties(); @@ -352,7 +352,7 @@ public void testOOBTelemetryDisabled() throws SQLException { * account level. Tests connection property CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED=false but * CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED is enabled on account level */ - @Ignore + @Disabled @Test public void testOOBTelemetryEnabledOnServerDisabledOnClient() throws SQLException { Properties properties = new Properties(); @@ -392,16 +392,15 @@ public void testSnowflakeSQLLoggedExceptionIBTelemetry() throws SQLException { * telemetry should be used. * *

After running test, check for telemetry message in client_telemetry_v table. - * - * @throws SQLException */ - @Test(expected = SQLFeatureNotSupportedException.class) + @Test public void testSqlFeatureNotSupportedExceptionIBTelemetry() throws SQLException { // make a connection to initialize telemetry instance try (Connection con = getConnection()) { Statement statement = con.createStatement(); // try to execute a statement that throws a SQLFeatureNotSupportedException - statement.execute("select 1", new int[] {}); + assertThrows( + SQLFeatureNotSupportedException.class, () -> statement.execute("select 1", new int[] {})); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java index 5103348fa..fe359b0ec 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java @@ -6,21 +6,21 @@ import java.util.HashMap; import java.util.Map; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TelemetryServiceTest { private boolean defaultState; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); defaultState = service.isEnabled(); service.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); if (defaultState) { diff --git a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java index 86f8caf5a..dae7fc196 100644 --- a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java +++ b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java @@ -16,13 +16,13 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class FlatfileReadMultithreadIT { private final int NUM_RECORDS = 100000; @@ -30,7 +30,7 @@ public class FlatfileReadMultithreadIT { private static String TARGET_SCHEMA; private static String TARGET_DB; - @BeforeClass + @BeforeAll public static void setUpClass() throws Throwable { try (Connection testConnection = AbstractDriverIT.getConnection(); // NOTE: the stage object must be created right after the connection @@ -43,7 +43,7 @@ public static void setUpClass() throws Throwable { } } - @AfterClass + @AfterAll public static void tearDownClass() throws Throwable { try (Connection testConnection = AbstractDriverIT.getConnection(); Statement statement = testConnection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/loader/LoaderBase.java b/src/test/java/net/snowflake/client/loader/LoaderBase.java index ea0c29fdf..853955862 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderBase.java +++ b/src/test/java/net/snowflake/client/loader/LoaderBase.java @@ -6,8 +6,8 @@ import java.sql.Connection; import java.sql.SQLException; import net.snowflake.client.AbstractDriverIT; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; public class LoaderBase { static final String TARGET_TABLE_NAME = "LOADER_test_TABLE"; @@ -16,7 +16,7 @@ public class LoaderBase { static Connection putConnection; static String SCHEMA_NAME; - @BeforeClass + @BeforeAll public static void setUpClass() throws Throwable { testConnection = AbstractDriverIT.getConnection(); putConnection = AbstractDriverIT.getConnection(); @@ -40,7 +40,7 @@ public static void setUpClass() throws Throwable { .execute("alter session set JDBC_QUERY_RESULT_FORMAT='ARROW', QUERY_RESULT_FORMAT='ARROW'"); } - @AfterClass + @AfterAll public static void tearDownClass() throws SQLException { testConnection .createStatement() diff --git a/src/test/java/net/snowflake/client/loader/LoaderIT.java b/src/test/java/net/snowflake/client/loader/LoaderIT.java index 00fea060f..7f4e3ee97 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderIT.java @@ -9,8 +9,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.ResultSet; import java.sql.SQLException; @@ -22,13 +22,13 @@ import java.util.Date; import java.util.Random; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Loader IT */ -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderIT extends LoaderBase { @Test public void testInjectBadStagedFileInsert() throws Exception { @@ -93,7 +93,7 @@ public void testExecuteBeforeAfterSQLError() throws Exception { * * @throws Exception raises an exception if any error occurs. */ - @Ignore("Performance test") + @Disabled("Performance test") @Test public void testLoaderLargeInsert() throws Exception { new TestDataConfigBuilder(testConnection, putConnection) diff --git a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java index e10a606d4..72212171b 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java @@ -4,8 +4,8 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -13,9 +13,9 @@ import java.util.Arrays; import java.util.Collections; import java.util.Date; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Loader API tests for the latest JDBC driver. This doesn't work for the oldest supported driver. @@ -23,7 +23,7 @@ * is not applicable. If it is applicable, move tests to LoaderIT so that both the latest and oldest * supported driver run the tests. */ -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderLatestIT extends LoaderBase { @Test public void testLoaderUpsert() throws Exception { diff --git a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java index 859533686..a01dfffa5 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java @@ -5,16 +5,16 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.Statement; import java.util.List; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderMultipleBatchIT extends LoaderBase { @Test public void testLoaderMultipleBatch() throws Exception { diff --git a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java index 790249e96..9c418c421 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java @@ -5,7 +5,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.Statement; @@ -14,11 +14,11 @@ import java.util.Arrays; import java.util.Date; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderTimestampIT extends LoaderBase { @Test public void testLoadTimestamp() throws Exception { diff --git a/src/test/java/net/snowflake/client/loader/OnErrorTest.java b/src/test/java/net/snowflake/client/loader/OnErrorTest.java index db31b59b5..062621051 100644 --- a/src/test/java/net/snowflake/client/loader/OnErrorTest.java +++ b/src/test/java/net/snowflake/client/loader/OnErrorTest.java @@ -6,7 +6,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class OnErrorTest { @Test diff --git a/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java b/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java index 006574d66..15ee56e6a 100644 --- a/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java +++ b/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java @@ -3,21 +3,21 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** A base class for testing implementations of {@link SFLogger} */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public abstract class AbstractLoggerIT { public static final String fakeCreds = "credentials=(aws_key_id='abc123' aws_secret_key='rtyuiop')"; - @Before + @BeforeEach void setUp() { setLogLevel(LogLevel.TRACE); } @@ -36,7 +36,7 @@ public void TestLambdaIsNotEvaluatedIfMsgIsNotLogged() { "Value: {}", (ArgSupplier) () -> { - Assert.fail("Lambda expression evaluated even though message " + "is not logged"); + fail("Lambda expression evaluated even though message " + "is not logged"); return 0; }); } @@ -103,19 +103,19 @@ private void logAndVerifyAtEachLogLevel(String expectedLogMsg, String msg, Objec String loggedMsg = getLoggedMessage(); assertEquals( + expectedLogMsg, + loggedMsg, String.format( "Message logged did not match expected value. " + "expected=%s actual=%s", - expectedLogMsg, loggedMsg), - expectedLogMsg, - loggedMsg); + expectedLogMsg, loggedMsg)); LogLevel loggedMsgLevel = getLoggedMessageLevel(); assertEquals( + level, + loggedMsgLevel, String.format( "Message was not logged at expected log level. " + "expected=%s actual=%s", - level.toString(), loggedMsgLevel.toString()), - level, - loggedMsgLevel); + level.toString(), loggedMsgLevel.toString())); } } diff --git a/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java index 033a15457..b8c2b63e5 100644 --- a/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java @@ -3,21 +3,21 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class JDK14JCLWrapperLatestIT { JDK14JCLWrapper wrapper = new JDK14JCLWrapper(JDK14JCLWrapperLatestIT.class.getName()); JDK14Logger logger = (JDK14Logger) wrapper.getLogger(); @@ -66,7 +66,7 @@ private enum LogLevel { private TestJDK14LogHandler handler = new TestJDK14LogHandler(new SFFormatter()); - @Before + @BeforeEach public void setUp() { logLevelToRestore = logger.getLevel(); // Set debug level to lowest so that all possible messages can be sent. @@ -75,7 +75,7 @@ public void setUp() { logger.setUseParentHandlers(false); } - @After + @AfterEach public void tearDown() { logger.setUseParentHandlers(true); logger.setLevel(logLevelToRestore); diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java index 7bcfaa216..54d21f4e6 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java @@ -8,15 +8,15 @@ import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; /** A class for testing {@link JDK14Logger} */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class JDK14LoggerLatestIT extends AbstractLoggerIT { /** {@link JDK14Logger} instance that will be tested in this class */ private static final JDK14Logger LOGGER = new JDK14Logger(JDK14LoggerLatestIT.class.getName()); @@ -53,7 +53,7 @@ public class JDK14LoggerLatestIT extends AbstractLoggerIT { /** Level at which last message was logged using JDK14Logger. */ private Level lastLogMessageLevel = null; - @BeforeClass + @BeforeAll public static void oneTimeSetUp() { logLevelToRestore = internalLogger.getLevel(); useParentHandlersToRestore = internalLogger.getUseParentHandlers(); @@ -61,19 +61,19 @@ public static void oneTimeSetUp() { internalLogger.setUseParentHandlers(false); } - @AfterClass + @AfterAll public static void oneTimeTearDown() { internalLogger.setLevel(logLevelToRestore); internalLogger.setUseParentHandlers(useParentHandlersToRestore); } - @Before + @BeforeEach public void setUp() { super.setUp(); internalLogger.addHandler(this.handler); } - @After + @AfterEach public void tearDown() { internalLogger.removeHandler(this.handler); } diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java b/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java index e4aadfb14..101c5f9c8 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java @@ -4,17 +4,19 @@ package net.snowflake.client.log; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.file.Paths; import java.util.logging.Level; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; public class JDK14LoggerTest { @Test + @Disabled public void testLegacyLoggerInit() throws IOException { System.setProperty("snowflake.jdbc.log.size", "100000"); System.setProperty("snowflake.jdbc.log.count", "3"); diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java index 232da8451..c1f9df5df 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.log; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -18,18 +18,46 @@ import java.util.Properties; import java.util.logging.Level; import net.snowflake.client.AbstractDriverIT; +import net.snowflake.client.annotations.DontRunOnWindows; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import org.apache.commons.io.FileUtils; -import org.junit.Test; - +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +@Tag(TestTags.OTHERS) public class JDK14LoggerWithClientLatestIT extends AbstractDriverIT { + @TempDir public File tmpFolder; String homePath = systemGetProperty("user.home"); + private static Level originalLevel; + + @BeforeAll + static void saveLevel() { + originalLevel = JDK14Logger.getLevel(); + } + + @AfterAll + static void restoreLevel() { + JDK14Logger.setLevel(originalLevel); + } @Test - public void testJDK14LoggingWithClientConfig() { - Path configFilePath = Paths.get("config.json"); - String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"logs\"}}"; + @Disabled + public void testJDK14LoggingWithClientConfig() throws IOException { + File configFile = new File(tmpFolder, "config.json"); + configFile.createNewFile(); + Path configFilePath = configFile.toPath(); + File logFolder = new File(tmpFolder, "logs"); + logFolder.createNewFile(); + Path logFolderPath = logFolder.toPath(); + String configJson = + "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"" + logFolderPath + "\"}}"; try { Files.write(configFilePath, configJson.getBytes()); Properties properties = new Properties(); @@ -38,11 +66,8 @@ public void testJDK14LoggingWithClientConfig() { Statement statement = connection.createStatement()) { statement.executeQuery("select 1"); - File file = new File("logs/jdbc/"); + File file = new File(Paths.get(logFolderPath.toString(), "jdbc").toString()); assertTrue(file.exists()); - - Files.deleteIfExists(configFilePath); - FileUtils.deleteDirectory(new File("logs")); } } catch (IOException e) { fail("testJDK14LoggingWithClientConfig failed"); @@ -51,35 +76,42 @@ public void testJDK14LoggingWithClientConfig() { } } - @Test(expected = SQLException.class) - public void testJDK14LoggingWithClientConfigInvalidConfigFilePath() throws SQLException { + @Test + public void testJDK14LoggingWithClientConfigInvalidConfigFilePath() { Path configFilePath = Paths.get("invalid.json"); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); - try (Connection connection = getConnection(properties)) { - connection.createStatement().executeQuery("select 1"); - } + assertThrows( + SnowflakeSQLException.class, + () -> { + try (Connection connection = getConnection(properties)) { + connection.createStatement().executeQuery("select 1"); + } + }); } @Test - public void testJDK14LoggingWithClientConfigPermissionError() throws IOException, SQLException { - Path configFilePath = Paths.get("config.json"); - String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"logs\"}}"; - Path directoryPath = Files.createDirectory(Paths.get("logs")); - File directory = directoryPath.toFile(); + @Disabled + @DontRunOnWindows + public void testJDK14LoggingWithClientConfigPermissionError() throws IOException { + File configFile = new File(tmpFolder, "config.json"); + configFile.createNewFile(); + Path configFilePath = configFile.toPath(); + File logFolder = new File(tmpFolder, "logs"); + logFolder.createNewFile(); + Path logFolderPath = logFolder.toPath(); + String configJson = + "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"" + logFolderPath + "\"}}"; HashSet perms = new HashSet<>(); perms.add(PosixFilePermission.OWNER_READ); perms.add(PosixFilePermission.GROUP_READ); perms.add(PosixFilePermission.OTHERS_READ); - Files.setPosixFilePermissions(directoryPath, perms); + Files.setPosixFilePermissions(logFolderPath, perms); Files.write(configFilePath, configJson.getBytes()); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); assertThrows(SQLException.class, () -> getConnection(properties)); - - Files.delete(configFilePath); - directory.delete(); } @Test @@ -99,11 +131,16 @@ public void testJDK14LoggerWithQuotesInMessage() { } @Test + @Disabled public void testJDK14LoggingWithMissingLogPathClientConfig() throws Exception { - Path configFilePath = Paths.get("config.json"); + File configFile = new File(tmpFolder, "config.json"); + configFile.createNewFile(); + Path configFilePath = configFile.toPath(); String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; + Path home = tmpFolder.toPath(); + System.setProperty("user.home", home.toString()); - Path homeLogPath = Paths.get(homePath, "jdbc"); + Path homeLogPath = Paths.get(home.toString(), "jdbc"); Files.write(configFilePath, configJson.getBytes()); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); @@ -119,21 +156,23 @@ public void testJDK14LoggingWithMissingLogPathClientConfig() throws Exception { Files.deleteIfExists(configFilePath); FileUtils.deleteDirectory(new File(homeLogPath.toString())); } + } finally { + System.setProperty("user.home", homePath); } } @Test + @Disabled public void testJDK14LoggingWithMissingLogPathNoHomeDirClientConfig() throws Exception { System.clearProperty("user.home"); - Path configFilePath = Paths.get("config.json"); + File configFile = new File(tmpFolder, "config.json"); + Path configFilePath = configFile.toPath(); String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; Files.write(configFilePath, configJson.getBytes()); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); - try (Connection connection = getConnection(properties); - Statement statement = connection.createStatement()) { - + try (Connection connection = getConnection(properties)) { fail("testJDK14LoggingWithMissingLogPathNoHomeDirClientConfig failed"); } catch (SnowflakeSQLLoggedException e) { // Succeed diff --git a/src/test/java/net/snowflake/client/log/SFFormatterTest.java b/src/test/java/net/snowflake/client/log/SFFormatterTest.java index 3255a7357..04ef08c02 100644 --- a/src/test/java/net/snowflake/client/log/SFFormatterTest.java +++ b/src/test/java/net/snowflake/client/log/SFFormatterTest.java @@ -4,7 +4,7 @@ package net.snowflake.client.log; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.text.DateFormat; import java.text.ParseException; @@ -15,8 +15,8 @@ import java.util.logging.Formatter; import java.util.logging.Level; import java.util.logging.LogRecord; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class SFFormatterTest { // Change these numbers if necessary @@ -28,7 +28,7 @@ public class SFFormatterTest { /** Log record generator */ private LRGenerator recordGenerator; - @Before + @BeforeEach public void setUp() { recordGenerator = new LRGenerator(SFFormatter.CLASS_NAME_PREFIX + "TestClass", "TestMethod"); recordGenerator.setFormatter(new SFFormatter()); @@ -56,8 +56,8 @@ public void testUTCTimeStampSimple() throws ParseException { Date date = extractDate(record); long nowInMs = Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis(); assertTrue( - "Time difference boundary should be less than " + TIME_DIFFERENCE_BOUNDARY + "ms", - nowInMs - date.getTime() < TIME_DIFFERENCE_BOUNDARY); + nowInMs - date.getTime() < TIME_DIFFERENCE_BOUNDARY, + "Time difference boundary should be less than " + TIME_DIFFERENCE_BOUNDARY + "ms"); } finally { TimeZone.setDefault(originalTz); } diff --git a/src/test/java/net/snowflake/client/log/SFLogLevelTest.java b/src/test/java/net/snowflake/client/log/SFLogLevelTest.java index 5604fa013..e12271639 100644 --- a/src/test/java/net/snowflake/client/log/SFLogLevelTest.java +++ b/src/test/java/net/snowflake/client/log/SFLogLevelTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.log; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFLogLevelTest { diff --git a/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java b/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java index cd3f73898..a79e25de8 100644 --- a/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java +++ b/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFLoggerFactoryTest { diff --git a/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java b/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java index 49ee89d60..16280b1aa 100644 --- a/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java +++ b/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java @@ -1,20 +1,20 @@ package net.snowflake.client.log; import static net.snowflake.client.log.SFToJavaLogMapper.toJavaUtilLoggingLevel; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.logging.Level; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFToJavaLogMapperTest { @Test public void testToJavaUtilLoggingLevel() { - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.OFF), java.util.logging.Level.OFF); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.ERROR), java.util.logging.Level.SEVERE); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.WARN), java.util.logging.Level.WARNING); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.INFO), java.util.logging.Level.INFO); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.OFF), Level.OFF); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.ERROR), Level.SEVERE); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.WARN), Level.WARNING); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.INFO), Level.INFO); assertEquals(toJavaUtilLoggingLevel(SFLogLevel.DEBUG), Level.FINE); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.TRACE), java.util.logging.Level.FINEST); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.TRACE), Level.FINEST); } } diff --git a/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java b/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java index 0b7d55a3c..008f356a0 100644 --- a/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java +++ b/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java @@ -3,22 +3,22 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Appender; import ch.qos.logback.core.AppenderBase; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SLF4JJJCLWrapperLatestIT { /** Message last logged using SLF4JLogger. */ @@ -55,7 +55,7 @@ private enum LogLevel { Logger logger = (Logger) wrapper.getLogger(); private final Appender testAppender = new TestAppender(); - @Before + @BeforeEach public void setUp() { levelToRestore = logger.getLevel(); if (!testAppender.isStarted()) { @@ -66,7 +66,7 @@ public void setUp() { logger.addAppender(testAppender); } - @After + @AfterEach public void tearDown() { logger.setLevel(levelToRestore); logger.detachAppender(testAppender); diff --git a/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java b/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java index 79e9829f7..9e515b03a 100644 --- a/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java +++ b/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java @@ -11,16 +11,16 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.slf4j.LoggerFactory; /** A class for testing {@link SLF4JLogger} */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SLF4JLoggerLatestIT extends AbstractLoggerIT { /** {@link SLF4JLogger} instance that will be tested in this class */ private static final SLF4JLogger LOGGER = new SLF4JLogger(SLF4JLoggerLatestIT.class); @@ -65,7 +65,7 @@ public class SLF4JLoggerLatestIT extends AbstractLoggerIT { /** Level at which last message was logged using SLF4JLogger. */ private Level lastLogMessageLevel = null; - @BeforeClass + @BeforeAll public static void oneTimeSetUp() { logLevelToRestore = internalLogger.getLevel(); additivityToRestore = internalLogger.isAdditive(); @@ -85,7 +85,7 @@ public static void oneTimeSetUp() { internalLogger.setAdditive(false); } - @AfterClass + @AfterAll public static void oneTimeTearDown() { // Restore original configuration internalLogger.setLevel(logLevelToRestore); @@ -96,10 +96,9 @@ public static void oneTimeTearDown() { appendersToRestore.forEach(internalLogger::addAppender); } - @Before + @BeforeEach public void setUp() { super.setUp(); - if (!testAppender.isStarted()) { testAppender.start(); } @@ -107,7 +106,7 @@ public void setUp() { internalLogger.addAppender(testAppender); } - @After + @AfterEach public void tearDown() { internalLogger.detachAppender(testAppender); } diff --git a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java index eadd984cc..09ffe213a 100644 --- a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java +++ b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java @@ -8,7 +8,7 @@ import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.SQLException; @@ -20,11 +20,11 @@ import javax.sql.ConnectionEventListener; import javax.sql.PooledConnection; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionPoolingDataSourceIT extends AbstractDriverIT { @Test public void testPooledConnection() throws SQLException { diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java index ce93928ac..268989657 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java @@ -7,12 +7,12 @@ import java.sql.SQLException; import java.util.Map; import javax.sql.PooledConnection; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class LogicalConnectionAlreadyClosedLatestIT extends BaseJDBCTest { @Test diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java index 39df72aa2..d3d19c8cf 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java @@ -11,12 +11,12 @@ import java.util.HashMap; import java.util.Map; import javax.sql.PooledConnection; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class LogicalConnectionFeatureNotSupportedLatestIT extends BaseJDBCTest { @Test diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java index d25cdb485..70afaf2bc 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java @@ -3,12 +3,12 @@ */ package net.snowflake.client.pooling; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -27,14 +27,14 @@ import java.util.Map; import java.util.Properties; import javax.sql.PooledConnection; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnectionV1; import net.snowflake.client.jdbc.SnowflakeDriver; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class LogicalConnectionLatestIT extends BaseJDBCTest { Map properties = getConnectionParameters(); diff --git a/src/test/java/net/snowflake/client/providers/BooleanProvider.java b/src/test/java/net/snowflake/client/providers/BooleanProvider.java new file mode 100644 index 000000000..24d2a09d3 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/BooleanProvider.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class BooleanProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList(Arguments.of(true), Arguments.of(false)); + } +} diff --git a/src/test/java/net/snowflake/client/providers/ProvidersUtil.java b/src/test/java/net/snowflake/client/providers/ProvidersUtil.java new file mode 100644 index 000000000..05b4f64c4 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/ProvidersUtil.java @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.ArrayList; +import java.util.List; +import org.apache.commons.lang3.ArrayUtils; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class ProvidersUtil { + private ProvidersUtil() {} + + private static List cartesianProduct( + ExtensionContext context, List a, SnowflakeArgumentsProvider b) { + List argsB = b.rawArguments(context); + List result = new ArrayList<>(); + for (Arguments args : a) { + for (Arguments args2 : argsB) { + result.add(Arguments.of(ArrayUtils.addAll(args.get(), args2.get()))); + } + } + return result; + } + + public static List cartesianProduct( + ExtensionContext context, + SnowflakeArgumentsProvider provider, + SnowflakeArgumentsProvider... providers) { + List args = provider.rawArguments(context); + for (SnowflakeArgumentsProvider argProvider : providers) { + args = cartesianProduct(context, args, argProvider); + } + return args; + } +} diff --git a/src/test/java/net/snowflake/client/providers/ResultFormatProvider.java b/src/test/java/net/snowflake/client/providers/ResultFormatProvider.java new file mode 100644 index 000000000..8f7ffbac4 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/ResultFormatProvider.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import net.snowflake.client.jdbc.ResultSetFormatType; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class ResultFormatProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList( + Arguments.of(ResultSetFormatType.JSON), + Arguments.of(ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES), + Arguments.of(ResultSetFormatType.NATIVE_ARROW)); + } +} diff --git a/src/test/java/net/snowflake/client/providers/ScaleProvider.java b/src/test/java/net/snowflake/client/providers/ScaleProvider.java new file mode 100644 index 000000000..e94421cb0 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/ScaleProvider.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.ArrayList; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class ScaleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + ArrayList scales = new ArrayList<>(); + for (int scale = 0; scale < 10; scale++) { + scales.add(Arguments.of(scale)); + } + return scales; + } +} diff --git a/src/test/java/net/snowflake/client/providers/SimpleResultFormatProvider.java b/src/test/java/net/snowflake/client/providers/SimpleResultFormatProvider.java new file mode 100644 index 000000000..1b973f966 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/SimpleResultFormatProvider.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class SimpleResultFormatProvider extends SnowflakeArgumentsProvider { + private static List arguments = + Arrays.asList(Arguments.of("JSON"), Arguments.of("ARROW")); + + public static void setSupportedFormats(List supportedFormats) { + arguments = supportedFormats; + } + + public static void resetSupportedFormats() { + setSupportedFormats(Arrays.asList(Arguments.of("JSON"), Arguments.of("ARROW"))); + } + + @Override + protected List rawArguments(ExtensionContext context) { + return arguments; + } +} diff --git a/src/test/java/net/snowflake/client/providers/SnowflakeArgumentsProvider.java b/src/test/java/net/snowflake/client/providers/SnowflakeArgumentsProvider.java new file mode 100644 index 000000000..28d9d48d7 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/SnowflakeArgumentsProvider.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.List; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; + +public abstract class SnowflakeArgumentsProvider implements ArgumentsProvider { + protected abstract List rawArguments(ExtensionContext context); + + @Override + public Stream provideArguments(ExtensionContext context) { + return rawArguments(context).stream(); + } +} diff --git a/src/test/java/net/snowflake/client/providers/TimezoneProvider.java b/src/test/java/net/snowflake/client/providers/TimezoneProvider.java new file mode 100644 index 000000000..163b982c7 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/TimezoneProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class TimezoneProvider extends SnowflakeArgumentsProvider { + private int length; + + private static List timeZones = + Arrays.asList( + Arguments.of("UTC"), + Arguments.of("America/Los_Angeles"), + Arguments.of("America/New_York"), + Arguments.of("Pacific/Honolulu"), + Arguments.of("Asia/Singapore"), + Arguments.of("CET"), + Arguments.of("GMT+0200")); + + public TimezoneProvider(int length) { + this.length = length; + } + + public TimezoneProvider() { + this.length = timeZones.size(); + } + + @Override + protected List rawArguments(ExtensionContext context) { + return timeZones.subList(0, length); + } +} diff --git a/src/test/java/net/snowflake/client/suites/ArrowTestSuite.java b/src/test/java/net/snowflake/client/suites/ArrowTestSuite.java new file mode 100644 index 000000000..b0bfa532a --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ArrowTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.ARROW) +public class ArrowTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/BaseTestSuite.java b/src/test/java/net/snowflake/client/suites/BaseTestSuite.java new file mode 100644 index 000000000..42b3d9a53 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/BaseTestSuite.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.platform.suite.api.ExcludePackages; +import org.junit.platform.suite.api.IncludeClassNamePatterns; +import org.junit.platform.suite.api.SelectPackages; +import org.junit.platform.suite.api.Suite; +import org.junit.platform.suite.api.SuiteDisplayName; + +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Suite +@SuiteDisplayName("Testowanie") +@SelectPackages("net.snowflake.client") +@ExcludePackages("net.snowflake.client.suites") +@IncludeClassNamePatterns(".+") +public @interface BaseTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ConnectionOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/ConnectionOldDriverTestSuite.java new file mode 100644 index 000000000..6dc07481d --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ConnectionOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.CONNECTION) +public class ConnectionOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ConnectionTestSuite.java b/src/test/java/net/snowflake/client/suites/ConnectionTestSuite.java new file mode 100644 index 000000000..6ebbd1237 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ConnectionTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.CONNECTION) +public class ConnectionTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/CoreOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/CoreOldDriverTestSuite.java new file mode 100644 index 000000000..be0763f55 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/CoreOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.CORE) +public class CoreOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/CoreTestSuite.java b/src/test/java/net/snowflake/client/suites/CoreTestSuite.java new file mode 100644 index 000000000..3e7a15db1 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/CoreTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.CORE) +public class CoreTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/DiagnosticOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/DiagnosticOldDriverTestSuite.java new file mode 100644 index 000000000..cdc925ecb --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/DiagnosticOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.DIAGNOSTIC) +public class DiagnosticOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/DiagnosticTestSuite.java b/src/test/java/net/snowflake/client/suites/DiagnosticTestSuite.java new file mode 100644 index 000000000..18a53668c --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/DiagnosticTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.DIAGNOSTIC) +public class DiagnosticTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/LoaderOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/LoaderOldDriverTestSuite.java new file mode 100644 index 000000000..897613378 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/LoaderOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.LOADER) +public class LoaderOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/LoaderTestSuite.java b/src/test/java/net/snowflake/client/suites/LoaderTestSuite.java new file mode 100644 index 000000000..7d4952e57 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/LoaderTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.LOADER) +public class LoaderTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/OldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/OldDriverTestSuite.java new file mode 100644 index 000000000..363ad3d2a --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/OldDriverTestSuite.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import java.util.Arrays; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.platform.suite.api.AfterSuite; +import org.junit.platform.suite.api.BeforeSuite; + +@BaseTestSuite +public abstract class OldDriverTestSuite { + @BeforeSuite + public static void beforeAll() { + SimpleResultFormatProvider.setSupportedFormats(Arrays.asList(Arguments.of("JSON"))); + } + + @AfterSuite + public static void afterAll() { + SimpleResultFormatProvider.resetSupportedFormats(); + } +} diff --git a/src/test/java/net/snowflake/client/suites/OthersOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/OthersOldDriverTestSuite.java new file mode 100644 index 000000000..3562d9c0e --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/OthersOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.OTHERS) +public class OthersOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/OthersTestSuite.java b/src/test/java/net/snowflake/client/suites/OthersTestSuite.java new file mode 100644 index 000000000..02f9f3630 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/OthersTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.OTHERS) +public class OthersTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ResultSetOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/ResultSetOldDriverTestSuite.java new file mode 100644 index 000000000..a57873e80 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ResultSetOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.RESULT_SET) +public class ResultSetOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ResultSetTestSuite.java b/src/test/java/net/snowflake/client/suites/ResultSetTestSuite.java new file mode 100644 index 000000000..0032593c2 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ResultSetTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.RESULT_SET) +public class ResultSetTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/StatementOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/StatementOldDriverTestSuite.java new file mode 100644 index 000000000..62ece4cec --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/StatementOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.STATEMENT) +public class StatementOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/StatementTestSuite.java b/src/test/java/net/snowflake/client/suites/StatementTestSuite.java new file mode 100644 index 000000000..19b96cf34 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/StatementTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.STATEMENT) +public class StatementTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/UnitOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/UnitOldDriverTestSuite.java new file mode 100644 index 000000000..8c9a9f470 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/UnitOldDriverTestSuite.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.ExcludeTags; + +@ExcludeTags({ + TestTags.CORE, + TestTags.ARROW, + TestTags.DIAGNOSTIC, + TestTags.CONNECTION, + TestTags.LOADER, + TestTags.OTHERS, + TestTags.RESULT_SET, + TestTags.STATEMENT +}) +public class UnitOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/UnitTestSuite.java b/src/test/java/net/snowflake/client/suites/UnitTestSuite.java new file mode 100644 index 000000000..5bd5904fe --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/UnitTestSuite.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.DisplayName; +import org.junit.platform.suite.api.ExcludeTags; + +@BaseTestSuite +@DisplayName("Unit tests") +@ExcludeTags({ + TestTags.CORE, + TestTags.ARROW, + TestTags.DIAGNOSTIC, + TestTags.CONNECTION, + TestTags.LOADER, + TestTags.OTHERS, + TestTags.RESULT_SET, + TestTags.STATEMENT +}) +public class UnitTestSuite {} diff --git a/src/test/java/net/snowflake/client/util/SecretDetectorTest.java b/src/test/java/net/snowflake/client/util/SecretDetectorTest.java index aa3339309..1b936b929 100644 --- a/src/test/java/net/snowflake/client/util/SecretDetectorTest.java +++ b/src/test/java/net/snowflake/client/util/SecretDetectorTest.java @@ -1,7 +1,7 @@ package net.snowflake.client.util; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -12,7 +12,7 @@ import net.minidev.json.JSONObject; import net.snowflake.client.core.ObjectMapperFactory; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SecretDetectorTest { @Test diff --git a/src/test/java/net/snowflake/client/util/StopwatchTest.java b/src/test/java/net/snowflake/client/util/StopwatchTest.java index 9e44ce18a..066b450fa 100644 --- a/src/test/java/net/snowflake/client/util/StopwatchTest.java +++ b/src/test/java/net/snowflake/client/util/StopwatchTest.java @@ -7,18 +7,18 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.concurrent.TimeUnit; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class StopwatchTest { Stopwatch stopwatch = new Stopwatch(); - @Before + @BeforeEach public void before() { stopwatch = new Stopwatch(); } @@ -36,7 +36,7 @@ public void testGetMillisWhenStopped() throws InterruptedException { @Test public void testGetMillisWithoutStopping() throws InterruptedException { stopwatch.start(); - TimeUnit.MILLISECONDS.sleep(20); + TimeUnit.MILLISECONDS.sleep(100); assertThat( stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(500L))); } diff --git a/src/test/resources/FileUploaderPrep/exampleAzure.json b/src/test/resources/FileUploaderPrep/exampleAzure.json new file mode 100644 index 000000000..a2b1835c3 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleAzure.json @@ -0,0 +1,51 @@ +{ + "data": { + "uploadInfo": { + "locationType": "AZURE", + "location": "EXAMPLE_LOCATION/", + "path": "EXAMPLE_PATH/", + "region": "westus", + "storageAccount": "sfcdev2stage", + "isClientSideEncrypted": true, + "creds": { + "AZURE_SAS_TOKEN": "EXAMPLE_AZURE_SAS_TOKEN" + }, + "presignedUrl": null, + "endPoint": "blob.core.windows.net" + }, + "src_locations": [ + "/foo/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "AZURE", + "location": "EXAMPLE_LOCATION/", + "path": "EXAMPLE_PATH/", + "region": "westus", + "storageAccount": "EXAMPLE_STORAGE_ACCOUNT", + "isClientSideEncrypted": true, + "creds": { + "AZURE_SAS_TOKEN": "EXAMPLE_AZURE_SAS_TOKEN" + }, + "presignedUrl": null, + "endPoint": "blob.core.windows.net" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleGCS.json b/src/test/resources/FileUploaderPrep/exampleGCS.json new file mode 100644 index 000000000..8cd605f1c --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleGCS.json @@ -0,0 +1,47 @@ +{ + "data": { + "uploadInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "src_locations": [ + "/foo/bart/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleGCSWithEndpoint.json b/src/test/resources/FileUploaderPrep/exampleGCSWithEndpoint.json new file mode 100644 index 000000000..8ba946c76 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleGCSWithEndpoint.json @@ -0,0 +1,47 @@ +{ + "data": { + "uploadInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "example.com" + }, + "src_locations": [ + "/foo/bart/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "example.com" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleGCSWithUseRegionalUrl.json b/src/test/resources/FileUploaderPrep/exampleGCSWithUseRegionalUrl.json new file mode 100644 index 000000000..79f4dc678 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleGCSWithUseRegionalUrl.json @@ -0,0 +1,49 @@ +{ + "data": { + "uploadInfo": { + "locationType": "GCS", + "useRegionalUrl": true, + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "src_locations": [ + "/foo/bart/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "GCS", + "useRegionalUrl": true, + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleS3.json b/src/test/resources/FileUploaderPrep/exampleS3.json new file mode 100644 index 000000000..eadc166d8 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleS3.json @@ -0,0 +1,60 @@ +{ + "data": { + "uploadInfo": { + "locationType": "S3", + "location": "example/location", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": null + }, + "src_locations": [ + "/tmp/files/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": true, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "S3", + "location": "stage/location/foo/", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "useS3RegionalUrl": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": null + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleS3WithStageEndpoint.json b/src/test/resources/FileUploaderPrep/exampleS3WithStageEndpoint.json new file mode 100644 index 000000000..32b8a66a1 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleS3WithStageEndpoint.json @@ -0,0 +1,59 @@ +{ + "data": { + "uploadInfo": { + "locationType": "S3", + "location": "example/location", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": null + }, + "src_locations": [ + "/tmp/files/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": true, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "S3", + "location": "stage/location/foo/", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": "s3-fips.us-east-1.amazonaws.com" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/thin_public_pom.xml b/thin_public_pom.xml index 31a1aedee..09c6bf079 100644 --- a/thin_public_pom.xml +++ b/thin_public_pom.xml @@ -37,33 +37,33 @@ 4.4.16 1.12.655 5.0.0 - 1.74 + 1.78.1 1.17.0 - 2.11.0 + 2.17.0 1.2 - 2.21.0 - 2.22.6 + 1.29.0 + 2.47.0 + 2.44.1 24.3.25 - 1.19.0 - 2.31.0 - 32.1.1-jre - 1.43.3 + 2.57.0 + 33.3.1-jre + 1.45.0 3.0.2 - 3.23.3 - 1.60.0 - 2.17.2 + 4.28.2 + 1.68.1 + 2.18.1 3.1.0 5.13.0 2.8.1 2.4.9 1.15.3 2.2.0 - 4.1.111.Final + 4.1.115.Final 9.37.3 UTF-8 UTF-8 2.0.13 - 1.6.9 + 1.5.6-5 @@ -117,6 +117,11 @@ bcprov-jdk18on ${bouncycastle.version} + + org.bouncycastle + bcutil-jdk18on + ${bouncycastle.version} + com.amazonaws aws-java-sdk-core @@ -150,12 +155,6 @@ gax ${google.gax.version} - - - org.threeten - threetenbp - ${threeten.version} - com.google.auth google-auth-library-oauth2-http @@ -262,6 +261,11 @@ jsoup ${jsoup.version} + + com.github.luben + zstd-jni + ${zstd-jni.version} + org.slf4j slf4j-api