diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 9ef1e0ed3..d07631635 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -7,6 +7,7 @@ SNOW-XXXXX - [ ] The code is correctly formatted (run `mvn -P check-style validate`) - [ ] New public API is not unnecessary exposed (run `mvn verify` and inspect `target/japicmp/japicmp.html`) - [ ] The pull request name is prefixed with `SNOW-XXXX: ` +- [ ] Code is in compliance with internal logging requirements ## External contributors - please answer these questions before submitting a pull request. Thanks! diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index ba43fe9b5..c18f81c8d 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -36,16 +36,78 @@ jobs: WHITESOURCE_API_KEY: ${{ secrets.WHITESOURCE_API_KEY }} run: ./ci/build.sh + test-windows: + needs: build + name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + runs-on: windows-latest + strategy: + fail-fast: false + matrix: + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + additionalMavenProfile: [''] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.runConfig.javaVersion }} + distribution: 'temurin' + cache: maven + - uses: actions/setup-python@v4 + with: + python-version: '3.7' + architecture: 'x64' + - name: Tests + shell: cmd + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} + JDBC_TEST_CATEGORY: ${{ matrix.category }} + ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} + run: ci\\test_windows.bat + + test-mac: + needs: build + name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + runs-on: macos-13 + strategy: + fail-fast: false + matrix: + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + additionalMavenProfile: [''] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.runConfig.javaVersion }} + distribution: 'temurin' + cache: maven + - uses: actions/setup-python@v4 + with: + python-version: '3.7' + - name: Install Homebrew Bash + shell: bash + run: brew install bash + - name: Tests + shell: bash + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} + JDBC_TEST_CATEGORY: ${{ matrix.category }} + ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} + run: /usr/local/bin/bash ./ci/test_mac.sh + test-linux: needs: build - name: ${{ matrix.cloud }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} on ${{ matrix.image }} + name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: - image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17' ] - cloud: [ 'AWS' ] - category: ['TestCategoryResultSet,TestCategoryStatement', 'TestCategoryOthers,TestCategoryCore', 'TestCategoryArrow,TestCategoryLoader,TestCategoryConnection', 'TestCategoryFips'] + image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17', 'jdbc-centos7-openjdk21' ] + cloud: [ 'AWS', 'AZURE', 'GCP' ] + category: ['TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader', 'TestCategoryOthers', 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic', 'TestCategoryFips'] additionalMavenProfile: ['', '-Dthin-jar'] steps: - uses: actions/checkout@v1 diff --git a/.github/workflows/parameters_azure.json.gpg b/.github/workflows/parameters_azure.json.gpg new file mode 100644 index 000000000..ea6fbdb51 Binary files /dev/null and b/.github/workflows/parameters_azure.json.gpg differ diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 180cb41e5..d8ea55f6d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,15 @@ +**JDBC Driver 3.17.0** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + +**JDBC Driver 3.16.1** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + +**JDBC Driver 3.16.0** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + **JDBC Driver 3.15.1** - \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc diff --git a/FIPS/pom.xml b/FIPS/pom.xml index 5766a8243..0ba788c0a 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,12 +5,12 @@ net.snowflake snowflake-jdbc-parent - 3.15.2-SNAPSHOT + 3.17.1-SNAPSHOT ../parent-pom.xml snowflake-jdbc-fips - 3.15.2-SNAPSHOT + 3.17.1-SNAPSHOT jar snowflake-jdbc-fips @@ -323,23 +323,6 @@ - - org.codehaus.mojo - exec-maven-plugin - ${version.plugin.exec} - - - check-shaded-content - verify - - exec - - - ${basedir}/scripts/check_content.sh - - - - @@ -466,13 +449,6 @@ org.objectweb ${shadeBase}.org.objectweb - - com.sun - ${shadeBase}.com.sun - - com.sun.jna.** - - io.netty ${shadeBase}.io.netty @@ -669,6 +645,35 @@ + + check-content + + + !windows + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + verify + + exec + + + ${basedir}/scripts/check_content.sh + + + + + + + java-9 diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java new file mode 100644 index 000000000..c902dc5f9 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningOnGCP implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && cloudProvider.equalsIgnoreCase("GCP"); + } +} diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java b/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java new file mode 100644 index 000000000..e69de29bb diff --git a/FIPS/src/test/java/net/snowflake/client/TestUtil.java b/FIPS/src/test/java/net/snowflake/client/TestUtil.java index 3c7f04958..703d59953 100644 --- a/FIPS/src/test/java/net/snowflake/client/TestUtil.java +++ b/FIPS/src/test/java/net/snowflake/client/TestUtil.java @@ -37,7 +37,7 @@ public interface TestRunInterface { /** * System.getenv wrapper. If System.getenv raises an SecurityException, it is ignored and returns * null. - * + * @deprecated This method should be replaced by SnowflakeUtil.systemGetEnv. *

This is replicated from SnowflakeUtil.systemGetEnv, because the old driver doesn't have that * function for the tests to use it. Replace this function call with SnowflakeUtil.systemGetEnv * when it is available. @@ -45,6 +45,7 @@ public interface TestRunInterface { * @param env the environment variable name. * @return the environment variable value if set, otherwise null. */ + @Deprecated public static String systemGetEnv(String env) { try { return System.getenv(env); diff --git a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java index a10924432..c1509a6a8 100644 --- a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java +++ b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java @@ -21,6 +21,7 @@ import javax.net.ssl.HttpsURLConnection; import net.snowflake.client.AbstractDriverIT; import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGCP; import net.snowflake.client.RunningOnGithubActions; import net.snowflake.client.category.TestCategoryFips; import net.snowflake.client.core.SecurityUtil; @@ -289,7 +290,12 @@ public void testConnectUsingKeyPair() throws Exception { DriverManager.getConnection(uri, properties).close(); } + /** + * Test case for connecting with FIPS and executing a query. + * Currently ignored execution on GCP due to exception thrown "SSlException Could not generate XDH keypair" + */ @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGCP.class) public void connectWithFipsAndQuery() throws SQLException { try (Connection con = getConnection()) { Statement statement = con.createStatement(); diff --git a/Jenkinsfile b/Jenkinsfile index 5e62aab1b..8e5925b8c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -56,7 +56,7 @@ timestamps { e.printStackTrace() } - jdkToParams = ['openjdk8': 'jdbc-centos7-openjdk8', 'openjdk11': 'jdbc-centos7-openjdk11', 'openjdk17': 'jdbc-centos7-openjdk17'].collectEntries { jdk, image -> + jdkToParams = ['openjdk8': 'jdbc-centos7-openjdk8', 'openjdk11': 'jdbc-centos7-openjdk11', 'openjdk17': 'jdbc-centos7-openjdk17', 'openjdk21': 'jdbc-centos7-openjdk21'].collectEntries { jdk, image -> return [(jdk): [ string(name: 'client_git_branch', value: scmInfo.GIT_BRANCH), string(name: 'client_git_commit', value: scmInfo.GIT_COMMIT), diff --git a/ci/_init.sh b/ci/_init.sh index c91f03c31..5df299949 100755 --- a/ci/_init.sh +++ b/ci/_init.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/usr/local/bin/env bash set -e export PLATFORM=$(echo $(uname) | tr '[:upper:]' '[:lower:]') @@ -23,6 +23,7 @@ declare -A TEST_IMAGE_NAMES=( [$DRIVER_NAME-centos7-openjdk8]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk8-test:$TEST_IMAGE_VERSION [$DRIVER_NAME-centos7-openjdk11]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk11-test:$TEST_IMAGE_VERSION [$DRIVER_NAME-centos7-openjdk17]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk17-test:$TEST_IMAGE_VERSION + [$DRIVER_NAME-centos7-openjdk21]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk21-test:$TEST_IMAGE_VERSION ) export TEST_IMAGE_NAMES @@ -30,11 +31,13 @@ declare -A TEST_IMAGE_DOCKERFILES=( [$DRIVER_NAME-centos7-openjdk8]=jdbc-centos7-openjdk-test [$DRIVER_NAME-centos7-openjdk11]=jdbc-centos7-openjdk-test [$DRIVER_NAME-centos7-openjdk17]=jdbc-centos7-openjdk-test + [$DRIVER_NAME-centos7-openjdk21]=jdbc-centos7-openjdk-test ) declare -A TEST_IMAGE_BUILD_ARGS=( [$DRIVER_NAME-centos7-openjdk8]="--target jdbc-centos7-openjdk-yum --build-arg=JDK_PACKAGE=java-1.8.0-openjdk-devel" [$DRIVER_NAME-centos7-openjdk11]="--target jdbc-centos7-openjdk-yum --build-arg=JDK_PACKAGE=java-11-openjdk-devel" # pragma: allowlist secret [$DRIVER_NAME-centos7-openjdk17]="--target jdbc-centos7-openjdk17" + [$DRIVER_NAME-centos7-openjdk21]="--target jdbc-centos7-openjdk21" ) diff --git a/ci/container/test_component.sh b/ci/container/test_component.sh index fa550217d..da245a627 100755 --- a/ci/container/test_component.sh +++ b/ci/container/test_component.sh @@ -1,11 +1,12 @@ #!/bin/bash -e # -# Test JDBC for Linux +# Test JDBC for Linux/MAC # set -o pipefail THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" export WORKSPACE=${WORKSPACE:-/mnt/workspace} export SOURCE_ROOT=${SOURCE_ROOT:-/mnt/host} +MVNW_EXE=$SOURCE_ROOT/mvnw echo "[INFO] Download JDBC Integration test cases and libraries" source $THIS_DIR/download_artifact.sh @@ -76,15 +77,15 @@ export MAVEN_OPTS="$MAVEN_OPTS -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=fa cd $SOURCE_ROOT # Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched -mvn --batch-mode --show-version dependency:resolve-plugins +$MVNW_EXE --batch-mode --show-version dependency:go-offline for c in "${CATEGORY[@]}"; do c=$(echo $c | sed 's/ *$//g') if [[ "$is_old_driver" == "true" ]]; then pushd TestOnly >& /dev/null - JDBC_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") + JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") echo "[INFO] Run JDBC $JDBC_VERSION tests" - mvn -DjenkinsIT \ + $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ -DtestCategory=net.snowflake.client.category.$c \ @@ -95,7 +96,7 @@ for c in "${CATEGORY[@]}"; do elif [[ "$c" == "TestCategoryFips" ]]; then pushd FIPS >& /dev/null echo "[INFO] Run Fips tests" - mvn -DjenkinsIT \ + $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ @@ -105,7 +106,7 @@ for c in "${CATEGORY[@]}"; do popd >& /dev/null else echo "[INFO] Run $c tests" - mvn -DjenkinsIT \ + $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ -DtestCategory=net.snowflake.client.category.$c \ diff --git a/ci/image/Dockerfile.jdbc-centos7-openjdk-test b/ci/image/Dockerfile.jdbc-centos7-openjdk-test index 15e351530..e6adfb975 100644 --- a/ci/image/Dockerfile.jdbc-centos7-openjdk-test +++ b/ci/image/Dockerfile.jdbc-centos7-openjdk-test @@ -88,4 +88,22 @@ RUN export JAVA_HOME=/opt/jdk-17 && \ -Dnot-self-contained-jar \ --batch-mode --fail-never compile && \ mv $HOME/.m2 /home/user && \ - chmod -R 777 /home/user/.m2 \ No newline at end of file + chmod -R 777 /home/user/.m2 + +###### OpenJDK 21 from archive (not available in yum) +FROM jdbc-centos7-openjdk-base AS jdbc-centos7-openjdk21 + +# Java +RUN curl -o - https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz | tar xfz - -C /opt && \ + ln -s /opt/jdk-21.0.2 /opt/jdk-21 + +RUN sed -i /usr/local/bin/entrypoint.sh -e '/^exec/i export JAVA_HOME=/opt/jdk-21' +RUN sed -i /usr/local/bin/entrypoint.sh -e '/^exec/i export PATH=$JAVA_HOME/bin:$PATH' + +RUN export JAVA_HOME=/opt/jdk-21 && \ + cd /root && \ + mvn -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -Dnot-self-contained-jar \ + --batch-mode --fail-never compile && \ + mv $HOME/.m2 /home/user && \ + chmod -R 777 /home/user/.m2 diff --git a/ci/log_analyze_setup.sh b/ci/log_analyze_setup.sh index b77d473c0..fd573d194 100755 --- a/ci/log_analyze_setup.sh +++ b/ci/log_analyze_setup.sh @@ -31,19 +31,36 @@ fi # The new complex password we use for jenkins test export SNOWFLAKE_TEST_PASSWORD_NEW="ThisIsRandomPassword123!" -LOG_PROPERTY_FILE_DOCKER=$(cd "$(dirname "${BASH_SOURCE[0]}")/.."; pwd)/src/test/resources/logging.properties +LOG_PROPERTY_FILE=$(cd "$(dirname "${BASH_SOURCE[0]}")/.."; pwd)/src/test/resources/logging.properties export CLIENT_DRIVER_NAME=JDBC function setup_log_env() { - sed -i "s|^java.util.logging.FileHandler.pattern.*|java.util.logging.FileHandler.pattern = $CLIENT_LOG_FILE_PATH_DOCKER|" ${LOG_PROPERTY_FILE_DOCKER} + if ["$WORKSPACE" == "/mnt/workspace"]; then + CLIENT_LOG_DIR_PATH=$LOCAL_CLIENT_LOG_DIR_PATH_DOCKER + CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH_DOCKER + CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH_DOCKER + else + CLIENT_LOG_DIR_PATH=$LOCAL_CLIENT_LOG_DIR_PATH + CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH + CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH + fi + echo "[INFO] CLIENT_LOG_DIR_PATH=$CLIENT_LOG_DIR_PATH" + echo "[INFO] CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH" + echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH" + echo "[INFO] Replace file handler for log file $LOG_PROPERTY_FILE" + + sed -i'' -e "s|^java.util.logging.FileHandler.pattern.*|java.util.logging.FileHandler.pattern = $CLIENT_LOG_FILE_PATH|" ${LOG_PROPERTY_FILE} - if [[ ! -d ${LOCAL_CLIENT_LOG_DIR_PATH_DOCKER} ]]; then - mkdir -p ${LOCAL_CLIENT_LOG_DIR_PATH_DOCKER} + if [[ ! -d ${CLIENT_LOG_DIR_PATH} ]]; then + echo "[INFO] create clien log directory $CLIENT_LOG_DIR_PATH" + mkdir -p ${CLIENT_LOG_DIR_PATH} fi - if [[ -f $CLIENT_KNOWN_SSM_FILE_PATH_DOCKER ]]; then - rm -f $CLIENT_KNOWN_SSM_FILE_PATH_DOCKER + if [[ -f $CLIENT_KNOWN_SSM_FILE_PATH ]]; then + rm -f $CLIENT_KNOWN_SSM_FILE_PATH fi - touch $CLIENT_KNOWN_SSM_FILE_PATH_DOCKER + + touch $CLIENT_KNOWN_SSM_FILE_PATH + echo "[INFO] finish setup log env" } diff --git a/ci/test.sh b/ci/test.sh index 49a999d41..03c66c502 100755 --- a/ci/test.sh +++ b/ci/test.sh @@ -58,6 +58,7 @@ for name in "${!TARGET_TEST_IMAGES[@]}"; do -e BUILD_NUMBER \ -e JDBC_TEST_CATEGORY \ -e ADDITIONAL_MAVEN_PROFILE \ + -e CLOUD_PROVIDER \ -e is_old_driver \ --add-host=snowflake.reg.local:${IP_ADDR} \ --add-host=s3testaccount.reg.local:${IP_ADDR} \ diff --git a/ci/test_mac.sh b/ci/test_mac.sh new file mode 100755 index 000000000..7f98e9add --- /dev/null +++ b/ci/test_mac.sh @@ -0,0 +1,21 @@ +#!/bin/bash -e +# +# Test JDBC for Mac +# + +echo "DOWNLOADED" +set -o pipefail +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +source $THIS_DIR/_init.sh +source $THIS_DIR/scripts/set_git_info.sh + + +export WORKSPACE=$GITHUB_WORKSPACE +export SOURCE_ROOT=$GITHUB_WORKSPACE + +python3 --version +python3 -m venv venv +source venv/bin/activate +pip3 install -U pip +pip3 install -U snowflake-connector-python +$THIS_DIR/container/test_component.sh diff --git a/ci/test_windows.bat b/ci/test_windows.bat new file mode 100644 index 000000000..4a5a8ebe3 --- /dev/null +++ b/ci/test_windows.bat @@ -0,0 +1,161 @@ +REM +REM Tests JDBC Driver on Windows +REM +setlocal +setlocal EnableDelayedExpansion +python -m venv venv +call venv\scripts\activate +pip install -U snowflake-connector-python + +cd %GITHUB_WORKSPACE% + +if "%CLOUD_PROVIDER%"=="AZURE" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_azure.json.gpg +) else if "%CLOUD_PROVIDER%"=="GCP" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_gcp.json.gpg +) else if "%CLOUD_PROVIDER%"=="AWS" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_aws.json.gpg +) else ( + echo === unknown cloud provider + exit /b 1 +) + +gpg --quiet --batch --yes --decrypt --passphrase=%PARAMETERS_SECRET% --output parameters.json %ENCODED_PARAMETERS_FILE% + +REM DON'T FORGET TO include @echo off here or the password may be leaked! +echo @echo off>parameters.bat +jq -r ".testconnection | to_entries | map(\"set \(.key)=\(.value)\") | .[]" parameters.json >> parameters.bat +call parameters.bat +if %ERRORLEVEL% NEQ 0 ( + echo === failed to set the test parameters + exit /b 1 +) +echo @echo off>parametersorg.bat +jq -r ".orgconnection | to_entries | map(\"set \(.key)=\(.value)\") | .[]" parameters.json >> parametersorg.bat +call parametersorg.bat +if %ERRORLEVEL% NEQ 0 ( + echo === failed to set the org parameters + exit /b 1 +) +set SNOWFLAKE_TEST_SCHEMA=%RUNNER_TRACKING_ID:-=_%_%GITHUB_SHA% +set TARGET_SCHEMA_NAME=%SNOWFLAKE_TEST_SCHEMA% + +echo [INFO] Account: %SNOWFLAKE_TEST_ACCOUNT% +echo [INFO] User : %SNOWFLAKE_TEST_USER% +echo [INFO] Database: %SNOWFLAKE_TEST_DATABASE% +echo [INFO] Schema: %SNOWFLAKE_TEST_SCHEMA% +echo [INFO] Warehouse: %SNOWFLAKE_TEST_WAREHOUSE% +echo [INFO] Role: %SNOWFLAKE_TEST_ROLE% +echo [INFO] PROVIDER: %CLOUD_PROVIDER% + +echo [INFO] Creating schema %SNOWFLAKE_TEST_SCHEMA% +pushd %GITHUB_WORKSPACE%\ci\container +python create_schema.py +popd + +REM setup log + +set CLIENT_LOG_DIR_PATH=%GITHUB_WORKSPACE%\jenkins_rt_logs +echo "[INFO] CLIENT_LOG_DIR_PATH=%CLIENT_LOG_DIR_PATH%" + +set CLIENT_LOG_FILE_PATH=%CLIENT_LOG_DIR_PATH%\ssnowflake_ssm_rt.log +echo "[INFO] CLIENT_LOG_FILE_PATH=%CLIENT_LOG_FILE_PATH%" + +set CLIENT_KNOWN_SSM_FILE_PATH=%CLIENT_LOG_DIR_PATH%\rt_jenkins_log_known_ssm.txt +echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH=%CLIENT_KNOWN_SSM_FILE_PATH%" + +REM To close log analyze, just set ENABLE_CLIENT_LOG_ANALYZE to not "true", e.g. "false". +set ENABLE_CLIENT_LOG_ANALYZE=true + +REM The new complex password we use for jenkins test +set SNOWFLAKE_TEST_PASSWORD_NEW="ThisIsRandomPassword123!" + +set LOG_PROPERTY_FILE=%GITHUB_WORKSPACE%\src\test\resources\logging.properties + +echo "[INFO] LOG_PROPERTY_FILE=%LOG_PROPERTY_FILE%" + +set CLIENT_DRIVER_NAME=JDBC + +powershell -Command "(Get-Content %LOG_PROPERTY_FILE%) | Foreach-Object { $_ -replace '^java.util.logging.FileHandler.pattern.*', 'java.util.logging.FileHandler.pattern = %CLIENT_LOG_FILE_PATH%' } | Set-Content %LOG_PROPERTY_FILE%" + +echo "[INFO] Create log directory" + +IF NOT EXIST %CLIENT_LOG_DIR_PATH% MD %CLIENT_LOG_DIR_PATH% 2>nul + +echo "[INFO] Delete ssm file" +IF EXIST "%CLIENT_KNOWN_SSM_FILE_PATH%" DEL /F /Q "%CLIENT_KNOWN_SSM_FILE_PATH%" + +echo "[INFO] Create ssm file" +echo.>"%CLIENT_KNOWN_SSM_FILE_PATH%" + +echo "[INFO] Finish log setup" +REM end setup log + +for /F "tokens=1,* delims==" %%i in ('set ^| findstr /I /R "^SNOWFLAKE_[^=]*$" ^| findstr /I /V /R "^SNOWFLAKE_PASS_[^=]*$" ^| sort') do ( + echo %%i=%%j +) + +echo [INFO] Starting hang_webserver.py 12345 +pushd %GITHUB_WORKSPACE%\ci\container +start /b python hang_webserver.py 12345 > hang_webserver.out 2>&1 +popd + +echo [INFO] Testing + +set MVNW_EXE=%GITHUB_WORKSPACE%\mvnw.cmd + +REM Avoid connection timeouts +set MAVEN_OPTS="-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120" +echo "MAVEN OPTIONS %MAVEN_OPTS%" + +REM Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched +cmd /c %MVNW_EXE% --batch-mode --show-version dependency:go-offline + +echo list = "%JDBC_TEST_CATEGORY%" +for %%a in ("%JDBC_TEST_CATEGORY:,=" "%") do ( + echo "Current category to execute" %%a + if /i %%a=="TestCategoryFips" ( + pushd FIPS + echo "[INFO] Run Fips tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + popd ) else ( + echo "[INFO] Run %%a tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -DtestCategory=net.snowflake.client.category.%%a ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + ) +) + +echo [INFO] Dropping schema %SNOWFLAKE_TEST_SCHEMA% +pushd %GITHUB_WORKSPACE%\ci\container +python drop_schema.py +popd diff --git a/dependencies/arrow-format-10.0.1.jar b/dependencies/arrow-format-10.0.1.jar index 5822d5a9f..26b30d66b 100644 Binary files a/dependencies/arrow-format-10.0.1.jar and b/dependencies/arrow-format-10.0.1.jar differ diff --git a/dependencies/arrow-memory-core-10.0.1.jar b/dependencies/arrow-memory-core-10.0.1.jar index 176e8e711..264c9e373 100644 Binary files a/dependencies/arrow-memory-core-10.0.1.jar and b/dependencies/arrow-memory-core-10.0.1.jar differ diff --git a/dependencies/arrow-memory-netty-10.0.1.jar b/dependencies/arrow-memory-netty-10.0.1.jar index 721e029c1..fa3ff089d 100644 Binary files a/dependencies/arrow-memory-netty-10.0.1.jar and b/dependencies/arrow-memory-netty-10.0.1.jar differ diff --git a/dependencies/arrow-memory-unsafe-10.0.1.jar b/dependencies/arrow-memory-unsafe-10.0.1.jar index 5960de229..45868d0e7 100644 Binary files a/dependencies/arrow-memory-unsafe-10.0.1.jar and b/dependencies/arrow-memory-unsafe-10.0.1.jar differ diff --git a/dependencies/arrow-vector-10.0.1.jar b/dependencies/arrow-vector-10.0.1.jar index efd86ab9a..fcc53e3f6 100644 Binary files a/dependencies/arrow-vector-10.0.1.jar and b/dependencies/arrow-vector-10.0.1.jar differ diff --git a/parent-pom.xml b/parent-pom.xml index ac2940dfd..466d4fa8e 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -5,7 +5,7 @@ net.snowflake snowflake-jdbc-parent - 3.15.2-SNAPSHOT + 3.17.1-SNAPSHOT pom @@ -28,6 +28,7 @@ 1.74 1.0.2.4 1.0.5 + 1.14.17 1.1 3.33.0 1.2 @@ -63,7 +64,7 @@ 1.3.6 2.2.0 4.11.0 - 4.1.100.Final + 4.1.111.Final 9.37.3 0.31.1 1.0-alpha-9-stable-1 @@ -482,12 +483,30 @@ ${mockito.version} test + + net.bytebuddy + byte-buddy + ${bytebuddy.version} + test + org.awaitility awaitility ${awaitility.version} test + + org.apache.maven.surefire + surefire-junit4 + ${version.plugin.surefire} + test + + + org.apache.maven.surefire + common-junit48 + ${version.plugin.surefire} + test + @@ -516,6 +535,10 @@ com.fasterxml.jackson.core jackson-databind + + com.fasterxml.jackson.dataformat + jackson-dataformat-toml + com.google.api gax @@ -725,5 +748,14 @@ org.awaitility awaitility + + + org.apache.maven.surefire + surefire-junit4 + + + org.apache.maven.surefire + common-junit48 + diff --git a/pom.xml b/pom.xml index a72cb7f43..d71b46639 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.15.2-SNAPSHOT + 3.17.1-SNAPSHOT ./parent-pom.xml ${artifactId} - 3.15.2-SNAPSHOT + 3.17.1-SNAPSHOT jar ${artifactId} @@ -746,25 +746,6 @@ - - org.codehaus.mojo - exec-maven-plugin - - - check-shaded-content - - exec - - verify - - ${basedir}/ci/scripts/check_content.sh - - -thin - - - - - @@ -886,13 +867,6 @@ org.objectweb ${shadeBase}.org.objectweb - - com.sun - ${shadeBase}.com.sun - - com.sun.jna.** - - io.netty ${shadeBase}.io.netty @@ -1094,22 +1068,6 @@ - - org.codehaus.mojo - exec-maven-plugin - - - check-shaded-content - - exec - - verify - - ${basedir}/ci/scripts/check_content.sh - - - - @@ -1238,6 +1196,75 @@ + + + check-content + + + !windows + + + !thin-jar + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + + exec + + verify + + ${basedir}/ci/scripts/check_content.sh + + + + + + + + + + check-content-thin + + + !windows + + + thin-jar + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + + exec + + verify + + ${basedir}/ci/scripts/check_content.sh + + -thin + + + + + + + + qa1IT diff --git a/src/main/java/net/snowflake/client/config/ConnectionParameters.java b/src/main/java/net/snowflake/client/config/ConnectionParameters.java new file mode 100644 index 000000000..5fa97ac91 --- /dev/null +++ b/src/main/java/net/snowflake/client/config/ConnectionParameters.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.config; + +import java.util.Properties; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +public class ConnectionParameters { + private final String url; + private final Properties params; + + public ConnectionParameters(String uri, Properties params) { + this.url = uri; + this.params = params; + } + + public String getUrl() { + return url; + } + + public Properties getParams() { + return params; + } +} diff --git a/src/main/java/net/snowflake/client/config/SFClientConfig.java b/src/main/java/net/snowflake/client/config/SFClientConfig.java index 1029b1167..a11071b1f 100644 --- a/src/main/java/net/snowflake/client/config/SFClientConfig.java +++ b/src/main/java/net/snowflake/client/config/SFClientConfig.java @@ -1,11 +1,20 @@ package net.snowflake.client.config; +import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; /** POJO class for Snowflake's client config. */ public class SFClientConfig { + // Used to keep the unknown properties when deserializing + @JsonIgnore @JsonAnySetter private Map unknownParams = new LinkedHashMap<>(); + @JsonProperty("common") private CommonProps commonProps; @@ -33,6 +42,18 @@ public void setConfigFilePath(String configFilePath) { this.configFilePath = configFilePath; } + Set getUnknownParamKeys() { + Set unknownParamKeys = new LinkedHashSet<>(unknownParams.keySet()); + + if (!commonProps.unknownParams.isEmpty()) { + unknownParamKeys.addAll( + commonProps.unknownParams.keySet().stream() + .map(s -> "common:" + s) + .collect(Collectors.toCollection(LinkedHashSet::new))); + } + return unknownParamKeys; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -51,6 +72,9 @@ public int hashCode() { } public static class CommonProps { + // Used to keep the unknown properties when deserializing + @JsonIgnore @JsonAnySetter Map unknownParams = new LinkedHashMap<>(); + @JsonProperty("log_level") private String logLevel; diff --git a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java index 2f3ee3b91..a0ca0fa11 100644 --- a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java @@ -8,8 +8,11 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; +import net.snowflake.client.core.Constants; import net.snowflake.client.jdbc.SnowflakeDriver; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -25,41 +28,61 @@ public class SFClientConfigParser { * connection property. 2. Environment variable: SF_CLIENT_CONFIG_FILE containing full path to * sf_client_config file. 3. Searches for default config file name(sf_client_config.json under the * driver directory from where the driver gets loaded. 4. Searches for default config file - * name(sf_client_config.json) under user home directory 5. Searches for default config file - * name(sf_client_config.json) under tmp directory. + * name(sf_client_config.json) under user home directory. * * @param configFilePath SF_CLIENT_CONFIG_FILE parameter read from connection URL or connection * properties * @return SFClientConfig */ public static SFClientConfig loadSFClientConfig(String configFilePath) throws IOException { + if (configFilePath != null) { + logger.info("Attempting to enable easy logging with file path {}", configFilePath); + } String derivedConfigFilePath = null; if (configFilePath != null && !configFilePath.isEmpty()) { // 1. Try to read the file at configFilePath. + logger.info("Using config file specified from connection string: {}", configFilePath); derivedConfigFilePath = configFilePath; } else if (System.getenv().containsKey(SF_CLIENT_CONFIG_ENV_NAME)) { // 2. If SF_CLIENT_CONFIG_ENV_NAME is set, read from env. - derivedConfigFilePath = systemGetEnv(SF_CLIENT_CONFIG_ENV_NAME); + String filePath = systemGetEnv(SF_CLIENT_CONFIG_ENV_NAME); + logger.info("Using config file specified from environment variable: {}", filePath); + derivedConfigFilePath = filePath; } else { // 3. Read SF_CLIENT_CONFIG_FILE_NAME from where jdbc jar is loaded. String driverLocation = Paths.get(getConfigFilePathFromJDBCJarLocation(), SF_CLIENT_CONFIG_FILE_NAME).toString(); if (Files.exists(Paths.get(driverLocation))) { + logger.info("Using config file specified from driver directory: {}", driverLocation); derivedConfigFilePath = driverLocation; } else { // 4. Read SF_CLIENT_CONFIG_FILE_NAME if it is present in user home directory. String userHomeFilePath = Paths.get(systemGetProperty("user.home"), SF_CLIENT_CONFIG_FILE_NAME).toString(); if (Files.exists(Paths.get(userHomeFilePath))) { + logger.info("Using config file specified from home directory: {}", userHomeFilePath); derivedConfigFilePath = userHomeFilePath; } } } if (derivedConfigFilePath != null) { try { + checkConfigFilePermissions(derivedConfigFilePath); + File configFile = new File(derivedConfigFilePath); ObjectMapper objectMapper = new ObjectMapper(); SFClientConfig clientConfig = objectMapper.readValue(configFile, SFClientConfig.class); + logger.info( + "Reading values logLevel {} and logPath {} from client configuration", + clientConfig.getCommonProps().getLogLevel(), + clientConfig.getCommonProps().getLogPath()); + + Set unknownParams = clientConfig.getUnknownParamKeys(); + if (!unknownParams.isEmpty()) { + for (String unknownParam : unknownParams) { + logger.warn("Unknown field from config: {}", unknownParam); + } + } clientConfig.setConfigFilePath(derivedConfigFilePath); return clientConfig; @@ -99,6 +122,31 @@ && systemGetProperty("os.name").toLowerCase().startsWith("windows")) { } } + private static void checkConfigFilePermissions(String derivedConfigFilePath) throws IOException { + try { + if (Constants.getOS() != Constants.OS.WINDOWS) { + // Check permissions of config file + if (checkGroupOthersWritePermissions(derivedConfigFilePath)) { + String error = + String.format( + "Error due to other users having permission to modify the config file: %s", + derivedConfigFilePath); + // TODO: SNOW-1503722 to change warning log to throw an error instead + logger.warn(error); + } + } + } catch (IOException e) { + throw e; + } + } + + static Boolean checkGroupOthersWritePermissions(String configFilePath) throws IOException { + Set folderPermissions = + Files.getPosixFilePermissions(Paths.get(configFilePath)); + return folderPermissions.contains(PosixFilePermission.GROUP_WRITE) + || folderPermissions.contains(PosixFilePermission.OTHERS_WRITE); + } + static String convertToWindowsPath(String filePath) { // Find the Windows file path pattern: ex) C:\ or D:\ Pattern windowsFilePattern = Pattern.compile("[C-Z]:[\\\\/]"); @@ -110,6 +158,8 @@ static String convertToWindowsPath(String filePath) { filePath = filePath.substring(1); } else if (filePath.startsWith("file:\\")) { filePath = filePath.substring(6); + } else if (filePath.startsWith("nested:\\")) { + filePath = filePath.substring(8); } else if (filePath.startsWith("\\")) { filePath = filePath.substring(2); } else if (matcher.find() && matcher.start() != 0) { diff --git a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java new file mode 100644 index 000000000..405dd09db --- /dev/null +++ b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java @@ -0,0 +1,168 @@ +package net.snowflake.client.config; + +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetEnv; + +import com.fasterxml.jackson.dataformat.toml.TomlMapper; +import com.google.common.base.Strings; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import net.snowflake.client.core.Constants; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class SFConnectionConfigParser { + + private static final SFLogger logger = SFLoggerFactory.getLogger(SFConnectionConfigParser.class); + private static final TomlMapper mapper = new TomlMapper(); + public static final String SNOWFLAKE_HOME_KEY = "SNOWFLAKE_HOME"; + public static final String SNOWFLAKE_DIR = ".snowflake"; + public static final String SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY = + "SNOWFLAKE_DEFAULT_CONNECTION_NAME"; + public static final String DEFAULT = "default"; + public static final String SNOWFLAKE_TOKEN_FILE_PATH = "/snowflake/session/token"; + + private static Map loadDefaultConnectionConfiguration( + String defaultConnectionName) throws SnowflakeSQLException { + String configDirectory = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_HOME_KEY)) + .orElse(Paths.get(System.getProperty("user.home"), SNOWFLAKE_DIR).toString()); + Path configFilePath = Paths.get(configDirectory, "connections.toml"); + + if (Files.exists(configFilePath)) { + logger.debug( + "Reading connection parameters from file using key: {} []", + configFilePath, + defaultConnectionName); + Map parametersMap = readParametersMap(configFilePath); + Map defaultConnectionParametersMap = parametersMap.get(defaultConnectionName); + return defaultConnectionParametersMap; + } else { + logger.debug("Connection configuration file does not exist"); + return new HashMap<>(); + } + } + + private static Map readParametersMap(Path configFilePath) + throws SnowflakeSQLException { + try { + File file = new File(configFilePath.toUri()); + varifyFilePermissionSecure(configFilePath); + return mapper.readValue(file, Map.class); + } catch (IOException ex) { + throw new SnowflakeSQLException(ex, "Problem during reading a configuration file."); + } + } + + private static void varifyFilePermissionSecure(Path configFilePath) + throws IOException, SnowflakeSQLException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + PosixFileAttributeView posixFileAttributeView = + Files.getFileAttributeView(configFilePath, PosixFileAttributeView.class); + if (!posixFileAttributeView.readAttributes().permissions().stream() + .allMatch( + o -> + Arrays.asList(PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_READ) + .contains(o))) { + logger.error( + "Reading from file {} is not safe because of insufficient permissions", configFilePath); + throw new SnowflakeSQLException( + String.format( + "Reading from file %s is not safe because of insufficient permissions", + configFilePath)); + } + } + } + + public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException { + String defaultConnectionName = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT); + Map fileConnectionConfiguration = + loadDefaultConnectionConfiguration(defaultConnectionName); + + if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) { + Properties conectionProperties = new Properties(); + conectionProperties.putAll(fileConnectionConfiguration); + + String url = createUrl(fileConnectionConfiguration); + logger.debug("Url created using parameters from connection configuration file: {}", url); + + if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) + && fileConnectionConfiguration.get("token") == null) { + Path path = + Paths.get( + Optional.ofNullable(fileConnectionConfiguration.get("token_file_path")) + .orElse(SNOWFLAKE_TOKEN_FILE_PATH)); + logger.debug("Token used in connect is read from file: {}", path); + try { + String token = new String(Files.readAllBytes(path), Charset.defaultCharset()); + if (!token.isEmpty()) { + putPropertyIfNotNull(conectionProperties, "token", token.trim()); + } else { + logger.warn("The token has empty value"); + } + } catch (IOException ex) { + throw new SnowflakeSQLException(ex, "There is a problem during reading token from file"); + } + } + return new ConnectionParameters(url, conectionProperties); + } else { + return null; + } + } + + private static String createUrl(Map fileConnectionConfiguration) + throws SnowflakeSQLException { + Optional maybeAccount = Optional.ofNullable(fileConnectionConfiguration.get("account")); + Optional maybeHost = Optional.ofNullable(fileConnectionConfiguration.get("host")); + if (maybeAccount.isPresent() + && maybeHost.isPresent() + && !maybeHost.get().contains(maybeAccount.get())) { + logger.warn( + String.format( + "Inconsistent host and account values in file configuration. ACCOUNT: {} , HOST: {}. The host value will be used.", + maybeAccount.get(), + maybeHost.get())); + } + String host = + maybeHost.orElse( + maybeAccount + .map(acnt -> String.format("%s.snowflakecomputing.com", acnt)) + .orElse(null)); + if (host == null || host.isEmpty()) { + logger.warn("Neither host nor account is specified in connection parameters"); + throw new SnowflakeSQLException( + "Unable to connect because neither host nor account is specified in connection parameters"); + } + logger.debug("Host created using parameters from connection configuration file: {}", host); + String port = fileConnectionConfiguration.get("port"); + String protocol = fileConnectionConfiguration.get("protocol"); + if (Strings.isNullOrEmpty(port)) { + if ("https".equals(protocol)) { + port = "443"; + } else { + port = "80"; + } + } + return String.format("jdbc:snowflake://%s:%s", host, port); + } + + private static void putPropertyIfNotNull(Properties props, Object key, Object value) { + if (key != null && value != null) { + props.put(key, value); + } + } +} diff --git a/src/main/java/net/snowflake/client/core/ArrowSqlInput.java b/src/main/java/net/snowflake/client/core/ArrowSqlInput.java index 61cf39674..f7093504e 100644 --- a/src/main/java/net/snowflake/client/core/ArrowSqlInput.java +++ b/src/main/java/net/snowflake/client/core/ArrowSqlInput.java @@ -4,7 +4,6 @@ package net.snowflake.client.core; -import static net.snowflake.client.core.SFResultSet.logger; import static net.snowflake.client.jdbc.SnowflakeUtil.mapSFExceptionToSQLException; import java.math.BigDecimal; @@ -22,12 +21,15 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.core.structs.SQLDataCreationHelper; import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.ThrowingBiFunction; import org.apache.arrow.vector.util.JsonStringArrayList; import org.apache.arrow.vector.util.JsonStringHashMap; @SnowflakeJdbcInternalApi public class ArrowSqlInput extends BaseSqlInput { + private static final SFLogger logger = SFLoggerFactory.getLogger(ArrowSqlInput.class); private final Map input; private int currentIndex = 0; diff --git a/src/main/java/net/snowflake/client/core/CredentialManager.java b/src/main/java/net/snowflake/client/core/CredentialManager.java index 0cd91f9ce..a5b919d3d 100644 --- a/src/main/java/net/snowflake/client/core/CredentialManager.java +++ b/src/main/java/net/snowflake/client/core/CredentialManager.java @@ -40,6 +40,7 @@ private void initSecureStorageManager() { /** Helper function for tests to go back to normal settings. */ void resetSecureStorageManager() { + logger.debug("Resetting the secure storage manager"); initSecureStorageManager(); } @@ -49,6 +50,7 @@ void resetSecureStorageManager() { * @param manager */ void injectSecureStorageManager(SecureStorageManager manager) { + logger.debug("Injecting secure storage manager"); secureStorageManager = manager; } @@ -66,6 +68,10 @@ public static CredentialManager getInstance() { * @param loginInput login input to attach id token */ void fillCachedIdToken(SFLoginInput loginInput) throws SFException { + logger.debug( + "Looking for cached id token for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); fillCachedCredential(loginInput, ID_TOKEN); } @@ -75,6 +81,10 @@ void fillCachedIdToken(SFLoginInput loginInput) throws SFException { * @param loginInput login input to attach mfa token */ void fillCachedMfaToken(SFLoginInput loginInput) throws SFException { + logger.debug( + "Looking for cached mfa token for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); fillCachedCredential(loginInput, MFA_TOKEN); } @@ -106,16 +116,26 @@ synchronized void fillCachedCredential(SFLoginInput loginInput, String credType) } if (cred == null) { - logger.debug("retrieved %s is null", credType); + logger.debug("Retrieved {} is null", credType); } // cred can be null if (credType == ID_TOKEN) { + logger.debug( + "Setting {}id token for user: {}, host: {}", + cred == null ? "null " : "", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); loginInput.setIdToken(cred); } else if (credType == MFA_TOKEN) { + logger.debug( + "Setting {}mfa token for user: {}, host: {}", + cred == null ? "null " : "", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); loginInput.setMfaToken(cred); } else { - logger.debug("unrecognized type %s for local cached credential", credType); + logger.debug("Unrecognized type {} for local cached credential", credType); } return; } @@ -127,6 +147,10 @@ synchronized void fillCachedCredential(SFLoginInput loginInput, String credType) * @param loginOutput loginOutput to denote to the cache */ void writeIdToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SFException { + logger.debug( + "Caching id token in a secure storage for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); writeTemporaryCredential(loginInput, loginOutput.getIdToken(), ID_TOKEN); } @@ -137,6 +161,10 @@ void writeIdToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SFE * @param loginOutput loginOutput to denote to the cache */ void writeMfaToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SFException { + logger.debug( + "Caching mfa token in a secure storage for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); writeTemporaryCredential(loginInput, loginOutput.getMfaToken(), MFA_TOKEN); } @@ -150,7 +178,7 @@ void writeMfaToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SF synchronized void writeTemporaryCredential(SFLoginInput loginInput, String cred, String credType) throws SFException { if (Strings.isNullOrEmpty(cred)) { - logger.debug("no %s is given.", credType); + logger.debug("No {} is given.", credType); return; // no credential } @@ -173,11 +201,15 @@ synchronized void writeTemporaryCredential(SFLoginInput loginInput, String cred, /** Delete the id token cache */ void deleteIdTokenCache(String host, String user) { + logger.debug( + "Removing cached id token from a secure storage for user: {}, host: {}", user, host); deleteTemporaryCredential(host, user, ID_TOKEN); } /** Delete the mfa token cache */ void deleteMfaTokenCache(String host, String user) { + logger.debug( + "Removing cached mfa token from a secure storage for user: {}, host: {}", user, host); deleteTemporaryCredential(host, user, MFA_TOKEN); } diff --git a/src/main/java/net/snowflake/client/core/EventHandler.java b/src/main/java/net/snowflake/client/core/EventHandler.java index 11acd00fa..e6eccc060 100644 --- a/src/main/java/net/snowflake/client/core/EventHandler.java +++ b/src/main/java/net/snowflake/client/core/EventHandler.java @@ -263,7 +263,7 @@ public void dumpLogBuffer(String identifier) { cleanupSfDumps(true); String logDumpPath = - logDumpPathPrefix + "/" + LOG_DUMP_FILE_NAME + identifier + LOG_DUMP_FILE_EXT; + logDumpPathPrefix + File.separator + LOG_DUMP_FILE_NAME + identifier + LOG_DUMP_FILE_EXT; if (!disableCompression) { logDumpPath += LOG_DUMP_COMP_EXT; diff --git a/src/main/java/net/snowflake/client/core/EventUtil.java b/src/main/java/net/snowflake/client/core/EventUtil.java index e4cde5502..d45cd0676 100644 --- a/src/main/java/net/snowflake/client/core/EventUtil.java +++ b/src/main/java/net/snowflake/client/core/EventUtil.java @@ -6,6 +6,7 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; +import java.io.File; import java.util.concurrent.atomic.AtomicReference; /** @@ -80,7 +81,7 @@ public static void triggerStateTransition(BasicEvent.QueryState newState, String } public static String getDumpPathPrefix() { - return DUMP_PATH_PREFIX + "/" + DUMP_SUBDIR; + return DUMP_PATH_PREFIX + File.separator + DUMP_SUBDIR; } public static String getDumpFileId() { diff --git a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java index d4dd1ecf0..91d45f29f 100644 --- a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java +++ b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java @@ -5,24 +5,18 @@ import com.google.common.base.Strings; import net.minidev.json.JSONObject; -import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.util.TimeMeasurement; public class ExecTimeTelemetryData { - private long queryStart; - private long bindStart; - private long bindEnd; - private long gzipStart; - private long gzipEnd; - private long httpClientStart; - private long httpClientEnd; - private long responseIOStreamStart; - private long responseIOStreamEnd; - private long processResultChunkStart; - private long processResultChunkEnd; - private long createResultSetStart; - private long createResultSetEnd; - private long queryEnd; + private final TimeMeasurement query = new TimeMeasurement(); + private final TimeMeasurement bind = new TimeMeasurement(); + private final TimeMeasurement gzip = new TimeMeasurement(); + private final TimeMeasurement httpClient = new TimeMeasurement(); + private final TimeMeasurement responseIOStream = new TimeMeasurement(); + private final TimeMeasurement processResultChunk = new TimeMeasurement(); + private final TimeMeasurement createResultSet = new TimeMeasurement(); + private String batchId; private String queryId; private String queryFunction; @@ -34,11 +28,10 @@ public class ExecTimeTelemetryData { private String requestId; public ExecTimeTelemetryData(String queryFunction, String batchId) { - if (TelemetryService.getInstance().isHTAPEnabled()) { - this.queryStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); - this.queryFunction = queryFunction; - this.batchId = batchId; - } else { + this.query.setStart(); + this.queryFunction = queryFunction; + this.batchId = batchId; + if (!TelemetryService.getInstance().isHTAPEnabled()) { this.sendData = false; } } @@ -48,128 +41,74 @@ public ExecTimeTelemetryData() { } public void setBindStart() { - if (!this.sendData) { - return; - } - this.bindStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + bind.setStart(); } public void setOCSPStatus(Boolean ocspEnabled) { - if (!this.sendData) { - return; - } this.ocspEnabled = ocspEnabled; } public void setBindEnd() { - if (!this.sendData) { - return; - } - this.bindEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + this.bind.setEnd(); } public void setHttpClientStart() { - if (!this.sendData) { - return; - } - this.httpClientStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + httpClient.setStart(); } public void setHttpClientEnd() { - if (!this.sendData) { - return; - } - this.httpClientEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + httpClient.setEnd(); } public void setGzipStart() { - if (!this.sendData) { - return; - } - this.gzipStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + gzip.setStart(); } public void setGzipEnd() { - if (!this.sendData) { - return; - } - this.gzipEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + gzip.setEnd(); } public void setQueryEnd() { - if (!this.sendData) { - return; - } - this.queryEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + query.setEnd(); } public void setQueryId(String queryId) { - if (!this.sendData) { - return; - } this.queryId = queryId; } public void setProcessResultChunkStart() { - if (!this.sendData) { - return; - } - this.processResultChunkStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + processResultChunk.setStart(); } public void setProcessResultChunkEnd() { - if (!this.sendData) { - return; - } - this.processResultChunkEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + processResultChunk.setEnd(); } public void setResponseIOStreamStart() { - if (!this.sendData) { - return; - } - this.responseIOStreamStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + responseIOStream.setStart(); } public void setResponseIOStreamEnd() { - if (!this.sendData) { - return; - } - this.responseIOStreamEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + responseIOStream.setEnd(); } public void setCreateResultSetStart() { - if (!this.sendData) { - return; - } - this.createResultSetStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + createResultSet.setStart(); } public void setCreateResultSetEnd() { - if (!this.sendData) { - return; - } - this.createResultSetEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + createResultSet.setEnd(); } public void incrementRetryCount() { - if (!this.sendData) { - return; - } this.retryCount++; } public void setRequestId(String requestId) { - if (!this.sendData) { - return; - } this.requestId = requestId; } public void addRetryLocation(String location) { - if (!this.sendData) { - return; - } if (Strings.isNullOrEmpty(this.retryLocations)) { this.retryLocations = location; } else { @@ -177,26 +116,46 @@ public void addRetryLocation(String location) { } } + long getTotalQueryTime() { + return query.getTime(); + } + + long getResultProcessingTime() { + if (createResultSet.getEnd() == 0 || processResultChunk.getStart() == 0) { + return -1; + } + + return createResultSet.getEnd() - processResultChunk.getStart(); + } + + long getHttpRequestTime() { + return httpClient.getTime(); + } + + long getResultSetCreationTime() { + return createResultSet.getTime(); + } + public String generateTelemetry() { if (this.sendData) { String eventType = "ExecutionTimeRecord"; JSONObject value = new JSONObject(); String valueStr; value.put("eventType", eventType); - value.put("QueryStart", this.queryStart); - value.put("BindStart", this.bindStart); - value.put("BindEnd", this.bindEnd); - value.put("GzipStart", this.gzipStart); - value.put("GzipEnd", this.gzipEnd); - value.put("HttpClientStart", this.httpClientStart); - value.put("HttpClientEnd", this.httpClientEnd); - value.put("ResponseIOStreamStart", this.responseIOStreamStart); - value.put("ResponseIOStreamEnd", this.responseIOStreamEnd); - value.put("ProcessResultChunkStart", this.processResultChunkStart); - value.put("ProcessResultChunkEnd", this.processResultChunkEnd); - value.put("CreateResultSetStart", this.createResultSetStart); - value.put("CreatResultSetEnd", this.createResultSetEnd); - value.put("QueryEnd", this.queryEnd); + value.put("QueryStart", this.query.getStart()); + value.put("BindStart", this.bind.getStart()); + value.put("BindEnd", this.bind.getEnd()); + value.put("GzipStart", this.gzip.getStart()); + value.put("GzipEnd", this.gzip.getEnd()); + value.put("HttpClientStart", this.httpClient.getStart()); + value.put("HttpClientEnd", this.httpClient.getEnd()); + value.put("ResponseIOStreamStart", this.responseIOStream.getStart()); + value.put("ResponseIOStreamEnd", this.responseIOStream.getEnd()); + value.put("ProcessResultChunkStart", this.processResultChunk.getStart()); + value.put("ProcessResultChunkEnd", this.processResultChunk.getEnd()); + value.put("CreateResultSetStart", this.createResultSet.getStart()); + value.put("CreateResultSetEnd", this.createResultSet.getEnd()); + value.put("QueryEnd", this.query.getEnd()); value.put("BatchID", this.batchId); value.put("QueryID", this.queryId); value.put("RequestID", this.requestId); @@ -204,9 +163,8 @@ public String generateTelemetry() { value.put("RetryCount", this.retryCount); value.put("RetryLocations", this.retryLocations); value.put("ocspEnabled", this.ocspEnabled); - value.put("ElapsedQueryTime", (this.queryEnd - this.queryStart)); - value.put( - "ElapsedResultProcessTime", (this.createResultSetEnd - this.processResultChunkStart)); + value.put("ElapsedQueryTime", getTotalQueryTime()); + value.put("ElapsedResultProcessTime", getResultProcessingTime()); value.put("Urgent", true); valueStr = value.toString(); // Avoid adding exception stacktrace to user logs. TelemetryService.getInstance().logExecutionTimeTelemetryEvent(value, eventType); @@ -214,4 +172,30 @@ public String generateTelemetry() { } return ""; } + + @SnowflakeJdbcInternalApi + public String getLogString() { + return "Query id: " + + this.queryId + + ", query function: " + + this.queryFunction + + ", batch id: " + + this.batchId + + ", request id: " + + this.requestId + + ", total query time: " + + getTotalQueryTime() / 1000 + + " ms" + + ", result processing time: " + + getResultProcessingTime() / 1000 + + " ms" + + ", result set creation time: " + + getResultSetCreationTime() / 1000 + + " ms" + + ", http request time: " + + getHttpRequestTime() / 1000 + + " ms" + + ", retry count: " + + this.retryCount; + } } diff --git a/src/main/java/net/snowflake/client/core/FieldSchemaCreator.java b/src/main/java/net/snowflake/client/core/FieldSchemaCreator.java new file mode 100644 index 000000000..b61dbd1f8 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/FieldSchemaCreator.java @@ -0,0 +1,96 @@ +package net.snowflake.client.core; + +import java.sql.SQLException; +import java.sql.Types; +import java.util.Optional; +import net.snowflake.client.jdbc.BindingParameterMetadata; +import net.snowflake.client.jdbc.SnowflakeColumn; +import net.snowflake.client.jdbc.SnowflakeType; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class FieldSchemaCreator { + static final SFLogger logger = SFLoggerFactory.getLogger(FieldSchemaCreator.class); + public static final int MAX_TEXT_COLUMN_SIZE = 134217728; + public static final int MAX_BINARY_COLUMN_SIZE = 67108864; + + public static BindingParameterMetadata buildSchemaForText( + String fieldName, Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse("text")) + .withLength(maybeColumn.map(cl -> cl.length()).orElse(MAX_TEXT_COLUMN_SIZE)) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .build(); + } + + public static BindingParameterMetadata buildSchemaForBytesType( + String fieldName, Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse("binary")) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .withLength(maybeColumn.map(cl -> cl.precision()).orElse(MAX_TEXT_COLUMN_SIZE)) + .withByteLength(maybeColumn.map(cl -> cl.byteLength()).orElse(MAX_BINARY_COLUMN_SIZE)) + .build(); + } + + public static BindingParameterMetadata buildSchemaTypeAndNameOnly( + String fieldName, String type, Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse(type)) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .build(); + } + + public static BindingParameterMetadata buildSchemaWithScaleAndPrecision( + String fieldName, + String type, + int scale, + int precision, + Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse(type)) + .withScale(maybeColumn.map(cl -> cl.scale()).filter(i -> i > 0).orElse(scale)) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .withPrecision(maybeColumn.map(cl -> cl.precision()).filter(i -> i > 0).orElse(precision)) + .build(); + } + + public static BindingParameterMetadata buildBindingSchemaForType(int baseType) + throws SQLException { + return buildBindingSchemaForType(baseType, true); + } + + public static BindingParameterMetadata buildBindingSchemaForType(int baseType, boolean addName) + throws SQLException { + String name = addName ? SnowflakeType.javaTypeToSFType(baseType, null).name() : null; + switch (baseType) { + case Types.VARCHAR: + case Types.CHAR: + return FieldSchemaCreator.buildSchemaForText(name, Optional.empty()); + case Types.FLOAT: + case Types.DOUBLE: + case Types.DECIMAL: + return FieldSchemaCreator.buildSchemaWithScaleAndPrecision( + name, "real", 9, 38, Optional.empty()); + case Types.NUMERIC: + case Types.INTEGER: + case Types.SMALLINT: + case Types.TINYINT: + case Types.BIGINT: + return FieldSchemaCreator.buildSchemaWithScaleAndPrecision( + null, "fixed", 0, 38, Optional.empty()); + case Types.BOOLEAN: + return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "boolean", Optional.empty()); + case Types.DATE: + return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "date", Optional.empty()); + case Types.TIMESTAMP: + case Types.TIME: + return FieldSchemaCreator.buildSchemaWithScaleAndPrecision( + name, "timestamp", 9, 0, Optional.empty()); + default: + logger.error("Could not create schema for type : " + baseType); + throw new SQLException("Could not create schema for type : " + baseType); + } + } +} diff --git a/src/main/java/net/snowflake/client/core/FileCacheManager.java b/src/main/java/net/snowflake/client/core/FileCacheManager.java index e635796c0..328aecc9c 100644 --- a/src/main/java/net/snowflake/client/core/FileCacheManager.java +++ b/src/main/java/net/snowflake/client/core/FileCacheManager.java @@ -28,7 +28,7 @@ import net.snowflake.client.log.SFLoggerFactory; class FileCacheManager { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(FileCacheManager.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(FileCacheManager.class); /** Object mapper for JSON encoding and decoding */ private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); @@ -87,6 +87,7 @@ void overrideCacheFile(File newCacheFile) { this.cacheFile = newCacheFile; this.cacheDir = newCacheFile.getParentFile(); this.baseCacheFileName = newCacheFile.getName(); + FileUtil.logFileUsage(cacheFile, "Override cache file", true); } FileCacheManager build() { @@ -102,8 +103,8 @@ FileCacheManager build() { ? systemGetEnv(this.cacheDirectoryEnvironmentVariable) : null; } catch (Throwable ex) { - LOGGER.debug( - "Cannot get environment variable for cache directory, " + "skip using cache", false); + logger.debug( + "Cannot get environment variable for cache directory, skip using cache", false); // In Boomi cloud, System.getenv is not allowed due to policy, // so we catch the exception and skip cache completely return this; @@ -122,7 +123,7 @@ FileCacheManager build() { // Checking if home directory is writable. File homeFile = new File(homeDir); if (!homeFile.canWrite()) { - LOGGER.debug("Home directory not writeable, using tmpdir", false); + logger.debug("Home directory not writeable, using tmpdir", false); homeDir = systemGetProperty("java.io.tmpdir"); } } @@ -142,11 +143,11 @@ FileCacheManager build() { } if (!this.cacheDir.mkdirs() && !this.cacheDir.exists()) { - LOGGER.debug( + logger.debug( "Cannot create the cache directory {}. Giving up.", this.cacheDir.getAbsolutePath()); return this; } - LOGGER.debug("Verified Directory {}", this.cacheDir.getAbsolutePath()); + logger.debug("Verified Directory {}", this.cacheDir.getAbsolutePath()); File cacheFileTmp = new File(this.cacheDir, this.baseCacheFileName).getAbsoluteFile(); try { @@ -155,15 +156,16 @@ FileCacheManager build() { // In this particular case, it doesn't matter as long as the file is // writable. if (cacheFileTmp.createNewFile()) { - LOGGER.debug("Successfully created a cache file {}", cacheFileTmp); + logger.debug("Successfully created a cache file {}", cacheFileTmp); } else { - LOGGER.debug("Cache file already exists {}", cacheFileTmp); + logger.debug("Cache file already exists {}", cacheFileTmp); } + FileUtil.logFileUsage(cacheFileTmp, "Cache file creation", false); this.cacheFile = cacheFileTmp.getCanonicalFile(); this.cacheLockFile = new File(this.cacheFile.getParentFile(), this.baseCacheFileName + ".lck"); } catch (IOException | SecurityException ex) { - LOGGER.info("Failed to touch the cache file. Ignored. {}", cacheFileTmp.getAbsoluteFile()); + logger.info("Failed to touch the cache file. Ignored. {}", cacheFileTmp.getAbsoluteFile()); } return this; } @@ -176,25 +178,26 @@ JsonNode readCacheFile() { } try { if (!cacheFile.exists()) { - LOGGER.debug("Cache file doesn't exists. File: {}", cacheFile); + logger.debug("Cache file doesn't exists. File: {}", cacheFile); return null; } try (Reader reader = new InputStreamReader(new FileInputStream(cacheFile), DEFAULT_FILE_ENCODING)) { + FileUtil.logFileUsage(cacheFile, "Read cache", false); return OBJECT_MAPPER.readTree(reader); } } catch (IOException ex) { - LOGGER.debug("Failed to read the cache file. No worry. File: {}, Err: {}", cacheFile, ex); + logger.debug("Failed to read the cache file. No worry. File: {}, Err: {}", cacheFile, ex); } return null; } void writeCacheFile(JsonNode input) { - LOGGER.debug("Writing cache file. File={}", cacheFile); + logger.debug("Writing cache file. File: {}", cacheFile); if (cacheFile == null || !tryLockCacheFile()) { // no cache file or it failed to lock file - LOGGER.debug( + logger.debug( "No cache file exists or failed to lock the file. Skipping writing the cache", false); return; } @@ -205,19 +208,20 @@ void writeCacheFile(JsonNode input) { } try (Writer writer = new OutputStreamWriter(new FileOutputStream(cacheFile), DEFAULT_FILE_ENCODING)) { + FileUtil.logFileUsage(cacheFile, "Write to cache", false); writer.write(input.toString()); } } catch (IOException ex) { - LOGGER.debug("Failed to write the cache file. File: {}", cacheFile); + logger.debug("Failed to write the cache file. File: {}", cacheFile); } finally { if (!unlockCacheFile()) { - LOGGER.debug("Failed to unlock cache file", false); + logger.debug("Failed to unlock cache file", false); } } } void deleteCacheFile() { - LOGGER.debug("Deleting cache file. File={}, Lock File={}", cacheFile, cacheLockFile); + logger.debug("Deleting cache file. File: {}, lock file: {}", cacheFile, cacheLockFile); if (cacheFile == null) { return; @@ -225,7 +229,7 @@ void deleteCacheFile() { unlockCacheFile(); if (!cacheFile.delete()) { - LOGGER.debug("Failed to delete the file: {}", cacheFile); + logger.debug("Failed to delete the file: {}", cacheFile); } } @@ -246,7 +250,7 @@ private boolean tryLockCacheFile() { ++cnt; } if (!locked) { - LOGGER.debug("Failed to lock the cache file.", false); + logger.debug("Failed to lock the cache file.", false); } return locked; } @@ -276,7 +280,7 @@ private boolean checkCacheLockFile() { if (!cacheLockFile.exists() && cacheFileTs > 0 && currentTime - this.cacheExpirationInMilliseconds <= cacheFileTs) { - LOGGER.debug("No cache file lock directory exists and cache file is up to date.", false); + logger.debug("No cache file lock directory exists and cache file is up to date.", false); return true; } @@ -288,13 +292,13 @@ private boolean checkCacheLockFile() { if (lockFileTs < currentTime - this.cacheFileLockExpirationInMilliseconds) { // old lock file if (!cacheLockFile.delete()) { - LOGGER.debug("Failed to delete the directory. Dir: {}", cacheLockFile); + logger.debug("Failed to delete the directory. Dir: {}", cacheLockFile); return false; } - LOGGER.debug("Deleted the cache lock directory, because it was old.", false); + logger.debug("Deleted the cache lock directory, because it was old.", false); return currentTime - this.cacheExpirationInMilliseconds <= cacheFileTs; } - LOGGER.debug("Failed to lock the file. Ignored.", false); + logger.debug("Failed to lock the file. Ignored.", false); return false; } @@ -305,7 +309,7 @@ private boolean checkCacheLockFile() { */ private static long fileCreationTime(File targetFile) { if (!targetFile.exists()) { - LOGGER.debug("File not exists. File: {}", targetFile); + logger.debug("File not exists. File: {}", targetFile); return -1; } try { @@ -313,8 +317,12 @@ private static long fileCreationTime(File targetFile) { BasicFileAttributes attr = Files.readAttributes(cacheFileLockPath, BasicFileAttributes.class); return attr.creationTime().toMillis(); } catch (IOException ex) { - LOGGER.debug("Failed to get creation time. File/Dir: {}, Err: {}", targetFile, ex); + logger.debug("Failed to get creation time. File/Dir: {}, Err: {}", targetFile, ex); } return -1; } + + String getCacheFilePath() { + return cacheFile.getAbsolutePath(); + } } diff --git a/src/main/java/net/snowflake/client/core/FileUtil.java b/src/main/java/net/snowflake/client/core/FileUtil.java new file mode 100644 index 000000000..3ae68909b --- /dev/null +++ b/src/main/java/net/snowflake/client/core/FileUtil.java @@ -0,0 +1,78 @@ +package net.snowflake.client.core; + +import com.google.common.base.Strings; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Arrays; +import java.util.Collection; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class FileUtil { + private static final SFLogger logger = SFLoggerFactory.getLogger(FileUtil.class); + private static final Collection WRITE_BY_OTHERS = + Arrays.asList(PosixFilePermission.GROUP_WRITE, PosixFilePermission.OTHERS_WRITE); + private static final Collection READ_BY_OTHERS = + Arrays.asList(PosixFilePermission.GROUP_READ, PosixFilePermission.OTHERS_READ); + + public static void logFileUsage(Path filePath, String context, boolean logReadAccess) { + logger.info("{}Accessing file: {}", getContextStr(context), filePath); + logWarnWhenAccessibleByOthers(filePath, context, logReadAccess); + } + + public static void logFileUsage(File file, String context, boolean logReadAccess) { + logFileUsage(file.toPath(), context, logReadAccess); + } + + public static void logFileUsage(String stringPath, String context, boolean logReadAccess) { + Path path = Paths.get(stringPath); + logFileUsage(path, context, logReadAccess); + } + + private static void logWarnWhenAccessibleByOthers( + Path filePath, String context, boolean logReadAccess) { + // we do not check the permissions for Windows + if (Constants.getOS() == Constants.OS.WINDOWS) { + return; + } + + try { + Collection filePermissions = Files.getPosixFilePermissions(filePath); + logger.debug( + "{}File {} access rights: {}", getContextStr(context), filePath, filePermissions); + + boolean isWritableByOthers = isPermPresent(filePermissions, WRITE_BY_OTHERS); + boolean isReadableByOthers = isPermPresent(filePermissions, READ_BY_OTHERS); + + if (isWritableByOthers || (isReadableByOthers && logReadAccess)) { + logger.warn( + "{}File {} is accessible by others to:{}{}", + getContextStr(context), + filePath, + isReadableByOthers && logReadAccess ? " read" : "", + isWritableByOthers ? " write" : ""); + } + } catch (IOException e) { + logger.warn( + "{}Unable to access the file to check the permissions: {}. Error: {}", + getContextStr(context), + filePath, + e); + } + } + + private static boolean isPermPresent( + Collection filePerms, Collection permsToCheck) + throws IOException { + return filePerms.stream().anyMatch(permsToCheck::contains); + } + + private static String getContextStr(String context) { + return Strings.isNullOrEmpty(context) ? "" : context + ": "; + } +} diff --git a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java index 152489b91..25ba5f946 100644 --- a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java +++ b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java @@ -23,7 +23,7 @@ public class HeartbeatBackground implements Runnable { private static HeartbeatBackground singleton = new HeartbeatBackground(); /** The logger. */ - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(HeartbeatBackground.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(HeartbeatBackground.class); // default master token validity (in seconds) is 4 hours private long masterTokenValidityInSecs = 4 * 3600; @@ -79,7 +79,7 @@ protected synchronized void addSession( this.heartBeatIntervalInSecs = masterTokenValidityInSecs / 4; } - LOGGER.debug( + logger.debug( "update heartbeat interval" + " from {} to {}", oldHeartBeatIntervalInSecs, this.heartBeatIntervalInSecs); @@ -96,7 +96,7 @@ protected synchronized void addSession( * JVM from exiting. */ if (this.scheduler == null) { - LOGGER.debug("create heartbeat thread pool", false); + logger.debug("create heartbeat thread pool", false); this.scheduler = Executors.newScheduledThreadPool( 1, @@ -113,19 +113,19 @@ public Thread newThread(Runnable runnable) { // schedule a heartbeat task if none exists if (heartbeatFuture == null) { - LOGGER.debug("schedule heartbeat task", false); + logger.debug("Schedule heartbeat task", false); this.scheduleHeartbeat(); } // or reschedule if the master token validity has been reduced (rare event) else if (requireReschedule) { - LOGGER.debug("Cancel existing heartbeat task", false); + logger.debug("Cancel existing heartbeat task", false); // Cancel existing task if not started yet and reschedule if (heartbeatFuture.cancel(false)) { - LOGGER.debug("Canceled existing heartbeat task, reschedule", false); + logger.debug("Canceled existing heartbeat task, reschedule", false); this.scheduleHeartbeat(); } else { - LOGGER.debug("Failed to cancel existing heartbeat task", false); + logger.debug("Failed to cancel existing heartbeat task", false); } } } @@ -155,7 +155,7 @@ private void scheduleHeartbeat() { */ long initialDelay = Math.max(heartBeatIntervalInSecs - elapsedSecsSinceLastHeartBeat, 0); - LOGGER.debug("schedule heartbeat task with initial delay of {} seconds", initialDelay); + logger.debug("Schedule heartbeat task with initial delay of {} seconds", initialDelay); // Creates and executes a periodic action to send heartbeats this.heartbeatFuture = this.scheduler.schedule(this, initialDelay, TimeUnit.SECONDS); @@ -191,7 +191,7 @@ public void run() { try { session.heartbeat(); } catch (Throwable ex) { - LOGGER.error("heartbeat error - message=" + ex.getMessage(), ex); + logger.error("Heartbeat error - message=" + ex.getMessage(), ex); } } @@ -203,11 +203,11 @@ public void run() { synchronized (this) { // schedule next heartbeat if (sessions.size() > 0) { - LOGGER.debug("schedule next heartbeat run", false); + logger.debug("Schedule next heartbeat run", false); scheduleHeartbeat(); } else { - LOGGER.debug("no need for heartbeat since no more sessions", false); + logger.debug("No need for heartbeat since no more sessions", false); // no need to heartbeat if no more session this.heartbeatFuture = null; diff --git a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java index 0ca3df8b0..f65b9e29d 100644 --- a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java +++ b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java @@ -150,4 +150,35 @@ public HttpProtocol getProxyHttpProtocol() { public Boolean getGzipDisabled() { return gzipDisabled; } + + @Override + public String toString() { + return "HttpClientSettingsKey[" + + "ocspMode=" + + ocspMode + + ", useProxy=" + + useProxy + + ", proxyHost='" + + proxyHost + + '\'' + + ", proxyPort=" + + proxyPort + + ", nonProxyHosts='" + + nonProxyHosts + + '\'' + + ", proxyUser='" + + proxyUser + + '\'' + + ", proxyPassword is " + + (proxyPassword.isEmpty() ? "not set" : "set") + + ", proxyProtocol='" + + proxyProtocol + + '\'' + + ", userAgentSuffix='" + + userAgentSuffix + + '\'' + + ", gzipDisabled=" + + gzipDisabled + + ']'; + } } diff --git a/src/main/java/net/snowflake/client/core/HttpUtil.java b/src/main/java/net/snowflake/client/core/HttpUtil.java index 84f31c8e8..f67031102 100644 --- a/src/main/java/net/snowflake/client/core/HttpUtil.java +++ b/src/main/java/net/snowflake/client/core/HttpUtil.java @@ -39,7 +39,9 @@ import net.snowflake.client.log.ArgSupplier; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.log.SFLoggerUtil; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; import org.apache.http.HttpHost; @@ -131,7 +133,7 @@ public static long getDownloadedConditionTimeoutInSeconds() { public static void closeExpiredAndIdleConnections() { if (connectionManager != null) { synchronized (connectionManager) { - logger.debug("connection pool stats: {}", connectionManager.getTotalStats()); + logger.debug("Connection pool stats: {}", connectionManager.getTotalStats()); connectionManager.closeExpiredConnections(); connectionManager.closeIdleConnections(DEFAULT_IDLE_CONNECTION_TIMEOUT, TimeUnit.SECONDS); } @@ -196,8 +198,13 @@ public static void setSessionlessProxyForAzure( ErrorCode.INVALID_PROXY_PROPERTIES, "Could not parse port number"); } Proxy azProxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort)); + logger.debug("Setting sessionless Azure proxy. Host: {}, port: {}", proxyHost, proxyPort); opContext.setProxy(azProxy); + } else { + logger.debug("Omitting sessionless Azure proxy setup as proxy is disabled"); } + } else { + logger.debug("Omitting sessionless Azure proxy setup"); } } @@ -211,7 +218,11 @@ public static void setProxyForAzure(HttpClientSettingsKey key, OperationContext if (key != null && key.usesProxy()) { Proxy azProxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(key.getProxyHost(), key.getProxyPort())); + logger.debug( + "Setting Azure proxy. Host: {}, port: {}", key.getProxyHost(), key.getProxyPort()); opContext.setProxy(azProxy); + } else { + logger.debug("Omitting Azure proxy setup"); } } @@ -262,16 +273,21 @@ static String buildUserAgent(String customSuffix) { */ public static CloseableHttpClient buildHttpClient( @Nullable HttpClientSettingsKey key, File ocspCacheFile, boolean downloadUnCompressed) { + logger.debug( + "Building http client with client settings key: {}, ocsp cache file: {}, download uncompressed: {}", + key != null ? key.toString() : null, + ocspCacheFile, + downloadUnCompressed); // set timeout so that we don't wait forever. // Setup the default configuration for all requests on this client int timeToLive = SystemUtil.convertSystemPropertyToIntValue(JDBC_TTL, DEFAULT_TTL); - logger.debug("time to live in connection pooling manager: {}", timeToLive); long connectTimeout = getConnectionTimeout().toMillis(); long socketTimeout = getSocketTimeout().toMillis(); logger.debug( - "Connect timeout is {} ms and socket timeout is {} for connection pooling manager", + "Connection pooling manager connect timeout: {} ms, socket timeout: {} ms, ttl: {} s", connectTimeout, - socketTimeout); + socketTimeout, + timeToLive); // Set proxy settings for DefaultRequestConfig. If current proxy settings are the same as for // the last request, keep the current DefaultRequestConfig. If not, build a new @@ -295,9 +311,26 @@ public static CloseableHttpClient buildHttpClient( // only set the proxy settings if they are not null // but no value has been specified for nonProxyHosts // the route planner will determine whether to use a proxy based on nonProxyHosts value. + String logMessage = + "Rebuilding request config. Connect timeout: " + + connectTimeout + + " ms, connection request " + + "timeout: " + + connectTimeout + + " ms, socket timeout: " + + socketTimeout + + " ms"; if (proxy != null && Strings.isNullOrEmpty(key.getNonProxyHosts())) { builder.setProxy(proxy); + logMessage += + ", host: " + + key.getProxyHost() + + ", port: " + + key.getProxyPort() + + ", scheme: " + + key.getProxyHttpProtocol().getScheme(); } + logger.debug(logMessage); DefaultRequestConfig = builder.build(); } @@ -309,6 +342,11 @@ public static CloseableHttpClient buildHttpClient( // care OCSP checks. // OCSP FailOpen is ON by default try { + if (ocspCacheFile == null) { + logger.debug("Instantiating trust manager with default ocsp cache file"); + } else { + logger.debug("Instantiating trust manager with ocsp cache file: {}", ocspCacheFile); + } TrustManager[] tm = {new SFTrustManager(key, ocspCacheFile)}; trustManagers = tm; } catch (Exception | Error err) { @@ -318,8 +356,18 @@ public static CloseableHttpClient buildHttpClient( logger.error(errors.toString(), true); throw new RuntimeException(err); // rethrow the exception } + } else if (key != null) { + logger.debug( + "Omitting trust manager instantiation as OCSP mode is set to {}", key.getOcspMode()); + } else { + logger.debug("Omitting trust manager instantiation as configuration is not provided"); } try { + logger.debug( + "Registering https connection socket factory with socks proxy disabled: {} and http " + + "connection socket factory", + socksProxyDisabled); + Registry registry = RegistryBuilder.create() .register( @@ -344,6 +392,7 @@ public static CloseableHttpClient buildHttpClient( connectionManager.setMaxTotal(maxConnections); connectionManager.setDefaultMaxPerRoute(maxConnectionsPerRoute); + logger.debug("Disabling cookie management for http client"); String userAgentSuffix = key != null ? key.getUserAgentSuffix() : ""; HttpClientBuilder httpClientBuilder = HttpClientBuilder.create() @@ -355,6 +404,8 @@ public static CloseableHttpClient buildHttpClient( .disableCookieManagement(); // SNOW-39748 if (key != null && key.usesProxy()) { + logger.debug( + "Instantiating proxy route planner with non-proxy hosts: {}", key.getNonProxyHosts()); // use the custom proxy properties SnowflakeMutableProxyRoutePlanner sdkProxyRoutePlanner = httpClientRoutePlanner.computeIfAbsent( @@ -372,12 +423,19 @@ public static CloseableHttpClient buildHttpClient( new UsernamePasswordCredentials(key.getProxyUser(), key.getProxyPassword()); AuthScope authScope = new AuthScope(key.getProxyHost(), key.getProxyPort()); CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + logger.debug( + "Using user: {}, password is {} for proxy host: {}, port: {}", + key.getProxyUser(), + SFLoggerUtil.isVariableProvided(key.getProxyPassword()), + key.getProxyHost(), + key.getProxyPort()); credentialsProvider.setCredentials(authScope, credentials); httpClientBuilder = httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); } } httpClientBuilder.setDefaultRequestConfig(DefaultRequestConfig); if (downloadUnCompressed) { + logger.debug("Disabling content compression for http client"); httpClientBuilder = httpClientBuilder.disableContentCompression(); } return httpClientBuilder.build(); @@ -392,6 +450,11 @@ public static void updateRoutePlanner(HttpClientSettingsKey key) { .get(key) .getNonProxyHosts() .equalsIgnoreCase(key.getNonProxyHosts())) { + logger.debug( + "Updating route planner non-proxy hosts for proxy: {}:{} to: {}", + key.getProxyHost(), + key.getProxyPort(), + key.getNonProxyHosts()); httpClientRoutePlanner.get(key).setNonProxyHosts(key.getNonProxyHosts()); } } @@ -490,6 +553,7 @@ public static RequestConfig getRequestConfigWithoutCookies() { } public static void setRequestConfig(RequestConfig requestConfig) { + logger.debug("Setting default request config to: {}", requestConfig); DefaultRequestConfig = requestConfig; } @@ -508,6 +572,7 @@ private static String getHttpClientStats() { * @param socksProxyDisabled new value */ public static void setSocksProxyDisabled(boolean socksProxyDisabled) { + logger.debug("Setting socks proxy disabled to {}", socksProxyDisabled); HttpUtil.socksProxyDisabled = socksProxyDisabled; } @@ -545,6 +610,7 @@ static String executeRequestWithoutCookies( AtomicBoolean canceling, HttpClientSettingsKey ocspAndProxyKey) throws SnowflakeSQLException, IOException { + logger.debug("Executing request without cookies"); return executeRequestInternal( httpRequest, retryTimeout, @@ -582,6 +648,7 @@ public static String executeGeneralRequest( int retryCount, HttpClientSettingsKey ocspAndProxyAndGzipKey) throws SnowflakeSQLException, IOException { + logger.debug("Executing general request"); return executeRequest( httpRequest, retryTimeout, @@ -617,6 +684,7 @@ public static String executeGeneralRequest( int retryCount, CloseableHttpClient httpClient) throws SnowflakeSQLException, IOException { + logger.debug("Executing general request"); return executeRequestInternal( httpRequest, retryTimeout, @@ -664,6 +732,7 @@ public static String executeRequest( ExecTimeTelemetryData execTimeData) throws SnowflakeSQLException, IOException { boolean ocspEnabled = !(ocspAndProxyKey.getOcspMode().equals(OCSPMode.INSECURE)); + logger.debug("Executing request with OCSP enabled: {}", ocspEnabled); execTimeData.setOCSPStatus(ocspEnabled); return executeRequestInternal( httpRequest, @@ -729,6 +798,12 @@ private static String executeRequestInternal( String theString; StringWriter writer = null; CloseableHttpResponse response = null; + Stopwatch stopwatch = null; + + if (logger.isDebugEnabled()) { + stopwatch = new Stopwatch(); + stopwatch.start(); + } try { response = @@ -746,6 +821,9 @@ private static String executeRequestInternal( includeRequestGuid, retryOnHTTP403, execTimeData); + if (logger.isDebugEnabled() && stopwatch != null) { + stopwatch.stop(); + } if (response == null || response.getStatusLine().getStatusCode() != 200) { logger.error("Error executing request: {}", requestInfoScrubbed); @@ -778,9 +856,10 @@ private static String executeRequestInternal( } logger.debug( - "Pool: {} Request returned for: {}", + "Pool: {} Request returned for: {} took {} ms", (ArgSupplier) HttpUtil::getHttpClientStats, - requestInfoScrubbed); + requestInfoScrubbed, + stopwatch == null ? "n/a" : stopwatch.elapsedMillis()); return theString; } @@ -855,8 +934,10 @@ static final class SFConnectionSocketFactory extends PlainConnectionSocketFactor @Override public Socket createSocket(HttpContext ctx) throws IOException { if (socksProxyDisabled) { + logger.trace("Creating socket with no proxy"); return new Socket(Proxy.NO_PROXY); } + logger.trace("Creating socket with proxy"); return super.createSocket(ctx); } } diff --git a/src/main/java/net/snowflake/client/core/JsonSqlInput.java b/src/main/java/net/snowflake/client/core/JsonSqlInput.java index 6b0e6e34e..0e6b274bd 100644 --- a/src/main/java/net/snowflake/client/core/JsonSqlInput.java +++ b/src/main/java/net/snowflake/client/core/JsonSqlInput.java @@ -4,7 +4,6 @@ package net.snowflake.client.core; import static net.snowflake.client.core.SFBaseResultSet.OBJECT_MAPPER; -import static net.snowflake.client.core.SFResultSet.logger; import static net.snowflake.client.jdbc.SnowflakeUtil.mapSFExceptionToSQLException; import com.fasterxml.jackson.core.type.TypeReference; @@ -29,12 +28,16 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.core.structs.SQLDataCreationHelper; import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.ThrowingBiFunction; import net.snowflake.common.core.SFTimestamp; import net.snowflake.common.core.SnowflakeDateTimeFormat; @SnowflakeJdbcInternalApi public class JsonSqlInput extends BaseSqlInput { + private static final SFLogger logger = SFLoggerFactory.getLogger(JsonSqlInput.class); + private final String text; private final JsonNode input; private final Iterator elements; private final TimeZone sessionTimeZone; @@ -42,12 +45,14 @@ public class JsonSqlInput extends BaseSqlInput { private boolean wasNull = false; public JsonSqlInput( + String text, JsonNode input, SFBaseSession session, Converters converters, List fields, TimeZone sessionTimeZone) { super(session, converters, fields); + this.text = text; this.input = input; this.elements = input.elements(); this.sessionTimeZone = sessionTimeZone; @@ -57,6 +62,10 @@ public JsonNode getInput() { return input; } + public String getText() { + return text; + } + @Override public String readString() throws SQLException { return withNextValue((this::convertString)); @@ -178,7 +187,7 @@ private T convertObject(Class type, TimeZone tz, Object value, FieldMetad JsonNode jsonNode = (JsonNode) value; SQLInput sqlInput = new JsonSqlInput( - jsonNode, session, converters, fieldMetadata.getFields(), sessionTimeZone); + null, jsonNode, session, converters, fieldMetadata.getFields(), sessionTimeZone); SQLData instance = (SQLData) SQLDataCreationHelper.create(type); instance.readSQL(sqlInput, null); return (T) instance; @@ -234,8 +243,12 @@ public List readList(Class type) throws SQLException { List result = new ArrayList(); if (ArrayNode.class.isAssignableFrom(value.getClass())) { for (JsonNode node : (ArrayNode) value) { - - result.add(convertObject(type, TimeZone.getDefault(), getValue(node), fieldMetadata)); + result.add( + convertObject( + type, + TimeZone.getDefault(), + getValue(node), + fieldMetadata.getFields().get(0))); } return result; } else { @@ -259,7 +272,11 @@ public T[] readArray(Class type) throws SQLException { int counter = 0; for (JsonNode node : valueNodes) { array[counter++] = - convertObject(type, TimeZone.getDefault(), getValue(node), fieldMetadata); + convertObject( + type, + TimeZone.getDefault(), + getValue(node), + fieldMetadata.getFields().get(0)); } return array; } else { @@ -306,7 +323,7 @@ private Timestamp convertTimestamp(TimeZone tz, Object value, FieldMetadata fiel int columnSubType = fieldMetadata.getType(); int scale = fieldMetadata.getScale(); Timestamp result = - SqlInputTimestampUtil.getTimestampFromType( + SfTimestampUtil.getTimestampFromType( columnSubType, (String) value, session, sessionTimeZone, tz); if (result != null) { return result; diff --git a/src/main/java/net/snowflake/client/core/JsonSqlOutput.java b/src/main/java/net/snowflake/client/core/JsonSqlOutput.java new file mode 100644 index 000000000..f3fb4c06c --- /dev/null +++ b/src/main/java/net/snowflake/client/core/JsonSqlOutput.java @@ -0,0 +1,397 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.core; + +import static net.snowflake.client.core.FieldSchemaCreator.buildSchemaTypeAndNameOnly; +import static net.snowflake.client.core.FieldSchemaCreator.buildSchemaWithScaleAndPrecision; + +import java.io.InputStream; +import java.io.Reader; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLData; +import java.sql.SQLException; +import java.sql.SQLOutput; +import java.sql.SQLXML; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.TimeZone; +import java.util.stream.Collectors; +import net.minidev.json.JSONObject; +import net.snowflake.client.jdbc.BindingParameterMetadata; +import net.snowflake.client.jdbc.SnowflakeColumn; +import net.snowflake.client.jdbc.SnowflakeLoggedFeatureNotSupportedException; +import net.snowflake.client.jdbc.SnowflakeType; +import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.util.ThrowingTriCallable; +import net.snowflake.common.core.SFBinary; +import net.snowflake.common.core.SFTime; +import net.snowflake.common.core.SFTimestamp; +import net.snowflake.common.core.SnowflakeDateTimeFormat; + +@SnowflakeJdbcInternalApi +public class JsonSqlOutput implements SQLOutput { + static final SFLogger logger = SFLoggerFactory.getLogger(JsonSqlOutput.class); + private JSONObject json; + private SQLData original; + private SFBaseSession session; + private Iterator fields; + private BindingParameterMetadata schema; + private TimeZone sessionTimezone; + + public JsonSqlOutput(SQLData original, SFBaseSession sfBaseSession) { + this.original = original; + this.session = sfBaseSession; + this.sessionTimezone = getSessionTimezone(sfBaseSession); + fields = getClassFields(original).iterator(); + schema = new BindingParameterMetadata("object"); + schema.setFields(new ArrayList<>()); + json = new JSONObject(); + } + + private TimeZone getSessionTimezone(SFBaseSession sfBaseSession) { + String timeZoneName = + (String) ResultUtil.effectiveParamValue(sfBaseSession.getCommonParameters(), "TIMEZONE"); + return TimeZone.getTimeZone(timeZoneName); + } + + private static List getClassFields(SQLData original) { + return Arrays.stream(original.getClass().getDeclaredFields()) + .filter( + field -> + !Modifier.isStatic(field.getModifiers()) + && !Modifier.isTransient(field.getModifiers())) + .collect(Collectors.toList()); + } + + @Override + public void writeString(String value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(FieldSchemaCreator.buildSchemaForText(fieldName, maybeColumn)); + })); + } + + @Override + public void writeBoolean(boolean value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "boolean", maybeColumn)); + })); + } + + @Override + public void writeByte(byte value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeShort(short value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeInt(int input) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, input); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeLong(long value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeFloat(float value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "real", maybeColumn)); + })); + } + + @Override + public void writeDouble(double value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "real", maybeColumn)); + })); + } + + @Override + public void writeBigDecimal(BigDecimal value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add( + buildSchemaWithScaleAndPrecision( + fieldName, "fixed", value.scale(), 38, maybeColumn)); + })); + } + + @Override + public void writeBytes(byte[] value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, new SFBinary(value).toHex()); + schema + .getFields() + .add(FieldSchemaCreator.buildSchemaForBytesType(fieldName, maybeColumn)); + })); + } + + @Override + public void writeDate(Date value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put( + fieldName, + ResultUtil.getDateAsString(value, getDateTimeFormat("DATE_OUTPUT_FORMAT"))); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "date", maybeColumn)); + })); + } + + @Override + public void writeTime(Time x) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + long nanosSinceMidnight = SfTimestampUtil.getTimeInNanoseconds(x); + String result = + ResultUtil.getSFTimeAsString( + SFTime.fromNanoseconds(nanosSinceMidnight), + 9, + getDateTimeFormat("TIME_OUTPUT_FORMAT")); + + json.put(fieldName, result); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "time", 9, 0, maybeColumn)); + })); + } + + @Override + public void writeTimestamp(Timestamp value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + String timestampSessionType = + (String) + ResultUtil.effectiveParamValue( + session.getCommonParameters(), "CLIENT_TIMESTAMP_TYPE_MAPPING"); + SnowflakeType snowflakeType = + SnowflakeType.fromString( + maybeColumn + .map(cl -> cl.type()) + .filter(str -> !str.isEmpty()) + .orElse(timestampSessionType)); + int columnType = snowflakeTypeToJavaType(snowflakeType); + TimeZone timeZone = timeZoneDependOnType(snowflakeType, session, null); + String timestampAsString = + SnowflakeUtil.mapSFExceptionToSQLException( + () -> + ResultUtil.getSFTimestampAsString( + new SFTimestamp(value, timeZone), + columnType, + 9, + getDateTimeFormat("TIMESTAMP_NTZ_OUTPUT_FORMAT"), + getDateTimeFormat("TIMESTAMP_LTZ_OUTPUT_FORMAT"), + getDateTimeFormat("TIMESTAMP_TZ_OUTPUT_FORMAT"), + session)); + + json.put(fieldName, timestampAsString); + schema + .getFields() + .add( + buildSchemaWithScaleAndPrecision( + fieldName, snowflakeType.name(), 9, 0, maybeColumn)); + })); + } + + @Override + public void writeCharacterStream(Reader x) throws SQLException { + logger.debug(" Unsupported method writeCharacterStream(Reader x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeAsciiStream(InputStream x) throws SQLException { + logger.debug("Unsupported method writeAsciiStream(InputStream x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeBinaryStream(InputStream x) throws SQLException { + logger.debug("Unsupported method writeBinaryStream(InputStream x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeObject(SQLData sqlData) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + JsonSqlOutput jsonSqlOutput = new JsonSqlOutput(sqlData, session); + sqlData.writeSQL(jsonSqlOutput); + json.put(fieldName, jsonSqlOutput.getJsonObject()); + BindingParameterMetadata structSchema = jsonSqlOutput.getSchema(); + structSchema.setName(fieldName); + schema.getFields().add(structSchema); + })); + } + + @Override + public void writeRef(Ref x) throws SQLException { + logger.debug("Unsupported method writeRef(Ref x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeBlob(Blob x) throws SQLException { + logger.debug("Unsupported method writeBlob(Blob x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeClob(Clob x) throws SQLException { + logger.debug("Unsupported method writeClob(Clob x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeStruct(Struct x) throws SQLException { + logger.debug("Unsupported method writeStruct(Struct x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeArray(Array x) throws SQLException { + logger.debug("Unsupported method writeArray(Array x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeURL(URL x) throws SQLException { + logger.debug("Unsupported method writeURL(URL x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeNString(String x) throws SQLException { + logger.debug("Unsupported method writeNString(String x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeNClob(NClob x) throws SQLException { + logger.debug("Unsupported method writeNClob(NClob x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeRowId(RowId x) throws SQLException { + logger.debug("Unsupported method writeRowId(RowId x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeSQLXML(SQLXML x) throws SQLException { + logger.debug("Unsupported method writeSQLXML(SQLXML x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + public String getJsonString() { + return json.toJSONString(); + } + + public JSONObject getJsonObject() { + return json; + } + + private void withNextValue( + ThrowingTriCallable, SQLException> action) + throws SQLException { + Field field = fields.next(); + String fieldName = field.getName(); + Optional maybeColumn = + Optional.ofNullable(field.getAnnotation(SnowflakeColumn.class)); + action.apply(json, fieldName, maybeColumn); + } + + private SnowflakeDateTimeFormat getDateTimeFormat(String format) { + String rawFormat = (String) session.getCommonParameters().get(format); + if (rawFormat == null || rawFormat.isEmpty()) { + rawFormat = (String) session.getCommonParameters().get("TIMESTAMP_OUTPUT_FORMAT"); + } + SnowflakeDateTimeFormat formatter = SnowflakeDateTimeFormat.fromSqlFormat(rawFormat); + return formatter; + } + + public BindingParameterMetadata getSchema() { + return schema; + } + + private TimeZone timeZoneDependOnType( + SnowflakeType snowflakeType, SFBaseSession session, TimeZone tz) { + if (snowflakeType == SnowflakeType.TIMESTAMP_NTZ) { + return null; + } else if (snowflakeType == SnowflakeType.TIMESTAMP_LTZ) { + return getSessionTimezone(session); + } else if (snowflakeType == SnowflakeType.TIMESTAMP_TZ) { + return Optional.ofNullable(tz).orElse(sessionTimezone); + } + return TimeZone.getDefault(); + } + + private int snowflakeTypeToJavaType(SnowflakeType snowflakeType) { + if (snowflakeType == SnowflakeType.TIMESTAMP_NTZ) { + return SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ; + } else if (snowflakeType == SnowflakeType.TIMESTAMP_LTZ) { + return SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ; + } + return SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ; + } +} diff --git a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java index c751d4d20..0f9a7b01f 100644 --- a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java +++ b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java @@ -1,6 +1,7 @@ package net.snowflake.client.core; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; @@ -10,8 +11,9 @@ */ public class ObjectMapperFactory { @SnowflakeJdbcInternalApi - // Snowflake allows up to 16M string size and returns base64 encoded value that makes it up to 23M - public static final int DEFAULT_MAX_JSON_STRING_LEN = 23_000_000; + // Snowflake allows up to 128M (after updating Max LOB size) string size and returns base64 + // encoded value that makes it up to 180M + public static final int DEFAULT_MAX_JSON_STRING_LEN = 180_000_000; @SnowflakeJdbcInternalApi public static final String MAX_JSON_STRING_LENGTH_JVM = @@ -21,6 +23,7 @@ public static ObjectMapper getObjectMapper() { ObjectMapper mapper = new ObjectMapper(); mapper.configure(MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS, false); mapper.configure(MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS, false); + mapper.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS); // override the maxStringLength value in ObjectMapper int maxJsonStringLength = diff --git a/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java b/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java index 36b5727a9..98c6690dc 100644 --- a/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java +++ b/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java @@ -1,20 +1,36 @@ /* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. */ package net.snowflake.client.core; +import net.snowflake.client.jdbc.BindingParameterMetadata; + /** This class represents a binding object passed to server side Created by hyu on 6/15/17. */ public class ParameterBindingDTO { /** Type of binding */ private String type; + private String fmt; + private BindingParameterMetadata schema; + /** Value is a String object if it's a single bind, otherwise is an array of String */ private Object value; - public ParameterBindingDTO(String type, Object value) { + public ParameterBindingDTO( + String fmt, String type, Object value, BindingParameterMetadata schema) { + this.fmt = fmt; this.type = type; this.value = value; + this.schema = schema; + } + + public ParameterBindingDTO(String fmt, String type, Object value) { + this(fmt, type, value, null); + } + + public ParameterBindingDTO(String type, Object value) { + this(null, type, value, null); } public Object getValue() { @@ -32,4 +48,20 @@ public void setType(String type) { public void setValue(Object value) { this.value = value; } + + public String getFmt() { + return fmt; + } + + public void setFmt(String fmt) { + this.fmt = fmt; + } + + public BindingParameterMetadata getSchema() { + return schema; + } + + public void setSchema(BindingParameterMetadata schema) { + this.schema = schema; + } } diff --git a/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java new file mode 100644 index 000000000..8d4a01742 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java @@ -0,0 +1,13 @@ +package net.snowflake.client.core; + +@SnowflakeJdbcInternalApi +public class PrivateLinkDetector { + /** + * We can only tell if private link is enabled for certain hosts when the hostname contains the + * word 'privatelink' but we don't have a good way of telling if a private link connection is + * expected for internal stages for example. + */ + public static boolean isPrivateLink(String host) { + return host.toLowerCase().contains(".privatelink.snowflakecomputing."); + } +} diff --git a/src/main/java/net/snowflake/client/core/QueryContextCache.java b/src/main/java/net/snowflake/client/core/QueryContextCache.java index ea47e6167..85fde42ac 100644 --- a/src/main/java/net/snowflake/client/core/QueryContextCache.java +++ b/src/main/java/net/snowflake/client/core/QueryContextCache.java @@ -109,7 +109,7 @@ else if (readTimestamp == qce.readTimestamp && qce.priority != priority) { /** Sync the newPriorityMap with the priorityMap at the end of current round of merge */ void syncPriorityMap() { logger.debug( - "syncPriorityMap called priorityMap size = {}, newPrioirtyMap size = {}", + "syncPriorityMap called priorityMap size: {}, newPrioirtyMap size: {}", priorityMap.size(), newPriorityMap.size()); for (Map.Entry entry : newPriorityMap.entrySet()) { @@ -125,7 +125,9 @@ void syncPriorityMap() { */ void checkCacheCapacity() { logger.debug( - "checkCacheCapacity() called. treeSet size {} cache capacity {}", treeSet.size(), capacity); + "checkCacheCapacity() called. treeSet size: {} cache capacity: {}", + treeSet.size(), + capacity); if (treeSet.size() > capacity) { // remove elements based on priority while (treeSet.size() > capacity) { @@ -135,18 +137,18 @@ void checkCacheCapacity() { } logger.debug( - "checkCacheCapacity() returns. treeSet size {} cache capacity {}", + "checkCacheCapacity() returns. treeSet size: {} cache capacity: {}", treeSet.size(), capacity); } /** Clear the cache. */ public void clearCache() { - logger.debug("clearCache() called"); + logger.trace("clearCache() called"); idMap.clear(); priorityMap.clear(); treeSet.clear(); - logger.debug("clearCache() returns. Number of entries in cache now {}", treeSet.size()); + logger.trace("clearCache() returns. Number of entries in cache now: {}", treeSet.size()); } /** @@ -211,7 +213,7 @@ public void deserializeQueryContextJson(String data) { syncPriorityMap(); } } catch (Exception e) { - logger.debug("deserializeQueryContextJson: Exception = {}", e.getMessage()); + logger.debug("deserializeQueryContextJson: Exception: {}", e.getMessage()); // Not rethrowing. clear the cache as incomplete merge can lead to unexpected behavior. clearCache(); } @@ -306,7 +308,7 @@ public void deserializeQueryContextDTO(QueryContextDTO queryContextDTO) { // round of merge. syncPriorityMap(); } catch (Exception e) { - logger.debug("deserializeQueryContextDTO: Exception = {}", e.getMessage()); + logger.debug("deserializeQueryContextDTO: Exception: {}", e.getMessage()); // Not rethrowing. clear the cache as incomplete merge can lead to unexpected behavior. clearCache(); } @@ -359,7 +361,7 @@ public QueryContextDTO serializeQueryContextDTO() { return queryContextDTO; } catch (Exception e) { - logger.debug("serializQueryContextDTO(): Exception {}", e.getMessage()); + logger.debug("serializeQueryContextDTO(): Exception: {}", e.getMessage()); return null; } } diff --git a/src/main/java/net/snowflake/client/core/ResultUtil.java b/src/main/java/net/snowflake/client/core/ResultUtil.java index 8581df1fc..b894f4259 100644 --- a/src/main/java/net/snowflake/client/core/ResultUtil.java +++ b/src/main/java/net/snowflake/client/core/ResultUtil.java @@ -29,7 +29,7 @@ import net.snowflake.common.util.TimeUtil; public class ResultUtil { - static final SFLogger logger = SFLoggerFactory.getLogger(ResultUtil.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(ResultUtil.class); public static final int MILLIS_IN_ONE_DAY = 86400000; public static final int DEFAULT_SCALE_OF_SFTIME_FRACTION_SECONDS = @@ -184,7 +184,7 @@ public static SFTimestamp getSFTimestamp( TimeZone sessionTZ, SFBaseSession session) throws SFException { - logger.debug("public Timestamp getTimestamp(int columnIndex)", false); + logger.trace("Timestamp getTimestamp(int columnIndex)", false); try { TimeUtil.TimestampType tsType = null; @@ -278,7 +278,7 @@ public static String getSFTimestampAsString( throws SFException { // Derive the timestamp formatter to use SnowflakeDateTimeFormat formatter; - if (columnType == Types.TIMESTAMP) { + if (columnType == Types.TIMESTAMP || columnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ) { formatter = timestampNTZFormatter; } else if (columnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ) { formatter = timestampLTZFormatter; diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index 74be589ea..74e4c41db 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -37,6 +37,7 @@ import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; +import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryData; import net.snowflake.client.jdbc.telemetry.TelemetryField; @@ -278,7 +279,9 @@ private boolean fetchNextRowUnsorted() throws SnowflakeSQLException { if (currentChunkIterator.next()) { logger.debug( - "Moving to chunk index {}, row count={}", nextChunkIndex, nextChunk.getRowCount()); + "Moving to chunk index: {}, row count: {}", + nextChunkIndex, + nextChunk.getRowCount()); nextChunkIndex++; return true; @@ -378,7 +381,7 @@ public SQLInput createSqlInputForColumn( SFBaseSession session, List fields) { if (parentObjectClass.equals(JsonSqlInput.class)) { - return createJsonSqlInputForColumn(input, columnIndex, session, fields); + return createJsonSqlInputForColumn(input, session, fields); } else { return new ArrowSqlInput((Map) input, session, converters, fields); } @@ -435,7 +438,7 @@ public boolean next() throws SFException, SnowflakeSQLException { } return true; } else { - logger.debug("end of result", false); + logger.debug("End of result", false); /* * Here we check if the result has been truncated and throw exception if @@ -557,6 +560,10 @@ public Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SFException { @Override public Object getObject(int columnIndex) throws SFException { + int type = resultSetMetaData.getColumnType(columnIndex); + if (type == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + return getString(columnIndex); + } ArrowVectorConverter converter = currentChunkIterator.getCurrentConverter(columnIndex - 1); int index = currentChunkIterator.getCurrentRowInRecordBatch(); wasNull = converter.isNull(index); @@ -564,7 +571,6 @@ public Object getObject(int columnIndex) throws SFException { converter.setUseSessionTimezone(useSessionTimezone); converter.setSessionTimeZone(sessionTimeZone); Object obj = converter.toObject(index); - int type = resultSetMetaData.getColumnType(columnIndex); boolean isStructuredType = resultSetMetaData.isStructuredTypeColumn(columnIndex); if (type == Types.STRUCT && isStructuredType) { if (converter instanceof VarCharConverter) { @@ -581,8 +587,10 @@ private Object createJsonSqlInput(int columnIndex, Object obj) throws SFExceptio if (obj == null) { return null; } - JsonNode jsonNode = OBJECT_MAPPER.readTree((String) obj); + String text = (String) obj; + JsonNode jsonNode = OBJECT_MAPPER.readTree(text); return new JsonSqlInput( + text, jsonNode, session, converters, @@ -595,6 +603,9 @@ private Object createJsonSqlInput(int columnIndex, Object obj) throws SFExceptio private Object createArrowSqlInput(int columnIndex, Map input) throws SFException { + if (input == null) { + return null; + } return new ArrowSqlInput( input, session, converters, resultSetMetaData.getColumnFields(columnIndex)); } @@ -800,7 +811,7 @@ public static void closeRootAllocator(RootAllocator rootAllocator) { rootAllocator.close(); } } catch (InterruptedException ie) { - logger.debug("interrupted during closing root allocator", false); + logger.debug("Interrupted during closing root allocator", false); } catch (Exception e) { logger.debug("Exception happened when closing rootAllocator: ", e.getLocalizedMessage()); } diff --git a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java index f7fde0790..71e56a515 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java @@ -139,12 +139,12 @@ public SFBaseSession getSession() { // default implementation public boolean next() throws SFException, SnowflakeSQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); return false; } public void close() throws SnowflakeSQLException { - logger.debug("public void close()", false); + logger.trace("void close()", false); // no exception even if already closed. resultSetMetaData = null; @@ -152,7 +152,7 @@ public void close() throws SnowflakeSQLException { } public boolean wasNull() { - logger.debug("public boolean wasNull() returning {}", wasNull); + logger.trace("boolean wasNull() returning {}", wasNull); return wasNull; } @@ -261,14 +261,15 @@ public Timestamp convertToTimestamp( @SnowflakeJdbcInternalApi protected SQLInput createJsonSqlInputForColumn( - Object input, int columnIndex, SFBaseSession session, List fields) { + Object input, SFBaseSession session, List fields) { JsonNode inputNode; if (input instanceof JsonNode) { inputNode = (JsonNode) input; } else { inputNode = OBJECT_MAPPER.convertValue(input, JsonNode.class); } - return new JsonSqlInput(inputNode, session, getConverters(), fields, sessionTimeZone); + return new JsonSqlInput( + input.toString(), inputNode, session, getConverters(), fields, sessionTimeZone); } @SnowflakeJdbcInternalApi diff --git a/src/main/java/net/snowflake/client/core/SFBaseSession.java b/src/main/java/net/snowflake/client/core/SFBaseSession.java index b0da3e9b4..c5191cb82 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseSession.java +++ b/src/main/java/net/snowflake/client/core/SFBaseSession.java @@ -46,7 +46,7 @@ * which signals whether to enable client telemetry */ public abstract class SFBaseSession { - static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseSession.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseSession.class); private final Properties clientInfo = new Properties(); private final AtomicBoolean autoCommit = new AtomicBoolean(true); // Injected delay for the purpose of connection timeout testing @@ -354,6 +354,8 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { return ocspAndProxyAndGzipKey; } + OCSPMode ocspMode = getOCSPMode(); + Boolean gzipDisabled = false; if (connectionPropertiesMap.containsKey(SFSessionProperty.GZIP_DISABLED)) { gzipDisabled = (Boolean) connectionPropertiesMap.get(SFSessionProperty.GZIP_DISABLED); @@ -387,7 +389,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { String proxyProtocol = (String) connectionPropertiesMap.get(SFSessionProperty.PROXY_PROTOCOL); ocspAndProxyAndGzipKey = new HttpClientSettingsKey( - getOCSPMode(), + ocspMode, proxyHost, proxyPort, nonProxyHosts, @@ -397,6 +399,8 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); + return ocspAndProxyAndGzipKey; } // If JVM proxy parameters are specified, proxies need to go through the JDBC driver's @@ -417,7 +421,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { // log the JVM parameters that are being used if (httpUseProxy) { logger.debug( - "Proxy environment settings: http.useProxy={}, http.proxyHost={}, http.proxyPort={}, http.proxyUser={}, " + "Using JVM parameters for proxy setup: http.useProxy={}, http.proxyHost={}, http.proxyPort={}, http.proxyUser={}, " + "http.proxyPassword is {}, https.proxyHost={}, https.proxyPort={}, https.proxyUser={}, " + "https.proxyPassword is {}, http.nonProxyHosts={}, NO_PROXY={}, http.proxyProtocol={}", httpUseProxy, @@ -456,6 +460,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { if (proxyProtocol.equals("https") && !Strings.isNullOrEmpty(httpsProxyHost) && !Strings.isNullOrEmpty(httpsProxyPort)) { + logger.debug("Using https proxy configuration from JVM parameters"); int proxyPort; try { proxyPort = Integer.parseInt(httpsProxyPort); @@ -465,7 +470,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { } ocspAndProxyAndGzipKey = new HttpClientSettingsKey( - getOCSPMode(), + ocspMode, httpsProxyHost, proxyPort, combinedNonProxyHosts, @@ -474,9 +479,11 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { "https", userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } else if (proxyProtocol.equals("http") && !Strings.isNullOrEmpty(httpProxyHost) && !Strings.isNullOrEmpty(httpProxyPort)) { + logger.debug("Using http proxy configuration from JVM parameters"); int proxyPort; try { proxyPort = Integer.parseInt(httpProxyPort); @@ -486,7 +493,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { } ocspAndProxyAndGzipKey = new HttpClientSettingsKey( - getOCSPMode(), + ocspMode, httpProxyHost, proxyPort, combinedNonProxyHosts, @@ -495,25 +502,49 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { "http", userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } else { // Not enough parameters set to use the proxy. - logger.debug( - "http.useProxy={} but valid host and port were not provided. No proxy in use.", + logger.warn( + "Failed parsing the proxy settings from JVM parameters as http.useProxy={}," + + " but valid host and port were not provided.", httpUseProxy); ocspAndProxyAndGzipKey = - new HttpClientSettingsKey(getOCSPMode(), userAgentSuffix, gzipDisabled); + new HttpClientSettingsKey(ocspMode, userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } } else { // If no proxy is used or JVM http proxy is used, no need for setting parameters logger.debug("http.useProxy={}. JVM proxy not used.", httpUseProxy); unsetInvalidProxyHostAndPort(); - ocspAndProxyAndGzipKey = - new HttpClientSettingsKey(getOCSPMode(), userAgentSuffix, gzipDisabled); + ocspAndProxyAndGzipKey = new HttpClientSettingsKey(ocspMode, userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } } return ocspAndProxyAndGzipKey; } + private void logHttpClientInitInfo(HttpClientSettingsKey key) { + if (key.usesProxy()) { + logger.info( + "Driver OCSP mode: {}, gzip disabled: {}, proxy protocol: {}," + + " proxy host: {}, proxy port: {}, non proxy hosts: {}, proxy user: {}, proxy password is {}", + key.getOcspMode(), + key.getGzipDisabled(), + key.getProxyHttpProtocol(), + key.getProxyHost(), + key.getProxyPort(), + key.getNonProxyHosts(), + key.getProxyUser(), + key.getProxyPassword().isEmpty() ? "not set" : "set"); + } else { + logger.info( + "Driver OCSP mode: {}, gzip disabled: {} and no proxy", + key.getOcspMode(), + key.getGzipDisabled()); + } + } + public void unsetInvalidProxyHostAndPort() { // If proxyHost and proxyPort are used without http or https unset them, so they are not used // later by the ProxySelector. diff --git a/src/main/java/net/snowflake/client/core/SFBaseStatement.java b/src/main/java/net/snowflake/client/core/SFBaseStatement.java index 62d933b2e..17b2fd1b6 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseStatement.java +++ b/src/main/java/net/snowflake/client/core/SFBaseStatement.java @@ -20,7 +20,7 @@ public abstract class SFBaseStatement { // maximum number of parameters for the statement; if this threshold is exceeded, // we throw an exception protected static final int MAX_STATEMENT_PARAMETERS = 1000; - static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseStatement.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseStatement.class); // statement level parameters; just a string-key, object-value map. protected final Map statementParametersMap = new HashMap<>(); // timeout in seconds for queries @@ -125,7 +125,7 @@ public abstract SFBaseResultSet asyncExecute( * @param sql the set property sql */ public void executeSetProperty(final String sql) { - logger.debug("setting property", false); + logger.trace("Setting property", false); // tokenize the sql String[] tokens = sql.split("\\s+"); @@ -136,11 +136,11 @@ public void executeSetProperty(final String sql) { if ("sort".equalsIgnoreCase(tokens[1])) { if (tokens.length >= 3 && "on".equalsIgnoreCase(tokens[2])) { - logger.debug("setting sort on", false); + logger.debug("Setting sort on", false); this.getSFBaseSession().setSessionPropertyByKey("sort", true); } else { - logger.debug("setting sort off", false); + logger.debug("Setting sort off", false); this.getSFBaseSession().setSessionPropertyByKey("sort", false); } } diff --git a/src/main/java/net/snowflake/client/core/SFException.java b/src/main/java/net/snowflake/client/core/SFException.java index 37c47da32..facbf238d 100644 --- a/src/main/java/net/snowflake/client/core/SFException.java +++ b/src/main/java/net/snowflake/client/core/SFException.java @@ -11,7 +11,7 @@ /** Created by jhuang on 1/5/16. */ public class SFException extends Throwable { - static final SFLogger logger = SFLoggerFactory.getLogger(SFException.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFException.class); private static final long serialVersionUID = 1L; diff --git a/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java b/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java index b3f797f6f..06c503ce4 100644 --- a/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java @@ -83,7 +83,7 @@ public SFFixedViewResultSet(SnowflakeFixedView fixedView, CommandType commandTyp */ @Override public boolean next() throws SFException { - logger.debug("next called", false); + logger.trace("next called", false); List nextRowList; try { @@ -98,7 +98,7 @@ public boolean next() throws SFException { row++; if (nextRowList == null) { - logger.debug("end of result", false); + logger.debug("End of result", false); return false; } @@ -112,7 +112,7 @@ public boolean next() throws SFException { @Override protected Object getObjectInternal(int columnIndex) throws SFException { - logger.debug("public Object getObjectInternal(int columnIndex)", false); + logger.trace("Object getObjectInternal(int columnIndex)", false); if (nextRow == null) { throw new SFException(ErrorCode.ROW_DOES_NOT_EXIST); diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 9ad7cb983..1011870df 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -18,6 +18,7 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -53,6 +54,7 @@ public Object getObject(int columnIndex) throws SFException { switch (type) { case Types.VARCHAR: case Types.CHAR: + case SnowflakeUtil.EXTRA_TYPES_VECTOR: return getString(columnIndex); case Types.BINARY: @@ -95,7 +97,6 @@ public Object getObject(int columnIndex) throws SFException { } else { throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } - default: throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } @@ -124,7 +125,7 @@ public Array getArray(int columnIndex) throws SFException { @Override public String getString(int columnIndex) throws SFException { - logger.debug("public String getString(int columnIndex)", false); + logger.trace("String getString(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getInternalColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -134,21 +135,21 @@ public String getString(int columnIndex) throws SFException { @Override public boolean getBoolean(int columnIndex) throws SFException { - logger.debug("public boolean getBoolean(int columnIndex)", false); + logger.trace("boolean getBoolean(int columnIndex)", false); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getBooleanConverter().getBoolean(getObjectInternal(columnIndex), columnType); } @Override public byte getByte(int columnIndex) throws SFException { - logger.debug("public short getByte(int columnIndex)", false); + logger.trace("short getByte(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); return converters.getNumberConverter().getByte(obj); } @Override public short getShort(int columnIndex) throws SFException { - logger.debug("public short getShort(int columnIndex)", false); + logger.trace("short getShort(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getShort(obj, columnType); @@ -156,7 +157,7 @@ public short getShort(int columnIndex) throws SFException { @Override public int getInt(int columnIndex) throws SFException { - logger.debug("public int getInt(int columnIndex)", false); + logger.trace("int getInt(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getInt(obj, columnType); @@ -164,7 +165,7 @@ public int getInt(int columnIndex) throws SFException { @Override public long getLong(int columnIndex) throws SFException { - logger.debug("public long getLong(int columnIndex)", false); + logger.trace("long getLong(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getLong(obj, columnType); @@ -172,7 +173,7 @@ public long getLong(int columnIndex) throws SFException { @Override public BigDecimal getBigDecimal(int columnIndex) throws SFException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getBigDecimal(obj, columnType); @@ -180,7 +181,7 @@ public BigDecimal getBigDecimal(int columnIndex) throws SFException { @Override public BigDecimal getBigDecimal(int columnIndex, int scale) throws SFException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getBigDecimal(obj, columnType, scale); @@ -188,7 +189,7 @@ public BigDecimal getBigDecimal(int columnIndex, int scale) throws SFException { @Override public Time getTime(int columnIndex) throws SFException { - logger.debug("public Time getTime(int columnIndex)", false); + logger.trace("Time getTime(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -200,7 +201,7 @@ public Time getTime(int columnIndex) throws SFException { @Override public Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SFException { - logger.debug("public Timestamp getTimestamp(int columnIndex)", false); + logger.trace("Timestamp getTimestamp(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -212,7 +213,7 @@ public Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SFException { @Override public float getFloat(int columnIndex) throws SFException { - logger.debug("public float getFloat(int columnIndex)", false); + logger.trace("float getFloat(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getFloat(obj, columnType); @@ -220,7 +221,7 @@ public float getFloat(int columnIndex) throws SFException { @Override public double getDouble(int columnIndex) throws SFException { - logger.debug("public double getDouble(int columnIndex)", false); + logger.trace("double getDouble(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getDouble(obj, columnType); @@ -228,7 +229,7 @@ public double getDouble(int columnIndex) throws SFException { @Override public byte[] getBytes(int columnIndex) throws SFException { - logger.debug("public byte[] getBytes(int columnIndex)", false); + logger.trace("byte[] getBytes(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -242,7 +243,7 @@ public Date getDate(int columnIndex) throws SFException { @Override public Date getDate(int columnIndex, TimeZone tz) throws SFException { - logger.debug("public Date getDate(int columnIndex)", false); + logger.trace("Date getDate(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -258,7 +259,7 @@ public SQLInput createSqlInputForColumn( int columnIndex, SFBaseSession session, List fields) { - return createJsonSqlInputForColumn(input, columnIndex, session, fields); + return createJsonSqlInputForColumn(input, session, fields); } @Override @@ -294,6 +295,7 @@ private Object getSqlInput(String input, int columnIndex) throws SFException { try { JsonNode jsonNode = OBJECT_MAPPER.readTree(input); return new JsonSqlInput( + input, jsonNode, session, converters, diff --git a/src/main/java/net/snowflake/client/core/SFLoginInput.java b/src/main/java/net/snowflake/client/core/SFLoginInput.java index 3d53bf104..18ebfaa57 100644 --- a/src/main/java/net/snowflake/client/core/SFLoginInput.java +++ b/src/main/java/net/snowflake/client/core/SFLoginInput.java @@ -49,6 +49,7 @@ public class SFLoginInput { private String inFlightCtx; // Opaque string sent for Snowsight account activation private boolean disableConsoleLogin = true; + private boolean disableSamlURLCheck = false; // Additional headers to add for Snowsight. Map additionalHttpHeadersForSnowsight; @@ -378,6 +379,15 @@ SFLoginInput setInFlightCtx(String inFlightCtx) { return this; } + boolean getDisableSamlURLCheck() { + return disableSamlURLCheck; + } + + SFLoginInput setDisableSamlURLCheck(boolean disableSamlURLCheck) { + this.disableSamlURLCheck = disableSamlURLCheck; + return this; + } + Map getAdditionalHttpHeadersForSnowsight() { return additionalHttpHeadersForSnowsight; } @@ -416,7 +426,11 @@ static boolean getBooleanValue(Object v) { String getHostFromServerUrl() throws SFException { URL url; try { - url = new URL(serverUrl); + if (!serverUrl.startsWith("http")) { + url = new URL("https://" + serverUrl); + } else { + url = new URL(serverUrl); + } } catch (MalformedURLException e) { throw new SFException( e, ErrorCode.INTERNAL_ERROR, "Invalid serverUrl for retrieving host name"); diff --git a/src/main/java/net/snowflake/client/core/SFOCSPException.java b/src/main/java/net/snowflake/client/core/SFOCSPException.java index 6d96bd366..f4eb4360c 100644 --- a/src/main/java/net/snowflake/client/core/SFOCSPException.java +++ b/src/main/java/net/snowflake/client/core/SFOCSPException.java @@ -9,7 +9,7 @@ import net.snowflake.client.log.SFLoggerFactory; public class SFOCSPException extends Throwable { - static final SFLogger logger = SFLoggerFactory.getLogger(SFOCSPException.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFOCSPException.class); private static final long serialVersionUID = 1L; diff --git a/src/main/java/net/snowflake/client/core/SFResultSet.java b/src/main/java/net/snowflake/client/core/SFResultSet.java index b7698cf5d..2716cc780 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFResultSet.java @@ -35,7 +35,7 @@ * @author jhuang */ public class SFResultSet extends SFJsonResultSet { - static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSet.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSet.class); private int columnCount = 0; @@ -309,7 +309,7 @@ public boolean next() throws SFException, SnowflakeSQLException { } return true; } else { - logger.debug("end of result", false); + logger.debug("End of result", false); /* * Here we check if the result has been truncated and throw exception if diff --git a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java index 30072926f..c39b4ec86 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java @@ -25,7 +25,7 @@ /** Snowflake ResultSetMetaData */ public class SFResultSetMetaData { - static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSetMetaData.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSetMetaData.class); private int columnCount = 0; @@ -37,6 +37,8 @@ public class SFResultSetMetaData { private List precisions; + private List dimensions; + private List scales; private List nullables; @@ -143,6 +145,7 @@ public SFResultSetMetaData( this.columnTypeNames = new ArrayList<>(this.columnCount); this.columnTypes = new ArrayList<>(this.columnCount); this.precisions = new ArrayList<>(this.columnCount); + this.dimensions = new ArrayList<>(this.columnCount); this.scales = new ArrayList<>(this.columnCount); this.nullables = new ArrayList<>(this.columnCount); this.columnSrcDatabases = new ArrayList<>(this.columnCount); @@ -156,6 +159,7 @@ public SFResultSetMetaData( columnNames.add(columnMetadata.get(colIdx).getName()); columnTypeNames.add(columnMetadata.get(colIdx).getTypeName()); precisions.add(calculatePrecision(columnMetadata.get(colIdx))); + dimensions.add(calculateDimension(columnMetadata.get(colIdx))); columnTypes.add(columnMetadata.get(colIdx).getType()); scales.add(columnMetadata.get(colIdx).getScale()); nullables.add( @@ -200,6 +204,14 @@ private Integer calculatePrecision(SnowflakeColumnMetadata columnMetadata) { } } + private Integer calculateDimension(SnowflakeColumnMetadata columnMetadata) { + int columnType = columnMetadata.getType(); + if (columnType == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + return columnMetadata.getDimension(); + } + return 0; + } + private Integer calculateDisplaySize(SnowflakeColumnMetadata columnMetadata) { int columnType = columnMetadata.getType(); switch (columnType) { @@ -403,6 +415,14 @@ public int getPrecision(int column) { } } + public int getDimension(int column) { + if (dimensions != null && dimensions.size() >= column && column > 0) { + return dimensions.get(column - 1); + } else { + return 0; + } + } + public boolean isSigned(int column) { return (columnTypes.get(column - 1) == Types.INTEGER || columnTypes.get(column - 1) == Types.DECIMAL diff --git a/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java b/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java index bbd1e1c14..aca26a272 100644 --- a/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java +++ b/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java @@ -23,7 +23,8 @@ /** Snowflake custom SSLConnectionSocketFactory */ public class SFSSLConnectionSocketFactory extends SSLConnectionSocketFactory { - static final SFLogger logger = SFLoggerFactory.getLogger(SFSSLConnectionSocketFactory.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SFSSLConnectionSocketFactory.class); private static final String SSL_VERSION = "TLSv1.2"; diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index d7ee69a07..bb0b2b2a8 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; @@ -40,11 +41,14 @@ import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.jdbc.diagnostic.DiagnosticContext; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryClient; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.log.SFLoggerUtil; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.SqlState; import org.apache.http.HttpHeaders; @@ -58,7 +62,7 @@ public class SFSession extends SFBaseSession { public static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION; public static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake"; public static final String SF_HEADER_TOKEN_TAG = "Token"; - static final SFLogger logger = SFLoggerFactory.getLogger(SFSession.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFSession.class); private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); private static final String SF_PATH_SESSION_HEARTBEAT = "/session/heartbeat"; private static final String SF_PATH_QUERY_MONITOR = "/monitoring/queries/"; @@ -94,6 +98,7 @@ public class SFSession extends SFBaseSession { *

Default:300 seconds */ private int loginTimeout = 300; + /** * Amount of milliseconds a user is willing to tolerate for network related issues (e.g. HTTP * 503/504) or database transient issues (e.g. GS not responding) @@ -213,7 +218,7 @@ private JsonNode getQueryMetadata(String queryID) throws SQLException { loginTimeout, authTimeout, (int) httpClientSocketTimeout.toMillis(), - 0, + maxHttpRetries, getHttpClientKey()); jsonNode = OBJECT_MAPPER.readTree(response); } catch (Exception e) { @@ -224,7 +229,7 @@ private JsonNode getQueryMetadata(String queryID) throws SQLException { // Get response as JSON and parse it to get the query status // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", response); + logger.debug("Response: {}", response); int errorCode = jsonNode.path("code").asInt(); // If the error is due to an expired session token, try renewing the session and trying @@ -511,21 +516,23 @@ public boolean containProperty(String key) { * @throws SnowflakeSQLException exception raised from Snowflake components */ public synchronized void open() throws SFException, SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); performSanityCheckOnProperties(); Map connectionPropertiesMap = getConnectionPropertiesMap(); - logger.debug( - "input: server={}, account={}, user={}, password={}, role={}, database={}, schema={}," - + " warehouse={}, validate_default_parameters={}, authenticator={}, ocsp_mode={}," - + " passcode_in_password={}, passcode={}, private_key={}, disable_socks_proxy={}," - + " application={}, app_id={}, app_version={}, login_timeout={}, retry_timeout={}, network_timeout={}," - + " query_timeout={}, tracing={}, private_key_file={}, private_key_file_pwd={}." - + " session_parameters: client_store_temporary_credential={}, gzip_disabled={}", + logger.info( + "Opening session with server: {}, account: {}, user: {}, password is {}, role: {}, database: {}, schema: {}," + + " warehouse: {}, validate default parameters: {}, authenticator: {}, ocsp mode: {}," + + " passcode in password: {}, passcode is {}, private key is {}, disable socks proxy: {}," + + " application: {}, app id: {}, app version: {}, login timeout: {}, retry timeout: {}, network timeout: {}," + + " query timeout: {}, tracing: {}, private key file: {}, private key file pwd is {}," + + " enable_diagnostics: {}, diagnostics_allowlist_path: {}," + + " session parameters: client store temporary credential: {}, gzip disabled: {}", connectionPropertiesMap.get(SFSessionProperty.SERVER_URL), connectionPropertiesMap.get(SFSessionProperty.ACCOUNT), connectionPropertiesMap.get(SFSessionProperty.USER), - !Strings.isNullOrEmpty((String) connectionPropertiesMap.get(SFSessionProperty.PASSWORD)) - ? "***" - : "(empty)", + SFLoggerUtil.isVariableProvided( + (String) connectionPropertiesMap.get(SFSessionProperty.PASSWORD)), connectionPropertiesMap.get(SFSessionProperty.ROLE), connectionPropertiesMap.get(SFSessionProperty.DATABASE), connectionPropertiesMap.get(SFSessionProperty.SCHEMA), @@ -534,12 +541,9 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.AUTHENTICATOR), getOCSPMode().name(), connectionPropertiesMap.get(SFSessionProperty.PASSCODE_IN_PASSWORD), - !Strings.isNullOrEmpty((String) connectionPropertiesMap.get(SFSessionProperty.PASSCODE)) - ? "***" - : "(empty)", - connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY) != null - ? "(not null)" - : "(null)", + SFLoggerUtil.isVariableProvided( + (String) connectionPropertiesMap.get(SFSessionProperty.PASSCODE)), + SFLoggerUtil.isVariableProvided(connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY)), connectionPropertiesMap.get(SFSessionProperty.DISABLE_SOCKS_PROXY), connectionPropertiesMap.get(SFSessionProperty.APPLICATION), connectionPropertiesMap.get(SFSessionProperty.APP_ID), @@ -550,22 +554,22 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.QUERY_TIMEOUT), connectionPropertiesMap.get(SFSessionProperty.TRACING), connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE), - !Strings.isNullOrEmpty( - (String) connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD)) - ? "***" - : "(empty)", + SFLoggerUtil.isVariableProvided( + (String) connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD)), + connectionPropertiesMap.get(SFSessionProperty.ENABLE_DIAGNOSTICS), + connectionPropertiesMap.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE), sessionParametersMap.get(CLIENT_STORE_TEMPORARY_CREDENTIAL), connectionPropertiesMap.get(SFSessionProperty.GZIP_DISABLED)); HttpClientSettingsKey httpClientSettingsKey = getHttpClientKey(); logger.debug( - "connection proxy parameters: use_proxy={}, proxy_host={}, proxy_port={}, proxy_user={}," - + " proxy_password={}, non_proxy_hosts={}, proxy_protocol={}", + "Connection proxy parameters: use proxy: {}, proxy host: {}, proxy port: {}, proxy user: {}," + + " proxy password is {}, non proxy hosts: {}, proxy protocol: {}", httpClientSettingsKey.usesProxy(), httpClientSettingsKey.getProxyHost(), httpClientSettingsKey.getProxyPort(), httpClientSettingsKey.getProxyUser(), - !Strings.isNullOrEmpty(httpClientSettingsKey.getProxyPassword()) ? "***" : "(empty)", + SFLoggerUtil.isVariableProvided(httpClientSettingsKey.getProxyPassword()), httpClientSettingsKey.getNonProxyHosts(), httpClientSettingsKey.getProxyHttpProtocol()); @@ -608,19 +612,26 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.DISABLE_CONSOLE_LOGIN) != null ? getBooleanValue( connectionPropertiesMap.get(SFSessionProperty.DISABLE_CONSOLE_LOGIN)) - : true); + : true) + .setDisableSamlURLCheck( + connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK) != null + ? getBooleanValue( + connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK)) + : false); - // Enable or disable OOB telemetry based on connection parameter. Default is disabled. - // The value may still change later when session parameters from the server are read. - if (getBooleanValue( - connectionPropertiesMap.get(SFSessionProperty.CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED))) { - TelemetryService.enable(); - } else { - TelemetryService.disable(); - } + logger.info( + "Connecting to {} Snowflake domain", + loginInput.getHostFromServerUrl().toLowerCase().endsWith(".cn") ? "CHINA" : "GLOBAL"); + + // we ignore the parameters CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED and htapOOBTelemetryEnabled + // OOB telemetry is disabled + TelemetryService.disableOOBTelemetry(); // propagate OCSP mode to SFTrustManager. Note OCSP setting is global on JVM. HttpUtil.initHttpClient(httpClientSettingsKey, null); + + runDiagnosticsIfEnabled(); + SFLoginOutput loginOutput = SessionUtil.openSession(loginInput, connectionPropertiesMap, tracingLevel.toString()); isClosed = false; @@ -644,13 +655,7 @@ public synchronized void open() throws SFException, SnowflakeSQLException { // Update common parameter values for this session SessionUtil.updateSfDriverParamValues(loginOutput.getCommonParams(), this); - // Enable or disable HTAP OOB telemetry based on connection parameter. Default is disabled. - if (getBooleanValue( - connectionPropertiesMap.get(SFSessionProperty.HTAP_OOB_TELEMETRY_ENABLED))) { - TelemetryService.enableHTAP(); - } else { - TelemetryService.disableHTAP(); - } + String loginDatabaseName = (String) connectionPropertiesMap.get(SFSessionProperty.DATABASE); String loginSchemaName = (String) connectionPropertiesMap.get(SFSessionProperty.SCHEMA); String loginRole = (String) connectionPropertiesMap.get(SFSessionProperty.ROLE); @@ -702,6 +707,8 @@ public synchronized void open() throws SFException, SnowflakeSQLException { // start heartbeat for this session so that the master token will not expire startHeartbeatForThisSession(); + stopwatch.stop(); + logger.info("Session {} opened in {} ms.", getSessionId(), stopwatch.elapsedMillis()); } /** @@ -762,10 +769,14 @@ boolean isUsernamePasswordMFAAuthenticator() { synchronized void renewSession(String prevSessionToken) throws SFException, SnowflakeSQLException { if (sessionToken != null && !sessionToken.equals(prevSessionToken)) { - logger.debug("not renew session because session token has not been updated.", false); + logger.debug( + "Not renewing session {} because session token has not been updated.", getSessionId()); return; } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + logger.debug("Renewing session {}", getSessionId()); SFLoginInput loginInput = new SFLoginInput(); loginInput .setServerUrl(getServerUrl()) @@ -786,6 +797,9 @@ synchronized void renewSession(String prevSessionToken) sessionToken = loginOutput.getSessionToken(); masterToken = loginOutput.getMasterToken(); + stopwatch.stop(); + logger.debug( + "Session {} renewed successfully in {} ms", getSessionId(), stopwatch.elapsedMillis()); } /** @@ -805,14 +819,17 @@ public String getSessionToken() { */ @Override public void close() throws SFException, SnowflakeSQLException { - logger.debug(" public void close()", false); + logger.debug("Closing session {}", getSessionId()); // stop heartbeat for this session stopHeartbeatForThisSession(); if (isClosed) { + logger.debug("Session {} is already closed", getSessionId()); return; } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); SFLoginInput loginInput = new SFLoginInput(); loginInput @@ -832,6 +849,11 @@ public void close() throws SFException, SnowflakeSQLException { qcc.clearCache(); } + stopwatch.stop(); + logger.info( + "Session {} has been successfully closed in {} ms", + getSessionId(), + stopwatch.elapsedMillis()); isClosed = true; } @@ -887,23 +909,26 @@ public Void call() throws SQLException { /** Start heartbeat for this session */ protected void startHeartbeatForThisSession() { if (getEnableHeartbeat() && !Strings.isNullOrEmpty(masterToken)) { - logger.debug("start heartbeat, master token validity: " + masterTokenValidityInSeconds); + logger.debug( + "Session {} start heartbeat, master token validity: {} s", + getSessionId(), + masterTokenValidityInSeconds); HeartbeatBackground.getInstance() .addSession(this, masterTokenValidityInSeconds, heartbeatFrequency); } else { - logger.debug("heartbeat not enabled for the session", false); + logger.debug("Heartbeat not enabled for the session {}", getSessionId()); } } /** Stop heartbeat for this session */ protected void stopHeartbeatForThisSession() { if (getEnableHeartbeat() && !Strings.isNullOrEmpty(masterToken)) { - logger.debug("stop heartbeat", false); + logger.debug("Session {} stop heartbeat", getSessionId()); HeartbeatBackground.getInstance().removeSession(this); } else { - logger.debug("heartbeat not enabled for the session", false); + logger.debug("Heartbeat not enabled for the session {}", getSessionId()); } } @@ -914,12 +939,15 @@ protected void stopHeartbeatForThisSession() { * @throws SQLException exception raised from SQL generic layers */ protected void heartbeat() throws SFException, SQLException { - logger.debug(" public void heartbeat()", false); + logger.debug("Session {} heartbeat", getSessionId()); if (isClosed) { return; } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + HttpPost postRequest = null; String requestId = UUIDUtils.getUUID().toString(); @@ -969,14 +997,14 @@ protected void heartbeat() throws SFException, SQLException { JsonNode rootNode; - logger.debug("connection heartbeat response: {}", theResponse); + logger.debug("Connection heartbeat response: {}", theResponse); rootNode = OBJECT_MAPPER.readTree(theResponse); // check the response to see if it is session expiration response if (rootNode != null && (Constants.SESSION_EXPIRED_GS_CODE == rootNode.path("code").asInt())) { - logger.debug("renew session and retry", false); + logger.debug("Renew session and retry", false); this.renewSession(prevSessionToken); retry = true; continue; @@ -992,12 +1020,15 @@ protected void heartbeat() throws SFException, SQLException { throw (SnowflakeSQLException) ex; } - logger.error("unexpected exception", ex); + logger.error("Unexpected exception", ex); throw new SFException( ErrorCode.INTERNAL_ERROR, IncidentUtil.oneLiner("unexpected exception", ex)); } } while (retry); + stopwatch.stop(); + logger.debug( + "Session {} heartbeat successful in {} ms", getSessionId(), stopwatch.elapsedMillis()); } void injectedDelay() { @@ -1248,4 +1279,45 @@ public SFClientConfig getSfClientConfig() { public void setSfClientConfig(SFClientConfig sfClientConfig) { this.sfClientConfig = sfClientConfig; } + + /** + * If the JDBC driver starts in diagnostics mode then the method prints results of the + * connectivity tests it performs in the logs. A SQLException is thrown with a message indicating + * that the driver is in diagnostics mode, and that a connection was not created. + */ + private void runDiagnosticsIfEnabled() throws SnowflakeSQLException { + Map connectionPropertiesMap = getConnectionPropertiesMap(); + boolean isDiagnosticsEnabled = + Optional.ofNullable(connectionPropertiesMap.get(SFSessionProperty.ENABLE_DIAGNOSTICS)) + .map(b -> (Boolean) b) + .orElse(false); + + if (!isDiagnosticsEnabled) { + return; + } + logger.info("Running diagnostics tests"); + String allowListFile = + (String) connectionPropertiesMap.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE); + + if (allowListFile == null || allowListFile.isEmpty()) { + logger.error( + "Diagnostics was enabled but an allowlist file was not provided." + + " Please provide an allowlist JSON file using the connection parameter {}", + SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE); + throw new SnowflakeSQLException( + "Diagnostics was enabled but an allowlist file was not provided. " + + "Please provide an allowlist JSON file using the connection parameter " + + SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE); + } else { + DiagnosticContext diagnosticContext = + new DiagnosticContext(allowListFile, connectionPropertiesMap); + diagnosticContext.runDiagnostics(); + } + + throw new SnowflakeSQLException( + "A connection was not created because the driver is running in diagnostics mode." + + " If this is unintended then disable diagnostics check by removing the " + + SFSessionProperty.ENABLE_DIAGNOSTICS + + " connection parameter"); + } } diff --git a/src/main/java/net/snowflake/client/core/SFSessionProperty.java b/src/main/java/net/snowflake/client/core/SFSessionProperty.java index 0ca91809c..3dcb09602 100644 --- a/src/main/java/net/snowflake/client/core/SFSessionProperty.java +++ b/src/main/java/net/snowflake/client/core/SFSessionProperty.java @@ -77,12 +77,15 @@ public enum SFSessionProperty { PUT_GET_MAX_RETRIES("putGetMaxRetries", false, Integer.class), RETRY_TIMEOUT("retryTimeout", false, Integer.class), + ENABLE_DIAGNOSTICS("ENABLE_DIAGNOSTICS", false, Boolean.class), + DIAGNOSTICS_ALLOWLIST_FILE("DIAGNOSTICS_ALLOWLIST_FILE", false, String.class), ENABLE_PATTERN_SEARCH("enablePatternSearch", false, Boolean.class), DISABLE_GCS_DEFAULT_CREDENTIALS("disableGcsDefaultCredentials", false, Boolean.class), - JDBC_ARROW_TREAT_DECIMAL_AS_INT("JDBC_ARROW_TREAT_DECIMAL_AS_INT", false, Boolean.class); + JDBC_ARROW_TREAT_DECIMAL_AS_INT("JDBC_ARROW_TREAT_DECIMAL_AS_INT", false, Boolean.class), + DISABLE_SAML_URL_CHECK("disableSamlURLCheck", false, Boolean.class); // property key in string private String propertyKey; diff --git a/src/main/java/net/snowflake/client/core/SFSqlInput.java b/src/main/java/net/snowflake/client/core/SFSqlInput.java index b3efa6893..2b3d6ba95 100644 --- a/src/main/java/net/snowflake/client/core/SFSqlInput.java +++ b/src/main/java/net/snowflake/client/core/SFSqlInput.java @@ -4,7 +4,6 @@ package net.snowflake.client.core; import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLInput; import java.util.List; import java.util.Map; @@ -31,8 +30,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @param tz timezone to consider. * @return the attribute; if the value is SQL NULL, returns null * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.2 */ java.sql.Timestamp readTimestamp(TimeZone tz) throws SQLException; /** @@ -43,8 +40,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code Object} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ T readObject(Class type, TimeZone tz) throws SQLException; /** @@ -55,8 +50,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code List} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ List readList(Class type) throws SQLException; @@ -68,8 +61,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code Map} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ Map readMap(Class type) throws SQLException; /** @@ -80,8 +71,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code Array} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ T[] readArray(Class type) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/core/SFStatement.java b/src/main/java/net/snowflake/client/core/SFStatement.java index 1dd555bb5..6142b8eb9 100644 --- a/src/main/java/net/snowflake/client/core/SFStatement.java +++ b/src/main/java/net/snowflake/client/core/SFStatement.java @@ -44,7 +44,7 @@ /** Snowflake statement */ public class SFStatement extends SFBaseStatement { - static final SFLogger logger = SFLoggerFactory.getLogger(SFStatement.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFStatement.class); private SFSession session; @@ -80,7 +80,7 @@ public class SFStatement extends SFBaseStatement { private long conservativeMemoryLimit; // in bytes public SFStatement(SFSession session) { - logger.debug(" public SFStatement(SFSession session)", false); + logger.trace("SFStatement(SFSession session)", false); this.session = session; Integer queryTimeout = session == null ? null : session.getQueryTimeout(); @@ -91,7 +91,7 @@ public SFStatement(SFSession session) { private void verifyArrowSupport() { if (SnowflakeDriver.isDisableArrowResultFormat()) { logger.debug( - "disable arrow support: {}", SnowflakeDriver.getDisableArrowResultFormatMessage()); + "Disable arrow support: {}", SnowflakeDriver.getDisableArrowResultFormatMessage()); statementParametersMap.put("JDBC_QUERY_RESULT_FORMAT", "JSON"); } } @@ -205,7 +205,7 @@ SFBaseResultSet executeQueryInternal( throws SQLException, SFException { resetState(); - logger.debug("executeQuery: {}", sql); + logger.debug("ExecuteQuery: {}", sql); if (session == null || session.isClosed()) { throw new SQLException("connection is closed"); @@ -771,9 +771,9 @@ public SFBaseResultSet execute( session.injectedDelay(); if (session.getPreparedStatementLogging()) { - logger.info("execute: {}", sql); + logger.info("Execute: {}", sql); } else { - logger.debug("execute: {}", sql); + logger.debug("Execute: {}", sql); } String trimmedSql = sql.trim(); @@ -798,7 +798,7 @@ private SFBaseResultSet executeFileTransfer(String sql) throws SQLException, SFE try { transferAgent.execute(); - logger.debug("setting result set", false); + logger.debug("Setting result set", false); resultSet = (SFFixedViewResultSet) transferAgent.getResultSet(); childResults = Collections.emptyList(); @@ -814,7 +814,7 @@ private SFBaseResultSet executeFileTransfer(String sql) throws SQLException, SFE @Override public void close() { - logger.debug("public void close()", false); + logger.trace("void close()", false); if (requestId != null) { EventUtil.triggerStateTransition( @@ -827,7 +827,7 @@ public void close() { isClosed = true; if (httpRequest != null) { - logger.debug("releasing connection for the http request", false); + logger.debug("Releasing connection for the http request", false); httpRequest.releaseConnection(); httpRequest = null; @@ -841,7 +841,7 @@ public void close() { @Override public void cancel() throws SFException, SQLException { - logger.debug("public void cancel()", false); + logger.trace("void cancel()", false); if (canceling.get()) { logger.debug("Query is already cancelled", false); diff --git a/src/main/java/net/snowflake/client/core/SFTrustManager.java b/src/main/java/net/snowflake/client/core/SFTrustManager.java index bd05729c3..740c70fe3 100644 --- a/src/main/java/net/snowflake/client/core/SFTrustManager.java +++ b/src/main/java/net/snowflake/client/core/SFTrustManager.java @@ -140,7 +140,7 @@ public class SFTrustManager extends X509ExtendedTrustManager { /** OCSP response cache file name. Should be identical to other driver's cache file name. */ static final String CACHE_FILE_NAME = "ocsp_response_cache.json"; - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(SFTrustManager.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManager.class); private static final ASN1ObjectIdentifier OIDocsp = new ASN1ObjectIdentifier("1.3.6.1.5.5.7.48.1").intern(); private static final ASN1ObjectIdentifier SHA1RSA = @@ -167,8 +167,10 @@ public class SFTrustManager extends X509ExtendedTrustManager { private static final int DEFAULT_OCSP_CACHE_SERVER_CONNECTION_TIMEOUT = 5000; /** Default OCSP responder connection timeout */ private static final int DEFAULT_OCSP_RESPONDER_CONNECTION_TIMEOUT = 10000; + /** Default OCSP Cache server host name prefix */ + private static final String DEFAULT_OCSP_CACHE_HOST_PREFIX = "http://ocsp.snowflakecomputing."; /** Default OCSP Cache server host name */ - private static final String DEFAULT_OCSP_CACHE_HOST = "http://ocsp.snowflakecomputing.com"; + private static final String DEFAULT_OCSP_CACHE_HOST = DEFAULT_OCSP_CACHE_HOST_PREFIX + "com"; /** OCSP response file cache directory */ private static final FileCacheManager fileCacheManager; @@ -200,7 +202,7 @@ public class SFTrustManager extends X509ExtendedTrustManager { /** OCSP Response Cache server Retry URL pattern */ static String SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN; /** OCSP response cache server URL. */ - private static String SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE; + static String SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE; private static JcaX509CertificateConverter CONVERTER_X509 = new JcaX509CertificateConverter(); /** RootCA cache */ @@ -283,6 +285,9 @@ public class SFTrustManager extends X509ExtendedTrustManager { JsonNode res = fileCacheManager.readCacheFile(); readJsonStoreCache(res); } + + logger.debug( + "Initializing trust manager with OCSP mode: {}, cache file: {}", ocspMode, cacheFile); } /** Deletes OCSP response cache file from disk. */ @@ -312,7 +317,7 @@ static void resetOCSPResponseCacherServerURL(String ocspCacheServerUrl) throws I return; } SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheServerUrl; - if (!SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE.startsWith(DEFAULT_OCSP_CACHE_HOST)) { + if (!SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE.startsWith(DEFAULT_OCSP_CACHE_HOST_PREFIX)) { URL url = new URL(SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); if (url.getPort() > 0) { SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = @@ -322,10 +327,13 @@ static void resetOCSPResponseCacherServerURL(String ocspCacheServerUrl) throws I SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = String.format("%s://%s/retry/%s", url.getProtocol(), url.getHost(), "%s/%s"); } + logger.debug( + "Reset OCSP response cache server URL to: {}", + SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); } } - private static void setOCSPResponseCacheServerURL() { + private static void setOCSPResponseCacheServerURL(String topLevelDomain) { String ocspCacheUrl = systemGetProperty(SF_OCSP_RESPONSE_CACHE_SERVER_URL); if (ocspCacheUrl != null) { SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheUrl; @@ -336,30 +344,31 @@ private static void setOCSPResponseCacheServerURL() { SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheUrl; } } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Failed to get environment variable " + SF_OCSP_RESPONSE_CACHE_SERVER_URL + ". Ignored", true); } if (SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE == null) { SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = - String.format("%s/%s", DEFAULT_OCSP_CACHE_HOST, CACHE_FILE_NAME); + String.format("%s%s/%s", DEFAULT_OCSP_CACHE_HOST_PREFIX, topLevelDomain, CACHE_FILE_NAME); } + logger.debug("Set OCSP response cache server to: {}", SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); } private static boolean useOCSPResponseCacheServer() { String ocspCacheServerEnabled = systemGetProperty(SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED); if (Boolean.FALSE.toString().equalsIgnoreCase(ocspCacheServerEnabled)) { - LOGGER.debug("No OCSP Response Cache Server is used.", false); + logger.debug("No OCSP Response Cache Server is used.", false); return false; } try { ocspCacheServerEnabled = systemGetEnv(SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED); if (Boolean.FALSE.toString().equalsIgnoreCase(ocspCacheServerEnabled)) { - LOGGER.debug("No OCSP Response Cache Server is used.", false); + logger.debug("No OCSP Response Cache Server is used.", false); return false; } } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Failed to get environment variable " + SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED + ". Ignored", @@ -383,7 +392,7 @@ private static String encodeCacheKey(OcspResponseCacheKey ocsp_cache_key) { CertID cid = new CertID(algo, nameHash, keyHash, snumber); return Base64.encodeBase64String(cid.toASN1Primitive().getEncoded()); } catch (Exception ex) { - LOGGER.debug("Failed to encode cache key to base64 encoded cert id", false); + logger.debug("Failed to encode cache key to base64 encoded cert id", false); } return null; } @@ -423,7 +432,7 @@ private static SFPair> decodeCacheFro JsonNode ocspRespBase64 = elem.getValue(); if (!ocspRespBase64.isArray() || ocspRespBase64.size() != 2) { - LOGGER.debug("Invalid cache file format. Ignored", false); + logger.debug("Invalid cache file format. Ignored", false); return null; } long producedAt = ocspRespBase64.get(0).asLong(); @@ -465,14 +474,14 @@ private static ObjectNode encodeCacheToJSON() { } return out; } catch (IOException ex) { - LOGGER.debug("Failed to encode ASN1 object.", false); + logger.debug("Failed to encode ASN1 object.", false); } return null; } private static synchronized void readJsonStoreCache(JsonNode m) { if (m == null || !m.getNodeType().equals(JsonNodeType.OBJECT)) { - LOGGER.debug("Invalid cache file format.", false); + logger.debug("Invalid cache file format.", false); return; } try { @@ -489,7 +498,7 @@ private static synchronized void readJsonStoreCache(JsonNode m) { } } } catch (IOException ex) { - LOGGER.debug("Failed to decode the cache file", false); + logger.debug("Failed to decode the cache file", false); } } @@ -669,7 +678,7 @@ private void checkNewOCSPEndpointAvailability() { try { new_ocsp_ept = systemGetEnv("SF_OCSP_ACTIVATE_NEW_ENDPOINT"); } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Could not get environment variable to check for New OCSP Endpoint Availability", false); new_ocsp_ept = systemGetProperty("net.snowflake.jdbc.ocsp_activate_new_endpoint"); } @@ -784,22 +793,23 @@ void validateRevocationStatus(X509Certificate[] chain, String peerHost) ocspCacheServer.resetOCSPResponseCacheServer(peerHost); } - setOCSPResponseCacheServerURL(); + String topLevelDomain = peerHost.substring(peerHost.lastIndexOf(".") + 1); + setOCSPResponseCacheServerURL(topLevelDomain); boolean isCached = isCached(pairIssuerSubjectList); if (useOCSPResponseCacheServer() && !isCached) { if (!ocspCacheServer.new_endpoint_enabled) { - LOGGER.debug( + logger.debug( "Downloading OCSP response cache from the server. URL: {}", SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); } else { - LOGGER.debug( + logger.debug( "Downloading OCSP response cache from the server. URL: {}", ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER); } try { readOcspResponseCacheServer(); } catch (SFOCSPException ex) { - LOGGER.debug( + logger.debug( "Error downloading OCSP Response from cache server : {}." + "OCSP Responses will be fetched directly from the CA OCSP" + "Responder ", @@ -900,7 +910,7 @@ private void executeOneRevocationStatusCheck( telemetryData.setCacheHit(true); } } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Exception occurred while trying to fetch OCSP Response - {}", ex.getMessage()); throw new SFOCSPException( OCSPErrorCode.OCSP_RESPONSE_FETCH_FAILURE, @@ -908,8 +918,8 @@ private void executeOneRevocationStatusCheck( ex); } - LOGGER.debug( - "validating. {}", CertificateIDToString(req.getRequestList()[0].getCertID())); + logger.debug( + "Validating. {}", CertificateIDToString(req.getRequestList()[0].getCertID())); try { validateRevocationStatusMain(pairIssuerSubject, value0.right); success = true; @@ -930,12 +940,12 @@ private void executeOneRevocationStatusCheck( } catch (CertificateException ex) { WAS_CACHE_UPDATED.set(OCSP_RESPONSE_CACHE.remove(keyOcspResponse) != null); if (WAS_CACHE_UPDATED.get()) { - LOGGER.debug("deleting the invalid OCSP cache.", false); + logger.debug("Deleting the invalid OCSP cache.", false); } cause = ex; - LOGGER.debug( - "Retrying {}/{} after sleeping {}(ms)", retry + 1, maxRetryCounter, sleepTime); + logger.debug( + "Retrying {}/{} after sleeping {} ms", retry + 1, maxRetryCounter, sleepTime); try { if (retry + 1 < maxRetryCounter) { Thread.sleep(sleepTime); @@ -950,7 +960,7 @@ private void executeOneRevocationStatusCheck( error = new CertificateException(ex); ocspLog = telemetryData.generateTelemetry(SF_OCSP_EVENT_TYPE_REVOKED_CERTIFICATE_ERROR, error); - LOGGER.error(ocspLog, false); + logger.error(ocspLog, false); throw error; } @@ -960,21 +970,21 @@ private void executeOneRevocationStatusCheck( error = new CertificateException( "Certificate Revocation check failed. Could not retrieve OCSP Response.", cause); - LOGGER.debug(cause.getMessage(), false); + logger.debug(cause.getMessage(), false); } else { error = new CertificateException( "Certificate Revocation check failed. Could not retrieve OCSP Response."); - LOGGER.debug(error.getMessage(), false); + logger.debug(error.getMessage(), false); } ocspLog = telemetryData.generateTelemetry(SF_OCSP_EVENT_TYPE_VALIDATION_ERROR, error); if (isOCSPFailOpen()) { // Log includes fail-open warning. - LOGGER.error(generateFailOpenLog(ocspLog), false); + logger.error(generateFailOpenLog(ocspLog), false); } else { // still not success, raise an error. - LOGGER.debug(ocspLog, false); + logger.debug(ocspLog, false); throw error; } } @@ -993,7 +1003,7 @@ private boolean isCached(List> pairIssuerSubjec for (SFPair pairIssuerSubject : pairIssuerSubjectList) { OCSPReq req = createRequest(pairIssuerSubject); CertificateID certificateId = req.getRequestList()[0].getCertID(); - LOGGER.debug(CertificateIDToString(certificateId), false); + logger.debug(CertificateIDToString(certificateId), false); CertID cid = certificateId.toASN1Primitive(); OcspResponseCacheKey k = new OcspResponseCacheKey( @@ -1003,18 +1013,18 @@ private boolean isCached(List> pairIssuerSubjec SFPair res = OCSP_RESPONSE_CACHE.get(k); if (res == null) { - LOGGER.debug("Not all OCSP responses for the certificate is in the cache.", false); + logger.debug("Not all OCSP responses for the certificate is in the cache.", false); isCached = false; break; } else if (currentTimeSecond - CACHE_EXPIRATION_IN_SECONDS > res.left) { - LOGGER.debug("Cache for CertID expired.", false); + logger.debug("Cache for CertID expired.", false); isCached = false; break; } else { try { validateRevocationStatusMain(pairIssuerSubject, res.right); } catch (SFOCSPException ex) { - LOGGER.debug( + logger.debug( "Cache includes invalid OCSPResponse. " + "Will download the OCSP cache from Snowflake OCSP server", false); @@ -1023,7 +1033,7 @@ private boolean isCached(List> pairIssuerSubjec } } } catch (IOException ex) { - LOGGER.debug("Failed to encode CertID.", false); + logger.debug("Failed to encode CertID.", false); } return isCached; } @@ -1059,14 +1069,14 @@ private void readOcspResponseCacheServer() throws SFOCSPException { JsonNode m = OBJECT_MAPPER.readTree(out.toByteArray()); out.close(); readJsonStoreCache(m); - LOGGER.debug("Successfully downloaded OCSP cache from the server.", false); + logger.debug("Successfully downloaded OCSP cache from the server.", false); } catch (IOException ex) { - LOGGER.debug( + logger.debug( "Failed to read the OCSP response cache from the server. " + "Server: {}, Err: {}", ocspCacheServerInUse, ex); } catch (URISyntaxException ex) { - LOGGER.debug("Indicate that a string could not be parsed as a URI reference.", false); + logger.debug("Indicate that a string could not be parsed as a URI reference.", false); throw new SFOCSPException( OCSPErrorCode.INVALID_CACHE_SERVER_URL, "Invalid OCSP Cache Server URL used", ex); } finally { @@ -1141,11 +1151,11 @@ private OCSPResp fetchOcspResponse( } else { url = new URL(String.format("%s/%s", ocspUrlStr, urlEncodedOCSPReq)); } - LOGGER.debug("not hit cache. Fetching OCSP response from CA OCSP server. {}", url); + logger.debug("Not hit cache. Fetching OCSP response from CA OCSP server. {}", url); } else { url = new URL(ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL); - LOGGER.debug( - "not hit cache. Fetching OCSP response from Snowflake OCSP Response Fetcher. {}", url); + logger.debug( + "Not hit cache. Fetching OCSP response from Snowflake OCSP Response Fetcher. {}", url); } long sleepTime = INITIAL_SLEEPING_TIME_IN_MILLISECONDS; @@ -1180,12 +1190,12 @@ private OCSPResp fetchOcspResponse( break; } } catch (IOException ex) { - LOGGER.debug("Failed to reach out OCSP responder: {}", ex.getMessage()); + logger.debug("Failed to reach out OCSP responder: {}", ex.getMessage()); savedEx = ex; } IOUtils.closeQuietly(response); - LOGGER.debug("Retrying {}/{} after sleeping {}(ms)", retry + 1, maxRetryCounter, sleepTime); + logger.debug("Retrying {}/{} after sleeping {} ms", retry + 1, maxRetryCounter, sleepTime); try { if (retry + 1 < maxRetryCounter) { Thread.sleep(sleepTime); @@ -1245,8 +1255,10 @@ private int getOCSPResponderConnectionTimeout() { private String overrideOCSPURL(String ocspURL) { String ocspURLInput = systemGetProperty(SF_OCSP_TEST_RESPONDER_URL); if (ocspURLInput != null) { + logger.debug("Overriding OCSP url to: {}", ocspURLInput); return ocspURLInput; } + logger.debug("Overriding OCSP url to: {}", ocspURL); return ocspURL; } @@ -1272,7 +1284,7 @@ private void validateRevocationStatusMain( X509CertificateHolder signVerifyCert; checkInvalidSigningCertTestParameter(); if (attachedCerts.length > 0) { - LOGGER.debug( + logger.debug( "Certificate is attached for verification. " + "Verifying it by the issuer certificate.", false); @@ -1296,15 +1308,15 @@ private void validateRevocationStatusMain( CONVERTER_X509.getCertificate(signVerifyCert).getTBSCertificate(), signVerifyCert.getSignatureAlgorithm()); } catch (CertificateException ex) { - LOGGER.debug("OCSP Signing Certificate signature verification failed", false); + logger.debug("OCSP Signing Certificate signature verification failed", false); throw new SFOCSPException( OCSPErrorCode.INVALID_CERTIFICATE_SIGNATURE, "OCSP Signing Certificate signature verification failed", ex); } - LOGGER.debug("Verifying OCSP signature by the attached certificate public key.", false); + logger.debug("Verifying OCSP signature by the attached certificate public key.", false); } else { - LOGGER.debug( + logger.debug( "Certificate is NOT attached for verification. " + "Verifying OCSP signature by the issuer public key.", false); @@ -1317,7 +1329,7 @@ private void validateRevocationStatusMain( basicOcspResp.getTBSResponseData(), basicOcspResp.getSignatureAlgorithmID()); } catch (CertificateException ex) { - LOGGER.debug("OCSP signature verification failed", false); + logger.debug("OCSP signature verification failed", false); throw new SFOCSPException( OCSPErrorCode.INVALID_OCSP_RESPONSE_SIGNATURE, "OCSP signature verification failed", @@ -1376,7 +1388,7 @@ private void validateBasicOcspResponse(Date currentTime, BasicOCSPResp basicOcsp Date thisUpdate = singleResps.getThisUpdate(); Date nextUpdate = singleResps.getNextUpdate(); - LOGGER.debug( + logger.debug( "Current Time: {}, This Update: {}, Next Update: {}", currentTime, thisUpdate, @@ -1392,7 +1404,7 @@ private void validateBasicOcspResponse(Date currentTime, BasicOCSPResp basicOcsp DATE_FORMAT_UTC.format(nextUpdate))); } } - LOGGER.debug("OK. Verified the certificate revocation status.", false); + logger.debug("OK. Verified the certificate revocation status.", false); } private void checkCertUnknownTestParameter() throws SFOCSPException { @@ -1516,7 +1528,7 @@ private String ocspResponseToB64(OCSPResp ocspResp) { try { return Base64.encodeBase64String(ocspResp.getEncoded()); } catch (Throwable ex) { - LOGGER.debug("Could not convert OCSP Response to Base64", false); + logger.debug("Could not convert OCSP Response to Base64", false); return null; } } @@ -1525,7 +1537,7 @@ private OCSPResp b64ToOCSPResp(String ocspRespB64) { try { return new OCSPResp(Base64.decodeBase64(ocspRespB64)); } catch (Throwable ex) { - LOGGER.debug("Could not cover OCSP Response from Base64 to OCSPResp object", false); + logger.debug("Could not cover OCSP Response from Base64 to OCSPResp object", false); return null; } } @@ -1537,14 +1549,16 @@ static class OCSPCacheServer { void resetOCSPResponseCacheServer(String host) { String ocspCacheServerUrl; - if (host.indexOf(".global.snowflakecomputing.com") > 0) { + if (host.toLowerCase().contains(".global.snowflakecomputing.")) { ocspCacheServerUrl = String.format("https://ocspssd%s/%s", host.substring(host.indexOf('-')), "ocsp"); - } else if (host.indexOf(".snowflakecomputing.com") > 0) { + } else if (host.toLowerCase().contains(".snowflakecomputing.")) { ocspCacheServerUrl = String.format("https://ocspssd%s/%s", host.substring(host.indexOf('.')), "ocsp"); } else { - ocspCacheServerUrl = "https://ocspssd.snowflakecomputing.com/ocsp"; + String topLevelDomain = host.substring(host.lastIndexOf(".") + 1); + ocspCacheServerUrl = + String.format("https://ocspssd.snowflakecomputing.%s/ocsp", topLevelDomain); } SF_OCSP_RESPONSE_CACHE_SERVER = String.format("%s/%s", ocspCacheServerUrl, "fetch"); SF_OCSP_RESPONSE_RETRY_URL = String.format("%s/%s", ocspCacheServerUrl, "retry"); @@ -1624,7 +1638,7 @@ public byte[] getDigest() { String.format( "Failed to instantiate the algorithm: %s. err=%s", ALGORITHM_SHA1_NAME, ex.getMessage()); - LOGGER.error(errMsg, false); + logger.error(errMsg, false); throw new RuntimeException(errMsg); } } diff --git a/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java b/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java index 144caefec..5030e4603 100644 --- a/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java +++ b/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java @@ -22,6 +22,7 @@ private SecureStorageAppleManager() { } public static SecureStorageAppleManager builder() { + logger.info("Using Apple Keychain as a token cache storage"); return new SecureStorageAppleManager(); } diff --git a/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java b/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java index e1f352187..7663147b3 100644 --- a/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java +++ b/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java @@ -41,6 +41,8 @@ private SecureStorageLinuxManager() { .setCacheExpirationInSeconds(CACHE_EXPIRATION_IN_SECONDS) .setCacheFileLockExpirationInSeconds(CACHE_FILE_LOCK_EXPIRATION_IN_SECONDS) .build(); + logger.info( + "Using temporary file: {} as a token cache storage", fileCacheManager.getCacheFilePath()); } private static class SecureStorageLinuxManagerHolder { diff --git a/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java b/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java index f43952023..f38c1570b 100644 --- a/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java +++ b/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java @@ -33,6 +33,7 @@ private SecureStorageWindowsManager() { } public static SecureStorageWindowsManager builder() { + logger.info("Using Windows Credential Manager as a token cache storage"); return new SecureStorageWindowsManager(); } diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index a3421e841..6a9db988f 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -37,6 +37,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.ClientAuthnParameter; import net.snowflake.common.core.SqlState; @@ -71,12 +72,11 @@ public class SessionUtil { public static final String SF_QUERY_SESSION_DELETE = "delete"; // Headers - public static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION; + @Deprecated + public static final String SF_HEADER_AUTHORIZATION = SFSession.SF_HEADER_AUTHORIZATION; // Authentication type private static final String SF_HEADER_BASIC_AUTHTYPE = "Basic"; - private static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake"; - private static final String SF_HEADER_TOKEN_TAG = "Token"; private static final String CLIENT_STORE_TEMPORARY_CREDENTIAL = "CLIENT_STORE_TEMPORARY_CREDENTIAL"; private static final String CLIENT_REQUEST_MFA_TOKEN = "CLIENT_REQUEST_MFA_TOKEN"; @@ -343,6 +343,8 @@ private static SFLoginOutput newSession( Map connectionPropertiesMap, String tracingLevel) throws SFException, SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); // build URL for login request URIBuilder uriBuilder; URI loginURI; @@ -369,6 +371,18 @@ private static SFLoginOutput newSession( final ClientAuthnDTO.AuthenticatorType authenticatorType = getAuthenticator(loginInput); Map commonParams; + String oktaUsername = loginInput.getOKTAUserName(); + logger.debug( + "Authenticating user: {}, host: {} with authentication method: {}." + + " Login timeout: {} s, auth timeout: {} s, OCSP mode: {}{}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + authenticatorType, + loginInput.getLoginTimeout(), + loginInput.getAuthTimeout(), + loginInput.getOCSPMode(), + Strings.isNullOrEmpty(oktaUsername) ? "" : ", okta username: " + oktaUsername); + try { uriBuilder = new URIBuilder(loginInput.getServerUrl()); @@ -629,7 +643,7 @@ private static SFLoginOutput newSession( * HttpClient should take authorization header from char[] instead of * String. */ - postRequest.setHeader(SF_HEADER_AUTHORIZATION, SF_HEADER_BASIC_AUTHTYPE); + postRequest.setHeader(SFSession.SF_HEADER_AUTHORIZATION, SF_HEADER_BASIC_AUTHTYPE); setServiceNameHeader(loginInput, postRequest); @@ -639,6 +653,8 @@ private static SFLoginOutput newSession( int leftsocketTimeout = loginInput.getSocketTimeoutInMillis(); int retryCount = 0; + Exception lastRestException = null; + while (true) { try { theString = @@ -650,6 +666,7 @@ private static SFLoginOutput newSession( retryCount, loginInput.getHttpClientSettingsKey()); } catch (SnowflakeSQLException ex) { + lastRestException = ex; if (ex.getErrorCode() == ErrorCode.AUTHENTICATOR_REQUEST_TIMEOUT.getMessageCode()) { if (authenticatorType == ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT || authenticatorType == ClientAuthnDTO.AuthenticatorType.OKTA) { @@ -714,16 +731,42 @@ private static SFLoginOutput newSession( } else { throw ex; } + } catch (Exception ex) { + lastRestException = ex; } break; } + if (theString == null) { + if (lastRestException != null) { + logger.error( + "Failed to open new session for user: {}, host: {}. Error: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + lastRestException); + throw lastRestException; + } else { + SnowflakeSQLException exception = + new SnowflakeSQLException( + NO_QUERY_ID, + "empty authentication response", + SqlState.CONNECTION_EXCEPTION, + ErrorCode.CONNECTION_ERROR.getMessageCode()); + logger.error( + "Failed to open new session for user: {}, host: {}. Error: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + exception); + throw exception; + } + } + // general method, same as with data binding JsonNode jsonNode = mapper.readTree(theString); // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", theString); + logger.debug("Response: {}", theString); int errorCode = jsonNode.path("code").asInt(); if (errorCode == Constants.ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE) { @@ -741,9 +784,16 @@ private static SFLoginOutput newSession( deleteMfaTokenCache(loginInput.getHostFromServerUrl(), loginInput.getUserName()); } + String errorMessage = jsonNode.path("message").asText(); + + logger.error( + "Failed to open new session for user: {}, host: {}. Error: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + errorMessage); throw new SnowflakeSQLException( NO_QUERY_ID, - jsonNode.path("message").asText(), + errorMessage, SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, errorCode); } @@ -769,7 +819,7 @@ private static SFLoginOutput newSession( commonParams = SessionUtil.getCommonParams(jsonNode.path("data").path("parameters")); if (serverVersion != null) { - logger.debug("server version = {}", serverVersion); + logger.debug("Server version: {}", serverVersion); if (serverVersion.indexOf(" ") > 0) { databaseVersion = serverVersion.substring(0, serverVersion.indexOf(" ")); @@ -777,7 +827,7 @@ private static SFLoginOutput newSession( databaseVersion = serverVersion; } } else { - logger.debug("server version is null", false); + logger.debug("Server version is null", false); } if (databaseVersion != null) { @@ -800,13 +850,13 @@ private static SFLoginOutput newSession( if (!jsonNode.path("data").path("newClientForUpgrade").isNull()) { newClientForUpgrade = jsonNode.path("data").path("newClientForUpgrade").asText(); - logger.debug("new client: {}", newClientForUpgrade); + logger.debug("New client: {}", newClientForUpgrade); } // get health check interval and adjust network timeouts if different int healthCheckIntervalFromGS = jsonNode.path("data").path("healthCheckInterval").asInt(); - logger.debug("health check interval = {}", healthCheckIntervalFromGS); + logger.debug("Health check interval: {}", healthCheckIntervalFromGS); if (healthCheckIntervalFromGS > 0 && healthCheckIntervalFromGS != healthCheckInterval) { // add health check interval to socket timeout @@ -821,9 +871,9 @@ private static SFLoginOutput newSession( HttpUtil.setRequestConfig(requestConfig); - logger.debug("adjusted connection timeout to = {}", loginInput.getConnectionTimeout()); + logger.debug("Adjusted connection timeout to: {}", loginInput.getConnectionTimeout()); - logger.debug("adjusted socket timeout to = {}", httpClientSocketTimeout); + logger.debug("Adjusted socket timeout to: {}", httpClientSocketTimeout); } } catch (SnowflakeSQLException ex) { throw ex; // must catch here to avoid Throwable to get the exception @@ -873,6 +923,13 @@ && asBoolean(loginInput.getSessionParameters().get(CLIENT_STORE_TEMPORARY_CREDEN CredentialManager.getInstance().writeMfaToken(loginInput, ret); } + stopwatch.stop(); + logger.debug( + "User: {}, host: {} with authentication method: {} authenticated successfully in {} ms", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + authenticatorType, + stopwatch.elapsedMillis()); return ret; } @@ -974,13 +1031,18 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT postRequest.addHeader("accept", "application/json"); postRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + " " + SF_HEADER_TOKEN_TAG + "=\"" + headerToken + "\""); + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + + " " + + SFSession.SF_HEADER_TOKEN_TAG + + "=\"" + + headerToken + + "\""); setServiceNameHeader(loginInput, postRequest); logger.debug( - "request type: {}, old session token: {}, " + "master token: {}", + "Request type: {}, old session token: {}, " + "master token: {}", requestType.value, (ArgSupplier) () -> loginInput.getSessionToken() != null ? "******" : null, (ArgSupplier) () -> loginInput.getMasterToken() != null ? "******" : null); @@ -999,7 +1061,7 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", theString); + logger.debug("Response: {}", theString); String errorCode = jsonNode.path("code").asText(); String message = jsonNode.path("message").asText(); @@ -1037,7 +1099,7 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT * @throws SFException if failed to close session */ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException { - logger.debug(" public void close() throws SFException"); + logger.trace("void close() throws SFException"); // assert the following inputs are valid AssertUtil.assertTrue( @@ -1068,10 +1130,10 @@ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeS postRequest, loginInput.getAdditionalHttpHeadersForSnowsight()); postRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + loginInput.getSessionToken() + "\""); @@ -1089,15 +1151,15 @@ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeS JsonNode rootNode; - logger.debug("connection close response: {}", theString); + logger.debug("Connection close response: {}", theString); rootNode = mapper.readTree(theString); SnowflakeUtil.checkErrorAndThrowException(rootNode); } catch (URISyntaxException ex) { - throw new RuntimeException("unexpected URI syntax exception", ex); + throw new RuntimeException("Unexpected URI syntax exception", ex); } catch (IOException ex) { - logger.error("unexpected IO exception for: " + postRequest, ex); + logger.error("Unexpected IO exception for: " + postRequest, ex); } catch (SnowflakeSQLException ex) { // ignore exceptions for session expiration exceptions and for // sessions that no longer exist @@ -1154,6 +1216,16 @@ private static String federatedFlowStep4( loginInput.getHttpClientSettingsKey()); // step 5 + validateSAML(responseHtml, loginInput); + } catch (IOException | URISyntaxException ex) { + handleFederatedFlowError(loginInput, ex); + } + return responseHtml; + } + + private static void validateSAML(String responseHtml, SFLoginInput loginInput) + throws SnowflakeSQLException, MalformedURLException { + if (!loginInput.getDisableSamlURLCheck()) { String postBackUrl = getPostBackUrlFromHTML(responseHtml); if (!isPrefixEqual(postBackUrl, loginInput.getServerUrl())) { URL idpDestinationUrl = new URL(postBackUrl); @@ -1167,18 +1239,13 @@ private static String federatedFlowStep4( clientDestinationHostName, idpDestinationHostName); - // Session is in process of getting created, so exception constructor takes in null session - // value + // Session is in process of getting created, so exception constructor takes in null throw new SnowflakeSQLLoggedException( null, ErrorCode.IDP_INCORRECT_DESTINATION.getMessageCode(), - SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION - /* session = */ ); + SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION); } - } catch (IOException | URISyntaxException ex) { - handleFederatedFlowError(loginInput, ex); } - return responseHtml; } /** @@ -1229,7 +1296,7 @@ private static String federatedFlowStep3(SFLoginInput loginInput, String tokenUr null, loginInput.getHttpClientSettingsKey()); - logger.debug("user is authenticated against {}.", loginInput.getAuthenticator()); + logger.debug("User is authenticated against {}.", loginInput.getAuthenticator()); // session token is in the data field of the returned json response final JsonNode jsonNode = mapper.readTree(idpResponse); @@ -1268,7 +1335,7 @@ private static void federatedFlowStep2(SFLoginInput loginInput, String tokenUrl, null, ErrorCode.IDP_CONNECTION_ERROR.getMessageCode(), SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION - /* session = */ ); + /* session= */ ); } } catch (MalformedURLException ex) { handleFederatedFlowError(loginInput, ex); @@ -1317,12 +1384,12 @@ private static JsonNode federatedFlowStep1(SFLoginInput loginInput) throws Snowf loginInput.getSocketTimeoutInMillis(), 0, loginInput.getHttpClientSettingsKey()); - logger.debug("authenticator-request response: {}", gsResponse); + logger.debug("Authenticator-request response: {}", gsResponse); JsonNode jsonNode = mapper.readTree(gsResponse); // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", gsResponse); + logger.debug("Response: {}", gsResponse); int errorCode = jsonNode.path("code").asInt(); throw new SnowflakeSQLException( @@ -1460,7 +1527,7 @@ public static Map getCommonParams(JsonNode paramsNode) { // What type of value is it and what's the value? if (!child.hasNonNull("value")) { - logger.debug("No value found for Common Parameter {}", child.path("name").asText()); + logger.debug("No value found for Common Parameter: {}", child.path("name").asText()); continue; } @@ -1495,7 +1562,7 @@ static void updateSfDriverParamValues(Map parameters, SFBaseSess session.setCommonParameters(parameters); } for (Map.Entry entry : parameters.entrySet()) { - logger.debug("processing parameter {}", entry.getKey()); + logger.debug("Processing parameter {}", entry.getKey()); if ("CLIENT_DISABLE_INCIDENTS".equalsIgnoreCase(entry.getKey())) { SnowflakeDriver.setDisableIncidents((Boolean) entry.getValue()); @@ -1592,11 +1659,9 @@ static void updateSfDriverParamValues(Map parameters, SFBaseSess session.setClientPrefetchThreads((int) entry.getValue()); } } else if (CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED.equalsIgnoreCase(entry.getKey())) { - if ((boolean) entry.getValue()) { - TelemetryService.enable(); - } else { - TelemetryService.disable(); - } + // we ignore the parameter CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED + // OOB telemetry is always disabled + TelemetryService.disableOOBTelemetry(); } else if (CLIENT_VALIDATE_DEFAULT_PARAMETERS.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setValidateDefaultParameters(SFLoginInput.getBooleanValue(entry.getValue())); @@ -1641,7 +1706,7 @@ enum TokenRequestType { * @param serverUrl The Snowflake URL includes protocol such as "https://" */ public static void resetOCSPUrlIfNecessary(String serverUrl) throws IOException { - if (serverUrl.indexOf(".privatelink.snowflakecomputing.com") > 0) { + if (PrivateLinkDetector.isPrivateLink(serverUrl)) { // Privatelink uses special OCSP Cache server URL url = new URL(serverUrl); String host = url.getHost(); diff --git a/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java b/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java index da7807b69..9db2f0589 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java +++ b/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java @@ -46,7 +46,8 @@ * user can type IdP username and password. 4. Return token and proof key to the GS to gain access. */ public class SessionUtilExternalBrowser { - static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilExternalBrowser.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SessionUtilExternalBrowser.class); public interface AuthExternalBrowserHandlers { // build a HTTP post object @@ -202,14 +203,14 @@ private String getSSOUrl(int port) throws SFException, SnowflakeSQLException { 0, loginInput.getHttpClientSettingsKey()); - logger.debug("authenticator-request response: {}", theString); + logger.debug("Authenticator-request response: {}", theString); // general method, same as with data binding JsonNode jsonNode = mapper.readTree(theString); // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", theString); + logger.debug("Response: {}", theString); String errorCode = jsonNode.path("code").asText(); throw new SnowflakeSQLException( SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, @@ -240,7 +241,7 @@ private String getConsoleLoginUrl(int port) throws SFException { String consoleLoginUrl = consoleLoginUriBuilder.build().toURL().toString(); - logger.debug("console login url: {}", consoleLoginUrl); + logger.debug("Console login url: {}", consoleLoginUrl); return consoleLoginUrl; } catch (Exception ex) { @@ -266,7 +267,7 @@ void authenticate() throws SFException, SnowflakeSQLException { try { // main procedure int port = this.getLocalPort(ssocket); - logger.debug("Listening localhost:{}", port); + logger.debug("Listening localhost: {}", port); if (loginInput.getDisableConsoleLogin()) { // Access GS to get SSO URL diff --git a/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java b/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java index 2bef91eda..ad63ea603 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java +++ b/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java @@ -17,6 +17,7 @@ import java.io.IOException; import java.io.StringReader; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.security.InvalidKeyException; import java.security.KeyFactory; @@ -52,7 +53,7 @@ /** Class used to compute jwt token for key pair authentication Created by hyu on 1/16/18. */ class SessionUtilKeyPair { - static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilKeyPair.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilKeyPair.class); // user name in upper case private final String userName; @@ -147,7 +148,6 @@ private SecretKeyFactory getSecretKeyFactory(String algorithm) throws NoSuchAlgo private PrivateKey extractPrivateKeyFromFile(String privateKeyFile, String privateKeyFilePwd) throws SFException { - if (isBouncyCastleProviderEnabled) { try { return extractPrivateKeyWithBouncyCastle(privateKeyFile, privateKeyFilePwd); @@ -234,8 +234,11 @@ public static int getTimeout() { private PrivateKey extractPrivateKeyWithBouncyCastle( String privateKeyFile, String privateKeyFilePwd) throws IOException, PKCSException, OperatorCreationException { + Path privKeyPath = Paths.get(privateKeyFile); + FileUtil.logFileUsage( + privKeyPath, "Extract private key from file using Bouncy Castle provider", true); PrivateKeyInfo privateKeyInfo = null; - PEMParser pemParser = new PEMParser(new FileReader(Paths.get(privateKeyFile).toFile())); + PEMParser pemParser = new PEMParser(new FileReader(privKeyPath.toFile())); Object pemObject = pemParser.readObject(); if (pemObject instanceof PKCS8EncryptedPrivateKeyInfo) { // Handle the case where the private key is encrypted. @@ -263,7 +266,9 @@ private PrivateKey extractPrivateKeyWithBouncyCastle( private PrivateKey extractPrivateKeyWithJdk(String privateKeyFile, String privateKeyFilePwd) throws IOException, NoSuchAlgorithmException, InvalidKeySpecException, InvalidKeyException { - String privateKeyContent = new String(Files.readAllBytes(Paths.get(privateKeyFile))); + Path privKeyPath = Paths.get(privateKeyFile); + FileUtil.logFileUsage(privKeyPath, "Extract private key from file using Jdk", true); + String privateKeyContent = new String(Files.readAllBytes(privKeyPath)); if (Strings.isNullOrEmpty(privateKeyFilePwd)) { // unencrypted private key file return generatePrivateKey(false, privateKeyContent, privateKeyFilePwd); diff --git a/src/main/java/net/snowflake/client/core/SfSqlArray.java b/src/main/java/net/snowflake/client/core/SfSqlArray.java index 83270796a..70682b4f4 100644 --- a/src/main/java/net/snowflake/client/core/SfSqlArray.java +++ b/src/main/java/net/snowflake/client/core/SfSqlArray.java @@ -1,11 +1,17 @@ package net.snowflake.client.core; +import static net.snowflake.client.core.FieldSchemaCreator.buildBindingSchemaForType; + +import com.fasterxml.jackson.core.JsonProcessingException; import java.sql.Array; import java.sql.JDBCType; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.util.Arrays; import java.util.Map; +import net.snowflake.client.jdbc.BindingParameterMetadata; +import net.snowflake.client.jdbc.SnowflakeUtil; @SnowflakeJdbcInternalApi public class SfSqlArray implements Array { @@ -74,4 +80,19 @@ public ResultSet getResultSet(long index, int count, Map> map) @Override public void free() throws SQLException {} + + public String getJsonString() throws SQLException { + try { + return SnowflakeUtil.mapJson(elements); + } catch (JsonProcessingException e) { + throw new SQLException("There is exception during array to json string.", e); + } + } + + public BindingParameterMetadata getSchema() throws SQLException { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType("array") + .withFields(Arrays.asList(buildBindingSchemaForType(getBaseType(), false))) + .build(); + } } diff --git a/src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java b/src/main/java/net/snowflake/client/core/SfTimestampUtil.java similarity index 76% rename from src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java rename to src/main/java/net/snowflake/client/core/SfTimestampUtil.java index b95c518c6..ed58f4481 100644 --- a/src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java +++ b/src/main/java/net/snowflake/client/core/SfTimestampUtil.java @@ -4,6 +4,7 @@ package net.snowflake.client.core; +import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.TimeZone; @@ -11,7 +12,9 @@ import net.snowflake.common.core.SnowflakeDateTimeFormat; @SnowflakeJdbcInternalApi -public class SqlInputTimestampUtil { +public class SfTimestampUtil { + + static final long MS_IN_DAY = 86400 * 1000; public static Timestamp getTimestampFromType( int columnSubType, @@ -25,7 +28,7 @@ public static Timestamp getTimestampFromType( } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ || columnSubType == Types.TIMESTAMP) { return getTimestampFromFormat( - "TIMESTAMP_NTZ_OUTPUT_FORMAT", value, session, sessionTimeZone, tz); + "TIMESTAMP_NTZ_OUTPUT_FORMAT", value, session, sessionTimeZone, TimeZone.getDefault()); } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ) { return getTimestampFromFormat( "TIMESTAMP_TZ_OUTPUT_FORMAT", value, session, sessionTimeZone, tz); @@ -46,4 +49,13 @@ private static Timestamp getTimestampFromFormat( SnowflakeDateTimeFormat formatter = SnowflakeDateTimeFormat.fromSqlFormat(rawFormat); return formatter.parse(value, tz, 0, false).getTimestamp(); } + + public static long getTimeInNanoseconds(Time x) { + long msSinceEpoch = x.getTime(); + // Use % + % instead of just % to get the nonnegative remainder. + // TODO(mkember): Change to use Math.floorMod when Client is on Java 8. + long msSinceMidnight = (msSinceEpoch % MS_IN_DAY + MS_IN_DAY) % MS_IN_DAY; + long nanosSinceMidnight = msSinceMidnight * 1000 * 1000; + return nanosSinceMidnight; + } } diff --git a/src/main/java/net/snowflake/client/core/StmtUtil.java b/src/main/java/net/snowflake/client/core/StmtUtil.java index a02fb4d7b..96fefe5dc 100644 --- a/src/main/java/net/snowflake/client/core/StmtUtil.java +++ b/src/main/java/net/snowflake/client/core/StmtUtil.java @@ -23,7 +23,6 @@ import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; import net.snowflake.common.api.QueryInProgressResponse; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; @@ -43,25 +42,15 @@ public class StmtUtil { private static final String SF_PATH_QUERY_RESULT = "/queries/%s/result"; - static final String SF_QUERY_REQUEST_ID = "requestId"; - private static final String SF_QUERY_COMBINE_DESCRIBE_EXECUTE = "combinedDescribe"; - private static final String SF_QUERY_CONTEXT = "queryContext"; - - private static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION; - - private static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake"; - - private static final String SF_HEADER_TOKEN_TAG = "Token"; - static final String SF_MEDIA_TYPE = "application/snowflake"; // we don't want to retry canceling forever so put a limit which is // twice as much as our default socket timeout static final int SF_CANCELING_RETRY_TIMEOUT_IN_MILLIS = 600000; // 10 min - static final SFLogger logger = SFLoggerFactory.getLogger(StmtUtil.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(StmtUtil.class); /** Input for executing a statement on server */ static class StmtInput { @@ -310,12 +299,12 @@ public static StmtOutput execute(StmtInput stmtInput, ExecTimeTelemetryData exec // don't need to execute the query again if (stmtInput.retry && stmtInput.prevGetResultURL != null) { logger.debug( - "retrying statement execution with get result URL: {}", stmtInput.prevGetResultURL); + "Retrying statement execution with get result URL: {}", stmtInput.prevGetResultURL); } else { URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl); uriBuilder.setPath(SF_PATH_QUERY_V1); - uriBuilder.addParameter(SF_QUERY_REQUEST_ID, stmtInput.requestId); + uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, stmtInput.requestId); if (stmtInput.combineDescribe) { uriBuilder.addParameter(SF_QUERY_COMBINE_DESCRIBE_EXECUTE, Boolean.TRUE.toString()); @@ -376,10 +365,10 @@ public static StmtOutput execute(StmtInput stmtInput, ExecTimeTelemetryData exec httpRequest.addHeader("accept", stmtInput.mediaType); httpRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + stmtInput.sessionToken + "\""); @@ -522,11 +511,11 @@ else if (stmtInput.asyncExec // simulate client pause before trying to fetch result so that // we can test query behavior related to disconnected client if (stmtInput.injectClientPause != 0) { - logger.debug("inject client pause for {} seconds", stmtInput.injectClientPause); + logger.debug("Inject client pause for {} seconds", stmtInput.injectClientPause); try { Thread.sleep(stmtInput.injectClientPause * 1000); } catch (InterruptedException ex) { - logger.debug("exception encountered while injecting pause", false); + logger.debug("Exception encountered while injecting pause", false); } } } @@ -606,14 +595,14 @@ protected static String getQueryResult( protected static String getQueryResult(String getResultPath, StmtInput stmtInput) throws SFException, SnowflakeSQLException { HttpGet httpRequest = null; - logger.debug("get query result: {}", getResultPath); + logger.debug("Get query result: {}", getResultPath); try { URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl); uriBuilder.setPath(getResultPath); - uriBuilder.addParameter(SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); + uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); httpRequest = new HttpGet(uriBuilder.build()); // Add custom headers before adding common headers @@ -623,10 +612,10 @@ protected static String getQueryResult(String getResultPath, StmtInput stmtInput httpRequest.addHeader("accept", stmtInput.mediaType); httpRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + stmtInput.sessionToken + "\""); @@ -717,7 +706,7 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE uriBuilder.setPath(SF_PATH_ABORT_REQUEST_V1); - uriBuilder.addParameter(SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); + uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); httpRequest = new HttpPost(uriBuilder.build()); // Add custom headers before adding common headers @@ -742,10 +731,10 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE httpRequest.addHeader("accept", stmtInput.mediaType); httpRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + stmtInput.sessionToken + "\""); @@ -798,7 +787,7 @@ public static SFStatementType checkStageManageCommand(String sql) { // skip commenting prefixed with // while (trimmedSql.startsWith("//")) { if (logger.isDebugEnabled()) { - logger.debug("skipping // comments in: \n{}", trimmedSql); + logger.debug("Skipping // comments in: \n{}", trimmedSql); } if (trimmedSql.indexOf('\n') > 0) { diff --git a/src/main/java/net/snowflake/client/core/URLUtil.java b/src/main/java/net/snowflake/client/core/URLUtil.java index cd4129e4c..56fa0f266 100644 --- a/src/main/java/net/snowflake/client/core/URLUtil.java +++ b/src/main/java/net/snowflake/client/core/URLUtil.java @@ -3,8 +3,11 @@ */ package net.snowflake.client.core; +import static net.snowflake.client.core.SFSession.SF_QUERY_REQUEST_ID; + import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; +import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; @@ -15,10 +18,12 @@ import javax.annotation.Nullable; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import org.apache.http.NameValuePair; +import org.apache.http.client.utils.URLEncodedUtils; public class URLUtil { - static final SFLogger logger = SFLoggerFactory.getLogger(URLUtil.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(URLUtil.class); static final String validURLPattern = "^http(s?)\\:\\/\\/[0-9a-zA-Z]([-.\\w]*[0-9a-zA-Z@:])*(:(0-9)*)*(\\/?)([a-zA-Z0-9\\-\\.\\?\\,\\&\\(\\)\\/\\\\\\+&%\\$#_=@]*)?$"; static final Pattern pattern = Pattern.compile(validURLPattern); @@ -53,4 +58,20 @@ public static String urlEncode(String target) throws UnsupportedEncodingExceptio } return encodedTarget; } + + @SnowflakeJdbcInternalApi + public static String getRequestId(URI uri) { + return URLEncodedUtils.parse(uri, StandardCharsets.UTF_8).stream() + .filter(p -> p.getName().equals(SF_QUERY_REQUEST_ID)) + .findFirst() + .map(NameValuePair::getValue) + .orElse(null); + } + + @SnowflakeJdbcInternalApi + public static String getRequestIdLogStr(URI uri) { + String requestId = getRequestId(uri); + + return requestId == null ? "" : "[requestId=" + requestId + "] "; + } } diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java index 8eaaadc94..2ad5c3ef2 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java +++ b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java @@ -11,6 +11,7 @@ import java.util.TimeZone; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeTimestampWithTimezone; import net.snowflake.client.log.ArgSupplier; @@ -151,6 +152,19 @@ public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone ne * @return */ public static Timestamp toJavaTimestamp(long epoch, int scale) { + return toJavaTimestamp(epoch, scale, TimeZone.getDefault(), false); + } + + /** + * generate Java Timestamp object + * + * @param epoch the value since epoch time + * @param scale the scale of the value + * @return + */ + @SnowflakeJdbcInternalApi + public static Timestamp toJavaTimestamp( + long epoch, int scale, TimeZone sessionTimezone, boolean useSessionTimezone) { long seconds = epoch / powerOfTen(scale); int fraction = (int) ((epoch % powerOfTen(scale)) * powerOfTen(9 - scale)); if (fraction < 0) { @@ -158,7 +172,7 @@ public static Timestamp toJavaTimestamp(long epoch, int scale) { seconds--; fraction += 1000000000; } - return createTimestamp(seconds, fraction, TimeZone.getDefault(), false); + return createTimestamp(seconds, fraction, sessionTimezone, useSessionTimezone); } /** diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java index 236abe553..e2bba45ab 100644 --- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java @@ -11,6 +11,7 @@ import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeType; import net.snowflake.client.jdbc.SnowflakeUtil; @@ -65,7 +66,7 @@ public Timestamp toTimestamp(int index, TimeZone tz) throws SFException { private Timestamp getTimestamp(int index, TimeZone tz) throws SFException { long val = bigIntVector.getDataBuffer().getLong(index * BigIntVector.TYPE_WIDTH); int scale = context.getScale(columnIndex); - return getTimestamp(val, scale); + return getTimestamp(val, scale, sessionTimeZone, useSessionTimezone); } @Override @@ -90,8 +91,25 @@ public boolean toBoolean(int index) throws SFException { SnowflakeUtil.BOOLEAN_STR, val); } + /** + * Use {@link #getTimestamp(long, int, TimeZone, boolean)} + * + * @param val epoch + * @param scale scale + * @return Timestamp value without timezone take into account + * @throws SFException + */ + @Deprecated public static Timestamp getTimestamp(long val, int scale) throws SFException { Timestamp ts = ArrowResultUtil.toJavaTimestamp(val, scale); return ResultUtil.adjustTimestamp(ts); } + + @SnowflakeJdbcInternalApi + public static Timestamp getTimestamp( + long epoch, int scale, TimeZone sessionTimeZone, boolean useSessionTimezone) + throws SFException { + return ResultUtil.adjustTimestamp( + ArrowResultUtil.toJavaTimestamp(epoch, scale, sessionTimeZone, useSessionTimezone)); + } } diff --git a/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java b/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java index a07e583ea..14bea858a 100644 --- a/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java @@ -92,7 +92,8 @@ private Timestamp convertTimestampLtz(Object obj, int scale) throws SFException false); } } else if (obj instanceof Long) { - return BigIntToTimestampLTZConverter.getTimestamp((long) obj, scale); + return BigIntToTimestampLTZConverter.getTimestamp( + (long) obj, scale, sessionTimeZone, useSessionTimezone); } throw new SFException( ErrorCode.INVALID_VALUE_CONVERT, diff --git a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java index 2e9dbd82d..ae7a492a0 100644 --- a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java @@ -1,5 +1,6 @@ package net.snowflake.client.core.arrow; +import java.util.List; import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.SFException; import net.snowflake.client.jdbc.SnowflakeType; @@ -22,6 +23,10 @@ public Object toObject(int index) throws SFException { @Override public String toString(int index) throws SFException { - return vector.getObject(index).toString(); + List object = vector.getObject(index); + if (object == null) { + return null; + } + return object.toString(); } } diff --git a/src/main/java/net/snowflake/client/core/bind/BindUploader.java b/src/main/java/net/snowflake/client/core/bind/BindUploader.java index 2332f0150..6b901da44 100644 --- a/src/main/java/net/snowflake/client/core/bind/BindUploader.java +++ b/src/main/java/net/snowflake/client/core/bind/BindUploader.java @@ -159,6 +159,7 @@ private synchronized String synchronizedTimestampFormat(String o, String type) { int nano = times.right; Timestamp v1 = new Timestamp(sec * 1000); + ZoneOffset offsetId; // For timestamp_ntz, use UTC timezone. For timestamp_ltz, use the local timezone to minimise // the gap. if ("TIMESTAMP_LTZ".equals(type)) { @@ -166,10 +167,11 @@ private synchronized String synchronizedTimestampFormat(String o, String type) { cal.setTimeZone(tz); cal.clear(); timestampFormat.setCalendar(cal); + offsetId = ZoneId.systemDefault().getRules().getOffset(Instant.ofEpochMilli(v1.getTime())); + } else { + offsetId = ZoneOffset.UTC; } - ZoneOffset offsetId = ZoneId.systemDefault().getRules().getOffset(Instant.now()); - return timestampFormat.format(v1) + String.format("%09d", nano) + " " + offsetId; } diff --git a/src/main/java/net/snowflake/client/core/json/Converters.java b/src/main/java/net/snowflake/client/core/json/Converters.java index 584e0d12e..afe663f90 100644 --- a/src/main/java/net/snowflake/client/core/json/Converters.java +++ b/src/main/java/net/snowflake/client/core/json/Converters.java @@ -13,8 +13,8 @@ import java.util.TimeZone; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SfTimestampUtil; import net.snowflake.client.core.SnowflakeJdbcInternalApi; -import net.snowflake.client.core.SqlInputTimestampUtil; import net.snowflake.client.core.arrow.StructuredTypeDateTimeConverter; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; @@ -224,7 +224,7 @@ public Converter timestampFromStringConverter( TimeZone sessionTimezone) { return value -> { Timestamp result = - SqlInputTimestampUtil.getTimestampFromType( + SfTimestampUtil.getTimestampFromType( columnSubType, (String) value, session, sessionTimezone, tz); if (result != null) { return result; diff --git a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java index 103f90555..3516966e6 100644 --- a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java +++ b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java @@ -212,7 +212,11 @@ private static List initConverters( break; case MAP: - converters.add(new MapConverter((MapVector) vector, i, context)); + if (vector instanceof MapVector) { + converters.add(new MapConverter((MapVector) vector, i, context)); + } else { + converters.add(new VarCharConverter(vector, i, context)); + } break; case VECTOR: diff --git a/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java b/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java new file mode 100644 index 000000000..db1c85e7b --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import com.fasterxml.jackson.annotation.JsonInclude; +import java.util.List; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public class BindingParameterMetadata { + private String type; + private String name; + private Integer length; + private Integer byteLength; + private Integer precision; + private Integer scale; + + private boolean nullable = true; + private List fields; + + public BindingParameterMetadata(String type) { + this.type = type; + } + + public BindingParameterMetadata(String type, String name) { + this.type = type; + this.name = name; + } + + public BindingParameterMetadata( + String type, + String name, + Integer length, + Integer byteLength, + Integer precision, + Integer scale, + Boolean nullable) { + this.type = type; + this.name = name; + this.length = length; + this.byteLength = byteLength; + this.precision = precision; + this.scale = scale; + this.nullable = nullable; + } + + public BindingParameterMetadata() {} + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getLength() { + return length; + } + + public void setLength(Integer length) { + this.length = length; + } + + public Integer getByteLength() { + return byteLength; + } + + public void setByteLength(Integer byteLength) { + this.byteLength = byteLength; + } + + public Integer getPrecision() { + return precision; + } + + public void setPrecision(Integer precision) { + this.precision = precision; + } + + public Integer getScale() { + return scale; + } + + public void setScale(Integer scale) { + this.scale = scale; + } + + public Boolean isNullable() { + return nullable; + } + + public void setNullable(Boolean nullable) { + this.nullable = nullable; + } + + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public static class BindingParameterMetadataBuilder { + private BindingParameterMetadata bindingParameterMetadata; + + private BindingParameterMetadataBuilder() { + bindingParameterMetadata = new BindingParameterMetadata(); + } + + public BindingParameterMetadataBuilder withType(String type) { + bindingParameterMetadata.type = type; + return this; + } + + public BindingParameterMetadataBuilder withName(String name) { + bindingParameterMetadata.name = name; + return this; + } + + public BindingParameterMetadataBuilder withLength(Integer length) { + bindingParameterMetadata.length = length; + return this; + } + + public BindingParameterMetadataBuilder withByteLength(Integer byteLength) { + bindingParameterMetadata.byteLength = byteLength; + return this; + } + + public BindingParameterMetadataBuilder withPrecision(Integer precision) { + bindingParameterMetadata.precision = precision; + return this; + } + + public BindingParameterMetadataBuilder withScale(Integer scale) { + bindingParameterMetadata.scale = scale; + return this; + } + + public BindingParameterMetadataBuilder withNullable(Boolean nullable) { + bindingParameterMetadata.nullable = nullable; + return this; + } + + public BindingParameterMetadataBuilder withFields(List fields) { + bindingParameterMetadata.fields = fields; + return this; + } + + public static BindingParameterMetadataBuilder bindingParameterMetadata() { + return new BindingParameterMetadataBuilder(); + } + + public BindingParameterMetadata build() { + return bindingParameterMetadata; + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java index 39f94235b..3ee556bb4 100644 --- a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java +++ b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java @@ -11,6 +11,8 @@ import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.log.ArgSupplier; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; import net.snowflake.common.core.SqlState; import org.apache.http.Header; @@ -21,6 +23,8 @@ import org.apache.http.impl.client.CloseableHttpClient; public class DefaultResultStreamProvider implements ResultStreamProvider { + private static final SFLogger logger = + SFLoggerFactory.getLogger(DefaultResultStreamProvider.class); // SSE-C algorithm header private static final String SSE_C_ALGORITHM = "x-amz-server-side-encryption-customer-algorithm"; @@ -53,16 +57,15 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception * means failure. */ if (response == null || response.getStatusLine().getStatusCode() != 200) { - SnowflakeResultSetSerializableV1.logger.error( - "Error fetching chunk from: {}", context.getResultChunk().getScrubbedUrl()); + logger.error("Error fetching chunk from: {}", context.getResultChunk().getScrubbedUrl()); - SnowflakeUtil.logResponseDetails(response, SnowflakeResultSetSerializableV1.logger); + SnowflakeUtil.logResponseDetails(response, logger); throw new SnowflakeSQLException( SqlState.IO_ERROR, ErrorCode.NETWORK_ERROR.getMessageCode(), "Error encountered when downloading a result chunk: HTTP " - + "status=" + + "status: " + ((response != null) ? response.getStatusLine().getStatusCode() : "null response")); } @@ -72,7 +75,7 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception // read the chunk data inputStream = detectContentEncodingAndGetInputStream(response, entity.getContent()); } catch (Exception ex) { - SnowflakeResultSetSerializableV1.logger.error("Failed to decompress data: {}", response); + logger.error("Failed to decompress data: {}", response); throw new SnowflakeSQLLoggedException( context.getSession(), @@ -82,7 +85,7 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception } // trace the response if requested - SnowflakeResultSetSerializableV1.logger.debug("Json response: {}", response); + logger.debug("Json response: {}", response); return inputStream; } @@ -94,8 +97,7 @@ private HttpResponse getResultChunk(ChunkDownloadContext context) throws Excepti if (context.getChunkHeadersMap() != null && context.getChunkHeadersMap().size() != 0) { for (Map.Entry entry : context.getChunkHeadersMap().entrySet()) { - SnowflakeResultSetSerializableV1.logger.debug( - "Adding header key={}, value={}", entry.getKey(), entry.getValue()); + logger.debug("Adding header key: {}", entry.getKey()); httpRequest.addHeader(entry.getKey(), entry.getValue()); } } @@ -103,11 +105,11 @@ private HttpResponse getResultChunk(ChunkDownloadContext context) throws Excepti else if (context.getQrmk() != null) { httpRequest.addHeader(SSE_C_ALGORITHM, SSE_C_AES); httpRequest.addHeader(SSE_C_KEY, context.getQrmk()); - SnowflakeResultSetSerializableV1.logger.debug("Adding SSE-C headers", false); + logger.debug("Adding SSE-C headers", false); } - SnowflakeResultSetSerializableV1.logger.debug( - "Thread {} Fetching result #chunk{}: {}", + logger.debug( + "Thread {} Fetching result chunk#{}: {}", Thread.currentThread().getId(), context.getChunkIndex(), context.getResultChunk().getScrubbedUrl()); @@ -133,8 +135,8 @@ else if (context.getQrmk() != null) { true, // no retry on http request new ExecTimeTelemetryData()); - SnowflakeResultSetSerializableV1.logger.debug( - "Thread {} Call #chunk{} returned for URL: {}, response={}", + logger.debug( + "Thread {} Call chunk#{} returned for URL: {}, response: {}", Thread.currentThread().getId(), context.getChunkIndex(), (ArgSupplier) () -> SecretDetector.maskSASToken(context.getResultChunk().getUrl()), diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java index 7ada3a803..6bb62c82f 100644 --- a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java +++ b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java @@ -8,15 +8,19 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLNonTransientConnectionException; import java.sql.Statement; import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.logging.Level; import net.snowflake.client.config.SFClientConfig; import net.snowflake.client.config.SFClientConfigParser; +import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFBaseStatement; @@ -136,13 +140,16 @@ private void setClientConfig() throws SnowflakeSQLLoggedException { String clientConfigFilePath = (String) connectionPropertiesMap.getOrDefault(SFSessionProperty.CLIENT_CONFIG_FILE, null); - SFClientConfig sfClientConfig; - try { - sfClientConfig = SFClientConfigParser.loadSFClientConfig(clientConfigFilePath); - } catch (IOException e) { - throw new SnowflakeSQLLoggedException(sfSession, ErrorCode.INTERNAL_ERROR, e.getMessage()); + SFClientConfig sfClientConfig = sfSession.getSfClientConfig(); + if (sfClientConfig == null) { + try { + sfClientConfig = SFClientConfigParser.loadSFClientConfig(clientConfigFilePath); + } catch (IOException e) { + throw new SnowflakeSQLLoggedException( + sfSession, ErrorCode.INTERNAL_ERROR, e.getMessage(), e.getCause()); + } + sfSession.setSfClientConfig(sfClientConfig); } - sfSession.setSfClientConfig(sfClientConfig); } /** @@ -181,6 +188,7 @@ && systemGetProperty("java.util.logging.config.file") == null) { if (logLevel != null && logPattern != null) { try { + logger.info("Setting logger with log level {} and log pattern {}", logLevel, logPattern); JDK14Logger.instantiateLogger(logLevel, logPattern); } catch (IOException ex) { throw new SnowflakeSQLLoggedException( @@ -188,13 +196,10 @@ && systemGetProperty("java.util.logging.config.file") == null) { } if (sfClientConfig != null) { logger.debug( - String.format( - "SF Client config found at location: %s.", sfClientConfig.getConfigFilePath())); + "SF Client config found at location: {}.", sfClientConfig.getConfigFilePath()); } logger.debug( - String.format( - "Instantiating JDK14Logger with level: %s , output path: %s", - logLevel, logPattern)); + "Instantiating JDK14Logger with level: {}, output path: {}", logLevel, logPattern); } } } @@ -206,25 +211,98 @@ private String constructLogPattern(String logPathFromConfig) throws SnowflakeSQL String logPattern = "%t/snowflake_jdbc%u.log"; // java.tmpdir + Path logPath; if (logPathFromConfig != null && !logPathFromConfig.isEmpty()) { - Path path = Paths.get(logPathFromConfig, "jdbc"); - if (!Files.exists(path)) { + // Get log path from configuration + logPath = Paths.get(logPathFromConfig); + if (!Files.exists(logPath)) { try { - Files.createDirectories(path); + Files.createDirectories(logPath); } catch (IOException ex) { throw new SnowflakeSQLLoggedException( sfSession, ErrorCode.INTERNAL_ERROR, String.format( - "Un-able to create log path mentioned in configfile %s ,%s", + "Unable to create log path mentioned in configfile %s ,%s", logPathFromConfig, ex.getMessage())); } } - logPattern = Paths.get(path.toString(), "snowflake_jdbc%u.log").toString(); + } else { + // Get log path from home directory + String homePath = systemGetProperty("user.home"); + if (homePath == null || homePath.isEmpty()) { + throw new SnowflakeSQLLoggedException( + sfSession, + ErrorCode.INTERNAL_ERROR, + String.format( + "Log path not set in configfile %s and home directory not set.", + logPathFromConfig)); + } + logPath = Paths.get(homePath); } + + Path path = createLogPathSubDirectory(logPath); + + logPattern = Paths.get(path.toString(), "snowflake_jdbc%u.log").toString(); return logPattern; } + private Path createLogPathSubDirectory(Path logPath) throws SnowflakeSQLLoggedException { + Path path = Paths.get(logPath.toString(), "jdbc"); + if (!Files.exists(path)) { + createLogFolder(path); + } else { + checkLogFolderPermissions(path); + } + return path; + } + + private void createLogFolder(Path path) throws SnowflakeSQLLoggedException { + try { + if (Constants.getOS() == Constants.OS.WINDOWS) { + Files.createDirectories(path); + } else { + Files.createDirectories( + path, + PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwx------"))); + } + } catch (IOException ex) { + throw new SnowflakeSQLLoggedException( + sfSession, + ErrorCode.INTERNAL_ERROR, + String.format( + "Unable to create jdbc subfolder in configfile %s ,%s", + path.toString(), ex.getMessage(), ex.getCause())); + } + } + + private void checkLogFolderPermissions(Path path) throws SnowflakeSQLLoggedException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + try { + Set folderPermissions = Files.getPosixFilePermissions(path); + if (folderPermissions.contains(PosixFilePermission.GROUP_WRITE) + || folderPermissions.contains(PosixFilePermission.GROUP_READ) + || folderPermissions.contains(PosixFilePermission.GROUP_EXECUTE) + || folderPermissions.contains(PosixFilePermission.OTHERS_WRITE) + || folderPermissions.contains(PosixFilePermission.OTHERS_READ) + || folderPermissions.contains(PosixFilePermission.OTHERS_EXECUTE)) { + logger.warn( + "Access permission for the logs directory '{}' is currently {} and is potentially " + + "accessible to users other than the owner of the logs directory.", + path.toString(), + folderPermissions.toString()); + } + } catch (IOException ex) { + throw new SnowflakeSQLLoggedException( + sfSession, + ErrorCode.INTERNAL_ERROR, + String.format( + "Unable to get permissions of log directory %s ,%s", + path.toString(), ex.getMessage(), ex.getCause())); + } + } + } + private void initSessionProperties(SnowflakeConnectString conStr, String appID, String appVersion) throws SFException { Map properties = mergeProperties(conStr); diff --git a/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java index cf019d62e..d38011c0e 100644 --- a/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java @@ -3,7 +3,9 @@ */ package net.snowflake.client.jdbc; +import java.util.ArrayList; import java.util.List; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; public class FieldMetadata { @@ -43,6 +45,11 @@ public FieldMetadata( this.fields = fields; } + @SnowflakeJdbcInternalApi + public FieldMetadata() { + this.fields = new ArrayList<>(); + } + public String getName() { return name; } diff --git a/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java b/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java index 2930188eb..14fb7dbdc 100644 --- a/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java +++ b/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import net.snowflake.client.core.FileUtil; /** * An {@link OutputStream} that starts buffering to a byte array, but switches to file buffering @@ -129,6 +130,7 @@ public ByteSource asByteSource() { private synchronized InputStream openInputStream() throws IOException { if (file != null) { + FileUtil.logFileUsage(file, "Data buffering stream", false); return new FileInputStream(file); } else { return new ByteArrayInputStream(memory.getBuffer(), 0, memory.getCount()); diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java index fa7826664..5be46c5de 100644 --- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java +++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java @@ -6,6 +6,7 @@ import java.io.PrintWriter; import java.io.StringWriter; +import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLKeyException; @@ -17,6 +18,7 @@ import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFOCSPException; import net.snowflake.client.core.SessionUtil; +import net.snowflake.client.core.URLUtil; import net.snowflake.client.core.UUIDUtils; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.ArgSupplier; @@ -24,6 +26,7 @@ import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.DecorrelatedJitterBackoff; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.SqlState; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpRequestBase; @@ -123,9 +126,32 @@ public static CloseableHttpResponse execute( boolean noRetry, ExecTimeTelemetryData execTimeData) throws SnowflakeSQLException { - CloseableHttpResponse response = null; + Stopwatch stopwatch = null; + + if (logger.isDebugEnabled()) { + stopwatch = new Stopwatch(); + stopwatch.start(); + } String requestInfoScrubbed = SecretDetector.maskSASToken(httpRequest.toString()); + String requestIdStr = URLUtil.getRequestIdLogStr(httpRequest.getURI()); + logger.debug( + "{}Executing rest request: {}, retry timeout: {}, socket timeout: {}, max retries: {}," + + " inject socket timeout: {}, canceling: {}, without cookies: {}, include retry parameters: {}," + + " include request guid: {}, retry http 403: {}, no retry: {}", + requestIdStr, + requestInfoScrubbed, + retryTimeout, + socketTimeout, + maxRetries, + injectSocketTimeout, + canceling, + withoutCookies, + includeRetryParameters, + includeRequestGuid, + retryHTTP403, + noRetry); + CloseableHttpResponse response = null; // time the client started attempting to submit request final long startTime = System.currentTimeMillis(); @@ -140,6 +166,10 @@ public static CloseableHttpResponse execute( // Used to indicate that this is a login/auth request and will be using the new retry strategy. boolean isLoginRequest = SessionUtil.isNewRetryStrategyRequest(httpRequest); + if (isLoginRequest) { + logger.debug("{}Request is a login/auth request. Using new retry strategy", requestIdStr); + } + // total elapsed time due to transient issues. long elapsedMilliForTransientIssues = 0; @@ -168,9 +198,14 @@ public static CloseableHttpResponse execute( // try request till we get a good response or retry timeout while (true) { - logger.debug("Retry count: {}", retryCount); - logger.debug("Attempting request: {}", requestInfoScrubbed); - + logger.debug( + "{}Retry count: {}, max retries: {}, retry timeout: {} s, backoff: {} ms. Attempting request: {}", + requestIdStr, + retryCount, + maxRetries, + retryTimeout, + backoffInMilli, + requestInfoScrubbed); try { // update start time startTimePerRequest = System.currentTimeMillis(); @@ -184,7 +219,8 @@ public static CloseableHttpResponse execute( if (injectSocketTimeout != 0 && retryCount == 0) { // test code path logger.debug( - "Injecting socket timeout by setting " + "socket timeout to {} millisecond ", + "{}Injecting socket timeout by setting socket timeout to {} ms", + requestIdStr, injectSocketTimeout); httpRequest.setConfig( HttpUtil.getDefaultRequestConfigWithSocketTimeout( @@ -203,6 +239,7 @@ public static CloseableHttpResponse execute( // If HTAP if ("true".equalsIgnoreCase(System.getenv("HTAP_SIMULATION")) && builder.getPathSegments().contains("query-request")) { + logger.debug("{}Setting htap simulation", requestIdStr); builder.setParameter("target", "htap_simulation"); } if (includeRetryParameters && retryCount > 0) { @@ -215,14 +252,18 @@ public static CloseableHttpResponse execute( // so that it can be renewed in time and pass it to the http request configuration. if (authTimeout > 0) { int requestSocketAndConnectTimeout = (int) authTimeout * 1000; + logger.debug( + "{}Setting auth timeout as the socket timeout: {} s", requestIdStr, authTimeout); httpRequest.setConfig( HttpUtil.getDefaultRequestConfigWithSocketAndConnectTimeout( requestSocketAndConnectTimeout, withoutCookies)); } if (includeRequestGuid) { + UUID guid = UUIDUtils.getUUID(); + logger.debug("{}Request {} guid: {}", requestIdStr, requestInfoScrubbed, guid.toString()); // Add request_guid for better tracing - builder.setParameter(SF_REQUEST_GUID, UUIDUtils.getUUID().toString()); + builder.setParameter(SF_REQUEST_GUID, guid.toString()); } httpRequest.setURI(builder.build()); @@ -233,7 +274,7 @@ public static CloseableHttpResponse execute( // if exception is caused by illegal state, e.g shutdown of http client // because of closing of connection, then fail immediately and stop retrying. throw new SnowflakeSQLLoggedException( - null, ErrorCode.INVALID_STATE, ex, /* session = */ ex.getMessage()); + null, ErrorCode.INVALID_STATE, ex, /* session= */ ex.getMessage()); } catch (SSLHandshakeException | SSLKeyException @@ -247,17 +288,20 @@ public static CloseableHttpResponse execute( } catch (Exception ex) { savedEx = ex; - // if the request took more than 5 min (socket timeout) log an error - if ((System.currentTimeMillis() - startTimePerRequest) - > HttpUtil.getSocketTimeout().toMillis()) { + // if the request took more than socket timeout log an error + long currentMillis = System.currentTimeMillis(); + if ((currentMillis - startTimePerRequest) > HttpUtil.getSocketTimeout().toMillis()) { logger.warn( - "HTTP request took longer than 5 min: {} sec", - (System.currentTimeMillis() - startTimePerRequest) / 1000); + "{}HTTP request took longer than socket timeout {} ms: {} ms", + requestIdStr, + HttpUtil.getSocketTimeout().toMillis(), + (currentMillis - startTimePerRequest)); } StringWriter sw = new StringWriter(); savedEx.printStackTrace(new PrintWriter(sw)); logger.debug( - "Exception encountered for: {}, {}, {}", + "{}Exception encountered for: {}, {}, {}", + requestIdStr, requestInfoScrubbed, ex.getLocalizedMessage(), (ArgSupplier) sw::toString); @@ -281,7 +325,11 @@ public static CloseableHttpResponse execute( || isNonRetryableHTTPCode(response, retryHTTP403)) { String msg = "Unknown cause"; if (response != null) { - logger.debug("HTTP response code: {}", response.getStatusLine().getStatusCode()); + logger.debug( + "{}HTTP response code for request {}: {}", + requestIdStr, + requestInfoScrubbed, + response.getStatusLine().getStatusCode()); msg = "StatusCode: " + response.getStatusLine().getStatusCode() @@ -295,13 +343,16 @@ public static CloseableHttpResponse execute( if (response == null || response.getStatusLine().getStatusCode() != 200) { logger.debug( - "Error response not retryable, " + msg + ", request: {}", requestInfoScrubbed); + "{}Error response not retryable, " + msg + ", request: {}", + requestIdStr, + requestInfoScrubbed); EventUtil.triggerBasicEvent( - Event.EventType.NETWORK_ERROR, msg + ", Request: " + httpRequest.toString(), false); + Event.EventType.NETWORK_ERROR, msg + ", Request: " + httpRequest, false); } breakRetryReason = "status code does not need retry"; if (noRetry) { - logger.debug("HTTP retry disabled for this request. noRetry: {}", noRetry); + logger.debug( + "{}HTTP retry disabled for this request. noRetry: {}", requestIdStr, noRetry); breakRetryReason = "retry is disabled"; } @@ -311,16 +362,18 @@ public static CloseableHttpResponse execute( } else { if (response != null) { logger.debug( - "HTTP response not ok: status code: {}, request: {}", + "{}HTTP response not ok: status code: {}, request: {}", + requestIdStr, response.getStatusLine().getStatusCode(), requestInfoScrubbed); } else if (savedEx != null) { logger.debug( - "Null response for cause: {}, request: {}", + "{}Null response for cause: {}, request: {}", + requestIdStr, getRootCause(savedEx).getMessage(), requestInfoScrubbed); } else { - logger.debug("Null response for request: {}", requestInfoScrubbed); + logger.debug("{}Null response for request: {}", requestIdStr, requestInfoScrubbed); } // get the elapsed time for the last request @@ -331,7 +384,7 @@ public static CloseableHttpResponse execute( // check canceling flag if (canceling != null && canceling.get()) { - logger.debug("Stop retrying since canceling is requested", false); + logger.debug("{}Stop retrying since canceling is requested", requestIdStr); breakRetryReason = "canceling is requested"; break; } @@ -349,9 +402,10 @@ public static CloseableHttpResponse execute( if (elapsedMilliForTransientIssues > retryTimeoutInMilliseconds && retryCount >= MIN_RETRY_COUNT) { logger.error( - "Stop retrying since elapsed time due to network " + "{}Stop retrying since elapsed time due to network " + "issues has reached timeout. " - + "Elapsed: {}(ms), timeout: {}(ms)", + + "Elapsed: {} ms, timeout: {} ms", + requestIdStr, elapsedMilliForTransientIssues, retryTimeoutInMilliseconds); @@ -362,7 +416,10 @@ public static CloseableHttpResponse execute( if (maxRetries > 0 && retryCount > maxRetries) { // check for max retries. logger.error( - "Stop retrying as max retries have been reached! max retry count: {}", maxRetries); + "{}Stop retrying as max retries have been reached for request: {}! Max retry count: {}", + requestIdStr, + requestInfoScrubbed, + maxRetries); breakRetryReason = "max retries reached"; breakRetryEventName = "HttpRequestRetryLimitExceeded"; } @@ -433,30 +490,24 @@ public static CloseableHttpResponse execute( // sleep for backoff - elapsed amount of time if (backoffInMilli > elapsedMilliForLastCall) { try { - logger.debug("sleeping in {}(ms)", backoffInMilli); + logger.debug( + "{}Retry request {}: sleeping for {} ms", + requestIdStr, + requestInfoScrubbed, + backoffInMilli); Thread.sleep(backoffInMilli); - elapsedMilliForTransientIssues += backoffInMilli; - if (isLoginRequest) { - long jitteredBackoffInMilli = backoff.getJitterForLogin(backoffInMilli); - backoffInMilli = - (long) - backoff.chooseRandom( - jitteredBackoffInMilli + backoffInMilli, - Math.pow(2, retryCount) + jitteredBackoffInMilli); - } else { - backoffInMilli = backoff.nextSleepTime(backoffInMilli); - } - if (retryTimeoutInMilliseconds > 0 - && (elapsedMilliForTransientIssues + backoffInMilli) > retryTimeoutInMilliseconds) { - // If the timeout will be reached before the next backoff, just use the remaining - // time. - backoffInMilli = - Math.min( - backoffInMilli, retryTimeoutInMilliseconds - elapsedMilliForTransientIssues); - } } catch (InterruptedException ex1) { - logger.debug("Backoff sleep before retrying login got interrupted", false); + logger.debug("{}Backoff sleep before retrying login got interrupted", requestIdStr); } + elapsedMilliForTransientIssues += backoffInMilli; + backoffInMilli = + getNewBackoffInMilli( + backoffInMilli, + isLoginRequest, + backoff, + retryCount, + retryTimeoutInMilliseconds, + elapsedMilliForTransientIssues); } retryCount++; @@ -504,15 +555,18 @@ public static CloseableHttpResponse execute( if (response == null) { if (savedEx != null) { logger.error( - "Returning null response: cause: {}, request: {}", + "{}Returning null response. Cause: {}, request: {}", + requestIdStr, getRootCause(savedEx), requestInfoScrubbed); } else { - logger.error("Returning null response for request: {}", requestInfoScrubbed); + logger.error( + "{}Returning null response for request: {}", requestIdStr, requestInfoScrubbed); } } else if (response.getStatusLine().getStatusCode() != 200) { logger.error( - "Error response: HTTP Response code: {}, request: {}", + "{}Error response: HTTP Response code: {}, request: {}", + requestIdStr, response.getStatusLine().getStatusCode(), requestInfoScrubbed); } @@ -554,9 +608,58 @@ public static CloseableHttpResponse execute( } } + if (logger.isDebugEnabled() && stopwatch != null) { + stopwatch.stop(); + } + logger.debug( + "{}Execution of request {} took {} ms with total of {} retries", + requestIdStr, + requestInfoScrubbed, + stopwatch == null ? "n/a" : stopwatch.elapsedMillis(), + retryCount); return response; } + static long getNewBackoffInMilli( + long previousBackoffInMilli, + boolean isLoginRequest, + DecorrelatedJitterBackoff decorrelatedJitterBackoff, + int retryCount, + long retryTimeoutInMilliseconds, + long elapsedMilliForTransientIssues) { + long backoffInMilli; + if (isLoginRequest) { + long jitteredBackoffInMilli = + decorrelatedJitterBackoff.getJitterForLogin(previousBackoffInMilli); + backoffInMilli = + (long) + decorrelatedJitterBackoff.chooseRandom( + jitteredBackoffInMilli + previousBackoffInMilli, + Math.pow(2, retryCount) + jitteredBackoffInMilli); + } else { + backoffInMilli = decorrelatedJitterBackoff.nextSleepTime(previousBackoffInMilli); + } + + backoffInMilli = Math.min(maxBackoffInMilli, Math.max(previousBackoffInMilli, backoffInMilli)); + + if (retryTimeoutInMilliseconds > 0 + && (elapsedMilliForTransientIssues + backoffInMilli) > retryTimeoutInMilliseconds) { + // If the timeout will be reached before the next backoff, just use the remaining + // time (but cannot be negative) - this is the only place when backoff is not in range + // min-max. + backoffInMilli = + Math.max( + 0, + Math.min( + backoffInMilli, retryTimeoutInMilliseconds - elapsedMilliForTransientIssues)); + logger.debug( + "We are approaching retry timeout {}ms, setting backoff to {}ms", + retryTimeoutInMilliseconds, + backoffInMilli); + } + return backoffInMilli; + } + static boolean isNonRetryableHTTPCode(CloseableHttpResponse response, boolean retryHTTP403) { return response != null && (response.getStatusLine().getStatusCode() < 500 diff --git a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java index c50bf4900..0bafbf12d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java @@ -21,11 +21,15 @@ import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFSession; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.SqlState; /** SFAsyncResultSet implementation. Note: For Snowflake internal use */ public class SFAsyncResultSet extends SnowflakeBaseResultSet implements SnowflakeResultSet, ResultSet { + private static final SFLogger logger = SFLoggerFactory.getLogger(SFAsyncResultSet.class); + private ResultSet resultSetForNext = new SnowflakeResultSetV1.EmptyResultSet(); private boolean resultSetForNextInitialized = false; private String queryID; @@ -367,7 +371,7 @@ public boolean isBeforeFirst() throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -375,7 +379,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java index 692c7e412..d191b646c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java @@ -6,6 +6,7 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.mapSFExceptionToSQLException; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -38,7 +39,6 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; -import net.snowflake.client.core.ArrowSqlInput; import net.snowflake.client.core.ColumnTypeHelper; import net.snowflake.client.core.JsonSqlInput; import net.snowflake.client.core.ObjectMapperFactory; @@ -52,7 +52,7 @@ /** Base class for query result set and metadata result set */ public abstract class SnowflakeBaseResultSet implements ResultSet { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBaseResultSet.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBaseResultSet.class); private final int resultSetType; private final int resultSetConcurrency; private final int resultSetHoldability; @@ -150,7 +150,7 @@ public Timestamp getTimestamp(int columnIndex) throws SQLException { @Override public InputStream getAsciiStream(int columnIndex) throws SQLException { - logger.debug("public InputStream getAsciiStream(int columnIndex)", false); + logger.trace("InputStream getAsciiStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -160,33 +160,33 @@ public InputStream getAsciiStream(int columnIndex) throws SQLException { @Deprecated @Override public InputStream getUnicodeStream(int columnIndex) throws SQLException { - logger.debug("public InputStream getUnicodeStream(int columnIndex)", false); + logger.trace("InputStream getUnicodeStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public InputStream getBinaryStream(int columnIndex) throws SQLException { - logger.debug("public InputStream getBinaryStream(int columnIndex)", false); + logger.trace("InputStream getBinaryStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public String getString(String columnLabel) throws SQLException { - logger.debug("public String getString(String columnLabel)", false); + logger.trace("String getString(String columnLabel)", false); return getString(findColumn(columnLabel)); } @Override public boolean getBoolean(String columnLabel) throws SQLException { - logger.debug("public boolean getBoolean(String columnLabel)", false); + logger.trace("boolean getBoolean(String columnLabel)", false); return getBoolean(findColumn(columnLabel)); } @Override public byte getByte(String columnLabel) throws SQLException { - logger.debug("public byte getByte(String columnLabel)", false); + logger.trace("byte getByte(String columnLabel)", false); raiseSQLExceptionIfResultSetIsClosed(); return getByte(findColumn(columnLabel)); @@ -194,35 +194,35 @@ public byte getByte(String columnLabel) throws SQLException { @Override public short getShort(String columnLabel) throws SQLException { - logger.debug("public short getShort(String columnLabel)", false); + logger.trace("short getShort(String columnLabel)", false); return getShort(findColumn(columnLabel)); } @Override public int getInt(String columnLabel) throws SQLException { - logger.debug("public int getInt(String columnLabel)", false); + logger.trace("int getInt(String columnLabel)", false); return getInt(findColumn(columnLabel)); } @Override public long getLong(String columnLabel) throws SQLException { - logger.debug("public long getLong(String columnLabel)", false); + logger.trace("long getLong(String columnLabel)", false); return getLong(findColumn(columnLabel)); } @Override public float getFloat(String columnLabel) throws SQLException { - logger.debug("public float getFloat(String columnLabel)", false); + logger.trace("float getFloat(String columnLabel)", false); return getFloat(findColumn(columnLabel)); } @Override public double getDouble(String columnLabel) throws SQLException { - logger.debug("public double getDouble(String columnLabel)", false); + logger.trace("double getDouble(String columnLabel)", false); return getDouble(findColumn(columnLabel)); } @@ -233,42 +233,42 @@ public double getDouble(String columnLabel) throws SQLException { @Deprecated @Override public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(String columnLabel, " + "int scale)", false); + logger.trace("BigDecimal getBigDecimal(String columnLabel, " + "int scale)", false); return getBigDecimal(findColumn(columnLabel), scale); } @Override public byte[] getBytes(String columnLabel) throws SQLException { - logger.debug("public byte[] getBytes(String columnLabel)", false); + logger.trace("byte[] getBytes(String columnLabel)", false); return getBytes(findColumn(columnLabel)); } @Override public Date getDate(String columnLabel) throws SQLException { - logger.debug("public Date getDate(String columnLabel)", false); + logger.trace("Date getDate(String columnLabel)", false); return getDate(findColumn(columnLabel)); } @Override public Time getTime(String columnLabel) throws SQLException { - logger.debug("public Time getTime(String columnLabel)", false); + logger.trace("Time getTime(String columnLabel)", false); return getTime(findColumn(columnLabel)); } @Override public Timestamp getTimestamp(String columnLabel) throws SQLException { - logger.debug("public Timestamp getTimestamp(String columnLabel)", false); + logger.trace("Timestamp getTimestamp(String columnLabel)", false); return getTimestamp(findColumn(columnLabel)); } @Override public InputStream getAsciiStream(String columnLabel) throws SQLException { - logger.debug("public InputStream getAsciiStream(String columnLabel)", false); + logger.trace("InputStream getAsciiStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -278,55 +278,55 @@ public InputStream getAsciiStream(String columnLabel) throws SQLException { @Deprecated @Override public InputStream getUnicodeStream(String columnLabel) throws SQLException { - logger.debug("public InputStream getUnicodeStream(String columnLabel)", false); + logger.trace("InputStream getUnicodeStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public InputStream getBinaryStream(String columnLabel) throws SQLException { - logger.debug("public InputStream getBinaryStream(String columnLabel)", false); + logger.trace("InputStream getBinaryStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public SQLWarning getWarnings() throws SQLException { - logger.debug("public SQLWarning getWarnings()", false); + logger.trace("SQLWarning getWarnings()", false); raiseSQLExceptionIfResultSetIsClosed(); return null; } @Override public void clearWarnings() throws SQLException { - logger.debug("public void clearWarnings()", false); + logger.trace("void clearWarnings()", false); raiseSQLExceptionIfResultSetIsClosed(); } @Override public String getCursorName() throws SQLException { - logger.debug("public String getCursorName()", false); + logger.trace("String getCursorName()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public ResultSetMetaData getMetaData() throws SQLException { - logger.debug("public ResultSetMetaData getMetaData()", false); + logger.trace("ResultSetMetaData getMetaData()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetMetaData; } @Override public Object getObject(String columnLabel) throws SQLException { - logger.debug("public Object getObject(String columnLabel)", false); + logger.trace("Object getObject(String columnLabel)", false); return getObject(findColumn(columnLabel)); } @Override public int findColumn(String columnLabel) throws SQLException { - logger.debug("public int findColumn(String columnLabel)", false); + logger.trace("int findColumn(String columnLabel)", false); raiseSQLExceptionIfResultSetIsClosed(); int columnIndex = resultSetMetaData.getColumnIndex(columnLabel); @@ -340,7 +340,7 @@ public int findColumn(String columnLabel) throws SQLException { @Override public Reader getCharacterStream(int columnIndex) throws SQLException { - logger.debug("public Reader getCharacterStream(int columnIndex)", false); + logger.trace("Reader getCharacterStream(int columnIndex)", false); raiseSQLExceptionIfResultSetIsClosed(); String streamData = getString(columnIndex); return (streamData == null) ? null : new StringReader(streamData); @@ -348,76 +348,76 @@ public Reader getCharacterStream(int columnIndex) throws SQLException { @Override public Reader getCharacterStream(String columnLabel) throws SQLException { - logger.debug("public Reader getCharacterStream(String columnLabel)", false); + logger.trace("Reader getCharacterStream(String columnLabel)", false); return getCharacterStream(findColumn(columnLabel)); } @Override public BigDecimal getBigDecimal(String columnLabel) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(String columnLabel)", false); + logger.trace("BigDecimal getBigDecimal(String columnLabel)", false); return getBigDecimal(findColumn(columnLabel)); } @Override public void beforeFirst() throws SQLException { - logger.debug("public void beforeFirst()", false); + logger.trace("void beforeFirst()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void afterLast() throws SQLException { - logger.debug("public void afterLast()", false); + logger.trace("void afterLast()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean first() throws SQLException { - logger.debug("public boolean first()", false); + logger.trace("boolean first()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean last() throws SQLException { - logger.debug("public boolean last()", false); + logger.trace("boolean last()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean absolute(int row) throws SQLException { - logger.debug("public boolean absolute(int row)", false); + logger.trace("boolean absolute(int row)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean relative(int rows) throws SQLException { - logger.debug("public boolean relative(int rows)", false); + logger.trace("boolean relative(int rows)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean previous() throws SQLException { - logger.debug("public boolean previous()", false); + logger.trace("boolean previous()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public int getFetchDirection() throws SQLException { - logger.debug("public int getFetchDirection()", false); + logger.trace("int getFetchDirection()", false); raiseSQLExceptionIfResultSetIsClosed(); return ResultSet.FETCH_FORWARD; } @Override public void setFetchDirection(int direction) throws SQLException { - logger.debug("public void setFetchDirection(int direction)", false); + logger.trace("void setFetchDirection(int direction)", false); raiseSQLExceptionIfResultSetIsClosed(); if (direction != ResultSet.FETCH_FORWARD) { @@ -427,14 +427,14 @@ public void setFetchDirection(int direction) throws SQLException { @Override public int getFetchSize() throws SQLException { - logger.debug("public int getFetchSize()", false); + logger.trace("int getFetchSize()", false); raiseSQLExceptionIfResultSetIsClosed(); return this.fetchSize; } @Override public void setFetchSize(int rows) throws SQLException { - logger.debug("public void setFetchSize(int rows)", false); + logger.trace("void setFetchSize(int rows)", false); raiseSQLExceptionIfResultSetIsClosed(); this.fetchSize = rows; @@ -442,140 +442,140 @@ public void setFetchSize(int rows) throws SQLException { @Override public int getType() throws SQLException { - logger.debug("public int getType()", false); + logger.trace("int getType()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetType; } @Override public int getConcurrency() throws SQLException { - logger.debug("public int getConcurrency()", false); + logger.trace("int getConcurrency()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetConcurrency; } @Override public boolean rowUpdated() throws SQLException { - logger.debug("public boolean rowUpdated()", false); + logger.trace("boolean rowUpdated()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean rowInserted() throws SQLException { - logger.debug("public boolean rowInserted()", false); + logger.trace("boolean rowInserted()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean rowDeleted() throws SQLException { - logger.debug("public boolean rowDeleted()", false); + logger.trace("boolean rowDeleted()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNull(int columnIndex) throws SQLException { - logger.debug("public void updateNull(int columnIndex)", false); + logger.trace("void updateNull(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBoolean(int columnIndex, boolean x) throws SQLException { - logger.debug("public void updateBoolean(int columnIndex, boolean x)", false); + logger.trace("void updateBoolean(int columnIndex, boolean x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateByte(int columnIndex, byte x) throws SQLException { - logger.debug("public void updateByte(int columnIndex, byte x)", false); + logger.trace("void updateByte(int columnIndex, byte x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateShort(int columnIndex, short x) throws SQLException { - logger.debug("public void updateShort(int columnIndex, short x)", false); + logger.trace("void updateShort(int columnIndex, short x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateInt(int columnIndex, int x) throws SQLException { - logger.debug("public void updateInt(int columnIndex, int x)", false); + logger.trace("void updateInt(int columnIndex, int x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateLong(int columnIndex, long x) throws SQLException { - logger.debug("public void updateLong(int columnIndex, long x)", false); + logger.trace("void updateLong(int columnIndex, long x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateFloat(int columnIndex, float x) throws SQLException { - logger.debug("public void updateFloat(int columnIndex, float x)", false); + logger.trace("void updateFloat(int columnIndex, float x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDouble(int columnIndex, double x) throws SQLException { - logger.debug("public void updateDouble(int columnIndex, double x)", false); + logger.trace("void updateDouble(int columnIndex, double x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { - logger.debug("public void updateBigDecimal(int columnIndex, BigDecimal x)", false); + logger.trace("void updateBigDecimal(int columnIndex, BigDecimal x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateString(int columnIndex, String x) throws SQLException { - logger.debug("public void updateString(int columnIndex, String x)", false); + logger.trace("void updateString(int columnIndex, String x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBytes(int columnIndex, byte[] x) throws SQLException { - logger.debug("public void updateBytes(int columnIndex, byte[] x)", false); + logger.trace("void updateBytes(int columnIndex, byte[] x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDate(int columnIndex, Date x) throws SQLException { - logger.debug("public void updateDate(int columnIndex, Date x)", false); + logger.trace("void updateDate(int columnIndex, Date x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTime(int columnIndex, Time x) throws SQLException { - logger.debug("public void updateTime(int columnIndex, Time x)", false); + logger.trace("void updateTime(int columnIndex, Time x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { - logger.debug("public void updateTimestamp(int columnIndex, Timestamp x)", false); + logger.trace("void updateTimestamp(int columnIndex, Timestamp x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(int columnIndex, " + "InputStream x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -583,7 +583,7 @@ public void updateAsciiStream(int columnIndex, InputStream x, int length) throws @Override public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(int columnIndex, " + "InputStream x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -591,7 +591,7 @@ public void updateBinaryStream(int columnIndex, InputStream x, int length) throw @Override public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(int columnIndex, " + "Reader x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -599,7 +599,7 @@ public void updateCharacterStream(int columnIndex, Reader x, int length) throws @Override public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { - logger.debug( + logger.trace( "public void updateObject(int columnIndex, Object x, " + "int scaleOrLength)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -607,112 +607,112 @@ public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQ @Override public void updateObject(int columnIndex, Object x) throws SQLException { - logger.debug("public void updateObject(int columnIndex, Object x)", false); + logger.trace("void updateObject(int columnIndex, Object x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNull(String columnLabel) throws SQLException { - logger.debug("public void updateNull(String columnLabel)", false); + logger.trace("void updateNull(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBoolean(String columnLabel, boolean x) throws SQLException { - logger.debug("public void updateBoolean(String columnLabel, boolean x)", false); + logger.trace("void updateBoolean(String columnLabel, boolean x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateByte(String columnLabel, byte x) throws SQLException { - logger.debug("public void updateByte(String columnLabel, byte x)", false); + logger.trace("void updateByte(String columnLabel, byte x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateShort(String columnLabel, short x) throws SQLException { - logger.debug("public void updateShort(String columnLabel, short x)", false); + logger.trace("void updateShort(String columnLabel, short x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateInt(String columnLabel, int x) throws SQLException { - logger.debug("public void updateInt(String columnLabel, int x)", false); + logger.trace("void updateInt(String columnLabel, int x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateLong(String columnLabel, long x) throws SQLException { - logger.debug("public void updateLong(String columnLabel, long x)", false); + logger.trace("void updateLong(String columnLabel, long x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateFloat(String columnLabel, float x) throws SQLException { - logger.debug("public void updateFloat(String columnLabel, float x)", false); + logger.trace("void updateFloat(String columnLabel, float x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDouble(String columnLabel, double x) throws SQLException { - logger.debug("public void updateDouble(String columnLabel, double x)", false); + logger.trace("void updateDouble(String columnLabel, double x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { - logger.debug("public void updateBigDecimal(String columnLabel, " + "BigDecimal x)", false); + logger.trace("void updateBigDecimal(String columnLabel, " + "BigDecimal x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateString(String columnLabel, String x) throws SQLException { - logger.debug("public void updateString(String columnLabel, String x)", false); + logger.trace("void updateString(String columnLabel, String x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBytes(String columnLabel, byte[] x) throws SQLException { - logger.debug("public void updateBytes(String columnLabel, byte[] x)", false); + logger.trace("void updateBytes(String columnLabel, byte[] x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDate(String columnLabel, Date x) throws SQLException { - logger.debug("public void updateDate(String columnLabel, Date x)", false); + logger.trace("void updateDate(String columnLabel, Date x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTime(String columnLabel, Time x) throws SQLException { - logger.debug("public void updateTime(String columnLabel, Time x)", false); + logger.trace("void updateTime(String columnLabel, Time x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { - logger.debug("public void updateTimestamp(String columnLabel, Timestamp x)", false); + logger.trace("void updateTimestamp(String columnLabel, Timestamp x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(String columnLabel, " + "InputStream x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -721,7 +721,7 @@ public void updateAsciiStream(String columnLabel, InputStream x, int length) thr @Override public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(String columnLabel, " + "InputStream x, int length)", false); @@ -731,7 +731,7 @@ public void updateBinaryStream(String columnLabel, InputStream x, int length) @Override public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(String columnLabel, " + "Reader reader,int length)", false); @@ -740,7 +740,7 @@ public void updateCharacterStream(String columnLabel, Reader reader, int length) @Override public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { - logger.debug( + logger.trace( "public void updateObject(String columnLabel, Object x, " + "int scaleOrLength)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -748,91 +748,91 @@ public void updateObject(String columnLabel, Object x, int scaleOrLength) throws @Override public void updateObject(String columnLabel, Object x) throws SQLException { - logger.debug("public void updateObject(String columnLabel, Object x)", false); + logger.trace("void updateObject(String columnLabel, Object x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void insertRow() throws SQLException { - logger.debug("public void insertRow()", false); + logger.trace("void insertRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRow() throws SQLException { - logger.debug("public void updateRow()", false); + logger.trace("void updateRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void deleteRow() throws SQLException { - logger.debug("public void deleteRow()", false); + logger.trace("void deleteRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void refreshRow() throws SQLException { - logger.debug("public void refreshRow()", false); + logger.trace("void refreshRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void cancelRowUpdates() throws SQLException { - logger.debug("public void cancelRowUpdates()", false); + logger.trace("void cancelRowUpdates()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void moveToInsertRow() throws SQLException { - logger.debug("public void moveToInsertRow()", false); + logger.trace("void moveToInsertRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void moveToCurrentRow() throws SQLException { - logger.debug("public void moveToCurrentRow()", false); + logger.trace("void moveToCurrentRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Statement getStatement() throws SQLException { - logger.debug("public Statement getStatement()", false); + logger.trace("Statement getStatement()", false); raiseSQLExceptionIfResultSetIsClosed(); return statement; } @Override public Object getObject(int columnIndex, Map> map) throws SQLException { - logger.debug("public Object getObject(int columnIndex, Map> map)", false); + logger.trace("Object getObject(int columnIndex, Map> map)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Ref getRef(int columnIndex) throws SQLException { - logger.debug("public Ref getRef(int columnIndex)", false); + logger.trace("Ref getRef(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Blob getBlob(int columnIndex) throws SQLException { - logger.debug("public Blob getBlob(int columnIndex)", false); + logger.trace("Blob getBlob(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Clob getClob(int columnIndex) throws SQLException { - logger.debug("public Clob getClob(int columnIndex)", false); + logger.trace("Clob getClob(int columnIndex)", false); String columnValue = getString(columnIndex); return columnValue == null ? null : new SnowflakeClob(columnValue); @@ -840,14 +840,14 @@ public Clob getClob(int columnIndex) throws SQLException { @Override public Array getArray(int columnIndex) throws SQLException { - logger.debug("public Array getArray(int columnIndex)", false); + logger.trace("Array getArray(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Object getObject(String columnLabel, Map> map) throws SQLException { - logger.debug( + logger.trace( "public Object getObject(String columnLabel, " + "Map> map)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -855,21 +855,21 @@ public Object getObject(String columnLabel, Map> map) throws SQ @Override public Ref getRef(String columnLabel) throws SQLException { - logger.debug("public Ref getRef(String columnLabel)", false); + logger.trace("Ref getRef(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Blob getBlob(String columnLabel) throws SQLException { - logger.debug("public Blob getBlob(String columnLabel)", false); + logger.trace("Blob getBlob(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Clob getClob(String columnLabel) throws SQLException { - logger.debug("public Clob getClob(String columnLabel)", false); + logger.trace("Clob getClob(String columnLabel)", false); String columnValue = getString(columnLabel); return columnValue == null ? null : new SnowflakeClob(columnValue); @@ -877,258 +877,258 @@ public Clob getClob(String columnLabel) throws SQLException { @Override public Array getArray(String columnLabel) throws SQLException { - logger.debug("public Array getArray(String columnLabel)", false); + logger.trace("Array getArray(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Date getDate(int columnIndex, Calendar cal) throws SQLException { - logger.debug("public Date getDate(int columnIndex, Calendar cal)", false); + logger.trace("Date getDate(int columnIndex, Calendar cal)", false); return getDate(columnIndex, cal.getTimeZone()); } @Override public Date getDate(String columnLabel, Calendar cal) throws SQLException { - logger.debug("public Date getDate(String columnLabel, Calendar cal)", false); + logger.trace("Date getDate(String columnLabel, Calendar cal)", false); return getDate(findColumn(columnLabel), cal.getTimeZone()); } @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { - logger.debug("public Time getTime(int columnIndex, Calendar cal)", false); + logger.trace("Time getTime(int columnIndex, Calendar cal)", false); return getTime(columnIndex); } @Override public Time getTime(String columnLabel, Calendar cal) throws SQLException { - logger.debug("public Time getTime(String columnLabel, Calendar cal)", false); + logger.trace("Time getTime(String columnLabel, Calendar cal)", false); return getTime(columnLabel); } @Override public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - logger.debug("public Timestamp getTimestamp(int columnIndex, Calendar cal)", false); + logger.trace("Timestamp getTimestamp(int columnIndex, Calendar cal)", false); return getTimestamp(columnIndex, cal.getTimeZone()); } @Override public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { - logger.debug("public Timestamp getTimestamp(String columnLabel, " + "Calendar cal)", false); + logger.trace("Timestamp getTimestamp(String columnLabel, " + "Calendar cal)", false); return getTimestamp(findColumn(columnLabel), cal.getTimeZone()); } @Override public URL getURL(int columnIndex) throws SQLException { - logger.debug("public URL getURL(int columnIndex)", false); + logger.trace("URL getURL(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public URL getURL(String columnLabel) throws SQLException { - logger.debug("public URL getURL(String columnLabel)", false); + logger.trace("URL getURL(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRef(int columnIndex, Ref x) throws SQLException { - logger.debug("public void updateRef(int columnIndex, Ref x)", false); + logger.trace("void updateRef(int columnIndex, Ref x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRef(String columnLabel, Ref x) throws SQLException { - logger.debug("public void updateRef(String columnLabel, Ref x)", false); + logger.trace("void updateRef(String columnLabel, Ref x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBlob(int columnIndex, Blob x) throws SQLException { - logger.debug("public void updateBlob(int columnIndex, Blob x)", false); + logger.trace("void updateBlob(int columnIndex, Blob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBlob(String columnLabel, Blob x) throws SQLException { - logger.debug("public void updateBlob(String columnLabel, Blob x)", false); + logger.trace("void updateBlob(String columnLabel, Blob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(int columnIndex, Clob x) throws SQLException { - logger.debug("public void updateClob(int columnIndex, Clob x)", false); + logger.trace("void updateClob(int columnIndex, Clob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(String columnLabel, Clob x) throws SQLException { - logger.debug("public void updateClob(String columnLabel, Clob x)", false); + logger.trace("void updateClob(String columnLabel, Clob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateArray(int columnIndex, Array x) throws SQLException { - logger.debug("public void updateArray(int columnIndex, Array x)", false); + logger.trace("void updateArray(int columnIndex, Array x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateArray(String columnLabel, Array x) throws SQLException { - logger.debug("public void updateArray(String columnLabel, Array x)", false); + logger.trace("void updateArray(String columnLabel, Array x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public RowId getRowId(int columnIndex) throws SQLException { - logger.debug("public RowId getRowId(int columnIndex)", false); + logger.trace("RowId getRowId(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public RowId getRowId(String columnLabel) throws SQLException { - logger.debug("public RowId getRowId(String columnLabel)", false); + logger.trace("RowId getRowId(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRowId(int columnIndex, RowId x) throws SQLException { - logger.debug("public void updateRowId(int columnIndex, RowId x)", false); + logger.trace("void updateRowId(int columnIndex, RowId x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRowId(String columnLabel, RowId x) throws SQLException { - logger.debug("public void updateRowId(String columnLabel, RowId x)", false); + logger.trace("void updateRowId(String columnLabel, RowId x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public int getHoldability() throws SQLException { - logger.debug("public int getHoldability()", false); + logger.trace("int getHoldability()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetHoldability; } @Override public void updateNString(int columnIndex, String nString) throws SQLException { - logger.debug("public void updateNString(int columnIndex, String nString)", false); + logger.trace("void updateNString(int columnIndex, String nString)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNString(String columnLabel, String nString) throws SQLException { - logger.debug("public void updateNString(String columnLabel, String nString)", false); + logger.trace("void updateNString(String columnLabel, String nString)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(int columnIndex, NClob nClob) throws SQLException { - logger.debug("public void updateNClob(int columnIndex, NClob nClob)", false); + logger.trace("void updateNClob(int columnIndex, NClob nClob)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(String columnLabel, NClob nClob) throws SQLException { - logger.debug("public void updateNClob(String columnLabel, NClob nClob)", false); + logger.trace("void updateNClob(String columnLabel, NClob nClob)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public NClob getNClob(int columnIndex) throws SQLException { - logger.debug("public NClob getNClob(int columnIndex)", false); + logger.trace("NClob getNClob(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public NClob getNClob(String columnLabel) throws SQLException { - logger.debug("public NClob getNClob(String columnLabel)", false); + logger.trace("NClob getNClob(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public SQLXML getSQLXML(int columnIndex) throws SQLException { - logger.debug("public SQLXML getSQLXML(int columnIndex)", false); + logger.trace("SQLXML getSQLXML(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public SQLXML getSQLXML(String columnLabel) throws SQLException { - logger.debug("public SQLXML getSQLXML(String columnLabel)", false); + logger.trace("SQLXML getSQLXML(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { - logger.debug("public void updateSQLXML(int columnIndex, SQLXML xmlObject)", false); + logger.trace("void updateSQLXML(int columnIndex, SQLXML xmlObject)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { - logger.debug("public void updateSQLXML(String columnLabel, SQLXML xmlObject)", false); + logger.trace("void updateSQLXML(String columnLabel, SQLXML xmlObject)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public String getNString(int columnIndex) throws SQLException { - logger.debug("public String getNString(int columnIndex)", false); + logger.trace("String getNString(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public String getNString(String columnLabel) throws SQLException { - logger.debug("public String getNString(String columnLabel)", false); + logger.trace("String getNString(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Reader getNCharacterStream(int columnIndex) throws SQLException { - logger.debug("public Reader getNCharacterStream(int columnIndex)", false); + logger.trace("Reader getNCharacterStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Reader getNCharacterStream(String columnLabel) throws SQLException { - logger.debug("public Reader getNCharacterStream(String columnLabel)", false); + logger.trace("Reader getNCharacterStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNCharacterStream(int columnIndex, " + "Reader x, long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1137,7 +1137,7 @@ public void updateNCharacterStream(int columnIndex, Reader x, long length) throw @Override public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNCharacterStream(String columnLabel, " + "Reader reader,long length)", false); @@ -1146,7 +1146,7 @@ public void updateNCharacterStream(String columnLabel, Reader reader, long lengt @Override public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(int columnIndex, " + "InputStream x, long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1154,7 +1154,7 @@ public void updateAsciiStream(int columnIndex, InputStream x, long length) throw @Override public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(int columnIndex, " + "InputStream x, long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1162,7 +1162,7 @@ public void updateBinaryStream(int columnIndex, InputStream x, long length) thro @Override public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(int columnIndex, Reader x, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1171,7 +1171,7 @@ public void updateCharacterStream(int columnIndex, Reader x, long length) throws @Override public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(String columnLabel, " + "InputStream x, long length)", false); @@ -1181,7 +1181,7 @@ public void updateAsciiStream(String columnLabel, InputStream x, long length) @Override public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(String columnLabel, " + "InputStream x, long length)", false); @@ -1191,7 +1191,7 @@ public void updateBinaryStream(String columnLabel, InputStream x, long length) @Override public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(String columnLabel, " + "Reader reader,long length)", false); @@ -1201,7 +1201,7 @@ public void updateCharacterStream(String columnLabel, Reader reader, long length @Override public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBlob(int columnIndex, InputStream " + "inputStream, long length)", false); @@ -1211,7 +1211,7 @@ public void updateBlob(int columnIndex, InputStream inputStream, long length) @Override public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBlob(String columnLabel, " + "InputStream inputStream,long length)", false); @@ -1220,14 +1220,14 @@ public void updateBlob(String columnLabel, InputStream inputStream, long length) @Override public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { - logger.debug("public void updateClob(int columnIndex, Reader reader, " + "long length)", false); + logger.trace("void updateClob(int columnIndex, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateClob(String columnLabel, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1235,7 +1235,7 @@ public void updateClob(String columnLabel, Reader reader, long length) throws SQ @Override public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNClob(int columnIndex, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1243,7 +1243,7 @@ public void updateNClob(int columnIndex, Reader reader, long length) throws SQLE @Override public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNClob(String columnLabel, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1251,14 +1251,14 @@ public void updateNClob(String columnLabel, Reader reader, long length) throws S @Override public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { - logger.debug("public void updateNCharacterStream(int columnIndex, Reader x)", false); + logger.trace("void updateNCharacterStream(int columnIndex, Reader x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { - logger.debug( + logger.trace( "public void updateNCharacterStream(String columnLabel, " + "Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1266,42 +1266,42 @@ public void updateNCharacterStream(String columnLabel, Reader reader) throws SQL @Override public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { - logger.debug("public void updateAsciiStream(int columnIndex, InputStream x)", false); + logger.trace("void updateAsciiStream(int columnIndex, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { - logger.debug("public void updateBinaryStream(int columnIndex, InputStream x)", false); + logger.trace("void updateBinaryStream(int columnIndex, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { - logger.debug("public void updateCharacterStream(int columnIndex, Reader x)", false); + logger.trace("void updateCharacterStream(int columnIndex, Reader x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { - logger.debug("public void updateAsciiStream(String columnLabel, InputStream x)", false); + logger.trace("void updateAsciiStream(String columnLabel, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { - logger.debug("public void updateBinaryStream(String columnLabel, InputStream x)", false); + logger.trace("void updateBinaryStream(String columnLabel, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(String columnLabel, " + "Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1309,52 +1309,54 @@ public void updateCharacterStream(String columnLabel, Reader reader) throws SQLE @Override public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { - logger.debug("public void updateBlob(int columnIndex, InputStream inputStream)", false); + logger.trace("void updateBlob(int columnIndex, InputStream inputStream)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { - logger.debug("public void updateBlob(String columnLabel, InputStream " + "inputStream)", false); + logger.trace("void updateBlob(String columnLabel, InputStream " + "inputStream)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(int columnIndex, Reader reader) throws SQLException { - logger.debug("public void updateClob(int columnIndex, Reader reader)", false); + logger.trace("void updateClob(int columnIndex, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(String columnLabel, Reader reader) throws SQLException { - logger.debug("public void updateClob(String columnLabel, Reader reader)", false); + logger.trace("void updateClob(String columnLabel, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(int columnIndex, Reader reader) throws SQLException { - logger.debug("public void updateNClob(int columnIndex, Reader reader)", false); + logger.trace("void updateNClob(int columnIndex, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(String columnLabel, Reader reader) throws SQLException { - logger.debug("public void updateNClob(String columnLabel, Reader reader)", false); + logger.trace("void updateNClob(String columnLabel, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public T getObject(int columnIndex, Class type) throws SQLException { - logger.debug("public T getObject(int columnIndex,Class type)", false); + logger.trace(" T getObject(int columnIndex,Class type)", false); if (resultSetMetaData.isStructuredTypeColumn(columnIndex)) { if (SQLData.class.isAssignableFrom(type)) { - SQLInput sqlInput = (SQLInput) getObject(columnIndex); + SQLInput sqlInput = + SnowflakeUtil.mapSFExceptionToSQLException( + () -> (SQLInput) sfBaseResultSet.getObject(columnIndex)); if (sqlInput == null) { return null; } else { @@ -1366,12 +1368,17 @@ public T getObject(int columnIndex, Class type) throws SQLException { Object object = getObject(columnIndex); if (object == null) { return null; - } else if (object instanceof JsonSqlInput) { - JsonNode jsonNode = ((JsonSqlInput) object).getInput(); - return (T) - OBJECT_MAPPER.convertValue(jsonNode, new TypeReference>() {}); + } else if (object instanceof Map) { + throw new SQLException( + "Arrow native struct couldn't be converted to String. To map to SqlData the method getObject(int columnIndex, Class type) should be used"); } else { - return (T) ((ArrowSqlInput) object).getInput(); + try { + return (T) + OBJECT_MAPPER.readValue( + (String) object, new TypeReference>() {}); + } catch (JsonProcessingException e) { + throw new SQLException("Value couldn't be converted to Map"); + } } } } @@ -1409,7 +1416,7 @@ public T getObject(int columnIndex, Class type) throws SQLException { } public List getList(int columnIndex, Class type) throws SQLException { - logger.debug("public List getList(int columnIndex, Class type)", false); + logger.trace(" List getList(int columnIndex, Class type)", false); if (!resultSetMetaData.isStructuredTypeColumn(columnIndex)) { throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -1418,7 +1425,7 @@ public List getList(int columnIndex, Class type) throws SQLException { } public T[] getArray(int columnIndex, Class type) throws SQLException { - logger.debug("public T[] getArray(int columnIndex, Class type)", false); + logger.trace(" T[] getArray(int columnIndex, Class type)", false); if (!resultSetMetaData.isStructuredTypeColumn(columnIndex)) { throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -1571,7 +1578,7 @@ public T[] getArray(int columnIndex, Class type) throws SQLException { } public Map getMap(int columnIndex, Class type) throws SQLException { - logger.debug("public Map getMap(int columnIndex, Class type)", false); + logger.trace(" Map getMap(int columnIndex, Class type)", false); if (!resultSetMetaData.isStructuredTypeColumn(columnIndex)) { throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -1585,7 +1592,8 @@ public Map getMap(int columnIndex, Class type) throws SQLExcep int columnType = ColumnTypeHelper.getColumnType(valueFieldMetadata.getType(), session); int scale = valueFieldMetadata.getScale(); TimeZone tz = sfBaseResultSet.getSessionTimeZone(); - Object object = getObject(columnIndex); + Object object = + SnowflakeUtil.mapSFExceptionToSQLException(() -> sfBaseResultSet.getObject(columnIndex)); if (object == null) { return null; } @@ -1734,14 +1742,14 @@ public Map getMap(int columnIndex, Class type) throws SQLExcep @Override public T getObject(String columnLabel, Class type) throws SQLException { - logger.debug("public T getObject(String columnLabel,Class type)", false); + logger.trace(" T getObject(String columnLabel,Class type)", false); return getObject(findColumn(columnLabel), type); } @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -1752,7 +1760,7 @@ public T unwrap(Class iface) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java index f1281d593..354f84c72 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java @@ -13,6 +13,7 @@ import java.util.Properties; import java.util.logging.Logger; import javax.sql.DataSource; +import net.snowflake.client.core.SFSessionProperty; import net.snowflake.client.log.ArgSupplier; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -22,6 +23,7 @@ public class SnowflakeBasicDataSource implements DataSource, Serializable { private static final long serialversionUID = 1L; private static final String AUTHENTICATOR_SNOWFLAKE_JWT = "SNOWFLAKE_JWT"; private static final String AUTHENTICATOR_OAUTH = "OAUTH"; + private static final String AUTHENTICATOR_USERNAME_PASSWORD_MFA = "USERNAME_PASSWORD_MFA"; private String url; private String serverName; @@ -36,7 +38,7 @@ public class SnowflakeBasicDataSource implements DataSource, Serializable { private Properties properties = new Properties(); - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBasicDataSource.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBasicDataSource.class); static { try { @@ -88,12 +90,12 @@ public Connection getConnection() throws SQLException { public Connection getConnection(String username, String password) throws SQLException { if (!AUTHENTICATOR_OAUTH.equalsIgnoreCase( authenticator)) { // For OAuth, no username is required - properties.put("user", username); + properties.put(SFSessionProperty.USER.getPropertyKey(), username); } // The driver needs password for OAUTH as part of SNOW-533673 feature request. if (!AUTHENTICATOR_SNOWFLAKE_JWT.equalsIgnoreCase(authenticator)) { - properties.put("password", password); + properties.put(SFSessionProperty.PASSWORD.getPropertyKey(), password); } try { @@ -119,7 +121,8 @@ public void setLogWriter(PrintWriter out) throws SQLException { @Override public int getLoginTimeout() throws SQLException { try { - return Integer.parseInt(properties.getProperty("loginTimeout")); + return Integer.parseInt( + properties.getProperty(SFSessionProperty.LOGIN_TIMEOUT.getPropertyKey())); } catch (NumberFormatException e) { return 0; } @@ -127,7 +130,7 @@ public int getLoginTimeout() throws SQLException { @Override public void setLoginTimeout(int seconds) throws SQLException { - properties.put("loginTimeout", Integer.toString(seconds)); + properties.put(SFSessionProperty.LOGIN_TIMEOUT.getPropertyKey(), Integer.toString(seconds)); } @Override @@ -150,19 +153,19 @@ public void setUrl(String url) { } public void setDatabaseName(String databaseName) { - properties.put("db", databaseName); + properties.put(SFSessionProperty.DATABASE.getPropertyKey(), databaseName); } public void setSchema(String schema) { - properties.put("schema", schema); + properties.put(SFSessionProperty.SCHEMA.getPropertyKey(), schema); } public void setWarehouse(String warehouse) { - properties.put("warehouse", warehouse); + properties.put(SFSessionProperty.WAREHOUSE.getPropertyKey(), warehouse); } public void setRole(String role) { - properties.put("role", role); + properties.put(SFSessionProperty.ROLE.getPropertyKey(), role); } public void setUser(String user) { @@ -182,7 +185,7 @@ public void setPortNumber(int portNumber) { } public void setAccount(String account) { - this.properties.put("account", account); + this.properties.put(SFSessionProperty.ACCOUNT.getPropertyKey(), account); } public void setSsl(boolean ssl) { @@ -191,12 +194,12 @@ public void setSsl(boolean ssl) { public void setAuthenticator(String authenticator) { this.authenticator = authenticator; - this.properties.put("authenticator", authenticator); + this.properties.put(SFSessionProperty.AUTHENTICATOR.getPropertyKey(), authenticator); } public void setOauthToken(String oauthToken) { this.setAuthenticator(AUTHENTICATOR_OAUTH); - this.properties.put("token", oauthToken); + this.properties.put(SFSessionProperty.TOKEN.getPropertyKey(), oauthToken); } public String getUrl() { @@ -217,18 +220,155 @@ public String getUrl() { public void setPrivateKey(PrivateKey privateKey) { this.setAuthenticator(AUTHENTICATOR_SNOWFLAKE_JWT); - this.properties.put("privateKey", privateKey); + this.properties.put(SFSessionProperty.PRIVATE_KEY.getPropertyKey(), privateKey); } public void setPrivateKeyFile(String location, String password) { this.setAuthenticator(AUTHENTICATOR_SNOWFLAKE_JWT); - this.properties.put("private_key_file", location); + this.properties.put(SFSessionProperty.PRIVATE_KEY_FILE.getPropertyKey(), location); if (!Strings.isNullOrEmpty(password)) { - this.properties.put("private_key_file_pwd", password); + this.properties.put(SFSessionProperty.PRIVATE_KEY_FILE_PWD.getPropertyKey(), password); } } public void setTracing(String tracing) { - this.properties.put("tracing", tracing); + this.properties.put(SFSessionProperty.TRACING.getPropertyKey(), tracing); + } + + protected Properties getProperties() { + return this.properties; + } + + public void setAllowUnderscoresInHost(boolean allowUnderscoresInHost) { + this.properties.put( + SFSessionProperty.ALLOW_UNDERSCORES_IN_HOST.getPropertyKey(), + String.valueOf(allowUnderscoresInHost)); + } + + public void setDisableGcsDefaultCredentials(boolean isGcsDefaultCredentialsDisabled) { + this.properties.put( + SFSessionProperty.DISABLE_GCS_DEFAULT_CREDENTIALS.getPropertyKey(), + String.valueOf(isGcsDefaultCredentialsDisabled)); + } + + public void setDisableSamlURLCheck(boolean disableSamlURLCheck) { + this.properties.put( + SFSessionProperty.DISABLE_SAML_URL_CHECK.getPropertyKey(), + String.valueOf(disableSamlURLCheck)); + } + + public void setPasscode(String passcode) { + this.setAuthenticator(AUTHENTICATOR_USERNAME_PASSWORD_MFA); + this.properties.put(SFSessionProperty.PASSCODE.getPropertyKey(), passcode); + } + + public void setPasscodeInPassword(boolean isPasscodeInPassword) { + this.properties.put( + SFSessionProperty.PASSCODE_IN_PASSWORD.getPropertyKey(), + String.valueOf(isPasscodeInPassword)); + if (isPasscodeInPassword) { + this.setAuthenticator(AUTHENTICATOR_USERNAME_PASSWORD_MFA); + } + } + + public void setDisableSocksProxy(boolean ignoreJvmSocksProxy) { + this.properties.put( + SFSessionProperty.DISABLE_SOCKS_PROXY.getPropertyKey(), + String.valueOf(ignoreJvmSocksProxy)); + } + + public void setNonProxyHosts(String nonProxyHosts) { + this.properties.put(SFSessionProperty.NON_PROXY_HOSTS.getPropertyKey(), nonProxyHosts); + } + + public void setProxyHost(String proxyHost) { + this.properties.put(SFSessionProperty.PROXY_HOST.getPropertyKey(), proxyHost); + } + + public void setProxyPassword(String proxyPassword) { + this.properties.put(SFSessionProperty.PROXY_PASSWORD.getPropertyKey(), proxyPassword); + } + + public void setProxyPort(int proxyPort) { + this.properties.put(SFSessionProperty.PROXY_PORT.getPropertyKey(), Integer.toString(proxyPort)); + } + + public void setProxyProtocol(String proxyProtocol) { + this.properties.put(SFSessionProperty.PROXY_PROTOCOL.getPropertyKey(), proxyProtocol); + } + + public void setProxyUser(String proxyUser) { + this.properties.put(SFSessionProperty.PROXY_USER.getPropertyKey(), proxyUser); + } + + public void setUseProxy(boolean useProxy) { + this.properties.put(SFSessionProperty.USE_PROXY.getPropertyKey(), String.valueOf(useProxy)); + } + + public void setNetworkTimeout(int networkTimeoutSeconds) { + this.properties.put( + SFSessionProperty.NETWORK_TIMEOUT.getPropertyKey(), + Integer.toString(networkTimeoutSeconds)); + } + + public void setQueryTimeout(int queryTimeoutSeconds) { + this.properties.put( + SFSessionProperty.QUERY_TIMEOUT.getPropertyKey(), Integer.toString(queryTimeoutSeconds)); + } + + public void setApplication(String application) { + this.properties.put(SFSessionProperty.APPLICATION.getPropertyKey(), application); + } + + public void setClientConfigFile(String clientConfigFile) { + this.properties.put(SFSessionProperty.CLIENT_CONFIG_FILE.getPropertyKey(), clientConfigFile); + } + + public void setEnablePatternSearch(boolean enablePatternSearch) { + this.properties.put( + SFSessionProperty.ENABLE_PATTERN_SEARCH.getPropertyKey(), + String.valueOf(enablePatternSearch)); + } + + public void setEnablePutGet(boolean enablePutGet) { + this.properties.put( + SFSessionProperty.ENABLE_PUT_GET.getPropertyKey(), String.valueOf(enablePutGet)); + } + + public void setArrowTreatDecimalAsInt(boolean treatDecimalAsInt) { + this.properties.put( + SFSessionProperty.JDBC_ARROW_TREAT_DECIMAL_AS_INT.getPropertyKey(), + String.valueOf(treatDecimalAsInt)); + } + + public void setMaxHttpRetries(int maxHttpRetries) { + this.properties.put( + SFSessionProperty.MAX_HTTP_RETRIES.getPropertyKey(), Integer.toString(maxHttpRetries)); + } + + public void setOcspFailOpen(boolean ocspFailOpen) { + this.properties.put( + SFSessionProperty.OCSP_FAIL_OPEN.getPropertyKey(), String.valueOf(ocspFailOpen)); + } + + public void setPutGetMaxRetries(int putGetMaxRetries) { + this.properties.put( + SFSessionProperty.PUT_GET_MAX_RETRIES.getPropertyKey(), Integer.toString(putGetMaxRetries)); + } + + public void setStringsQuotedForColumnDef(boolean stringsQuotedForColumnDef) { + this.properties.put( + SFSessionProperty.STRINGS_QUOTED.getPropertyKey(), + String.valueOf(stringsQuotedForColumnDef)); + } + + public void setEnableDiagnostics(boolean enableDiagnostics) { + this.properties.put( + SFSessionProperty.ENABLE_DIAGNOSTICS.getPropertyKey(), String.valueOf(enableDiagnostics)); + } + + public void setDiagnosticsAllowlistFile(String diagnosticsAllowlistFile) { + this.properties.put( + SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE.getPropertyKey(), diagnosticsAllowlistFile); } } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java index 3c2386b78..930e70039 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java @@ -18,9 +18,13 @@ import java.sql.Timestamp; import java.util.Calendar; import java.util.Map; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; final class SnowflakeCallableStatementV1 extends SnowflakePreparedStatementV1 implements CallableStatement, SnowflakeCallableStatement { + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeCallableStatementV1.class); /** * Construct SnowflakePreparedStatementV1 diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java index ff9eb9003..8f29f5702 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java @@ -124,6 +124,10 @@ public class SnowflakeChunkDownloader implements ChunkDownloader { /** Arrow memory allocator for the current resultSet */ private RootAllocator rootAllocator; + private final String queryId; + + private final int firstChunkRowCount; + static long getCurrentMemoryUsage() { synchronized (currentMemoryUsage) { return currentMemoryUsage.longValue(); @@ -191,7 +195,7 @@ public Thread newThread(final Runnable r) { thread.setUncaughtExceptionHandler( new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread t, Throwable e) { - logger.error("uncaughtException in thread: " + t + " {}", e); + logger.error("Uncaught Exception in thread {}: {}", t, e); } }); @@ -211,6 +215,8 @@ public void uncaughtException(Thread t, Throwable e) { */ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerializable) throws SnowflakeSQLException { + this.queryId = resultSetSerializable.getQueryId(); + this.firstChunkRowCount = resultSetSerializable.getFirstChunkRowCount(); this.snowflakeConnectionString = resultSetSerializable.getSnowflakeConnectString(); this.ocspMode = resultSetSerializable.getOCSPMode(); this.ocspModeAndProxyKey = resultSetSerializable.getHttpClientKey(); @@ -221,7 +227,7 @@ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerial this.maxHttpRetries = resultSetSerializable.getMaxHttpRetries(); this.prefetchSlots = resultSetSerializable.getResultPrefetchThreads() * 2; this.queryResultFormat = resultSetSerializable.getQueryResultFormat(); - logger.debug("qrmk = {}", this.qrmk); + logger.debug("qrmk: {}", this.qrmk); this.chunkHeadersMap = resultSetSerializable.getChunkHeadersMap(); // session may be null. Its only use is for in-band telemetry in this class this.session = @@ -288,8 +294,7 @@ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerial } logger.debug( - "add chunk, url={} rowCount={} uncompressedSize={} " - + "neededChunkMemory={}, chunkResultFormat={}", + "Add chunk: url: {} rowCount: {} uncompressedSize: {} neededChunkMemory: {}, chunkResultFormat: {}", chunk.getScrubbedUrl(), chunk.getRowCount(), chunk.getUncompressedSize(), @@ -305,7 +310,7 @@ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerial resultSetSerializable.getChunkFileCount()); logger.debug( - "#chunks: {} #threads:{} #slots:{} -> pool:{}", + "#chunks: {} #threads: {} #slots: {} -> pool: {}", resultSetSerializable.getChunkFileCount(), resultSetSerializable.getResultPrefetchThreads(), prefetchSlots, @@ -396,7 +401,7 @@ private void startNextDownloaders() throws SnowflakeSQLException { neededChunkMemory); logger.debug( - "submit chunk #{} for downloading, url={}", + "Submit chunk #{} for downloading, url: {}", this.nextChunkToDownload, nextChunk.getScrubbedUrl()); @@ -432,7 +437,9 @@ private void startNextDownloaders() throws SnowflakeSQLException { authTimeout, socketTimeout, maxHttpRetries, - this.session)); + this.session, + chunks.size(), + queryId)); downloaderFutures.put(nextChunkToDownload, downloaderFuture); // increment next chunk to download nextChunkToDownload++; @@ -442,7 +449,7 @@ private void startNextDownloaders() throws SnowflakeSQLException { continue; } else { // cancel the reserved memory - logger.debug("cancel the reserved memory.", false); + logger.debug("Cancel the reserved memory.", false); curMem = currentMemoryUsage.addAndGet(-neededChunkMemory); if (getPrefetchMemRetry > prefetchMaxRetry) { logger.debug( @@ -468,7 +475,7 @@ private void startNextDownloaders() throws SnowflakeSQLException { getPrefetchMemRetry++; if (logger.isDebugEnabled()) { logger.debug( - "Thread {} waiting for {}s: currentMemoryUsage in MB: {}, neededChunkMemory in MB:" + "Thread {} waiting for {} s: currentMemoryUsage in MB: {}, neededChunkMemory in MB:" + " {}, nextChunkToDownload: {}, nextChunkToConsume: {}, retry: {}", (ArgSupplier) () -> Thread.currentThread().getId(), waitingTime / 1000.0, @@ -507,7 +514,7 @@ private void releaseCurrentMemoryUsage(int chunkId, Optional optionalRelea // has to be before reusing the memory long curMem = currentMemoryUsage.addAndGet(-releaseSize); logger.debug( - "Thread {}: currentMemoryUsage in MB: {}, released in MB: {}, " + "Thread {} - currentMemoryUsage in MB: {}, released in MB: {}, " + "chunk: {}, optionalReleaseSize: {}, JVMFreeMem: {}", (ArgSupplier) () -> Thread.currentThread().getId(), (ArgSupplier) () -> curMem / MB, @@ -549,7 +556,7 @@ public SnowflakeResultChunk getNextChunkToConsume() int prevChunk = this.nextChunkToConsume - 1; // free the chunk data for previous chunk - logger.debug("free chunk data for chunk #{}", prevChunk); + logger.debug("Free chunk data for chunk #{}", prevChunk); long chunkMemUsage = chunks.get(prevChunk).computeNeededChunkMemory(); @@ -573,7 +580,7 @@ public SnowflakeResultChunk getNextChunkToConsume() // if no more chunks, return null if (this.nextChunkToConsume >= this.chunks.size()) { - logger.debug("no more chunk", false); + logger.debug("No more chunk", false); return null; } @@ -591,7 +598,7 @@ public SnowflakeResultChunk getNextChunkToConsume() SnowflakeResultChunk currentChunk = this.chunks.get(nextChunkToConsume); if (currentChunk.getDownloadState() == DownloadState.SUCCESS) { - logger.debug("chunk #{} is ready to consume", nextChunkToConsume); + logger.debug("Chunk #{} is ready to consume", nextChunkToConsume); nextChunkToConsume++; if (nextChunkToConsume == this.chunks.size()) { // make sure to release the last chunk @@ -602,15 +609,15 @@ public SnowflakeResultChunk getNextChunkToConsume() // the chunk we want to consume is not ready yet, wait for it currentChunk.getLock().lock(); try { - logger.debug("#chunk{} is not ready to consume", nextChunkToConsume); - logger.debug("consumer get lock to check chunk state", false); + logger.debug("Chunk#{} is not ready to consume", nextChunkToConsume); + logger.debug("Consumer get lock to check chunk state", false); waitForChunkReady(currentChunk); // downloader thread encountered an error if (currentChunk.getDownloadState() == DownloadState.FAILURE) { releaseAllChunkMemoryUsage(); - logger.error("downloader encountered error: {}", currentChunk.getDownloadError()); + logger.error("Downloader encountered error: {}", currentChunk.getDownloadError()); if (currentChunk .getDownloadError() @@ -625,14 +632,14 @@ public SnowflakeResultChunk getNextChunkToConsume() currentChunk.getDownloadError()); } - logger.debug("#chunk{} is ready to consume", nextChunkToConsume); + logger.debug("Chunk#{} is ready to consume", nextChunkToConsume); nextChunkToConsume++; // next chunk to consume is ready for consumption return currentChunk; } finally { - logger.debug("consumer free lock", false); + logger.debug("Consumer free lock", false); boolean terminateDownloader = (currentChunk.getDownloadState() == DownloadState.FAILURE); // release the unlock always @@ -662,7 +669,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru long startTime = System.currentTimeMillis(); while (true) { logger.debug( - "Thread {} is waiting for #chunk{} to be ready, current" + "chunk state is: {}, retry={}", + "Thread {} is waiting for chunk#{} to be ready, current chunk state is: {}, retry: {}", Thread.currentThread().getId(), nextChunkToConsume, currentChunk.getDownloadState(), @@ -677,8 +684,8 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru .await(downloadedConditionTimeoutInSeconds, TimeUnit.SECONDS)) { // if the current chunk has not condition change over the timeout (which is rare) logger.debug( - "Thread {} is timeout for waiting #chunk{} to be ready, current" - + " chunk state is: {}, retry={}, scrubbedUrl={}", + "Thread {} is timeout for waiting chunk#{} to be ready, current" + + " chunk state is: {}, retry: {}, scrubbedUrl: {}", Thread.currentThread().getId(), nextChunkToConsume, currentChunk.getDownloadState(), @@ -688,7 +695,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru currentChunk.setDownloadState(DownloadState.FAILURE); currentChunk.setDownloadError( String.format( - "Timeout waiting for the download of #chunk%d(Total chunks: %d) retry=%d scrubbedUrl=%s", + "Timeout waiting for the download of chunk#%d(Total chunks: %d) retry: %d scrubbedUrl: %s", nextChunkToConsume, this.chunks.size(), retry, currentChunk.getScrubbedUrl())); break; } @@ -699,7 +706,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru retry++; // timeout or failed logger.debug( - "Since downloadState is {} Thread {} decides to retry {} time(s) for #chunk{}", + "Since downloadState is {} Thread {} decides to retry {} time(s) for chunk#{}", currentChunk.getDownloadState(), Thread.currentThread().getId(), retry, @@ -733,7 +740,9 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru authTimeout, socketTimeout, maxHttpRetries, - session)); + session, + chunks.size(), + queryId)); downloaderFutures.put(nextChunkToConsume, downloaderFuture); // Only when prefetch fails due to internal memory limitation, nextChunkToDownload // equals nextChunkToConsume. In that case we need to increment nextChunkToDownload @@ -750,14 +759,14 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru } } if (currentChunk.getDownloadState() == DownloadState.SUCCESS) { - logger.debug("ready to consume #chunk{}, succeed retry={}", nextChunkToConsume, retry); + logger.debug("Ready to consume chunk#{}, succeed retry={}", nextChunkToConsume, retry); } else if (retry >= maxHttpRetries) { // stop retrying and report failure currentChunk.setDownloadState(DownloadState.FAILURE); currentChunk.setDownloadError( String.format( - "Max retry reached for the download of #chunk%d " - + "(Total chunks: %d) retry=%d, error=%s", + "Max retry reached for the download of chunk#%d " + + "(Total chunks: %d) retry: %d, error: %s", nextChunkToConsume, this.chunks.size(), retry, @@ -814,9 +823,8 @@ public DownloaderMetrics terminate() throws InterruptedException { logger.debug("Executor did not terminate in the specified time.", false); List droppedTasks = executor.shutdownNow(); // optional ** logger.debug( - "Executor was abruptly shut down. " - + droppedTasks.size() - + " tasks will not be executed."); // optional ** + "Executor was abruptly shut down. {} tasks will not be executed.", + droppedTasks.size()); // optional ** } } // Normal flow will never hit here. This is only for testing purposes @@ -825,15 +833,32 @@ public DownloaderMetrics terminate() throws InterruptedException { throw (InterruptedException) SnowflakeChunkDownloader.injectedDownloaderException; } } - logger.debug( - "Total milliseconds waiting for chunks: {}, " - + "Total memory used: {}, total download time: {} millisec, " - + "total parsing time: {} milliseconds, total chunks: {}", - numberMillisWaitingForChunks, - Runtime.getRuntime().totalMemory(), + + long totalUncompressedSize = + chunks.stream() + .reduce(0L, (acc, chunk) -> acc + chunk.getUncompressedSize(), Long::sum); + long rowsInChunks = + chunks.stream().reduce(0L, (acc, chunk) -> acc + chunk.getRowCount(), Long::sum); + long chunksSize = chunks.size(); + + logger.info( + "Completed processing {} {} chunks for query {} in {} ms. Download took {} ms (average: {} ms)," + + " parsing took {} ms (average: {} ms). Chunks uncompressed size: {} MB (average: {} MB)," + + " rows in chunks: {} (total: {}, average in chunk: {}), total memory used: {} MB", + chunksSize, + queryResultFormat == QueryResultFormat.ARROW ? "ARROW" : "JSON", + queryId, + totalMillisParsingChunks.get() + totalMillisDownloadingChunks.get(), totalMillisDownloadingChunks.get(), - totalMillisParsingChunks.get(), - chunks.size()); + totalMillisDownloadingChunks.get() / chunksSize, + totalMillisParsingChunks, + totalMillisParsingChunks.get() / chunksSize, + totalUncompressedSize / MB, + totalUncompressedSize / MB / chunksSize, + rowsInChunks, + firstChunkRowCount + rowsInChunks, + rowsInChunks / chunksSize, + Runtime.getRuntime().totalMemory() / MB); return new DownloaderMetrics( numberMillisWaitingForChunks, @@ -884,6 +909,8 @@ private void addParsingTime(long parsingTime) { * mainly for logging purpose * @param chunkHeadersMap contains headers needed to be added when downloading from s3 * @param networkTimeoutInMilli network timeout + * @param totalChunks used to log the information of total chunks + * @param queryId used to log the queryId to which the chunk belongs to * @return A callable responsible for downloading chunk */ private static Callable getDownloadChunkCallable( @@ -896,7 +923,9 @@ private static Callable getDownloadChunkCallable( final int authTimeout, final int socketTimeout, final int maxHttpRetries, - final SFBaseSession session) { + final SFBaseSession session, + final int totalChunks, + final String queryId) { ChunkDownloadContext downloadContext = new ChunkDownloadContext( downloader, @@ -934,7 +963,7 @@ private void downloadAndParseChunk(InputStream inputStream) throws SnowflakeSQLE } } catch (Exception ex) { logger.debug( - "Thread {} Exception when parsing result #chunk{}: {}", + "Thread {} Exception when parsing result chunk#{}: {}", Thread.currentThread().getId(), chunkIndex, ex.getLocalizedMessage()); @@ -948,7 +977,7 @@ private void downloadAndParseChunk(InputStream inputStream) throws SnowflakeSQLE } finally { // close the buffer reader will close underlying stream logger.debug( - "Thread {} close input stream for #chunk{}", + "Thread {} close input stream for chunk#{}", Thread.currentThread().getId(), chunkIndex); try { @@ -979,7 +1008,7 @@ public Void call() { } logger.debug( - "Downloading #chunk{}, url={}, Thread {}", + "Downloading chunk#{}, url: {}, Thread {}", chunkIndex, resultChunk.getUrl(), Thread.currentThread().getId()); @@ -998,51 +1027,78 @@ public Void call() { InputStream is = downloader.getResultStreamProvider().getInputStream(downloadContext); logger.debug( - "Thread {} start downloading #chunk{}", Thread.currentThread().getId(), chunkIndex); + "Thread {} start downloading chunk#{}", Thread.currentThread().getId(), chunkIndex); downloadAndParseChunk(is); logger.debug( - "Thread {} finish downloading #chunk{}", Thread.currentThread().getId(), chunkIndex); + "Thread {} finish downloading chunk#{}", Thread.currentThread().getId(), chunkIndex); downloader.downloaderFutures.remove(chunkIndex); - logger.debug( - "Finished preparing chunk data for {}, " - + "total download time={}ms, total parse time={}ms", - resultChunk.getScrubbedUrl(), - resultChunk.getDownloadTime(), - resultChunk.getParseTime()); + if (chunkIndex % 5 == 0) { + logger.info( + "Processed {} chunk#{} in {} ms ({} out of {}) for query {}. Download took {} ms, " + + "parsing took {} ms. Chunk uncompressed size: {} kB, cols: {}, rows: {}, scrubbed URL: {}", + downloader.queryResultFormat == QueryResultFormat.ARROW ? "ARROW" : "JSON", + chunkIndex, + resultChunk.getTotalTime(), + chunkIndex + 1, + totalChunks, + queryId, + resultChunk.getDownloadTime(), + resultChunk.getParseTime(), + resultChunk.getUncompressedSize() / 1024, + resultChunk.colCount, + resultChunk.rowCount, + resultChunk.getScrubbedUrl()); + } else { + logger.debug( + "Processed {} chunk#{} in {} ms ({} out of {}) for query {}. Download took {} ms, " + + "parsing took {} ms. Chunk uncompressed size: {} kB, cols: {}, rows: {}, scrubbed URL: {}", + downloader.queryResultFormat == QueryResultFormat.ARROW ? "ARROW" : "JSON", + chunkIndex, + resultChunk.getTotalTime(), + chunkIndex + 1, + totalChunks, + queryId, + resultChunk.getDownloadTime(), + resultChunk.getParseTime(), + resultChunk.getUncompressedSize() / 1024, + resultChunk.colCount, + resultChunk.rowCount, + resultChunk.getScrubbedUrl()); + } resultChunk.getLock().lock(); try { - logger.debug("get lock to change the chunk to be ready to consume", false); + logger.debug("Get lock to change the chunk to be ready to consume", false); - logger.debug("wake up consumer if it is waiting for a chunk to be " + "ready", false); + logger.debug("Wake up consumer if it is waiting for a chunk to be ready", false); resultChunk.setDownloadState(DownloadState.SUCCESS); resultChunk.getDownloadCondition().signal(); } finally { - logger.debug("Downloaded #chunk{}, free lock", chunkIndex); + logger.debug("Downloaded chunk#{}, free lock", chunkIndex); resultChunk.getLock().unlock(); } } catch (Throwable th) { resultChunk.getLock().lock(); try { - logger.debug("get lock to set chunk download error", false); + logger.debug("Get lock to set chunk download error", false); resultChunk.setDownloadState(DownloadState.FAILURE); downloader.releaseCurrentMemoryUsage(chunkIndex, Optional.empty()); StringWriter errors = new StringWriter(); th.printStackTrace(new PrintWriter(errors)); resultChunk.setDownloadError(errors.toString()); - logger.debug("wake up consumer if it is waiting for a chunk to be ready", false); + logger.debug("Wake up consumer if it is waiting for a chunk to be ready", false); resultChunk.getDownloadCondition().signal(); } finally { - logger.debug("Failed to download #chunk{}, free lock", chunkIndex); + logger.debug("Failed to download chunk#{}, free lock", chunkIndex); resultChunk.getLock().unlock(); } logger.debug( - "Thread {} Exception encountered ({}:{}) fetching #chunk{} from: {}, Error {}", + "Thread {} Exception encountered ({}:{}) fetching chunk#{} from: {}, Error {}", Thread.currentThread().getId(), th.getClass().getName(), th.getLocalizedMessage(), @@ -1078,7 +1134,7 @@ private void parseJsonToChunkV2(InputStream jsonInputStream, SnowflakeResultChun ByteBuffer bBuf = null; int len; logger.debug( - "Thread {} start to read inputstream for #chunk{}", + "Thread {} start to read inputstream for chunk#{}", Thread.currentThread().getId(), chunkIndex); while ((len = jsonInputStream.read(buf)) != -1) { @@ -1103,7 +1159,7 @@ private void parseJsonToChunkV2(InputStream jsonInputStream, SnowflakeResultChun } } logger.debug( - "Thread {} finish reading inputstream for #chunk{}", + "Thread {} finish reading inputstream for chunk#{}", Thread.currentThread().getId(), chunkIndex); if (prevBuffer != null) { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java new file mode 100644 index 000000000..10f06dafa --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java @@ -0,0 +1,74 @@ +package net.snowflake.client.jdbc; + +import static java.lang.annotation.ElementType.FIELD; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target({FIELD}) +@Retention(RetentionPolicy.RUNTIME) +public @interface SnowflakeColumn { + + /** + * (Optional) The name for a column in database, + * + *

The default value is empty string. Provided name can override SqlData field name + */ + String name() default ""; + + /** + * (Optional) The snowflake type for a column + * + *

The default value is empty string Provided type can override default type + */ + String type() default ""; + + /** + * (Optional) The snowflake nullable flag for a column + * + *

The default value is true Provided value can override default nullable value + */ + boolean nullable() default true; + + /** + * (Optional) The length for a column of SQL type {@code varchar} or {@code binary}, or of similar + * database-native type. + * + *

Applies only to columns of exact varchar and binary type. + * + *

The default value {@code -1} indicates that a provider-determined length should be inferred. + */ + int length() default -1; + /** + * (Optional) The length for a column of SQL type {@code binary}, or of similar database-native + * type. + * + *

Applies only to columns of exact varchar and binary type. + * + *

The default value {@code -1} indicates that a provider-determined byteLength should be + * inferred. + */ + int byteLength() default -1; + + /** + * (Optional) The precision for a column of SQL type {@code decimal} or {@code numeric}, or of + * similar database-native type. + * + *

Applies only to columns of exact numeric type. + * + *

The default value {@code -1} indicates that a provider-determined precision should be + * inferred. + */ + int precision() default -1; + + /** + * (Optional) The scale for a column of SQL type {@code decimal} or {@code numeric}, or of similar + * database-native type. + * + *

Applies only to columns of exact numeric type. + * + *

The default value {@code 0} indicates that a provider-determined scale should be inferred. + */ + int scale() default -1; +} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java index 9f182772e..9f1cd272e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java @@ -28,6 +28,7 @@ public class SnowflakeColumnMetadata implements Serializable { private String columnSrcDatabase; private boolean isAutoIncrement; + private int dimension; // vector type contains dimension @SnowflakeJdbcInternalApi public SnowflakeColumnMetadata( @@ -44,7 +45,8 @@ public SnowflakeColumnMetadata( String columnSrcDatabase, String columnSrcSchema, String columnSrcTable, - boolean isAutoIncrement) { + boolean isAutoIncrement, + int dimension) { this.name = name; this.type = type; this.nullable = nullable; @@ -59,11 +61,12 @@ public SnowflakeColumnMetadata( this.columnSrcSchema = columnSrcSchema; this.columnSrcTable = columnSrcTable; this.isAutoIncrement = isAutoIncrement; + this.dimension = dimension; } /** * @deprecated Use {@link SnowflakeColumnMetadata#SnowflakeColumnMetadata(String, int, boolean, - * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean)} + * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean, int)} * instead */ @Deprecated @@ -194,6 +197,11 @@ public void setAutoIncrement(boolean autoIncrement) { isAutoIncrement = autoIncrement; } + @SnowflakeJdbcInternalApi + public int getDimension() { + return dimension; + } + public String toString() { StringBuilder sBuilder = new StringBuilder(); @@ -209,6 +217,7 @@ public String toString() { sBuilder.append(",schema=").append(columnSrcSchema); sBuilder.append(",table=").append(columnSrcTable); sBuilder.append((",isAutoIncrement=")).append(isAutoIncrement); + sBuilder.append((",dimension=")).append(dimension); return sBuilder.toString(); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java index 9d7da8071..ea456be6c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java @@ -21,14 +21,13 @@ public class SnowflakeConnectString implements Serializable { private static final long serialVersionUID = 1L; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeConnectString.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeConnectString.class); private final String scheme; private final String host; private final int port; private final Map parameters; private final String account; - private static SnowflakeConnectString INVALID_CONNECT_STRING = new SnowflakeConnectString("", "", -1, Collections.emptyMap(), ""); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java index 0da7d44c8..473aa2041 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java @@ -18,6 +18,7 @@ import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverPropertyInfo; +import java.sql.JDBCType; import java.sql.NClob; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -43,9 +44,11 @@ import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; +import net.snowflake.client.core.SfSqlArray; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.log.SFLoggerUtil; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.SqlState; /** Snowflake connection implementation */ @@ -136,11 +139,19 @@ public SnowflakeConnectionV1(String url, Properties info, boolean fakeConnection private void initConnectionWithImpl( SFConnectionHandler sfConnectionHandler, String url, Properties info) throws SQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + logger.info("Initializing new connection"); this.sfConnectionHandler = sfConnectionHandler; sfConnectionHandler.initializeConnection(url, info); this.sfSession = sfConnectionHandler.getSFSession(); missingProperties = sfSession.checkProperties(); this.showStatementParameters = sfSession.getPreparedStatementLogging(); + stopwatch.stop(); + logger.info( + "Connection initialized successfully in {} ms. Session id: {}", + stopwatch.elapsedMillis(), + sfSession.getSessionId()); } public List returnMissingProperties() { @@ -219,9 +230,19 @@ public String[] getChildQueryIds(String queryID) throws SQLException { */ @Override public void close() throws SQLException { - logger.debug(" public void close()", false); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + String sessionId = null; + + if (sfSession != null) { + sessionId = sfSession.getSessionId(); + logger.info("Closing connection with session id: {}", sessionId); + } else { + logger.debug("Closing connection without associated session"); + } if (isClosed) { + logger.debug("Connection is already closed"); // No exception is raised even if the connection is closed. return; } @@ -233,6 +254,9 @@ public void close() throws SQLException { sfSession = null; } // make sure to close all created statements + if (!openStatements.isEmpty()) { + logger.debug("Closing {} opened statements", openStatements.size()); + } for (Statement stmt : openStatements) { if (stmt != null && !stmt.isClosed()) { if (stmt.isWrapperFor(SnowflakeStatementV1.class)) { @@ -242,12 +266,20 @@ public void close() throws SQLException { } } } + if (!openStatements.isEmpty()) { + logger.debug("Statements closed successfully"); + } openStatements.clear(); } catch (SFException ex) { throw new SnowflakeSQLLoggedException( sfSession, ex.getSqlState(), ex.getVendorCode(), ex.getCause(), ex.getParams()); } + stopwatch.stop(); + logger.info( + "Connection with session id: {} closed successfully in {} ms", + sessionId, + stopwatch.elapsedMillis()); } public String getSessionID() throws SQLException { @@ -259,7 +291,7 @@ public String getSessionID() throws SQLException { @Override public boolean isClosed() throws SQLException { - logger.debug(" public boolean isClosed()", false); + logger.trace("boolean isClosed()", false); return isClosed; } @@ -272,14 +304,14 @@ public boolean isClosed() throws SQLException { */ @Override public DatabaseMetaData getMetaData() throws SQLException { - logger.debug(" public DatabaseMetaData getMetaData()", false); + logger.trace("DatabaseMetaData getMetaData()", false); raiseSQLExceptionIfConnectionIsClosed(); return new SnowflakeDatabaseMetaData(this); } @Override public CallableStatement prepareCall(String sql) throws SQLException { - logger.debug(" public CallableStatement prepareCall(String sql)", false); + logger.trace("CallableStatement prepareCall(String sql)", false); raiseSQLExceptionIfConnectionIsClosed(); CallableStatement stmt = prepareCall(sql, false); openStatements.add(stmt); @@ -287,7 +319,7 @@ public CallableStatement prepareCall(String sql) throws SQLException { } public CallableStatement prepareCall(String sql, boolean skipParsing) throws SQLException { - logger.debug(" public CallableStatement prepareCall(String sql, boolean skipParsing)", false); + logger.trace("CallableStatement prepareCall(String sql, boolean skipParsing)", false); raiseSQLExceptionIfConnectionIsClosed(); CallableStatement stmt = new SnowflakeCallableStatementV1( @@ -304,9 +336,8 @@ public CallableStatement prepareCall(String sql, boolean skipParsing) throws SQL @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - logger.debug( - " public CallableStatement prepareCall(String sql," - + " int resultSetType,int resultSetConcurrency", + logger.trace( + "CallableStatement prepareCall(String sql," + " int resultSetType,int resultSetConcurrency", false); CallableStatement stmt = prepareCall(sql, resultSetType, resultSetConcurrency, ResultSet.CLOSE_CURSORS_AT_COMMIT); @@ -318,8 +349,7 @@ public CallableStatement prepareCall(String sql, int resultSetType, int resultSe public CallableStatement prepareCall( String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug( - " public CallableStatement prepareCall(String sql, int " + "resultSetType,", false); + logger.trace("CallableStatement prepareCall(String sql, int " + "resultSetType,", false); CallableStatement stmt = new SnowflakeCallableStatementV1( this, sql, false, resultSetType, resultSetConcurrency, resultSetHoldability); @@ -329,21 +359,21 @@ public CallableStatement prepareCall( @Override public String nativeSQL(String sql) throws SQLException { - logger.debug("public String nativeSQL(String sql)", false); + logger.trace("String nativeSQL(String sql)", false); raiseSQLExceptionIfConnectionIsClosed(); return sql; } @Override public boolean getAutoCommit() throws SQLException { - logger.debug("boolean getAutoCommit()", false); + logger.trace("boolean getAutoCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return sfSession.getAutoCommit(); } @Override public void setAutoCommit(boolean isAutoCommit) throws SQLException { - logger.debug("void setAutoCommit(boolean isAutoCommit)", false); + logger.trace("void setAutoCommit(boolean isAutoCommit)", false); boolean currentAutoCommit = this.getAutoCommit(); if (isAutoCommit != currentAutoCommit) { sfSession.setAutoCommit(isAutoCommit); @@ -355,33 +385,33 @@ public void setAutoCommit(boolean isAutoCommit) throws SQLException { @Override public void commit() throws SQLException { - logger.debug("void commit()", false); + logger.trace("void commit()", false); this.executeImmediate("commit"); } @Override public void rollback() throws SQLException { - logger.debug("void rollback()", false); + logger.trace("void rollback()", false); this.executeImmediate("rollback"); } @Override public void rollback(Savepoint savepoint) throws SQLException { - logger.debug("void rollback(Savepoint savepoint)", false); + logger.trace("void rollback(Savepoint savepoint)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); } @Override public boolean isReadOnly() throws SQLException { - logger.debug("boolean isReadOnly()", false); + logger.trace("boolean isReadOnly()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public void setReadOnly(boolean readOnly) throws SQLException { - logger.debug("void setReadOnly(boolean readOnly)", false); + logger.trace("void setReadOnly(boolean readOnly)", false); raiseSQLExceptionIfConnectionIsClosed(); if (readOnly) { logger.debug("setReadOnly not supported.", false); @@ -396,7 +426,7 @@ public String getCatalog() throws SQLException { @Override public void setCatalog(String catalog) throws SQLException { - logger.debug("void setCatalog(String catalog)", false); + logger.trace("void setCatalog(String catalog)", false); // switch db by running "use db" this.executeImmediate("use database \"" + catalog + "\""); @@ -404,7 +434,7 @@ public void setCatalog(String catalog) throws SQLException { @Override public int getTransactionIsolation() throws SQLException { - logger.debug("int getTransactionIsolation()", false); + logger.trace("int getTransactionIsolation()", false); raiseSQLExceptionIfConnectionIsClosed(); return this.transactionIsolation; } @@ -417,7 +447,7 @@ public int getTransactionIsolation() throws SQLException { */ @Override public void setTransactionIsolation(int level) throws SQLException { - logger.debug("void setTransactionIsolation(int level), level = {}", level); + logger.trace("void setTransactionIsolation(int level), level = {}", level); raiseSQLExceptionIfConnectionIsClosed(); if (level == Connection.TRANSACTION_NONE || level == Connection.TRANSACTION_READ_COMMITTED) { this.transactionIsolation = level; @@ -431,14 +461,14 @@ public void setTransactionIsolation(int level) throws SQLException { @Override public SQLWarning getWarnings() throws SQLException { - logger.debug("SQLWarning getWarnings()", false); + logger.trace("SQLWarning getWarnings()", false); raiseSQLExceptionIfConnectionIsClosed(); return sqlWarnings; } @Override public void clearWarnings() throws SQLException { - logger.debug("void clearWarnings()", false); + logger.trace("void clearWarnings()", false); raiseSQLExceptionIfConnectionIsClosed(); sfSession.clearSqlWarnings(); sqlWarnings = null; @@ -447,7 +477,7 @@ public void clearWarnings() throws SQLException { @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { - logger.debug( + logger.trace( "Statement createStatement(int resultSetType, " + "int resultSetConcurrency)", false); Statement stmt = @@ -459,7 +489,7 @@ public Statement createStatement(int resultSetType, int resultSetConcurrency) @Override public Statement createStatement( int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug( + logger.trace( "Statement createStatement(int resultSetType, " + "int resultSetConcurrency, int resultSetHoldability", false); @@ -472,7 +502,7 @@ public Statement createStatement( @Override public PreparedStatement prepareStatement(String sql) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql)", false); + logger.trace("PreparedStatement prepareStatement(String sql)", false); raiseSQLExceptionIfConnectionIsClosed(); PreparedStatement stmt = prepareStatement(sql, false); openStatements.add(stmt); @@ -481,7 +511,7 @@ public PreparedStatement prepareStatement(String sql) throws SQLException { @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug( + logger.trace( "PreparedStatement prepareStatement(String sql, " + "int autoGeneratedKeys)", false); if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) { @@ -493,14 +523,14 @@ public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) thr @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, " + "int[] columnIndexes)", false); + logger.trace("PreparedStatement prepareStatement(String sql, " + "int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); } @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { - logger.debug( + logger.trace( "PreparedStatement prepareStatement(String sql, " + "String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); @@ -509,7 +539,7 @@ public PreparedStatement prepareStatement(String sql, String[] columnNames) thro @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); + logger.trace("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); PreparedStatement stmt = prepareStatement( @@ -522,7 +552,7 @@ public PreparedStatement prepareStatement(String sql, int resultSetType, int res public PreparedStatement prepareStatement( String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); + logger.trace("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); PreparedStatement stmt = new SnowflakePreparedStatementV1( @@ -532,7 +562,7 @@ public PreparedStatement prepareStatement( } public PreparedStatement prepareStatement(String sql, boolean skipParsing) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, boolean skipParsing)", false); + logger.trace("PreparedStatement prepareStatement(String sql, boolean skipParsing)", false); raiseSQLExceptionIfConnectionIsClosed(); PreparedStatement stmt = new SnowflakePreparedStatementV1( @@ -654,7 +684,7 @@ private void raiseSetClientInfoException(Map failedPro @Override public Properties getClientInfo() throws SQLException { - logger.debug("Properties getClientInfo()", false); + logger.trace("Properties getClientInfo()", false); raiseSQLExceptionIfConnectionIsClosed(); // sfSession must not be null if the connection is not closed. return sfSession.getClientInfo(); @@ -673,7 +703,7 @@ public void setClientInfo(Properties properties) throws SQLClientInfoException { @Override public String getClientInfo(String name) throws SQLException { - logger.debug("String getClientInfo(String name)", false); + logger.trace("String getClientInfo(String name)", false); raiseSQLExceptionIfConnectionIsClosed(); // sfSession must not be null if the connection is not closed. @@ -682,14 +712,13 @@ public String getClientInfo(String name) throws SQLException { @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { - logger.debug("Array createArrayOf(String typeName, Object[] " + "elements)", false); - - throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); + logger.trace("Array createArrayOf(String typeName, Object[] " + "elements)", false); + return new SfSqlArray(JDBCType.valueOf(typeName).getVendorTypeNumber(), elements); } @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { - logger.debug("Struct createStruct(String typeName, Object[] " + "attributes)", false); + logger.trace("Struct createStruct(String typeName, Object[] " + "attributes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); } @@ -702,7 +731,7 @@ public String getSchema() throws SQLException { @Override public void setSchema(String schema) throws SQLException { - logger.debug("void setSchema(String schema)", false); + logger.trace("void setSchema(String schema)", false); String databaseName = getCatalog(); @@ -716,14 +745,14 @@ public void setSchema(String schema) throws SQLException { @Override public void abort(Executor executor) throws SQLException { - logger.debug("void abort(Executor executor)", false); + logger.trace("void abort(Executor executor)", false); close(); } @Override public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { - logger.debug("void setNetworkTimeout(Executor executor, int " + "milliseconds)", false); + logger.trace("void setNetworkTimeout(Executor executor, int " + "milliseconds)", false); raiseSQLExceptionIfConnectionIsClosed(); networkTimeoutInMilli = milliseconds; @@ -731,14 +760,14 @@ public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLExc @Override public int getNetworkTimeout() throws SQLException { - logger.debug("int getNetworkTimeout()", false); + logger.trace("int getNetworkTimeout()", false); raiseSQLExceptionIfConnectionIsClosed(); return networkTimeoutInMilli; } @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -746,7 +775,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug(" T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -872,7 +901,7 @@ private void uploadStreamInternal( boolean compressData) throws SQLException { logger.debug( - "upload data from stream: stageName={}" + ", destPrefix={}, destFileName={}", + "Upload data from stream: stageName={}" + ", destPrefix={}, destFileName={}", stageName, destPrefix, destFileName); @@ -941,7 +970,7 @@ public InputStream downloadStream(String stageName, String sourceFileName, boole throws SQLException { logger.debug( - "download data to stream: stageName={}" + ", sourceFileName={}", stageName, sourceFileName); + "Download data to stream: stageName={}" + ", sourceFileName={}", stageName, sourceFileName); if (Strings.isNullOrEmpty(stageName)) { throw new SnowflakeSQLLoggedException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java index 34df34067..acfb3e4f7 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java @@ -35,6 +35,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; import net.snowflake.client.core.ObjectMapperFactory; @@ -52,7 +53,7 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDatabaseMetaData.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDatabaseMetaData.class); private static final ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); @@ -94,27 +95,57 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { // These are keywords not in SQL2003 standard private static final String notSQL2003Keywords = - "ACCOUNT,DATABASE,SCHEMA,VIEW,ISSUE,DATE_PART,EXTRACT," - + "POSITION,TRY_CAST,BIT,DATETIME,NUMBERC,OBJECT,BYTEINT,STRING,TEXT," - + "TIMESTAMPLTZ,TIMESTAMPNTZ,TIMESTAMPTZ,TIMESTAMP_LTZ,TIMESTAMP_NTZ,TIMESTAMP_TZ,TINYINT," - + "VARBINARY,VARIANT,ACCOUNTS,ACTION,ACTIVATE,ASC,AUTOINCREMENT,BEFORE," - + "BUILTIN,BYTE,CACHE,CHANGE,CLEAREPCACHE,CLONE,CLUSTER,CLUSTERS,COLUMNS,COMMENT," - + "COMPRESSION,CONSTRAINTS,COPY,CP,CREDENTIALS,D,DATA,DATABASES,DEFERRABLE," - + "DEFERRED,DELIMITED,DESC,DIRECTORY,DISABLE,DUAL,ENABLE,ENFORCED," - + "EXCLUSIVE,EXPLAIN,EXPORTED,FAIL,FIELDS,FILE,FILES,FIRST,FN,FORCE,FORMAT," - + "FORMATS,FUNCTIONS,GRANTS,GSINSTANCE,GSINSTANCES,HELP,HIBERNATE,HINTS," - + "HISTORY,IDENTIFIED,IMMUTABLE,IMPORTED,INCIDENT,INCIDENTS,INFO,INITIALLY," - + "ISSUES,KEEP,KEY,KEYS,LAST,LIMIT,LIST,LOAD,LOCATION,LOCK,LOCKS,LS,MANAGE,MAP,MATCHED," - + "MATERIALIZED,MODIFY,MONITOR,MONITORS,NAME,NETWORK,NEXT,NORELY,NOTIFY,NOVALIDATE,NULLS,OBJECTS," - + "OFFSET,OJ,OPERATE,OPERATION,OPTION,OWNERSHIP,PARAMETERS,PARTIAL," - + "PERCENT,PLAN,PLUS,POLICIES,POLICY,POOL,PRESERVE,PRIVILEGES,PUBLIC,PURGE,PUT,QUIESCE," - + "READ,RECLUSTER,REFERENCE,RELY,REMOVE,RENAME,REPLACE,REPLACE_FAIL,RESOURCE," - + "RESTART,RESTORE,RESTRICT,RESUME,REWRITE,RM,ROLE,ROLES,RULE,SAMPLE,SCHEMAS,SEMI," - + "SEQUENCE,SEQUENCES,SERVER,SERVERS,SESSION,SETLOGLEVEL,SETS,SFC,SHARE,SHARED,SHARES,SHOW,SHUTDOWN,SIMPLE,SORT," - + "STAGE,STAGES,STATEMENT,STATISTICS,STOP,STORED,STRICT,STRUCT,SUSPEND,SUSPEND_IMMEDIATE,SWAP,SWITCH,T," - + "TABLES,TEMP,TEMPORARY,TRANSACTION,TRANSACTIONS,TRANSIENT,TRIGGERS,TRUNCATE,TS,TYPE,UNDROP,UNLOCK,UNSET," - + "UPGRADE,USAGE,USE,USERS,UTC,UTCTIMESTAMP,VALIDATE,VARIABLES,VERSION,VIEWS,VOLATILE,VOLUME," - + "VOLUMES,WAREHOUSE,WAREHOUSES,WARN,WORK,WRITE,ZONE,INCREMENT,MINUS,REGEXP,RLIKE"; + String.join( + ",", + "ACCOUNT", + "ASOF", + "BIT", + "BYTEINT", + "CONNECTION", + "DATABASE", + "DATETIME", + "DATE_PART", + "FIXED", + "FOLLOWING", + "GSCLUSTER", + "GSPACKAGE", + "IDENTIFIER", + "ILIKE", + "INCREMENT", + "ISSUE", + "LONG", + "MAP", + "MATCH_CONDITION", + "MINUS", + "NUMBER", + "OBJECT", + "ORGANIZATION", + "QUALIFY", + "REFERENCE", + "REGEXP", + "RLIKE", + "SAMPLE", + "SCHEMA", + "STRING", + "TEXT", + "TIMESTAMPLTZ", + "TIMESTAMPNTZ", + "TIMESTAMPTZ", + "TIMESTAMP_LTZ", + "TIMESTAMP_NTZ", + "TIMESTAMP_TZ", + "TINYINT", + "TRANSIT", + "TRY_CAST", + "VARIANT", + "VECTOR", + "VIEW"); + + private static final String MAX_VARCHAR_BINARY_SIZE_PARAM_NAME = + "VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"; + + // Defaults to 16MB + private static final int DEFAULT_MAX_LOB_SIZE = 16777216; private final Connection connection; @@ -138,7 +169,7 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { private boolean isPatternMatchingEnabled = true; SnowflakeDatabaseMetaData(Connection connection) throws SQLException { - logger.debug("public SnowflakeDatabaseMetaData(SnowflakeConnection connection)", false); + logger.trace("SnowflakeDatabaseMetaData(SnowflakeConnection connection)", false); this.connection = connection; this.session = connection.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); @@ -249,21 +280,21 @@ private boolean isSchemaNameWildcardPattern(String inputString) { @Override public boolean allProceduresAreCallable() throws SQLException { - logger.debug("public boolean allProceduresAreCallable()", false); + logger.trace("boolean allProceduresAreCallable()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean allTablesAreSelectable() throws SQLException { - logger.debug("public boolean allTablesAreSelectable()", false); + logger.trace("boolean allTablesAreSelectable()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getURL() throws SQLException { - logger.debug("public String getURL()", false); + logger.trace("String getURL()", false); raiseSQLExceptionIfConnectionIsClosed(); String url = session.getUrl(); return url.startsWith("http://") @@ -273,14 +304,14 @@ public String getURL() throws SQLException { @Override public String getUserName() throws SQLException { - logger.debug("public String getUserName()", false); + logger.trace("String getUserName()", false); raiseSQLExceptionIfConnectionIsClosed(); return session.getUser(); } @Override public boolean isReadOnly() throws SQLException { - logger.debug("public boolean isReadOnly()", false); + logger.trace("boolean isReadOnly()", false); raiseSQLExceptionIfConnectionIsClosed(); // no read only mode is supported. return false; @@ -288,56 +319,56 @@ public boolean isReadOnly() throws SQLException { @Override public boolean nullsAreSortedHigh() throws SQLException { - logger.debug("public boolean nullsAreSortedHigh()", false); + logger.trace("boolean nullsAreSortedHigh()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean nullsAreSortedLow() throws SQLException { - logger.debug("public boolean nullsAreSortedLow()", false); + logger.trace("boolean nullsAreSortedLow()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean nullsAreSortedAtStart() throws SQLException { - logger.debug("public boolean nullsAreSortedAtStart()", false); + logger.trace("boolean nullsAreSortedAtStart()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean nullsAreSortedAtEnd() throws SQLException { - logger.debug("public boolean nullsAreSortedAtEnd()", false); + logger.trace("boolean nullsAreSortedAtEnd()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public String getDatabaseProductName() throws SQLException { - logger.debug("public String getDatabaseProductName()", false); + logger.trace("String getDatabaseProductName()", false); raiseSQLExceptionIfConnectionIsClosed(); return DatabaseProductName; } @Override public String getDatabaseProductVersion() throws SQLException { - logger.debug("public String getDatabaseProductVersion()", false); + logger.trace("String getDatabaseProductVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection.unwrap(SnowflakeConnectionV1.class).getDatabaseVersion(); } @Override public String getDriverName() throws SQLException { - logger.debug("public String getDriverName()", false); + logger.trace("String getDriverName()", false); raiseSQLExceptionIfConnectionIsClosed(); return DriverName; } @Override public String getDriverVersion() throws SQLException { - logger.debug("public String getDriverVersion()", false); + logger.trace("String getDriverVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return SnowflakeDriver.majorVersion + "." @@ -348,705 +379,708 @@ public String getDriverVersion() throws SQLException { @Override public int getDriverMajorVersion() { - logger.debug("public int getDriverMajorVersion()", false); + logger.trace("int getDriverMajorVersion()", false); return SnowflakeDriver.majorVersion; } @Override public int getDriverMinorVersion() { - logger.debug("public int getDriverMinorVersion()", false); + logger.trace("int getDriverMinorVersion()", false); return SnowflakeDriver.minorVersion; } @Override public boolean usesLocalFiles() throws SQLException { - logger.debug("public boolean usesLocalFiles()", false); + logger.trace("boolean usesLocalFiles()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean usesLocalFilePerTable() throws SQLException { - logger.debug("public boolean usesLocalFilePerTable()", false); + logger.trace("boolean usesLocalFilePerTable()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMixedCaseIdentifiers() throws SQLException { - logger.debug("public boolean supportsMixedCaseIdentifiers()", false); + logger.trace("boolean supportsMixedCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesUpperCaseIdentifiers() throws SQLException { - logger.debug("public boolean storesUpperCaseIdentifiers()", false); + logger.trace("boolean storesUpperCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean storesLowerCaseIdentifiers() throws SQLException { - logger.debug("public boolean storesLowerCaseIdentifiers()", false); + logger.trace("boolean storesLowerCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesMixedCaseIdentifiers() throws SQLException { - logger.debug("public boolean storesMixedCaseIdentifiers()", false); + logger.trace("boolean storesMixedCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean supportsMixedCaseQuotedIdentifiers()", false); + logger.trace("boolean supportsMixedCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean storesUpperCaseQuotedIdentifiers()", false); + logger.trace("boolean storesUpperCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean storesLowerCaseQuotedIdentifiers()", false); + logger.trace("boolean storesLowerCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean storesMixedCaseQuotedIdentifiers()", false); + logger.trace("boolean storesMixedCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getIdentifierQuoteString() throws SQLException { - logger.debug("public String getIdentifierQuoteString()", false); + logger.trace("String getIdentifierQuoteString()", false); raiseSQLExceptionIfConnectionIsClosed(); return "\""; } @Override public String getSQLKeywords() throws SQLException { - logger.debug("public String getSQLKeywords()", false); + logger.trace("String getSQLKeywords()", false); raiseSQLExceptionIfConnectionIsClosed(); return notSQL2003Keywords; } @Override public String getNumericFunctions() throws SQLException { - logger.debug("public String getNumericFunctions()", false); + logger.trace("String getNumericFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return NumericFunctionsSupported; } @Override public String getStringFunctions() throws SQLException { - logger.debug("public String getStringFunctions()", false); + logger.trace("String getStringFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return StringFunctionsSupported; } @Override public String getSystemFunctions() throws SQLException { - logger.debug("public String getSystemFunctions()", false); + logger.trace("String getSystemFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return SystemFunctionsSupported; } @Override public String getTimeDateFunctions() throws SQLException { - logger.debug("public String getTimeDateFunctions()", false); + logger.trace("String getTimeDateFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return DateAndTimeFunctionsSupported; } @Override public String getSearchStringEscape() throws SQLException { - logger.debug("public String getSearchStringEscape()", false); + logger.trace("String getSearchStringEscape()", false); raiseSQLExceptionIfConnectionIsClosed(); return Character.toString(SEARCH_STRING_ESCAPE); } @Override public String getExtraNameCharacters() throws SQLException { - logger.debug("public String getExtraNameCharacters()", false); + logger.trace("String getExtraNameCharacters()", false); raiseSQLExceptionIfConnectionIsClosed(); return "$"; } @Override public boolean supportsAlterTableWithAddColumn() throws SQLException { - logger.debug("public boolean supportsAlterTableWithAddColumn()", false); + logger.trace("boolean supportsAlterTableWithAddColumn()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsAlterTableWithDropColumn() throws SQLException { - logger.debug("public boolean supportsAlterTableWithDropColumn()", false); + logger.trace("boolean supportsAlterTableWithDropColumn()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsColumnAliasing() throws SQLException { - logger.debug("public boolean supportsColumnAliasing()", false); + logger.trace("boolean supportsColumnAliasing()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean nullPlusNonNullIsNull() throws SQLException { - logger.debug("public boolean nullPlusNonNullIsNull()", false); + logger.trace("boolean nullPlusNonNullIsNull()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsConvert() throws SQLException { - logger.debug("public boolean supportsConvert()", false); + logger.trace("boolean supportsConvert()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsConvert(int fromType, int toType) throws SQLException { - logger.debug("public boolean supportsConvert(int fromType, int toType)", false); + logger.trace("boolean supportsConvert(int fromType, int toType)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsTableCorrelationNames() throws SQLException { - logger.debug("public boolean supportsTableCorrelationNames()", false); + logger.trace("boolean supportsTableCorrelationNames()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsDifferentTableCorrelationNames() throws SQLException { - logger.debug("public boolean supportsDifferentTableCorrelationNames()", false); + logger.trace("boolean supportsDifferentTableCorrelationNames()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsExpressionsInOrderBy() throws SQLException { - logger.debug("public boolean supportsExpressionsInOrderBy()", false); + logger.trace("boolean supportsExpressionsInOrderBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsOrderByUnrelated() throws SQLException { - logger.debug("public boolean supportsOrderByUnrelated()", false); + logger.trace("boolean supportsOrderByUnrelated()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsGroupBy() throws SQLException { - logger.debug("public boolean supportsGroupBy()", false); + logger.trace("boolean supportsGroupBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsGroupByUnrelated() throws SQLException { - logger.debug("public boolean supportsGroupByUnrelated()", false); + logger.trace("boolean supportsGroupByUnrelated()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsGroupByBeyondSelect() throws SQLException { - logger.debug("public boolean supportsGroupByBeyondSelect()", false); + logger.trace("boolean supportsGroupByBeyondSelect()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsLikeEscapeClause() throws SQLException { - logger.debug("public boolean supportsLikeEscapeClause()", false); + logger.trace("boolean supportsLikeEscapeClause()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMultipleResultSets() throws SQLException { - logger.debug("public boolean supportsMultipleResultSets()", false); + logger.trace("boolean supportsMultipleResultSets()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMultipleTransactions() throws SQLException { - logger.debug("public boolean supportsMultipleTransactions()", false); + logger.trace("boolean supportsMultipleTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsNonNullableColumns() throws SQLException { - logger.debug("public boolean supportsNonNullableColumns()", false); + logger.trace("boolean supportsNonNullableColumns()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsMinimumSQLGrammar() throws SQLException { - logger.debug("public boolean supportsMinimumSQLGrammar()", false); + logger.trace("boolean supportsMinimumSQLGrammar()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCoreSQLGrammar() throws SQLException { - logger.debug("public boolean supportsCoreSQLGrammar()", false); + logger.trace("boolean supportsCoreSQLGrammar()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsExtendedSQLGrammar() throws SQLException { - logger.debug("public boolean supportsExtendedSQLGrammar()", false); + logger.trace("boolean supportsExtendedSQLGrammar()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsANSI92EntryLevelSQL() throws SQLException { - logger.debug("public boolean supportsANSI92EntryLevelSQL()", false); + logger.trace("boolean supportsANSI92EntryLevelSQL()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsANSI92IntermediateSQL() throws SQLException { - logger.debug("public boolean supportsANSI92IntermediateSQL()", false); + logger.trace("boolean supportsANSI92IntermediateSQL()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsANSI92FullSQL() throws SQLException { - logger.debug("public boolean supportsANSI92FullSQL()", false); + logger.trace("boolean supportsANSI92FullSQL()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsIntegrityEnhancementFacility() throws SQLException { - logger.debug("public boolean supportsIntegrityEnhancementFacility()", false); + logger.trace("boolean supportsIntegrityEnhancementFacility()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOuterJoins() throws SQLException { - logger.debug("public boolean supportsOuterJoins()", false); + logger.trace("boolean supportsOuterJoins()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsFullOuterJoins() throws SQLException { - logger.debug("public boolean supportsFullOuterJoins()", false); + logger.trace("boolean supportsFullOuterJoins()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsLimitedOuterJoins() throws SQLException { - logger.debug("public boolean supportsLimitedOuterJoins()", false); + logger.trace("boolean supportsLimitedOuterJoins()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getSchemaTerm() throws SQLException { - logger.debug("public String getSchemaTerm()", false); + logger.trace("String getSchemaTerm()", false); raiseSQLExceptionIfConnectionIsClosed(); return "schema"; } @Override public String getProcedureTerm() throws SQLException { - logger.debug("public String getProcedureTerm()", false); + logger.trace("String getProcedureTerm()", false); raiseSQLExceptionIfConnectionIsClosed(); return "procedure"; } @Override public String getCatalogTerm() throws SQLException { - logger.debug("public String getCatalogTerm()", false); + logger.trace("String getCatalogTerm()", false); raiseSQLExceptionIfConnectionIsClosed(); return "database"; } @Override public boolean isCatalogAtStart() throws SQLException { - logger.debug("public boolean isCatalogAtStart()", false); + logger.trace("boolean isCatalogAtStart()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getCatalogSeparator() throws SQLException { - logger.debug("public String getCatalogSeparator()", false); + logger.trace("String getCatalogSeparator()", false); raiseSQLExceptionIfConnectionIsClosed(); return "."; } @Override public boolean supportsSchemasInDataManipulation() throws SQLException { - logger.debug("public boolean supportsSchemasInDataManipulation()", false); + logger.trace("boolean supportsSchemasInDataManipulation()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSchemasInProcedureCalls() throws SQLException { - logger.debug("public boolean supportsSchemasInProcedureCalls()", false); + logger.trace("boolean supportsSchemasInProcedureCalls()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsSchemasInTableDefinitions() throws SQLException { - logger.debug("public boolean supportsSchemasInTableDefinitions()", false); + logger.trace("boolean supportsSchemasInTableDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSchemasInIndexDefinitions() throws SQLException { - logger.debug("public boolean supportsSchemasInIndexDefinitions()", false); + logger.trace("boolean supportsSchemasInIndexDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { - logger.debug("public boolean supportsSchemasInPrivilegeDefinitions()", false); + logger.trace("boolean supportsSchemasInPrivilegeDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCatalogsInDataManipulation() throws SQLException { - logger.debug("public boolean supportsCatalogsInDataManipulation()", false); + logger.trace("boolean supportsCatalogsInDataManipulation()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsCatalogsInProcedureCalls() throws SQLException { - logger.debug("public boolean supportsCatalogsInProcedureCalls()", false); + logger.trace("boolean supportsCatalogsInProcedureCalls()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCatalogsInTableDefinitions() throws SQLException { - logger.debug("public boolean supportsCatalogsInTableDefinitions()", false); + logger.trace("boolean supportsCatalogsInTableDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsCatalogsInIndexDefinitions() throws SQLException { - logger.debug("public boolean supportsCatalogsInIndexDefinitions()", false); + logger.trace("boolean supportsCatalogsInIndexDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { - logger.debug("public boolean supportsCatalogsInPrivilegeDefinitions()", false); + logger.trace("boolean supportsCatalogsInPrivilegeDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsPositionedDelete() throws SQLException { - logger.debug("public boolean supportsPositionedDelete()", false); + logger.trace("boolean supportsPositionedDelete()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsPositionedUpdate() throws SQLException { - logger.debug("public boolean supportsPositionedUpdate()", false); + logger.trace("boolean supportsPositionedUpdate()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsSelectForUpdate() throws SQLException { - logger.debug("public boolean supportsSelectForUpdate()", false); + logger.trace("boolean supportsSelectForUpdate()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsStoredProcedures() throws SQLException { - logger.debug("public boolean supportsStoredProcedures()", false); + logger.trace("boolean supportsStoredProcedures()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInComparisons() throws SQLException { - logger.debug("public boolean supportsSubqueriesInComparisons()", false); + logger.trace("boolean supportsSubqueriesInComparisons()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInExists() throws SQLException { - logger.debug("public boolean supportsSubqueriesInExists()", false); + logger.trace("boolean supportsSubqueriesInExists()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInIns() throws SQLException { - logger.debug("public boolean supportsSubqueriesInIns()", false); + logger.trace("boolean supportsSubqueriesInIns()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInQuantifieds() throws SQLException { - logger.debug("public boolean supportsSubqueriesInQuantifieds()", false); + logger.trace("boolean supportsSubqueriesInQuantifieds()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCorrelatedSubqueries() throws SQLException { - logger.debug("public boolean supportsCorrelatedSubqueries()", false); + logger.trace("boolean supportsCorrelatedSubqueries()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsUnion() throws SQLException { - logger.debug("public boolean supportsUnion()", false); + logger.trace("boolean supportsUnion()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsUnionAll() throws SQLException { - logger.debug("public boolean supportsUnionAll()", false); + logger.trace("boolean supportsUnionAll()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsOpenCursorsAcrossCommit() throws SQLException { - logger.debug("public boolean supportsOpenCursorsAcrossCommit()", false); + logger.trace("boolean supportsOpenCursorsAcrossCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOpenCursorsAcrossRollback() throws SQLException { - logger.debug("public boolean supportsOpenCursorsAcrossRollback()", false); + logger.trace("boolean supportsOpenCursorsAcrossRollback()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOpenStatementsAcrossCommit() throws SQLException { - logger.debug("public boolean supportsOpenStatementsAcrossCommit()", false); + logger.trace("boolean supportsOpenStatementsAcrossCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOpenStatementsAcrossRollback() throws SQLException { - logger.debug("public boolean supportsOpenStatementsAcrossRollback()", false); + logger.trace("boolean supportsOpenStatementsAcrossRollback()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public int getMaxBinaryLiteralLength() throws SQLException { - logger.debug("public int getMaxBinaryLiteralLength()", false); + logger.trace("int getMaxBinaryLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); - return 8388608; + return getMaxCharLiteralLength() / 2; // hex instead of octal, thus divided by 2 } @Override public int getMaxCharLiteralLength() throws SQLException { - logger.debug("public int getMaxCharLiteralLength()", false); + logger.trace("int getMaxCharLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); - return 16777216; + Optional maxLiteralLengthFromSession = + Optional.ofNullable( + (Integer) session.getOtherParameter(MAX_VARCHAR_BINARY_SIZE_PARAM_NAME)); + return maxLiteralLengthFromSession.orElse(DEFAULT_MAX_LOB_SIZE); } @Override public int getMaxColumnNameLength() throws SQLException { - logger.debug("public int getMaxColumnNameLength()", false); + logger.trace("int getMaxColumnNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxColumnsInGroupBy() throws SQLException { - logger.debug("public int getMaxColumnsInGroupBy()", false); + logger.trace("int getMaxColumnsInGroupBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInIndex() throws SQLException { - logger.debug("public int getMaxColumnsInIndex()", false); + logger.trace("int getMaxColumnsInIndex()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInOrderBy() throws SQLException { - logger.debug("public int getMaxColumnsInOrderBy()", false); + logger.trace("int getMaxColumnsInOrderBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInSelect() throws SQLException { - logger.debug("public int getMaxColumnsInSelect()", false); + logger.trace("int getMaxColumnsInSelect()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInTable() throws SQLException { - logger.debug("public int getMaxColumnsInTable()", false); + logger.trace("int getMaxColumnsInTable()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxConnections() throws SQLException { - logger.debug("public int getMaxConnections()", false); + logger.trace("int getMaxConnections()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxCursorNameLength() throws SQLException { - logger.debug("public int getMaxCursorNameLength()", false); + logger.trace("int getMaxCursorNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxIndexLength() throws SQLException { - logger.debug("public int getMaxIndexLength()", false); + logger.trace("int getMaxIndexLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxSchemaNameLength() throws SQLException { - logger.debug("public int getMaxSchemaNameLength()", false); + logger.trace("int getMaxSchemaNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxProcedureNameLength() throws SQLException { - logger.debug("public int getMaxProcedureNameLength()", false); + logger.trace("int getMaxProcedureNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxCatalogNameLength() throws SQLException { - logger.debug("public int getMaxCatalogNameLength()", false); + logger.trace("int getMaxCatalogNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxRowSize() throws SQLException { - logger.debug("public int getMaxRowSize()", false); + logger.trace("int getMaxRowSize()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { - logger.debug("public boolean doesMaxRowSizeIncludeBlobs()", false); + logger.trace("boolean doesMaxRowSizeIncludeBlobs()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public int getMaxStatementLength() throws SQLException { - logger.debug("public int getMaxStatementLength()", false); + logger.trace("int getMaxStatementLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxStatements() throws SQLException { - logger.debug("public int getMaxStatements()", false); + logger.trace("int getMaxStatements()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxTableNameLength() throws SQLException { - logger.debug("public int getMaxTableNameLength()", false); + logger.trace("int getMaxTableNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxTablesInSelect() throws SQLException { - logger.debug("public int getMaxTablesInSelect()", false); + logger.trace("int getMaxTablesInSelect()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxUserNameLength() throws SQLException { - logger.debug("public int getMaxUserNameLength()", false); + logger.trace("int getMaxUserNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getDefaultTransactionIsolation() throws SQLException { - logger.debug("public int getDefaultTransactionIsolation()", false); + logger.trace("int getDefaultTransactionIsolation()", false); raiseSQLExceptionIfConnectionIsClosed(); return Connection.TRANSACTION_READ_COMMITTED; } @Override public boolean supportsTransactions() throws SQLException { - logger.debug("public boolean supportsTransactions()", false); + logger.trace("boolean supportsTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsTransactionIsolationLevel(int level) throws SQLException { - logger.debug("public boolean supportsTransactionIsolationLevel(int level)", false); + logger.trace("boolean supportsTransactionIsolationLevel(int level)", false); raiseSQLExceptionIfConnectionIsClosed(); return (level == Connection.TRANSACTION_NONE) || (level == Connection.TRANSACTION_READ_COMMITTED); @@ -1054,29 +1088,28 @@ public boolean supportsTransactionIsolationLevel(int level) throws SQLException @Override public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException { - logger.debug( - "public boolean " + "supportsDataDefinitionAndDataManipulationTransactions()", false); + logger.trace("boolean supportsDataDefinitionAndDataManipulationTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsDataManipulationTransactionsOnly() throws SQLException { - logger.debug("public boolean supportsDataManipulationTransactionsOnly()", false); + logger.trace("boolean supportsDataManipulationTransactionsOnly()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean dataDefinitionCausesTransactionCommit() throws SQLException { - logger.debug("public boolean dataDefinitionCausesTransactionCommit()", false); + logger.trace("boolean dataDefinitionCausesTransactionCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean dataDefinitionIgnoredInTransactions() throws SQLException { - logger.debug("public boolean dataDefinitionIgnoredInTransactions()", false); + logger.trace("boolean dataDefinitionIgnoredInTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @@ -1087,7 +1120,7 @@ public ResultSet getProcedures( throws SQLException { raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); - logger.debug( + logger.trace( "public ResultSet getProcedures(String catalog, " + "String schemaPattern,String procedureNamePattern)", false); @@ -1109,7 +1142,7 @@ public ResultSet getProcedures( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_PROCEDURES, resultSet, statement) { public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -1125,7 +1158,7 @@ public boolean next() throws SQLException { || compiledProcedurePattern.matcher(procedureName).matches()) && (compiledSchemaPattern == null || compiledSchemaPattern.matcher(schemaName).matches())) { - logger.debug("Found a matched function:" + schemaName + "." + procedureName); + logger.trace("Found a matched function:" + schemaName + "." + procedureName); nextRow[0] = catalogName; nextRow[1] = schemaName; @@ -1149,7 +1182,7 @@ public ResultSet getProcedureColumns( final String procedureNamePattern, final String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getProcedureColumns(String catalog, " + "String schemaPattern,String procedureNamePattern," + "String columnNamePattern)", @@ -1325,9 +1358,9 @@ else if (i == 0) { typeName.substring(typeName.indexOf('(') + 1, typeName.indexOf(')'))); nextRow[16] = char_octet_len; } else if (type == Types.CHAR || type == Types.VARCHAR) { - nextRow[16] = 16777216; + nextRow[16] = getMaxCharLiteralLength(); } else if (type == Types.BINARY || type == Types.VARBINARY) { - nextRow[16] = 8388608; + nextRow[16] = getMaxBinaryLiteralLength(); } } else { nextRow[16] = null; @@ -1407,7 +1440,7 @@ private String getFirstResultSetCommand( showProcedureCommand += " in schema \"" + catalogEscaped + "\".\"" + schemaPattern + "\""; } } - logger.debug("sql command to get column metadata: {}", showProcedureCommand); + logger.debug("Sql command to get column metadata: {}", showProcedureCommand); return showProcedureCommand; } @@ -1441,7 +1474,7 @@ public ResultSet getTables( final String tableNamePattern, final String[] types) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getTables(String catalog={}, String " + "schemaPattern={}, String tableNamePattern={}, String[] types={})", originalCatalog, @@ -1525,7 +1558,7 @@ public ResultSet getTables( } } - logger.debug("sql command to get table metadata: {}", showTablesCommand); + logger.debug("Sql command to get table metadata: {}", showTablesCommand); resultSet = executeAndReturnEmptyResultIfNotFound(statement, showTablesCommand, GET_TABLES); sendInBandTelemetryMetadataMetrics( @@ -1539,7 +1572,7 @@ public ResultSet getTables( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_TABLES, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -1589,14 +1622,14 @@ public boolean next() throws SQLException { @Override public ResultSet getSchemas() throws SQLException { - logger.debug("public ResultSet getSchemas()", false); + logger.trace("ResultSet getSchemas()", false); return getSchemas(null, null); } @Override public ResultSet getCatalogs() throws SQLException { - logger.debug("public ResultSet getCatalogs()", false); + logger.trace("ResultSet getCatalogs()", false); raiseSQLExceptionIfConnectionIsClosed(); String showDB = "show /* JDBC:DatabaseMetaData.getCatalogs() */ databases in account"; @@ -1606,7 +1639,7 @@ public ResultSet getCatalogs() throws SQLException { GET_CATALOGS, statement.executeQuery(showDB), statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show databases result @@ -1624,7 +1657,7 @@ public boolean next() throws SQLException { @Override public ResultSet getTableTypes() throws SQLException { - logger.debug("public ResultSet getTableTypes()", false); + logger.trace("ResultSet getTableTypes()", false); raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); @@ -1654,7 +1687,7 @@ public ResultSet getColumns( final String columnNamePattern, final boolean extendedSet) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getColumns(String catalog={}, String schemaPattern={}, " + "String tableNamePattern={}, String columnNamePattern={}, boolean extendedSet={}", originalCatalog, @@ -1716,7 +1749,7 @@ public ResultSet getColumns( } } - logger.debug("sql command to get column metadata: {}", showColumnsCommand); + logger.debug("Sql command to get column metadata: {}", showColumnsCommand); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound( @@ -1736,7 +1769,7 @@ public ResultSet getColumns( String currentTableName = null; public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -1789,13 +1822,13 @@ public boolean next() throws SQLException { "error parsing data type: " + dataTypeStr); } - logger.debug("data type string: {}", dataTypeStr); + logger.debug("Data type string: {}", dataTypeStr); SnowflakeColumnMetadata columnMetadata = SnowflakeUtil.extractColumnMetadata( jsonNode, session.isJdbcTreatDecimalAsInt(), session); - logger.debug("nullable: {}", columnMetadata.isNullable()); + logger.debug("Nullable: {}", columnMetadata.isNullable()); // SNOW-16881: add catalog name nextRow[0] = catalogName; @@ -1842,6 +1875,9 @@ public boolean next() throws SQLException { || columnMetadata.getType() == Types.TIME || columnMetadata.getType() == Types.TIMESTAMP) { columnSize = columnMetadata.getPrecision(); + } else if (columnMetadata.getType() == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + // For VECTOR Snowflake type we consider dimension as the column size + columnSize = columnMetadata.getDimension(); } nextRow[6] = columnSize; @@ -1850,7 +1886,7 @@ public boolean next() throws SQLException { nextRow[9] = null; nextRow[10] = (columnMetadata.isNullable() ? columnNullable : columnNoNulls); - logger.debug("returning nullable: {}", nextRow[10]); + logger.debug("Returning nullable: {}", nextRow[10]); nextRow[11] = comment; nextRow[12] = defaultValue; @@ -1886,7 +1922,7 @@ public boolean next() throws SQLException { @Override public ResultSet getColumnPrivileges( String catalog, String schema, String table, String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getColumnPrivileges(String catalog, " + "String schema,String table, String columnNamePattern)", false); @@ -1921,7 +1957,7 @@ public ResultSet getColumnPrivileges( public ResultSet getTablePrivileges( String originalCatalog, String originalSchemaPattern, final String tableNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getTablePrivileges(String catalog, " + "String schemaPattern,String tableNamePattern)", false); @@ -1982,7 +2018,7 @@ public ResultSet getTablePrivileges( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_TABLE_PRIVILEGES, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); while (showObjectResultSet.next()) { @@ -2021,7 +2057,7 @@ public boolean next() throws SQLException { public ResultSet getBestRowIdentifier( String catalog, String schema, String table, int scope, boolean nullable) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getBestRowIdentifier(String catalog, " + "String schema,String table, int scope,boolean nullable)", false); @@ -2031,7 +2067,7 @@ public ResultSet getBestRowIdentifier( @Override public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getVersionColumns(String catalog, " + "String schema, String table)", false); @@ -2041,7 +2077,7 @@ public ResultSet getVersionColumns(String catalog, String schema, String table) @Override public ResultSet getPrimaryKeys(String originalCatalog, String originalSchema, final String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getPrimaryKeys(String catalog={}, " + "String schema={}, String table={})", originalCatalog, @@ -2097,7 +2133,7 @@ public ResultSet getPrimaryKeys(String originalCatalog, String originalSchema, f final String schemaIn = schema; final String tableIn = table; - logger.debug("sql command to get primary key metadata: {}", showPKCommand); + logger.debug("Sql command to get primary key metadata: {}", showPKCommand); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound(statement, showPKCommand, GET_PRIMARY_KEYS); sendInBandTelemetryMetadataMetrics( @@ -2106,7 +2142,7 @@ public ResultSet getPrimaryKeys(String originalCatalog, String originalSchema, f return new SnowflakeDatabaseMetaDataQueryResultSet(GET_PRIMARY_KEYS, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); while (showObjectResultSet.next()) { @@ -2292,7 +2328,7 @@ private ResultSet getForeignKeys( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_FOREIGN_KEYS, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); while (showObjectResultSet.next()) { @@ -2538,7 +2574,7 @@ private short getForeignKeyConstraintProperty(String property_name, String prope @Override public ResultSet getImportedKeys(String originalCatalog, String originalSchema, String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getImportedKeys(String catalog={}, " + "String schema={}, String table={})", originalCatalog, @@ -2556,7 +2592,7 @@ public ResultSet getImportedKeys(String originalCatalog, String originalSchema, @Override public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getExportedKeys(String catalog={}, " + "String schema={}, String table={})", catalog, @@ -2579,7 +2615,7 @@ public ResultSet getCrossReference( String foreignSchema, String foreignTable) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getCrossReference(String parentCatalog={}, " + "String parentSchema={}, String parentTable={}, " + "String foreignCatalog={}, String foreignSchema={}, " @@ -2607,7 +2643,7 @@ public ResultSet getCrossReference( @Override public ResultSet getTypeInfo() throws SQLException { - logger.debug("public ResultSet getTypeInfo()", false); + logger.trace("ResultSet getTypeInfo()", false); raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); @@ -2832,7 +2868,7 @@ public ResultSet getTypeInfo() throws SQLException { */ public ResultSet getStreams( String originalCatalog, String originalSchemaPattern, String streamName) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getStreams(String catalog={}, String schemaPattern={}" + "String streamName={}", originalCatalog, @@ -2876,7 +2912,7 @@ public ResultSet getStreams( } } - logger.debug("sql command to get stream metadata: {}", showStreamsCommand); + logger.debug("Sql command to get stream metadata: {}", showStreamsCommand); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound(statement, showStreamsCommand, GET_STREAMS); @@ -2886,7 +2922,7 @@ public ResultSet getStreams( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_STREAMS, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()"); + logger.trace("boolean next()"); incrementRow(); // iterate throw the show streams result until we find an entry @@ -2935,7 +2971,7 @@ public boolean next() throws SQLException { public ResultSet getIndexInfo( String catalog, String schema, String table, boolean unique, boolean approximate) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getIndexInfo(String catalog, String schema, " + "String table,boolean unique, boolean approximate)", false); @@ -2982,14 +3018,14 @@ public ResultSet getIndexInfo( @Override public boolean supportsResultSetType(int type) throws SQLException { - logger.debug("public boolean supportsResultSetType(int type)", false); + logger.trace("boolean supportsResultSetType(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return (type == ResultSet.TYPE_FORWARD_ONLY); } @Override public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException { - logger.debug( + logger.trace( "public boolean supportsResultSetConcurrency(int type, " + "int concurrency)", false); raiseSQLExceptionIfConnectionIsClosed(); return (type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY); @@ -2997,70 +3033,70 @@ public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQ @Override public boolean ownUpdatesAreVisible(int type) throws SQLException { - logger.debug("public boolean ownUpdatesAreVisible(int type)", false); + logger.trace("boolean ownUpdatesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean ownDeletesAreVisible(int type) throws SQLException { - logger.debug("public boolean ownDeletesAreVisible(int type)", false); + logger.trace("boolean ownDeletesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean ownInsertsAreVisible(int type) throws SQLException { - logger.debug("public boolean ownInsertsAreVisible(int type)", false); + logger.trace("boolean ownInsertsAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean othersUpdatesAreVisible(int type) throws SQLException { - logger.debug("public boolean othersUpdatesAreVisible(int type)", false); + logger.trace("boolean othersUpdatesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean othersDeletesAreVisible(int type) throws SQLException { - logger.debug("public boolean othersDeletesAreVisible(int type)", false); + logger.trace("boolean othersDeletesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean othersInsertsAreVisible(int type) throws SQLException { - logger.debug("public boolean othersInsertsAreVisible(int type)", false); + logger.trace("boolean othersInsertsAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean updatesAreDetected(int type) throws SQLException { - logger.debug("public boolean updatesAreDetected(int type)", false); + logger.trace("boolean updatesAreDetected(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean deletesAreDetected(int type) throws SQLException { - logger.debug("public boolean deletesAreDetected(int type)", false); + logger.trace("boolean deletesAreDetected(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean insertsAreDetected(int type) throws SQLException { - logger.debug("public boolean insertsAreDetected(int type)", false); + logger.trace("boolean insertsAreDetected(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsBatchUpdates() throws SQLException { - logger.debug("public boolean supportsBatchUpdates()", false); + logger.trace("boolean supportsBatchUpdates()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @@ -3069,7 +3105,7 @@ public boolean supportsBatchUpdates() throws SQLException { public ResultSet getUDTs( String catalog, String schemaPattern, String typeNamePattern, int[] types) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getUDTs(String catalog, " + "String schemaPattern,String typeNamePattern, int[] types)", false); @@ -3100,35 +3136,35 @@ public ResultSet getUDTs( @Override public Connection getConnection() throws SQLException { - logger.debug("public Connection getConnection()", false); + logger.trace("Connection getConnection()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection; } @Override public boolean supportsSavepoints() throws SQLException { - logger.debug("public boolean supportsSavepoints()", false); + logger.trace("boolean supportsSavepoints()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsNamedParameters() throws SQLException { - logger.debug("public boolean supportsNamedParameters()", false); + logger.trace("boolean supportsNamedParameters()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMultipleOpenResults() throws SQLException { - logger.debug("public boolean supportsMultipleOpenResults()", false); + logger.trace("boolean supportsMultipleOpenResults()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsGetGeneratedKeys() throws SQLException { - logger.debug("public boolean supportsGetGeneratedKeys()", false); + logger.trace("boolean supportsGetGeneratedKeys()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @@ -3136,7 +3172,7 @@ public boolean supportsGetGeneratedKeys() throws SQLException { @Override public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getSuperTypes(String catalog, " + "String schemaPattern,String typeNamePattern)", false); @@ -3147,7 +3183,7 @@ public ResultSet getSuperTypes(String catalog, String schemaPattern, String type @Override public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getSuperTables(String catalog, " + "String schemaPattern,String tableNamePattern)", false); @@ -3159,7 +3195,7 @@ public ResultSet getSuperTables(String catalog, String schemaPattern, String tab public ResultSet getAttributes( String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getAttributes(String catalog, String " + "schemaPattern," + "String typeNamePattern,String attributeNamePattern)", @@ -3170,75 +3206,75 @@ public ResultSet getAttributes( @Override public boolean supportsResultSetHoldability(int holdability) throws SQLException { - logger.debug("public boolean supportsResultSetHoldability(int holdability)", false); + logger.trace("boolean supportsResultSetHoldability(int holdability)", false); raiseSQLExceptionIfConnectionIsClosed(); return holdability == ResultSet.CLOSE_CURSORS_AT_COMMIT; } @Override public int getResultSetHoldability() throws SQLException { - logger.debug("public int getResultSetHoldability()", false); + logger.trace("int getResultSetHoldability()", false); return ResultSet.CLOSE_CURSORS_AT_COMMIT; } @Override public int getDatabaseMajorVersion() throws SQLException { - logger.debug("public int getDatabaseMajorVersion()", false); + logger.trace("int getDatabaseMajorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection.unwrap(SnowflakeConnectionV1.class).getDatabaseMajorVersion(); } @Override public int getDatabaseMinorVersion() throws SQLException { - logger.debug("public int getDatabaseMinorVersion()", false); + logger.trace("int getDatabaseMinorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection.unwrap(SnowflakeConnectionV1.class).getDatabaseMinorVersion(); } @Override public int getJDBCMajorVersion() throws SQLException { - logger.debug("public int getJDBCMajorVersion()", false); + logger.trace("int getJDBCMajorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return Integer.parseInt(JDBCVersion.split("\\.", 2)[0]); } @Override public int getJDBCMinorVersion() throws SQLException { - logger.debug("public int getJDBCMinorVersion()", false); + logger.trace("int getJDBCMinorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return Integer.parseInt(JDBCVersion.split("\\.", 2)[1]); } @Override public int getSQLStateType() throws SQLException { - logger.debug("public int getSQLStateType()", false); + logger.trace("int getSQLStateType()", false); return sqlStateSQL; } @Override public boolean locatorsUpdateCopy() { - logger.debug("public boolean locatorsUpdateCopy()", false); + logger.trace("boolean locatorsUpdateCopy()", false); return false; } @Override public boolean supportsStatementPooling() throws SQLException { - logger.debug("public boolean supportsStatementPooling()", false); + logger.trace("boolean supportsStatementPooling()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public RowIdLifetime getRowIdLifetime() throws SQLException { - logger.debug("public RowIdLifetime getRowIdLifetime()", false); + logger.trace("RowIdLifetime getRowIdLifetime()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public ResultSet getSchemas(String originalCatalog, String originalSchema) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getSchemas(String catalog={}, String " + "schemaPattern={})", originalCatalog, originalSchema); @@ -3270,7 +3306,7 @@ public ResultSet getSchemas(String originalCatalog, String originalSchema) throw showSchemas += " in database \"" + escapeSqlQuotes(catalog) + "\""; } - logger.debug("sql command to get schemas metadata: {}", showSchemas); + logger.debug("Sql command to get schemas metadata: {}", showSchemas); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound(statement, showSchemas, GET_SCHEMAS); @@ -3278,7 +3314,7 @@ public ResultSet getSchemas(String originalCatalog, String originalSchema) throw resultSet, "getSchemas", originalCatalog, originalSchema, "none", "none"); return new SnowflakeDatabaseMetaDataQueryResultSet(GET_SCHEMAS, resultSet, statement) { public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -3302,21 +3338,21 @@ public boolean next() throws SQLException { @Override public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException { - logger.debug("public boolean supportsStoredFunctionsUsingCallSyntax()", false); + logger.trace("boolean supportsStoredFunctionsUsingCallSyntax()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean autoCommitFailureClosesAllResultSets() throws SQLException { - logger.debug("public boolean autoCommitFailureClosesAllResultSets()", false); + logger.trace("boolean autoCommitFailureClosesAllResultSets()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public ResultSet getClientInfoProperties() throws SQLException { - logger.debug("public ResultSet getClientInfoProperties()", false); + logger.trace("ResultSet getClientInfoProperties()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -3327,7 +3363,7 @@ public ResultSet getFunctions( throws SQLException { raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); - logger.debug( + logger.trace( "public ResultSet getFunctions(String catalog={}, String schemaPattern={}, " + "String functionNamePattern={}", catalog, @@ -3349,7 +3385,7 @@ public ResultSet getFunctions( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_FUNCTIONS, resultSet, statement) { public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -3422,7 +3458,7 @@ private List parseColumns(String retType, String args) { public ResultSet getFunctionColumns( String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getFunctionColumns(String catalog, " + "String schemaPattern,String functionNamePattern," + "String columnNamePattern)", @@ -3544,9 +3580,9 @@ public ResultSet getFunctionColumns( typeName.substring(typeName.indexOf('(') + 1, typeName.indexOf(')'))); nextRow[13] = char_octet_len; } else if (type == Types.CHAR || type == Types.VARCHAR) { - nextRow[13] = 16777216; + nextRow[13] = getMaxCharLiteralLength(); } else if (type == Types.BINARY || type == Types.VARBINARY) { - nextRow[13] = 8388608; + nextRow[13] = getMaxBinaryLiteralLength(); } } else { nextRow[13] = null; @@ -3580,7 +3616,7 @@ public ResultSet getFunctionColumns( public ResultSet getPseudoColumns( String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getPseudoColumns(String catalog, " + "String schemaPattern,String tableNamePattern," + "String columnNamePattern)", @@ -3591,7 +3627,7 @@ public ResultSet getPseudoColumns( // @Override public boolean generatedKeyAlwaysReturned() throws SQLException { - logger.debug("public boolean generatedKeyAlwaysReturned()", false); + logger.trace("boolean generatedKeyAlwaysReturned()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -3599,7 +3635,7 @@ public boolean generatedKeyAlwaysReturned() throws SQLException { // unchecked @Override public T unwrap(Class iface) throws SQLException { - logger.debug(" T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -3610,7 +3646,7 @@ public T unwrap(Class iface) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java index 456eed7d7..ce93e49e8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java @@ -31,7 +31,7 @@ class SnowflakeDatabaseMetaDataResultSet extends SnowflakeBaseResultSet { private String queryId; - static final SFLogger logger = + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDatabaseMetaDataResultSet.class); /** @@ -132,7 +132,7 @@ public boolean isClosed() throws SQLException { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // no exception is raised even after the result set is closed. @@ -158,41 +158,41 @@ public void close() throws SQLException { try { getStatement().close(); // should close both result set and statement. } catch (SQLException ex) { - logger.debug("failed to close", ex); + logger.debug("Failed to close", ex); } } @Override public boolean isFirst() throws SQLException { - logger.debug("public boolean isFirst()", false); + logger.trace("boolean isFirst()", false); raiseSQLExceptionIfResultSetIsClosed(); return row == 0; } @Override public boolean isBeforeFirst() throws SQLException { - logger.debug("public boolean isBeforeFirst()", false); + logger.trace("boolean isBeforeFirst()", false); raiseSQLExceptionIfResultSetIsClosed(); return row == -1; } @Override public boolean isLast() throws SQLException { - logger.debug("public boolean isLast()", false); + logger.trace("boolean isLast()", false); raiseSQLExceptionIfResultSetIsClosed(); return !isBeforeFirst() && row == rows.length - 1; } @Override public boolean isAfterLast() throws SQLException { - logger.debug("public boolean isAfterLast()", false); + logger.trace("boolean isAfterLast()", false); raiseSQLExceptionIfResultSetIsClosed(); return row == rows.length; } @Override public int getRow() throws SQLException { - logger.debug("public int getRow()", false); + logger.trace("int getRow()", false); raiseSQLExceptionIfResultSetIsClosed(); return row; } @@ -260,7 +260,7 @@ static ResultSet getEmptyResultSet(DBMetadataResultSetMetadata metadataType, Sta } Object getObjectInternal(int columnIndex) throws SQLException { - logger.debug("public Object getObjectInternal(int columnIndex)", false); + logger.trace("Object getObjectInternal(int columnIndex)", false); raiseSQLExceptionIfResultSetIsClosed(); if (nextRow == null) { @@ -280,14 +280,14 @@ Object getObjectInternal(int columnIndex) throws SQLException { @Override public boolean wasNull() throws SQLException { - logger.debug("public boolean wasNull() returning {}", wasNull); + logger.trace("boolean wasNull() returning {}", wasNull); raiseSQLExceptionIfResultSetIsClosed(); return wasNull; } @Override public String getString(int columnIndex) throws SQLException { - logger.debug("public String getString(int columnIndex)", false); + logger.trace("String getString(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -297,7 +297,7 @@ public String getString(int columnIndex) throws SQLException { @Override public boolean getBoolean(int columnIndex) throws SQLException { - logger.debug("public boolean getBoolean(int columnIndex)", false); + logger.trace("boolean getBoolean(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -324,7 +324,7 @@ public boolean getBoolean(int columnIndex) throws SQLException { @Override public byte getByte(int columnIndex) throws SQLException { - logger.debug("public byte getByte(int columnIndex)", false); + logger.trace("byte getByte(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -341,7 +341,7 @@ public byte getByte(int columnIndex) throws SQLException { @Override public short getShort(int columnIndex) throws SQLException { - logger.debug("public short getShort(int columnIndex)", false); + logger.trace("short getShort(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -359,7 +359,7 @@ public short getShort(int columnIndex) throws SQLException { @Override public int getInt(int columnIndex) throws SQLException { - logger.debug("public int getInt(int columnIndex)", false); + logger.trace("int getInt(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -377,7 +377,7 @@ public int getInt(int columnIndex) throws SQLException { @Override public long getLong(int columnIndex) throws SQLException { - logger.debug("public long getLong(int columnIndex)", false); + logger.trace("long getLong(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -402,7 +402,7 @@ public long getLong(int columnIndex) throws SQLException { @Override public float getFloat(int columnIndex) throws SQLException { - logger.debug("public float getFloat(int columnIndex)", false); + logger.trace("float getFloat(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -420,7 +420,7 @@ public float getFloat(int columnIndex) throws SQLException { @Override public double getDouble(int columnIndex) throws SQLException { - logger.debug("public double getDouble(int columnIndex)", false); + logger.trace("double getDouble(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -447,7 +447,7 @@ public String getQueryID() { @Deprecated @Override public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex, int scale)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex, int scale)", false); BigDecimal value; @@ -471,7 +471,7 @@ public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException @Override public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex)", false); BigDecimal value = null; @@ -493,7 +493,7 @@ public BigDecimal getBigDecimal(int columnIndex) throws SQLException { @Override public Object getObject(int columnIndex) throws SQLException { - logger.debug("public Object getObject(int columnIndex)", false); + logger.trace("Object getObject(int columnIndex)", false); int type = resultSetMetaData.getColumnType(columnIndex); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 4aaf6013e..6d02f333d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -14,7 +14,13 @@ import java.sql.SQLFeatureNotSupportedException; import java.util.List; import java.util.Properties; +import net.snowflake.client.config.ConnectionParameters; +import net.snowflake.client.config.SFConnectionConfigParser; import net.snowflake.client.core.SecurityUtil; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.ResourceBundleManager; import net.snowflake.common.core.SqlState; @@ -26,10 +32,12 @@ * loading */ public class SnowflakeDriver implements Driver { + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDriver.class); + public static final String AUTO_CONNECTION_STRING_PREFIX = "jdbc:snowflake:auto"; static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); - public static String implementVersion = "3.15.2"; + public static String implementVersion = "3.17.1"; static int majorVersion = 0; static int minorVersion = 0; @@ -57,6 +65,9 @@ public class SnowflakeDriver implements Driver { initializeClientVersionFromManifest(); SecurityUtil.addBouncyCastleProvider(); + + // Telemetry OOB is disabled + TelemetryService.disableOOBTelemetry(); } /** try to initialize Arrow support if fails, JDBC is going to use the legacy format */ @@ -200,18 +211,52 @@ public boolean acceptsURL(String url) { */ @Override public Connection connect(String url, Properties info) throws SQLException { - if (url == null) { + ConnectionParameters connectionParameters = + overrideByFileConnectionParametersIfAutoConfiguration(url, info); + + if (connectionParameters.getUrl() == null) { // expected return format per the JDBC spec for java.sql.Driver#connect() throw new SnowflakeSQLException("Unable to connect to url of 'null'."); } - if (!SnowflakeConnectString.hasSupportedPrefix(url)) { + if (!SnowflakeConnectString.hasSupportedPrefix(connectionParameters.getUrl())) { return null; // expected return format per the JDBC spec for java.sql.Driver#connect() } - SnowflakeConnectString conStr = SnowflakeConnectString.parse(url, info); + SnowflakeConnectString conStr = + SnowflakeConnectString.parse( + connectionParameters.getUrl(), connectionParameters.getParams()); if (!conStr.isValid()) { throw new SnowflakeSQLException("Connection string is invalid. Unable to parse."); } - return new SnowflakeConnectionV1(url, info); + return new SnowflakeConnectionV1( + connectionParameters.getUrl(), connectionParameters.getParams()); + } + + private static ConnectionParameters overrideByFileConnectionParametersIfAutoConfiguration( + String url, Properties info) throws SnowflakeSQLException { + if (url != null && url.contains(AUTO_CONNECTION_STRING_PREFIX)) { + // Connect using connection configuration file + ConnectionParameters connectionParameters = + SFConnectionConfigParser.buildConnectionParameters(); + if (connectionParameters == null) { + throw new SnowflakeSQLException( + "Unavailable connection configuration parameters expected for auto configuration using file"); + } + return connectionParameters; + } else { + return new ConnectionParameters(url, info); + } + } + + /** + * Connect method using connection configuration file + * + * @return connection + * @throws SQLException if failed to create a snowflake connection + */ + @SnowflakeJdbcInternalApi + public Connection connect() throws SQLException { + logger.debug("Execute internal method connect() without parameters"); + return connect(AUTO_CONNECTION_STRING_PREFIX, null); } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java index 9a0f874bb..751b47d19 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java @@ -15,6 +15,7 @@ import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; import java.io.File; +import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -47,6 +48,7 @@ import java.util.concurrent.TimeUnit; import java.util.zip.GZIPOutputStream; import net.snowflake.client.core.ExecTimeTelemetryData; +import net.snowflake.client.core.FileUtil; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.OCSPMode; import net.snowflake.client.core.ObjectMapperFactory; @@ -73,6 +75,8 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.filefilter.WildcardFileFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for uploading/downloading files @@ -80,7 +84,8 @@ * @author jhuang */ public class SnowflakeFileTransferAgent extends SFBaseFileTransferAgent { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeFileTransferAgent.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeFileTransferAgent.class); static final StorageClientFactory storageFactory = StorageClientFactory.getFactory(); @@ -95,6 +100,7 @@ public class SnowflakeFileTransferAgent extends SFBaseFileTransferAgent { private static final String localFSFileSep = systemGetProperty("file.separator"); private static final int DEFAULT_PARALLEL = 10; + private static final Logger log = LoggerFactory.getLogger(SnowflakeFileTransferAgent.class); private final String command; @@ -202,7 +208,7 @@ static List getEncryptionMaterial( List encryptionMaterial = new ArrayList<>(); JsonNode rootNode = jsonNode.path("data").path("encryptionMaterial"); if (commandType == CommandType.UPLOAD) { - logger.debug("initEncryptionMaterial: UPLOAD", false); + logger.debug("InitEncryptionMaterial: UPLOAD", false); RemoteStoreFileEncryptionMaterial encMat = null; if (!rootNode.isMissingNode() && !rootNode.isNull()) { @@ -211,7 +217,7 @@ static List getEncryptionMaterial( encryptionMaterial.add(encMat); } else { - logger.debug("initEncryptionMaterial: DOWNLOAD", false); + logger.debug("InitEncryptionMaterial: DOWNLOAD", false); if (!rootNode.isMissingNode() && !rootNode.isNull()) { encryptionMaterial = @@ -231,7 +237,7 @@ private static List getPresignedUrls(CommandType commandType, JsonNode j List presignedUrls = new ArrayList<>(); JsonNode rootNode = jsonNode.path("data").path("presignedUrls"); if (commandType == CommandType.DOWNLOAD) { - logger.debug("initEncryptionMaterial: DOWNLOAD", false); + logger.debug("InitEncryptionMaterial: DOWNLOAD", false); if (!rootNode.isMissingNode() && !rootNode.isNull()) { presignedUrls = Arrays.asList(mapper.readValue(rootNode.toString(), String[].class)); @@ -541,7 +547,7 @@ public static Callable getUploadFileCallable( return new Callable() { public Void call() throws Exception { - logger.debug("Entering getUploadFileCallable...", false); + logger.trace("Entering getUploadFileCallable...", false); // make sure initialize context for the telemetry service for this thread TelemetryService.getInstance().updateContext(session.getSnowflakeConnectionString()); @@ -561,6 +567,7 @@ public Void call() throws Exception { SnowflakeFileTransferAgent.injectedFileTransferException; } + FileUtil.logFileUsage(srcFilePath, "Get file to upload", false); uploadStream = new FileInputStream(srcFilePath); } catch (FileNotFoundException ex) { metadata.resultStatus = ResultStatus.ERROR; @@ -585,7 +592,7 @@ public Void call() throws Exception { String digest = null; - logger.debug("Dest file name={}", false); + logger.debug("Dest file name: {}", false); // Temp file that needs to be cleaned up when upload was successful FileBackedOutputStream fileBackedOutputStream = null; @@ -637,7 +644,7 @@ public Void call() throws Exception { logger.debug( "Started copying file from: {} to {}:{} destName: {} " - + "auto compressed? {} size={}", + + "auto compressed? {} size: {}", srcFilePath, stage.getStageType().name(), stage.getLocation(), @@ -705,7 +712,7 @@ public Void call() throws Exception { try { fileBackedOutputStream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } if (inputStream == null) { @@ -713,7 +720,7 @@ public Void call() throws Exception { } } - logger.debug("filePath: {}", srcFilePath); + logger.debug("FilePath: {}", srcFilePath); // set dest size metadata.destFileSize = uploadSize; @@ -863,7 +870,7 @@ public Void call() throws Exception { throw ex; } - logger.debug("filePath: {}", srcFilePath); + logger.debug("FilePath: {}", srcFilePath); File destFile = new File(localLocation + localFSFileSep + destFileName); long downloadSize = destFile.length(); @@ -1053,9 +1060,9 @@ private void parseCommand() throws SnowflakeSQLException { logger.debug("Command type: {}", commandType); if (commandType == CommandType.UPLOAD) { - logger.debug("autoCompress: {}, source compression: {}", autoCompress, sourceCompression); + logger.debug("Auto compress: {}, source compression: {}", autoCompress, sourceCompression); } else { - logger.debug("local download location: {}", localLocation); + logger.debug("Local download location: {}", localLocation); } logger.debug("Source files: {}", String.join(",", sourceFiles)); @@ -1215,7 +1222,7 @@ private void verifyLocalFilePath(String localFilePathFromGS) throws SnowflakeSQL + ", expected: " + localFilePath); } else if (localFilePath.isEmpty()) { - logger.debug("fail to parse local file path from command: {}", command); + logger.debug("Fail to parse local file path from command: {}", command); } else { logger.trace("local file path from GS matches local parsing: {}", localFilePath); } @@ -1306,7 +1313,7 @@ private static JsonNode parseCommandInGS(SFStatement statement, String command) } JsonNode jsonNode = (JsonNode) result; - logger.debug("response: {}", jsonNode.toString()); + logger.debug("Response: {}", jsonNode.toString()); SnowflakeUtil.checkErrorAndThrowException(jsonNode); return jsonNode; @@ -1513,7 +1520,7 @@ public boolean execute() throws SQLException { filterExistingFiles(); - logger.debug("filtering done"); + logger.debug("Filtering done"); } synchronized (canceled) { @@ -1531,9 +1538,9 @@ public boolean execute() throws SQLException { boolean created = dir.mkdirs(); if (created) { - logger.debug("directory created: {}", localLocation); + logger.debug("Directory created: {}", localLocation); } else { - logger.debug("directory not created {}", localLocation); + logger.debug("Directory not created {}", localLocation); } } @@ -1544,19 +1551,19 @@ public boolean execute() throws SQLException { // separate files to big files list and small files list // big files will be uploaded in serial, while small files will be // uploaded concurrently. - logger.debug("start segregate files by size"); + logger.debug("Start segregate files by size"); segregateFilesBySize(); if (bigSourceFiles != null) { - logger.debug("start uploading big files"); + logger.debug("Start uploading big files"); uploadFiles(bigSourceFiles, 1); - logger.debug("end uploading big files"); + logger.debug("End uploading big files"); } if (smallSourceFiles != null) { - logger.debug("start uploading small files"); + logger.debug("Start uploading small files"); uploadFiles(smallSourceFiles, parallel); - logger.debug("end uploading small files"); + logger.debug("End uploading small files"); } } @@ -1717,7 +1724,7 @@ private void downloadFiles() throws SnowflakeSQLException { presignedUrl, queryID)); - logger.debug("submitted download job for: {}", srcFile); + logger.debug("Submitted download job for: {}", srcFile); } threadExecutor.shutdown(); @@ -1813,7 +1820,7 @@ private void uploadFiles(Set fileList, int parallel) throws SnowflakeSQL encryptionMaterial.get(0), queryID)); - logger.debug("submitted copy job for: {}", srcFile); + logger.debug("Submitted copy job for: {}", srcFile); } // shut down the thread executor @@ -1935,7 +1942,7 @@ static Set expandFileNames(String[] filePathList, String queryId) // For each location, list files and match against the patterns for (Map.Entry> entry : locationToFilePatterns.entrySet()) { try { - java.io.File dir = new java.io.File(entry.getKey()); + File dir = new File(entry.getKey()); logger.debug( "Listing files under: {} with patterns: {}", @@ -1947,11 +1954,15 @@ static Set expandFileNames(String[] filePathList, String queryId) && injectedFileTransferException instanceof Exception) { throw (Exception) SnowflakeFileTransferAgent.injectedFileTransferException; } - // The following currently ignore sub directories - for (Object file : - FileUtils.listFiles(dir, new WildcardFileFilter(entry.getValue()), null)) { - result.add(((java.io.File) file).getCanonicalPath()); + File[] filesMatchingPattern = + dir.listFiles((FileFilter) new WildcardFileFilter(entry.getValue())); + if (filesMatchingPattern != null) { + for (File file : filesMatchingPattern) { + result.add(file.getCanonicalPath()); + } + } else { + logger.debug("No files under {} matching pattern {}", entry.getKey(), entry.getValue()); } } catch (Exception ex) { throw new SnowflakeSQLException( @@ -1971,7 +1982,7 @@ static Set expandFileNames(String[] filePathList, String queryId) logger.debug("Expanded file paths: "); for (String filePath : result) { - logger.debug("file: {}", filePath); + logger.debug("File: {}", filePath); } return result; @@ -1991,7 +2002,7 @@ private static boolean pushFileToLocal( stageLocation = stageLocation.replace("~", systemGetProperty("user.home")); try { logger.debug( - "Copy file. srcFile={}, destination={}, destFileName={}", + "Copy file. srcFile: {}, destination: {}, destFileName: {}", filePath, stageLocation, destFileName); @@ -2026,7 +2037,7 @@ private static boolean pullFileFromLocal( throws SQLException { try { logger.debug( - "Copy file. srcFile={}, destination={}, destFileName={}", + "Copy file. srcFile: {}, destination: {}, destFileName: {}", sourceLocation + localFSFileSep + filePath, destLocation, destFileName); @@ -2076,7 +2087,7 @@ private static void pushFileToRemoteStore( } logger.debug( - "upload object. location={}, key={}, srcFile={}, encryption={}", + "Upload object. Location: {}, key: {}, srcFile: {}, encryption: {}", remoteLocation.location, destFileName, srcFile, @@ -2142,7 +2153,7 @@ private static void pushFileToRemoteStore( * @throws Exception if error occurs while data upload. */ public static void uploadWithoutConnection(SnowflakeFileTransferConfig config) throws Exception { - logger.debug("Entering uploadWithoutConnection..."); + logger.trace("Entering uploadWithoutConnection..."); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) config.getSnowflakeFileTransferMetadata(); @@ -2287,7 +2298,7 @@ public static void uploadWithoutConnection(SnowflakeFileTransferConfig config) t try { fileBackedOutputStream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } } @@ -2327,7 +2338,7 @@ private static void pushFileToRemoteStoreWithPresignedUrl( } logger.debug( - "upload object. location={}, key={}, srcFile={}, encryption={}", + "Upload object. Location: {}, key: {}, srcFile: {}, encryption: {}", remoteLocation.location, destFileName, srcFile, @@ -2416,7 +2427,7 @@ private static void pullFileFromRemoteStore( } logger.debug( - "Download object. location={}, key={}, srcFile={}, encryption={}", + "Download object. Location: {}, key: {}, srcFile: {}, encryption: {}", remoteLocation.location, stageFilePath, filePath, @@ -2503,7 +2514,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { if (stageInfo.getStageType() == StageInfo.StageType.S3 || stageInfo.getStageType() == StageInfo.StageType.AZURE || stageInfo.getStageType() == StageInfo.StageType.GCS) { - logger.debug("check existing files on remote storage for the common prefix"); + logger.debug("Check existing files on remote storage for the common prefix"); remoteLocation storeLocation = extractLocationAndPath(stageInfo.getLocation()); @@ -2511,7 +2522,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { int retryCount = 0; - logger.debug("start dragging object summaries from remote storage"); + logger.debug("Start dragging object summaries from remote storage"); do { try { // Normal flow will never hit here. This is only for testing purposes @@ -2525,7 +2536,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { storageClient.listObjects( storeLocation.location, SnowflakeUtil.concatFilePathNames(storeLocation.path, greatestCommonPrefix, "/")); - logger.debug("received object summaries from remote storage"); + logger.debug("Received object summaries from remote storage"); } catch (Exception ex) { logger.debug("Listing objects for filtering encountered exception: {}", ex.getMessage()); @@ -2639,7 +2650,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { try { stream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } } @@ -2670,7 +2681,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { fileBackedOutputStream.reset(); } } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } IOUtils.closeQuietly(stageFileStream); } @@ -2678,12 +2689,12 @@ private void filterExistingFiles() throws SnowflakeSQLException { // continue if digest is different so that we will process the file if (!stageFileHashText.equals(localFileHashText)) { logger.debug( - "digest diff between local and stage, will {} {}", + "Digest diff between local and stage, will {} {}", commandType.name().toLowerCase(), mappedSrcFile); continue; } else { - logger.debug("digest matches between local and stage, will skip {}", mappedSrcFile); + logger.debug("Digest matches between local and stage, will skip {}", mappedSrcFile); // skip the file given that the check sum is the same b/w source // and destination @@ -2706,7 +2717,7 @@ private void compareAndSkipRemoteFiles( throws SnowflakeSQLException { for (StorageObjectSummary obj : objectSummaries) { logger.debug( - "Existing object: key={} size={} md5={}", obj.getKey(), obj.getSize(), obj.getMD5()); + "Existing object: key: {} size: {} md5: {}", obj.getKey(), obj.getSize(), obj.getMD5()); int idxOfLastFileSep = obj.getKey().lastIndexOf("/"); String objFileName = obj.getKey().substring(idxOfLastFileSep + 1); @@ -2770,7 +2781,7 @@ private void compareAndSkipRemoteFiles( // log it logger.debug( "File returned from listing but found missing {} when getting its" - + " metadata. Location={}, key={}", + + " metadata. Location: {}, key: {}", obj.getLocation(), obj.getKey()); @@ -2831,7 +2842,7 @@ private void compareAndSkipRemoteFiles( try { stream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } } @@ -2845,7 +2856,7 @@ private void compareAndSkipRemoteFiles( (objDigest == null && !hashText.equals(obj.getMD5()))) // ETag/MD5 mismatch { logger.debug( - "digest diff between remote store and local, will {} {}, " + "Digest diff between remote store and local, will {} {}, " + "local digest: {}, remote store md5: {}", commandType.name().toLowerCase(), mappedSrcFile, @@ -2864,7 +2875,7 @@ private void compareAndSkipRemoteFiles( } logger.debug( - "digest same between remote store and local, will not upload {} {}", + "Digest same between remote store and local, will not upload {} {}", commandType.name().toLowerCase(), mappedSrcFile); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java index e4535ad32..ee3dc3ec8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java @@ -3,6 +3,7 @@ import java.math.BigInteger; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.Map; public interface SnowflakePreparedStatement { /** @@ -26,4 +27,13 @@ public interface SnowflakePreparedStatement { * @throws SQLException */ void setBigInteger(int parameterIndex, BigInteger x) throws SQLException; + + /** + * Sets the designated parameter to the given Map instance. + * + * @param parameterIndex + * @param map + * @throws SQLException + */ + void setMap(int parameterIndex, Map map, int type) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java index 2c01f0d04..000d4634d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java @@ -4,6 +4,7 @@ package net.snowflake.client.jdbc; +import com.fasterxml.jackson.core.JsonProcessingException; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; @@ -20,6 +21,7 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.RowId; +import java.sql.SQLData; import java.sql.SQLException; import java.sql.SQLXML; import java.sql.Time; @@ -36,11 +38,15 @@ import java.util.Set; import java.util.TimeZone; import net.snowflake.client.core.ExecTimeTelemetryData; +import net.snowflake.client.core.FieldSchemaCreator; +import net.snowflake.client.core.JsonSqlOutput; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFPreparedStatementMetaData; +import net.snowflake.client.core.SfSqlArray; +import net.snowflake.client.core.SfTimestampUtil; import net.snowflake.client.core.StmtUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -50,7 +56,8 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1 implements PreparedStatement, SnowflakePreparedStatement { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakePreparedStatementV1.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakePreparedStatementV1.class); /** Error code returned when describing a statement that is binding table name */ private static final Integer ERROR_CODE_TABLE_BIND_VARIABLE_NOT_SET = 2128; /** Error code when preparing statement with binding object names */ @@ -146,11 +153,12 @@ public ResultSet executeQuery() throws SQLException { if (showStatementParameters) { logger.info("executeQuery()", false); } else { - logger.debug("executeQuery()", false); + logger.trace("executeQuery()", false); } ResultSet rs = executeQueryInternal(sql, false, parameterBindings, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return rs; } @@ -167,11 +175,12 @@ public ResultSet executeAsyncQuery() throws SQLException { if (showStatementParameters) { logger.info("executeAsyncQuery()", false); } else { - logger.debug("executeAsyncQuery()", false); + logger.trace("executeAsyncQuery()", false); } ResultSet rs = executeQueryInternal(sql, true, parameterBindings, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return rs; } @@ -179,20 +188,20 @@ public ResultSet executeAsyncQuery() throws SQLException { public long executeLargeUpdate() throws SQLException { ExecTimeTelemetryData execTimeTelemetryData = new ExecTimeTelemetryData("long PreparedStatement.executeLargeUpdate()", this.batchID); - logger.debug("executeLargeUpdate()", false); + logger.trace("executeLargeUpdate()", false); long updates = executeUpdateInternal(sql, parameterBindings, true, execTimeTelemetryData); return updates; } @Override public int executeUpdate() throws SQLException { - logger.debug("executeUpdate()", false); + logger.trace("executeUpdate()", false); return (int) executeLargeUpdate(); } @Override public void setNull(int parameterIndex, int sqlType) throws SQLException { - logger.debug( + logger.trace( "setNull(parameterIndex: {}, sqlType: {})", parameterIndex, SnowflakeType.JavaSQLType.find(sqlType)); @@ -204,7 +213,7 @@ public void setNull(int parameterIndex, int sqlType) throws SQLException { @Override public void setBoolean(int parameterIndex, boolean x) throws SQLException { - logger.debug("setBoolean(parameterIndex: {}, boolean x)", parameterIndex); + logger.trace("setBoolean(parameterIndex: {}, boolean x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( SnowflakeUtil.javaTypeToSFTypeString(Types.BOOLEAN, connection.getSFBaseSession()), @@ -214,7 +223,7 @@ public void setBoolean(int parameterIndex, boolean x) throws SQLException { @Override public void setByte(int parameterIndex, byte x) throws SQLException { - logger.debug("setByte(parameterIndex: {}, byte x)", parameterIndex); + logger.trace("setByte(parameterIndex: {}, byte x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( SnowflakeUtil.javaTypeToSFTypeString(Types.TINYINT, connection.getSFBaseSession()), @@ -224,7 +233,7 @@ public void setByte(int parameterIndex, byte x) throws SQLException { @Override public void setShort(int parameterIndex, short x) throws SQLException { - logger.debug("setShort(parameterIndex: {}, short x)", parameterIndex); + logger.trace("setShort(parameterIndex: {}, short x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -235,7 +244,7 @@ public void setShort(int parameterIndex, short x) throws SQLException { @Override public void setInt(int parameterIndex, int x) throws SQLException { - logger.debug("setInt(parameterIndex: {}, int x)", parameterIndex); + logger.trace("setInt(parameterIndex: {}, int x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -246,7 +255,7 @@ public void setInt(int parameterIndex, int x) throws SQLException { @Override public void setLong(int parameterIndex, long x) throws SQLException { - logger.debug("setLong(parameterIndex: {}, long x)", parameterIndex); + logger.trace("setLong(parameterIndex: {}, long x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -257,7 +266,7 @@ public void setLong(int parameterIndex, long x) throws SQLException { @Override public void setBigInteger(int parameterIndex, BigInteger x) throws SQLException { - logger.debug("setBigInteger(parameterIndex: {}, BigInteger x)", parameterIndex); + logger.trace("setBigInteger(parameterIndex: {}, BigInteger x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -268,7 +277,7 @@ public void setBigInteger(int parameterIndex, BigInteger x) throws SQLException @Override public void setFloat(int parameterIndex, float x) throws SQLException { - logger.debug("setFloat(parameterIndex: {}, float x)", parameterIndex); + logger.trace("setFloat(parameterIndex: {}, float x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -279,7 +288,7 @@ public void setFloat(int parameterIndex, float x) throws SQLException { @Override public void setDouble(int parameterIndex, double x) throws SQLException { - logger.debug("setDouble(parameterIndex: {}, double x)", parameterIndex); + logger.trace("setDouble(parameterIndex: {}, double x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -290,7 +299,7 @@ public void setDouble(int parameterIndex, double x) throws SQLException { @Override public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - logger.debug("setBigDecimal(parameterIndex: {}, BigDecimal x)", parameterIndex); + logger.trace("setBigDecimal(parameterIndex: {}, BigDecimal x)", parameterIndex); if (x == null) { setNull(parameterIndex, Types.DECIMAL); @@ -305,7 +314,7 @@ public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException @Override public void setString(int parameterIndex, String x) throws SQLException { - logger.debug("setString(parameterIndex: {}, String x)", parameterIndex); + logger.trace("setString(parameterIndex: {}, String x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -315,7 +324,7 @@ public void setString(int parameterIndex, String x) throws SQLException { @Override public void setBytes(int parameterIndex, byte[] x) throws SQLException { - logger.debug("setBytes(parameterIndex: {}, byte[] x)", parameterIndex); + logger.trace("setBytes(parameterIndex: {}, byte[] x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -324,9 +333,23 @@ public void setBytes(int parameterIndex, byte[] x) throws SQLException { parameterBindings.put(String.valueOf(parameterIndex), binding); } + private void setObjectInternal(int parameterIndex, SQLData sqlData) throws SQLException { + logger.debug("setObjectInternal(parameterIndex: {}, SqlData sqlData)", parameterIndex); + + JsonSqlOutput stream = new JsonSqlOutput(sqlData, connection.getSFBaseSession()); + sqlData.writeSQL(stream); + ParameterBindingDTO binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.STRUCT, connection.getSFBaseSession()), + stream.getJsonString(), + stream.getSchema()); + parameterBindings.put(String.valueOf(parameterIndex), binding); + } + @Override public void setDate(int parameterIndex, Date x) throws SQLException { - logger.debug("setDate(parameterIndex: {}, Date x)", parameterIndex); + logger.trace("setDate(parameterIndex: {}, Date x)", parameterIndex); if (x == null) { setNull(parameterIndex, Types.DATE); @@ -345,18 +368,13 @@ public void setDate(int parameterIndex, Date x) throws SQLException { @Override public void setTime(int parameterIndex, Time x) throws SQLException { - logger.debug("setTime(parameterIndex: {}, Time x)", parameterIndex); + logger.trace("setTime(parameterIndex: {}, Time x)", parameterIndex); if (x == null) { setNull(parameterIndex, Types.TIME); } else { // Convert to nanoseconds since midnight using the input time mod 24 hours. - final long MS_IN_DAY = 86400 * 1000; - long msSinceEpoch = x.getTime(); - // Use % + % instead of just % to get the nonnegative remainder. - // TODO(mkember): Change to use Math.floorMod when Client is on Java 8. - long msSinceMidnight = (msSinceEpoch % MS_IN_DAY + MS_IN_DAY) % MS_IN_DAY; - long nanosSinceMidnight = msSinceMidnight * 1000 * 1000; + long nanosSinceMidnight = SfTimestampUtil.getTimeInNanoseconds(x); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -369,7 +387,7 @@ public void setTime(int parameterIndex, Time x) throws SQLException { @Override public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { - logger.debug("setTimestamp(parameterIndex: {}, Timestamp x)", parameterIndex); + logger.trace("setTimestamp(parameterIndex: {}, Timestamp x)", parameterIndex); setTimestampWithType(parameterIndex, x, Types.TIMESTAMP); } @@ -436,7 +454,7 @@ public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQ || targetSqlType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ) { setTimestampWithType(parameterIndex, (Timestamp) x, targetSqlType); } else { - logger.debug( + logger.trace( "setObject(parameterIndex: {}, Object x, sqlType: {})", parameterIndex, SnowflakeType.JavaSQLType.find(targetSqlType)); @@ -479,6 +497,8 @@ public void setObject(int parameterIndex, Object x) throws SQLException { setBoolean(parameterIndex, (Boolean) x); } else if (x instanceof byte[]) { setBytes(parameterIndex, (byte[]) x); + } else if (x instanceof SQLData) { + setObjectInternal(parameterIndex, (SQLData) x); } else { throw new SnowflakeSQLLoggedException( connection.getSFBaseSession(), @@ -492,17 +512,18 @@ public void setObject(int parameterIndex, Object x) throws SQLException { public boolean execute() throws SQLException { ExecTimeTelemetryData execTimeData = new ExecTimeTelemetryData("boolean PreparedStatement.execute(String)", this.batchID); - logger.debug("execute: {}", sql); + logger.debug("Execute: {}", sql); boolean success = executeInternal(sql, parameterBindings, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return success; } @Override public void addBatch() throws SQLException { - logger.debug("addBatch()", false); + logger.trace("addBatch()", false); raiseSQLExceptionIfStatementIsClosed(); @@ -600,13 +621,65 @@ public void setClob(int parameterIndex, Clob x) throws SQLException { } @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); + public void setArray(int parameterIndex, Array array) throws SQLException { + if (array instanceof SfSqlArray) { + SfSqlArray sfArray = (SfSqlArray) array; + ParameterBindingDTO binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.ARRAY, connection.getSFBaseSession()), + sfArray.getJsonString(), + sfArray.getSchema()); + parameterBindings.put(String.valueOf(parameterIndex), binding); + } else { + SfSqlArray sfArray = new SfSqlArray(Types.INTEGER, array); + ParameterBindingDTO binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.ARRAY, connection.getSFBaseSession()), + sfArray.getJsonString(), + sfArray.getSchema()); + parameterBindings.put(String.valueOf(parameterIndex), binding); + } + } + + @Override + public void setMap(int parameterIndex, Map map, int type) throws SQLException { + BindingParameterMetadata valueTypeSchema; + if (Types.STRUCT == type) { + SQLData sqlData = (SQLData) map.values().stream().findFirst().orElse(null); + JsonSqlOutput stream = new JsonSqlOutput(sqlData, connection.getSFBaseSession()); + sqlData.writeSQL(stream); + valueTypeSchema = stream.getSchema(); + } else { + valueTypeSchema = FieldSchemaCreator.buildBindingSchemaForType(type, false); + } + + BindingParameterMetadata schema = + BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType("map") + .withFields( + Arrays.asList( + FieldSchemaCreator.buildBindingSchemaForType(Types.VARCHAR, false), + valueTypeSchema)) + .build(); + ParameterBindingDTO binding = null; + try { + binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.STRUCT, connection.getSFBaseSession()), + SnowflakeUtil.mapJson(map), + schema); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + parameterBindings.put(String.valueOf(parameterIndex), binding); } @Override public ResultSetMetaData getMetaData() throws SQLException { - logger.debug("getMetaData()", false); + logger.trace("getMetaData()", false); raiseSQLExceptionIfStatementIsClosed(); @@ -616,7 +689,7 @@ public ResultSetMetaData getMetaData() throws SQLException { @Override public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - logger.debug("setDate(int parameterIndex, Date x, Calendar cal)", false); + logger.trace("setDate(int parameterIndex, Date x, Calendar cal)", false); raiseSQLExceptionIfStatementIsClosed(); if (x == null) { @@ -639,14 +712,14 @@ public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLExceptio @Override public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - logger.debug("setTime(int parameterIndex, Time x, Calendar cal)", false); + logger.trace("setTime(int parameterIndex, Time x, Calendar cal)", false); raiseSQLExceptionIfStatementIsClosed(); setTime(parameterIndex, x); } @Override public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - logger.debug("setTimestamp(int parameterIndex, Timestamp x, Calendar cal)", false); + logger.trace("setTimestamp(int parameterIndex, Timestamp x, Calendar cal)", false); raiseSQLExceptionIfStatementIsClosed(); // convert the time from being in UTC to be in local time zone @@ -687,7 +760,7 @@ public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws S @Override public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - logger.debug("setNull(int parameterIndex, int sqlType, String typeName)", false); + logger.trace("setNull(int parameterIndex, int sqlType, String typeName)", false); setNull(parameterIndex, sqlType); } @@ -748,7 +821,7 @@ public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException @Override public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - logger.debug( + logger.trace( "setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength)", false); raiseSQLExceptionIfStatementIsClosed(); @@ -847,13 +920,13 @@ public void clearBatch() throws SQLException { @Override public int[] executeBatch() throws SQLException { - logger.debug("executeBatch()", false); + logger.trace("executeBatch()", false); return executeBatchInternalWithArrayBind(false).intArr; } @Override public long[] executeLargeBatch() throws SQLException { - logger.debug("executeLargeBatch()", false); + logger.trace("executeLargeBatch()", false); return executeBatchInternalWithArrayBind(true).longArr; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java index 5cf9e1c40..7e91d9bab 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java @@ -144,4 +144,8 @@ public DownloadState getDownloadState() { public void setDownloadState(DownloadState downloadState) { this.downloadState = downloadState; } + + long getTotalTime() { + return downloadTime + parseTime; + } } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java index dcc5250b5..7de89e6f5 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java @@ -13,4 +13,22 @@ public interface SnowflakeResultSetMetaData { int getInternalColumnType(int column) throws SQLException; List getColumnFields(int column) throws SQLException; + + /** + * Get vector dimension + * + * @param column column index + * @return vector dimension when the column is vector type or 0 when it is not vector type + * @throws SQLException when cannot get column dimension + */ + int getDimension(int column) throws SQLException; + + /** + * Get vector dimension + * + * @param columnName column name + * @return vector dimension when the column is vector type or 0 when it is not vector type + * @throws SQLException when cannot get column dimension + */ + int getDimension(String columnName) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java index 5c67cde9a..b8cdb236b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java @@ -22,7 +22,8 @@ public enum QueryType { SYNC }; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeResultSetMetaDataV1.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeResultSetMetaDataV1.class); private SFResultSetMetaData resultSetMetaData; private String queryId; @@ -84,9 +85,19 @@ public List getColumnFields(int column) throws SQLException { () -> resultSetMetaData.getColumnFields(column)); } + @Override + public int getDimension(int column) throws SQLException { + return resultSetMetaData.getDimension(column); + } + + @Override + public int getDimension(String columnName) throws SQLException { + return resultSetMetaData.getDimension(getColumnIndex(columnName) + 1); + } + @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -97,7 +108,7 @@ public T unwrap(Class iface) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -161,7 +172,7 @@ public boolean isDefinitelyWritable(int column) throws SQLException { @Override public String getColumnClassName(int column) throws SQLException { - logger.debug("public String getColumnClassName(int column)", false); + logger.trace("String getColumnClassName(int column)", false); int type = this.getColumnType(column); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java index 5a7821e0b..f82505665 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java @@ -74,7 +74,8 @@ public class SnowflakeResultSetSerializableV1 implements SnowflakeResultSetSerializable, Serializable { private static final long serialVersionUID = 1L; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeResultSetSerializableV1.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeResultSetSerializableV1.class); static final ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); private static final long LOW_MAX_MEMORY = GB; @@ -544,7 +545,7 @@ public static SnowflakeResultSetSerializableV1 create( ResultStreamProvider resultStreamProvider) throws SnowflakeSQLException { SnowflakeResultSetSerializableV1 resultSetSerializable = new SnowflakeResultSetSerializableV1(); - logger.debug("Entering create()", false); + logger.trace("Entering create()", false); SnowflakeUtil.checkErrorAndThrowException(rootNode); @@ -581,7 +582,7 @@ public static SnowflakeResultSetSerializableV1 create( resultSetSerializable.possibleSession = Optional.ofNullable(sfSession); - logger.debug("query id: {}", resultSetSerializable.queryId); + logger.debug("Query id: {}", resultSetSerializable.queryId); Optional queryResultFormat = QueryResultFormat.lookupByName(rootNode.path("data").path("queryResultFormat").asText()); @@ -670,7 +671,7 @@ public static SnowflakeResultSetSerializableV1 create( resultSetSerializable.sendResultTime = sendResultTimeNode.longValue(); } - logger.debug("result version={}", resultSetSerializable.resultVersion); + logger.debug("Result version: {}", resultSetSerializable.resultVersion); // Bind parameter metadata JsonNode bindData = rootNode.path("data").path("metaDataOfBinds"); @@ -756,7 +757,7 @@ private void setupFieldsFromParameters() { this.dateFormatter = SnowflakeDateTimeFormat.fromSqlFormat(sqlDateFormat); logger.debug( - "sql date format: {}, java date format: {}", + "Sql date format: {}, java date format: {}", sqlDateFormat, (ArgSupplier) () -> this.dateFormatter.toSimpleDateTimePattern()); @@ -766,7 +767,7 @@ private void setupFieldsFromParameters() { this.timeFormatter = SnowflakeDateTimeFormat.fromSqlFormat(sqlTimeFormat); logger.debug( - "sql time format: {}, java time format: {}", + "Sql time format: {}, java time format: {}", sqlTimeFormat, (ArgSupplier) () -> this.timeFormatter.toSimpleDateTimePattern()); @@ -803,7 +804,7 @@ private void parseChunkFiles(JsonNode rootNode, SFBaseStatement sfStatement) { // Determine the prefetch thread count and memoryLimit if (this.chunkFileCount > 0) { - logger.debug("#chunks={}, initialize chunk downloader", this.chunkFileCount); + logger.debug("#chunks: {}, initialize chunk downloader", this.chunkFileCount); adjustMemorySettings(sfStatement); @@ -816,7 +817,7 @@ private void parseChunkFiles(JsonNode rootNode, SFBaseStatement sfStatement) { Map.Entry chunkHeader = chunkHeadersIter.next(); logger.debug( - "add header key={}, value={}", + "Add header key: {}, value: {}", chunkHeader.getKey(), chunkHeader.getValue().asText()); this.chunkHeadersMap.put(chunkHeader.getKey(), chunkHeader.getValue().asText()); @@ -835,7 +836,7 @@ private void parseChunkFiles(JsonNode rootNode, SFBaseStatement sfStatement) { new ChunkFileMetadata(url, rowCount, compressedSize, uncompressedSize)); logger.debug( - "add chunk, url={} rowCount={} " + "compressedSize={} uncompressedSize={}", + "Add chunk, url: {} rowCount: {} " + "compressedSize: {} uncompressedSize: {}", url, rowCount, compressedSize, @@ -855,8 +856,8 @@ private void adjustMemorySettings(SFBaseStatement sfStatement) { this.memoryLimit = sfStatement.getConservativeMemoryLimit(); int chunkSize = (int) this.parameters.get(CLIENT_RESULT_CHUNK_SIZE); logger.debug( - "enable conservative memory usage with prefetchThreads = {} and memoryLimit = {} and " - + "resultChunkSize = {}", + "Enable conservative memory usage with prefetchThreads: {} and memoryLimit: {} and " + + "resultChunkSize: {}", this.resultPrefetchThreads, this.memoryLimit, chunkSize); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java index 5135e3ca5..49c8c8546 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java @@ -28,13 +28,18 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; +import net.snowflake.client.core.ArrowSqlInput; +import net.snowflake.client.core.JsonSqlInput; import net.snowflake.client.core.QueryStatus; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; /** Snowflake ResultSet implementation */ public class SnowflakeResultSetV1 extends SnowflakeBaseResultSet implements SnowflakeResultSet, ResultSet { + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeResultSetV1.class); /** * Constructor takes an inputstream from the API response that we get from executing a SQL @@ -263,11 +268,17 @@ public ResultSetMetaData getMetaData() throws SQLException { public Object getObject(int columnIndex) throws SQLException { raiseSQLExceptionIfResultSetIsClosed(); - try { - return sfBaseResultSet.getObject(columnIndex); - } catch (SFException ex) { - throw new SnowflakeSQLException( - ex.getCause(), ex.getSqlState(), ex.getVendorCode(), ex.getParams()); + Object object = + SnowflakeUtil.mapSFExceptionToSQLException(() -> sfBaseResultSet.getObject(columnIndex)); + if (object == null) { + return null; + } else if (object instanceof JsonSqlInput) { + return ((JsonSqlInput) object).getText(); + } else if (object instanceof ArrowSqlInput) { + throw new SQLException( + "Arrow native struct couldn't be converted to String. To map to SqlData the method getObject(int columnIndex, Class type) should be used"); + } else { + return object; } } @@ -351,7 +362,7 @@ public boolean isBeforeFirst() throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -359,7 +370,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java index 00e8a3b64..a88829ec6 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java @@ -14,7 +14,7 @@ * @author jhuang */ public class SnowflakeSQLException extends SQLException { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeSQLException.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeSQLException.class); private static final long serialVersionUID = 1L; @@ -44,7 +44,7 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState, int // log user error from GS at fine level logger.debug( - "Snowflake exception: {}, sqlState:{}, vendorCode:{}, queryId:{}", + "Snowflake exception: {}, sqlState: {}, vendorCode: {}, queryId: {}", reason, sqlState, vendorCode, @@ -54,7 +54,7 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState, int public SnowflakeSQLException(String reason, String sqlState) { super(reason, sqlState); // log user error from GS at fine level - logger.debug("Snowflake exception: {}, sqlState:{}", reason, sqlState); + logger.debug("Snowflake exception: {}, sqlState: {}", reason, sqlState); } /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int)} */ @@ -70,7 +70,7 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode) { vendorCode); this.queryId = queryId; logger.debug( - "Snowflake exception: {}, sqlState:{}, vendorCode:{}", + "Snowflake exception: {}, sqlState: {}, vendorCode: {}", errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode)), sqlState, vendorCode); @@ -89,7 +89,7 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode, Ob vendorCode); this.queryId = queryId; logger.debug( - "Snowflake exception: {}, sqlState:{}, vendorCode:{}", + "Snowflake exception: {}, sqlState: {}, vendorCode: {}", errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode), params), sqlState, vendorCode); @@ -172,6 +172,10 @@ public SnowflakeSQLException(String reason) { super(reason); } + public SnowflakeSQLException(Throwable ex, String message) { + super(message, ex); + } + public String getQueryId() { return queryId; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java index d9d741a8c..cdc33322d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java @@ -25,6 +25,8 @@ import net.snowflake.client.jdbc.telemetry.TelemetryUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryEvent; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.LoginInfoDTO; import net.snowflake.common.core.SqlState; @@ -36,7 +38,8 @@ * exception with OOB telemetry. */ public class SnowflakeSQLLoggedException extends SnowflakeSQLException { - + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeSQLLoggedException.class); private static final ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); /** diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java index 992cf123d..ede179a9c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java @@ -11,7 +11,8 @@ public class SnowflakeSimulatedUploadFailure extends RuntimeException { private static final long serialVersionUID = 1L; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeSimulatedUploadFailure.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeSimulatedUploadFailure.class); public SnowflakeSimulatedUploadFailure() { super(); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java index a4c9100c3..5016c175b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java @@ -35,7 +35,7 @@ /** Snowflake statement */ class SnowflakeStatementV1 implements Statement, SnowflakeStatement { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeStatementV1.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeStatementV1.class); private static final String NOOP_MESSAGE = "This is a dummy SnowflakeStatement, " + "no member function should be called for it."; @@ -68,9 +68,6 @@ class SnowflakeStatementV1 implements Statement, SnowflakeStatement { // timeout in seconds private int queryTimeout = 0; - // max field size limited to 16MB - private final int maxFieldSize = 16777216; - SFBaseStatement sfBaseStatement; private boolean poolable; @@ -101,7 +98,7 @@ class SnowflakeStatementV1 implements Statement, SnowflakeStatement { int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug(" public SnowflakeStatement(SnowflakeConnectionV1 conn)", false); + logger.trace("SnowflakeStatement(SnowflakeConnectionV1 conn)", false); this.connection = connection; @@ -155,6 +152,7 @@ public ResultSet executeQuery(String sql) throws SQLException { ResultSet rs = executeQueryInternal(sql, false, null, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return rs; } @@ -172,6 +170,7 @@ public ResultSet executeAsyncQuery(String sql) throws SQLException { ResultSet rs = executeQueryInternal(sql, true, null, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", queryID, execTimeData.getLogString()); return rs; } @@ -206,6 +205,7 @@ public long executeLargeUpdate(String sql) throws SQLException { long res = executeUpdateInternal(sql, null, true, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", queryID, execTimeData.getLogString()); return res; } @@ -337,7 +337,7 @@ boolean executeInternal( raiseSQLExceptionIfStatementIsClosed(); connection.injectedDelay(); - logger.debug("execute: {}", sql); + logger.debug("Execute: {}", sql); String trimmedSql = sql.trim(); @@ -428,12 +428,13 @@ public boolean execute(String sql) throws SQLException { boolean res = executeInternal(sql, null, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", queryID, execTimeData.getLogString()); return res; } @Override public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("execute(String sql, int autoGeneratedKeys)", false); + logger.trace("execute(String sql, int autoGeneratedKeys)", false); if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) { return execute(sql); @@ -444,14 +445,14 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { @Override public boolean execute(String sql, int[] columnIndexes) throws SQLException { - logger.debug("execute(String sql, int[] columnIndexes)", false); + logger.trace("execute(String sql, int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public boolean execute(String sql, String[] columnNames) throws SQLException { - logger.debug("execute(String sql, String[] columnNames)", false); + logger.trace("execute(String sql, String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @@ -465,7 +466,7 @@ public boolean execute(String sql, String[] columnNames) throws SQLException { */ @Override public int[] executeBatch() throws SQLException { - logger.debug("int[] executeBatch()", false); + logger.trace("int[] executeBatch()", false); return executeBatchInternal(false).intArr; } @@ -478,7 +479,7 @@ public int[] executeBatch() throws SQLException { */ @Override public long[] executeLargeBatch() throws SQLException { - logger.debug("executeBatch()", false); + logger.trace("executeBatch()", false); return executeBatchInternal(true).longArr; } @@ -560,14 +561,14 @@ VariableTypeArray executeBatchInternal(boolean isLong) throws SQLException { @Override public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("executeUpdate(String sql, int autoGeneratedKeys)", false); + logger.trace("executeUpdate(String sql, int autoGeneratedKeys)", false); return (int) this.executeLargeUpdate(sql, autoGeneratedKeys); } @Override public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("executeUpdate(String sql, int autoGeneratedKeys)", false); + logger.trace("executeUpdate(String sql, int autoGeneratedKeys)", false); if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) { return executeLargeUpdate(sql); @@ -578,84 +579,84 @@ public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLExce @Override public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - logger.debug("executeUpdate(String sql, int[] columnIndexes)", false); + logger.trace("executeUpdate(String sql, int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { - logger.debug("executeLargeUpdate(String sql, int[] columnIndexes)", false); + logger.trace("executeLargeUpdate(String sql, int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public int executeUpdate(String sql, String[] columnNames) throws SQLException { - logger.debug("executeUpdate(String sql, String[] columnNames)", false); + logger.trace("executeUpdate(String sql, String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { - logger.debug("executeUpdate(String sql, String[] columnNames)", false); + logger.trace("executeUpdate(String sql, String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public Connection getConnection() throws SQLException { - logger.debug("getConnection()", false); + logger.trace("getConnection()", false); raiseSQLExceptionIfStatementIsClosed(); return connection; } @Override public int getFetchDirection() throws SQLException { - logger.debug("getFetchDirection()", false); + logger.trace("getFetchDirection()", false); raiseSQLExceptionIfStatementIsClosed(); return ResultSet.FETCH_FORWARD; } @Override public int getFetchSize() throws SQLException { - logger.debug("getFetchSize()", false); + logger.trace("getFetchSize()", false); raiseSQLExceptionIfStatementIsClosed(); return fetchSize; } @Override public ResultSet getGeneratedKeys() throws SQLException { - logger.debug("getGeneratedKeys()", false); + logger.trace("getGeneratedKeys()", false); raiseSQLExceptionIfStatementIsClosed(); return new SnowflakeResultSetV1.EmptyResultSet(); } @Override public int getMaxFieldSize() throws SQLException { - logger.debug("getMaxFieldSize()", false); + logger.trace("getMaxFieldSize()", false); raiseSQLExceptionIfStatementIsClosed(); - return maxFieldSize; + return connection.getMetaData().getMaxCharLiteralLength(); } @Override public int getMaxRows() throws SQLException { - logger.debug("getMaxRows()", false); + logger.trace("getMaxRows()", false); raiseSQLExceptionIfStatementIsClosed(); return maxRows; } @Override public boolean getMoreResults() throws SQLException { - logger.debug("getMoreResults()", false); + logger.trace("getMoreResults()", false); return getMoreResults(Statement.CLOSE_CURRENT_RESULT); } @Override public boolean getMoreResults(int current) throws SQLException { - logger.debug("getMoreResults(int current)", false); + logger.trace("getMoreResults(int current)", false); raiseSQLExceptionIfStatementIsClosed(); // clean up the current result set, if it exists @@ -704,48 +705,48 @@ public boolean getMoreResults(int current) throws SQLException { @Override public int getQueryTimeout() throws SQLException { - logger.debug("getQueryTimeout()", false); + logger.trace("getQueryTimeout()", false); raiseSQLExceptionIfStatementIsClosed(); return this.queryTimeout; } @Override public ResultSet getResultSet() throws SQLException { - logger.debug("getResultSet()", false); + logger.trace("getResultSet()", false); raiseSQLExceptionIfStatementIsClosed(); return resultSet; } @Override public int getResultSetConcurrency() throws SQLException { - logger.debug("getResultSetConcurrency()", false); + logger.trace("getResultSetConcurrency()", false); raiseSQLExceptionIfStatementIsClosed(); return resultSetConcurrency; } @Override public int getResultSetHoldability() throws SQLException { - logger.debug("getResultSetHoldability()", false); + logger.trace("getResultSetHoldability()", false); raiseSQLExceptionIfStatementIsClosed(); return resultSetHoldability; } @Override public int getResultSetType() throws SQLException { - logger.debug("getResultSetType()", false); + logger.trace("getResultSetType()", false); raiseSQLExceptionIfStatementIsClosed(); return this.resultSetType; } @Override public int getUpdateCount() throws SQLException { - logger.debug("getUpdateCount()", false); + logger.trace("getUpdateCount()", false); return (int) getUpdateCountIfDML(); } @Override public long getLargeUpdateCount() throws SQLException { - logger.debug("getLargeUpdateCount()", false); + logger.trace("getLargeUpdateCount()", false); return getUpdateCountIfDML(); } @@ -756,34 +757,34 @@ private long getUpdateCountIfDML() throws SQLException { @Override public SQLWarning getWarnings() throws SQLException { - logger.debug("getWarnings()", false); + logger.trace("getWarnings()", false); raiseSQLExceptionIfStatementIsClosed(); return sqlWarnings; } @Override public boolean isClosed() throws SQLException { - logger.debug("isClosed()", false); + logger.trace("isClosed()", false); return isClosed; // no exception } @Override public boolean isPoolable() throws SQLException { - logger.debug("isPoolable()", false); + logger.trace("isPoolable()", false); raiseSQLExceptionIfStatementIsClosed(); return poolable; } @Override public void setCursorName(String name) throws SQLException { - logger.debug("setCursorName(String name)", false); + logger.trace("setCursorName(String name)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public void setEscapeProcessing(boolean enable) throws SQLException { - logger.debug("setEscapeProcessing(boolean enable)", false); + logger.trace("setEscapeProcessing(boolean enable)", false); // NOTE: We could raise an exception here, because not implemented // but it may break the existing applications. For now returning nothing. // we should revisit. @@ -792,7 +793,7 @@ public void setEscapeProcessing(boolean enable) throws SQLException { @Override public void setFetchDirection(int direction) throws SQLException { - logger.debug("setFetchDirection(int direction)", false); + logger.trace("setFetchDirection(int direction)", false); raiseSQLExceptionIfStatementIsClosed(); if (direction != ResultSet.FETCH_FORWARD) { throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); @@ -801,21 +802,21 @@ public void setFetchDirection(int direction) throws SQLException { @Override public void setFetchSize(int rows) throws SQLException { - logger.debug("setFetchSize(int rows), rows={}", rows); + logger.trace("setFetchSize(int rows), rows={}", rows); raiseSQLExceptionIfStatementIsClosed(); fetchSize = rows; } @Override public void setMaxFieldSize(int max) throws SQLException { - logger.debug("setMaxFieldSize(int max)", false); + logger.trace("setMaxFieldSize(int max)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public void setMaxRows(int max) throws SQLException { - logger.debug("setMaxRows(int max)", false); + logger.trace("setMaxRows(int max)", false); raiseSQLExceptionIfStatementIsClosed(); @@ -832,7 +833,7 @@ public void setMaxRows(int max) throws SQLException { @Override public void setPoolable(boolean poolable) throws SQLException { - logger.debug("setPoolable(boolean poolable)", false); + logger.trace("setPoolable(boolean poolable)", false); raiseSQLExceptionIfStatementIsClosed(); if (poolable) { @@ -849,7 +850,7 @@ public void setPoolable(boolean poolable) throws SQLException { * @throws SQLException if any SQL error occurs. */ public void setParameter(String name, Object value) throws SQLException { - logger.debug("setParameter", false); + logger.trace("setParameter", false); try { if (this.sfBaseStatement != null) { @@ -867,7 +868,7 @@ public void setBatchID(String batchID) { @Override public void setQueryTimeout(int seconds) throws SQLException { - logger.debug("setQueryTimeout(int seconds)", false); + logger.trace("setQueryTimeout(int seconds)", false); raiseSQLExceptionIfStatementIsClosed(); this.queryTimeout = seconds; @@ -883,7 +884,7 @@ public void setQueryTimeout(int seconds) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("isWrapperFor(Class iface)", false); + logger.trace("isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -891,7 +892,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("unwrap(Class iface)", false); + logger.trace("unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -902,13 +903,13 @@ public T unwrap(Class iface) throws SQLException { @Override public void closeOnCompletion() throws SQLException { - logger.debug("closeOnCompletion()", false); + logger.trace("closeOnCompletion()", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public boolean isCloseOnCompletion() throws SQLException { - logger.debug("isCloseOnCompletion()", false); + logger.trace("isCloseOnCompletion()", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @@ -918,7 +919,7 @@ public void close() throws SQLException { } public void close(boolean removeClosedStatementFromConnection) throws SQLException { - logger.debug("close()", false); + logger.trace("close()", false); // No exception is raised even if the statement is closed. if (resultSet != null) { @@ -947,7 +948,7 @@ public void close(boolean removeClosedStatementFromConnection) throws SQLExcepti @Override public void cancel() throws SQLException { - logger.debug("cancel()", false); + logger.trace("cancel()", false); raiseSQLExceptionIfStatementIsClosed(); try { @@ -959,14 +960,14 @@ public void cancel() throws SQLException { @Override public void clearWarnings() throws SQLException { - logger.debug("clearWarnings()", false); + logger.trace("clearWarnings()", false); raiseSQLExceptionIfStatementIsClosed(); sqlWarnings = null; } @Override public void addBatch(String sql) throws SQLException { - logger.debug("addBatch(String sql)", false); + logger.trace("addBatch(String sql)", false); raiseSQLExceptionIfStatementIsClosed(); @@ -975,7 +976,7 @@ public void addBatch(String sql) throws SQLException { @Override public void clearBatch() throws SQLException { - logger.debug("clearBatch()", false); + logger.trace("clearBatch()", false); raiseSQLExceptionIfStatementIsClosed(); @@ -983,7 +984,7 @@ public void clearBatch() throws SQLException { } private void executeSetProperty(final String sql) { - logger.debug("setting property", false); + logger.trace("setting property", false); // tokenize the sql String[] tokens = sql.split("\\s+"); @@ -1260,7 +1261,7 @@ public void setQueryTimeout(int seconds) throws SQLException {} @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("isWrapperFor(Class iface)", false); + logger.trace("isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -1268,7 +1269,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("unwrap(Class iface)", false); + logger.trace("unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java index beccc79b2..ea958c551 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java @@ -447,6 +447,9 @@ public static SnowflakeType javaTypeToSFType(int javaType, SFBaseSession session case Types.STRUCT: return OBJECT; + case Types.ARRAY: + return ARRAY; + case Types.NULL: return ANY; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index fecba4bba..efe8ffbf8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -6,7 +6,9 @@ import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.google.common.base.Strings; import java.io.BufferedReader; @@ -15,6 +17,7 @@ import java.io.PrintWriter; import java.io.StringWriter; import java.lang.reflect.Field; +import java.lang.reflect.Method; import java.sql.SQLException; import java.sql.Time; import java.sql.Types; @@ -33,8 +36,10 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import net.snowflake.client.core.Constants; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.OCSPMode; +import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSessionProperty; @@ -55,6 +60,7 @@ public class SnowflakeUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeUtil.class); + private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); /** Additional data types not covered by standard JDBC */ public static final int EXTRA_TYPES_TIMESTAMP_LTZ = 50000; @@ -86,6 +92,10 @@ public class SnowflakeUtil { public static final String BYTE_STR = "byte"; public static final String BYTES_STR = "byte array"; + public static String mapJson(Object ob) throws JsonProcessingException { + return OBJECT_MAPPER.writeValueAsString(ob); + } + public static void checkErrorAndThrowExceptionIncludingReauth(JsonNode rootNode) throws SnowflakeSQLException { checkErrorAndThrowExceptionSub(rootNode, true); @@ -171,6 +181,15 @@ public static SnowflakeColumnMetadata extractColumnMetadata( int precision = colNode.path("precision").asInt(); int scale = colNode.path("scale").asInt(); int length = colNode.path("length").asInt(); + int dimension = + colNode + .path("dimension") + .asInt(); // vector dimension when checking columns via connection.getMetadata + int vectorDimension = + colNode + .path("vectorDimension") + .asInt(); // dimension when checking columns via resultSet.getMetadata + int finalVectorDimension = dimension > 0 ? dimension : vectorDimension; boolean fixed = colNode.path("fixed").asBoolean(); JsonNode udtOutputType = colNode.path("outputType"); JsonNode extColTypeNameNode = colNode.path("extTypeName"); @@ -213,7 +232,8 @@ public static SnowflakeColumnMetadata extractColumnMetadata( colSrcDatabase, colSrcSchema, colSrcTable, - isAutoIncrement); + isAutoIncrement, + finalVectorDimension); } static ColumnTypeInfo getSnowflakeType( @@ -303,8 +323,9 @@ static ColumnTypeInfo getSnowflakeType( break; case ARRAY: + int columnType = isStructuredType ? Types.ARRAY : Types.VARCHAR; columnTypeInfo = - new ColumnTypeInfo(Types.ARRAY, defaultIfNull(extColTypeName, "ARRAY"), baseType); + new ColumnTypeInfo(columnType, defaultIfNull(extColTypeName, "ARRAY"), baseType); break; case MAP: @@ -374,7 +395,13 @@ static List createFieldsMetadata( throws SnowflakeSQLLoggedException { List fields = new ArrayList<>(); for (JsonNode node : fieldsJson) { - String colName = node.path("name").asText(); + String colName; + if (!node.path("fieldType").isEmpty()) { + colName = node.path("fieldName").asText(); + node = node.path("fieldType"); + } else { + colName = node.path("name").asText(); + } int scale = node.path("scale").asInt(); int precision = node.path("precision").asInt(); String internalColTypeName = node.path("type").asText(); @@ -437,7 +464,8 @@ static String javaTypeToSFTypeString(int javaType, SFBaseSession session) return SnowflakeType.javaTypeToSFType(javaType, session).name(); } - static SnowflakeType javaTypeToSFType(int javaType, SFBaseSession session) + @SnowflakeJdbcInternalApi + public static SnowflakeType javaTypeToSFType(int javaType, SFBaseSession session) throws SnowflakeSQLException { return SnowflakeType.javaTypeToSFType(javaType, session); } @@ -548,7 +576,9 @@ static List describeFixedViewColumns( "", // database "", // schema "", - false)); // isAutoincrement + false, // isAutoincrement + 0 // dimension + )); } return rowType; @@ -696,6 +726,18 @@ public static void systemSetEnv(String key, String value) { field.setAccessible(true); Map writableEnv = (Map) field.get(env); writableEnv.put(key, value); + + // To an environment variable is set on Windows, it uses a different map to store the values + // when the system.getenv(VAR_NAME) is used its required to update in this additional place. + if (Constants.getOS() == Constants.OS.WINDOWS) { + Class pe = Class.forName("java.lang.ProcessEnvironment"); + Method getenv = pe.getDeclaredMethod("getenv", String.class); + getenv.setAccessible(true); + Field props = pe.getDeclaredField("theCaseInsensitiveEnvironment"); + props.setAccessible(true); + Map writableEnvForGet = (Map) props.get(null); + writableEnvForGet.put(key, value); + } } catch (Exception e) { System.out.println("Failed to set value"); logger.error( diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java index ec7f0c7ca..49b3542fd 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java @@ -10,13 +10,19 @@ import java.util.Properties; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.HttpProtocol; +import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFSessionProperty; import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.log.SFLoggerUtil; @SnowflakeJdbcInternalApi public class S3HttpUtil { + private static final SFLogger logger = SFLoggerFactory.getLogger(HttpUtil.class); + /** * A static function to set S3 proxy params when there is a valid session * @@ -30,11 +36,28 @@ public static void setProxyForS3(HttpClientSettingsKey key, ClientConfiguration clientConfig.setProxyHost(key.getProxyHost()); clientConfig.setProxyPort(key.getProxyPort()); clientConfig.setNonProxyHosts(key.getNonProxyHosts()); + String logMessage = + "Setting S3 proxy. Host: " + + key.getProxyHost() + + ", port: " + + key.getProxyPort() + + ", protocol: " + + key.getProxyHttpProtocol() + + ", non-proxy hosts: " + + key.getNonProxyHosts(); if (!Strings.isNullOrEmpty(key.getProxyUser()) && !Strings.isNullOrEmpty(key.getProxyPassword())) { + logMessage += + ", user: " + + key.getProxyUser() + + ", password is " + + SFLoggerUtil.isVariableProvided(key.getProxyPassword()); clientConfig.setProxyUsername(key.getProxyUser()); clientConfig.setProxyPassword(key.getProxyPassword()); } + logger.debug(logMessage); + } else { + logger.debug("Omitting S3 proxy setup"); } } @@ -84,11 +107,26 @@ public static void setSessionlessProxyForS3( clientConfig.setProxyPort(proxyPort); clientConfig.setNonProxyHosts(nonProxyHosts); clientConfig.setProxyProtocol(protocolEnum); + String logMessage = + "Setting sessionless S3 proxy. Host: " + + proxyHost + + ", port: " + + proxyPort + + ", non-proxy hosts: " + + nonProxyHosts + + ", protocol: " + + proxyProtocol; if (!Strings.isNullOrEmpty(proxyUser) && !Strings.isNullOrEmpty(proxyPassword)) { + logMessage += ", user: " + proxyUser + " with password provided"; clientConfig.setProxyUsername(proxyUser); clientConfig.setProxyPassword(proxyPassword); } + logger.debug(logMessage); + } else { + logger.debug("Omitting sessionless S3 proxy setup as proxy is disabled"); } + } else { + logger.debug("Omitting sessionless S3 proxy setup"); } } } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java index 8977d154b..889a1d9e0 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java @@ -56,6 +56,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SFPair; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; @@ -92,6 +93,9 @@ private SnowflakeAzureClient() {} public static SnowflakeAzureClient createSnowflakeAzureClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFBaseSession sfSession) throws SnowflakeSQLException { + logger.info( + "Initializing Snowflake Azure client with encryption: {}", + encMat != null ? "true" : "false"); SnowflakeAzureClient azureClient = new SnowflakeAzureClient(); azureClient.setupAzureClient(stage, encMat, sfSession); @@ -208,6 +212,7 @@ public int getEncryptionKeySize() { */ @Override public void renew(Map stageCredentials) throws SnowflakeSQLException { + logger.debug("Renewing the Azure client"); stageInfo.setCredentials(stageCredentials); setupAzureClient(stageInfo, encMat, session); } @@ -320,10 +325,14 @@ public void download( String presignedUrl, String queryId) throws SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + String localFilePath = localLocation + localFileSep + destFileName; + logger.info( + "Staring download of file from Azure stage path: {} to {}", stageFilePath, localFilePath); int retryCount = 0; do { try { - String localFilePath = localLocation + localFileSep + destFileName; File localFile = new File(localFilePath); CloudBlobContainer container = azStorageClient.getContainerReference(remoteStorageLocation); CloudBlob blob = container.getBlockBlobReference(stageFilePath); @@ -332,6 +341,8 @@ public void download( transferOptions.setConcurrentRequestCount(parallelism); blob.downloadToFile(localFilePath, null, transferOptions, opContext); + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); // Pull object metadata from Azure blob.downloadAttributes(null, transferOptions, opContext); @@ -345,6 +356,7 @@ public void download( String iv = encryptionData.getValue(); if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -357,10 +369,27 @@ public void download( // Decrypt file try { EncryptionProvider.decrypt(localFile, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "Azure file {} downloaded to {}. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + remoteStorageLocation, + localFile.getAbsolutePath(), + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); } catch (Exception ex) { logger.error("Error decrypting file", ex); throw ex; } + } else { + logger.info( + "Azure file {} downloaded to {}. It took {} ms with {} retries", + remoteStorageLocation, + localFile.getAbsolutePath(), + downloadMillis, + retryCount); } return; @@ -403,6 +432,10 @@ public InputStream downloadToStream( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + "Staring download of file from Azure stage path: {} to input stream", stageFilePath); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); int retryCount = 0; do { @@ -412,7 +445,8 @@ public InputStream downloadToStream( CloudBlob blob = container.getBlockBlobReference(stageFilePath); InputStream stream = blob.openInputStream(null, null, opContext); - + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); Map userDefinedMetadata = blob.getMetadata(); AbstractMap.SimpleEntry encryptionData = @@ -423,6 +457,7 @@ public InputStream downloadToStream( String iv = encryptionData.getValue(); if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -433,8 +468,18 @@ public InputStream downloadToStream( } try { - - return EncryptionProvider.decryptStream(stream, key, iv, encMat); + InputStream is = EncryptionProvider.decryptStream(stream, key, iv, encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "Azure file {} downloaded to input stream. It took {} ms " + + "(download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); + return is; } catch (Exception ex) { logger.error("Error in decrypting file", ex); @@ -442,6 +487,11 @@ public InputStream downloadToStream( } } else { + logger.info( + "Azure file {} downloaded to input stream. Download took {} ms with {} retries", + stageFilePath, + downloadMillis, + retryCount); return stream; } @@ -493,6 +543,9 @@ public void upload( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "Azure", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); final List toClose = new ArrayList<>(); long originalContentLength = meta.getContentLength(); @@ -512,9 +565,10 @@ public void upload( } int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); do { try { - logger.debug("Starting upload", false); InputStream fileInputStream = uploadStreamInfo.left; CloudBlobContainer container = azStorageClient.getContainerReference(remoteStorageLocation); CloudBlockBlob blob = container.getBlockBlobReference(destFileName); @@ -531,7 +585,22 @@ public void upload( null, transferOptions, opContext); - logger.debug("Upload successful", false); + stopwatch.stop(); + + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to Azure location: {}. It took {} ms with {} retries", + remoteStorageLocation, + stopwatch.elapsedMillis(), + retryCount); + } else { + logger.info( + "Uploaded file {} to Azure location: {}. It took {} ms with {} retries", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis(), + retryCount); + } blob.uploadMetadata(null, transferOptions, opContext); diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java index 61c31b3ab..506293023 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java @@ -58,6 +58,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SFPair; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; @@ -106,6 +107,8 @@ private SnowflakeGCSClient() {} public static SnowflakeGCSClient createSnowflakeGCSClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFSession session) throws SnowflakeSQLException { + logger.debug( + "Initializing Snowflake GCS client with encryption: {}", encMat != null ? "true" : "false"); SnowflakeGCSClient sfGcsClient = new SnowflakeGCSClient(); sfGcsClient.setupGCSClient(stage, encMat, session); @@ -165,6 +168,7 @@ public boolean requirePresignedUrl() { @Override public void renew(Map stageCredentials) throws SnowflakeSQLException { + logger.debug("Renewing the Snowflake GCS client"); stageInfo.setCredentials(stageCredentials); setupGCSClient(stageInfo, encMat, session); } @@ -249,14 +253,18 @@ public void download( String presignedUrl, String queryId) throws SnowflakeSQLException { - int retryCount = 0; String localFilePath = localLocation + localFileSep + destFileName; + logger.info( + "Staring download of file from GCS stage path: {} to {}", stageFilePath, localFilePath); + int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); File localFile = new File(localFilePath); - do { try { String key = null; String iv = null; + long downloadMillis = 0; if (!Strings.isNullOrEmpty(presignedUrl)) { logger.debug("Starting download with presigned URL", false); URIBuilder uriBuilder = new URIBuilder(presignedUrl); @@ -269,7 +277,7 @@ public void download( CloseableHttpClient httpClient = HttpUtil.getHttpClientWithoutDecompression(session.getHttpClientKey()); - // Put the file on storage using the presigned url + // Get the file on storage using the presigned url HttpResponse response = RestRequest.execute( httpClient, @@ -315,6 +323,8 @@ public void download( } } } + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); logger.debug("Download successful", false); } catch (IOException ex) { logger.debug("Download unsuccessful {}", ex); @@ -340,6 +350,8 @@ public void download( logger.debug("Starting download without presigned URL", false); blob.downloadTo( localFile.toPath(), Blob.BlobSourceOption.shouldReturnRawInputStream(true)); + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); logger.debug("Download successful", false); // Get the user-defined BLOB metadata @@ -370,7 +382,18 @@ public void download( // Decrypt file try { + stopwatch.start(); EncryptionProvider.decrypt(localFile, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "GCS file {} downloaded to {}. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); } catch (Exception ex) { logger.error("Error decrypting file", ex); throw new SnowflakeSQLLoggedException( @@ -380,6 +403,13 @@ public void download( SqlState.INTERNAL_ERROR, "Cannot decrypt file"); } + } else { + logger.info( + "GCS file {} downloaded to {}. It took {} ms with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis, + retryCount); } return; } catch (Exception ex) { @@ -421,8 +451,12 @@ public InputStream downloadToStream( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info("Staring download of file from GCS stage path: {} to input stream", stageFilePath); int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); InputStream inputStream = null; + long downloadMillis = 0; do { try { String key = null; @@ -478,6 +512,8 @@ public InputStream downloadToStream( } } } + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); logger.debug("Download successful", false); } catch (IOException ex) { logger.debug("Download unsuccessful {}", ex); @@ -509,9 +545,12 @@ public InputStream downloadToStream( key = encryptionData.getKey(); iv = encryptionData.getValue(); } + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); } if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLException( queryId, @@ -523,7 +562,17 @@ public InputStream downloadToStream( // Decrypt file try { if (inputStream != null) { + inputStream = EncryptionProvider.decryptStream(inputStream, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "GCS file {} downloaded to stream. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); return inputStream; } } catch (Exception ex) { @@ -535,7 +584,15 @@ public InputStream downloadToStream( SqlState.INTERNAL_ERROR, "Cannot decrypt file"); } + } else { + logger.info( + "GCS file {} downloaded to stream. Download took {} ms with {} retries", + stageFilePath, + downloadMillis, + retryCount); } + + return inputStream; } catch (Exception ex) { logger.debug("Download unsuccessful {}", ex); handleStorageException(ex, ++retryCount, "download", session, command, queryId); @@ -584,6 +641,9 @@ public void uploadWithPresignedUrlWithoutConnection( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "GCS", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); final List toClose = new ArrayList<>(); long originalContentLength = meta.getContentLength(); @@ -601,7 +661,8 @@ public void uploadWithPresignedUrlWithoutConnection( if (!(meta instanceof CommonObjectMetadata)) { throw new IllegalArgumentException("Unexpected metadata object type"); } - + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); if (Strings.isNullOrEmpty(presignedUrl) || "null".equalsIgnoreCase(presignedUrl)) { logger.debug("Starting upload with downscoped token"); uploadWithDownScopedToken( @@ -611,7 +672,7 @@ public void uploadWithPresignedUrlWithoutConnection( meta.getUserMetadata(), uploadStreamInfo.left, queryId); - logger.debug("Upload successfully with downscoped token"); + logger.debug("Upload successful with downscoped token"); } else { logger.debug("Starting upload with presigned url"); @@ -627,6 +688,20 @@ public void uploadWithPresignedUrlWithoutConnection( queryId); logger.debug("Upload successfully with presigned url"); } + stopwatch.stop(); + + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to GCS location: {}. It took {} ms", + remoteStorageLocation, + stopwatch.elapsedMillis()); + } else { + logger.info( + "Uploaded file {} to GCS location: {}. It took {} ms", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis()); + } // close any open streams in the "toClose" list and return for (FileInputStream is : toClose) { @@ -668,6 +743,9 @@ public void upload( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "GCS", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); final List toClose = new ArrayList<>(); long originalContentLength = meta.getContentLength(); @@ -686,6 +764,8 @@ public void upload( throw new IllegalArgumentException("Unexpected metadata object type"); } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); if (!Strings.isNullOrEmpty(presignedUrl)) { logger.debug("Starting upload with downscope token", false); uploadWithPresignedUrl( @@ -698,7 +778,20 @@ public void upload( presignedUrl, session.getHttpClientKey(), queryId); + stopwatch.stop(); logger.debug("Upload successful", false); + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to GCS location: {}. It took {} ms", + remoteStorageLocation, + stopwatch.elapsedMillis()); + } else { + logger.info( + "Uploaded file {} to GCS location: {}. It took {} ms", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis()); + } // close any open streams in the "toClose" list and return for (FileInputStream is : toClose) { @@ -722,7 +815,20 @@ public void upload( uploadStreamInfo.left, queryId); + stopwatch.stop(); logger.debug("Upload successful", false); + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to GCS location: {}. It took {} ms", + remoteStorageLocation, + stopwatch.elapsedMillis()); + } else { + logger.info( + "Uploaded file {} to GCS location: {}. It took {} ms", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis()); + } // close any open streams in the "toClose" list and return for (FileInputStream is : toClose) { diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java index 190493b69..5b405a15f 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java @@ -67,6 +67,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SFPair; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; @@ -116,6 +117,10 @@ public SnowflakeS3Client( SFBaseSession session, boolean useS3RegionalUrl) throws SnowflakeSQLException { + logger.debug( + "Initializing Snowflake S3 client with encryption: {}, client side encrypted: {}", + encMat != null, + isClientSideEncrypted); this.session = session; this.isUseS3RegionalUrl = useS3RegionalUrl; setupSnowflakeS3Client( @@ -217,9 +222,10 @@ private void setupSnowflakeS3Client( } else { if (region != null) { if (this.isUseS3RegionalUrl) { + String domainSuffixForRegionalUrl = getDomainSuffixForRegionalUrl(region.getName()); amazonS3Builder.withEndpointConfiguration( new AwsClientBuilder.EndpointConfiguration( - "s3." + region.getName() + ".amazonaws.com", region.getName())); + "s3." + region.getName() + "." + domainSuffixForRegionalUrl, region.getName())); } else { amazonS3Builder.withRegion(region.getName()); } @@ -230,6 +236,10 @@ private void setupSnowflakeS3Client( amazonClient = (AmazonS3) amazonS3Builder.build(); } + static String getDomainSuffixForRegionalUrl(String regionName) { + return regionName.toLowerCase().startsWith("cn-") ? "amazonaws.com.cn" : "amazonaws.com"; + } + // Returns the Max number of retry attempts @Override public int getMaxRetries() { @@ -274,6 +284,7 @@ public int getEncryptionKeySize() { */ @Override public void renew(Map stageCredentials) throws SnowflakeSQLException { + logger.debug("Renewing the Snowflake S3 client"); // We renew the client with fresh credentials and with its original parameters setupSnowflakeS3Client( stageCredentials, @@ -288,6 +299,7 @@ public void renew(Map stageCredentials) throws SnowflakeSQLException { @Override public void shutdown() { + logger.debug("Shutting down the Snowflake S3 client"); amazonClient.shutdown(); } @@ -335,14 +347,18 @@ public void download( String presignedUrl, String queryId) throws SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + String localFilePath = localLocation + localFileSep + destFileName; + logger.info( + "Staring download of file from S3 stage path: {} to {}", stageFilePath, localFilePath); TransferManager tx = null; int retryCount = 0; do { try { - File localFile = new File(localLocation + localFileSep + destFileName); + File localFile = new File(localFilePath); - logger.debug( - "Creating executor service for transfer" + "manager with {} threads", parallelism); + logger.debug("Creating executor service for transfer manager with {} threads", parallelism); // download files from s3 tx = @@ -369,7 +385,11 @@ public ExecutorService newExecutor() { myDownload.waitForCompletion(); + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); + if (this.isEncrypting() && this.getEncryptionKeySize() < 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -382,10 +402,27 @@ public ExecutorService newExecutor() { // Decrypt file try { EncryptionProvider.decrypt(localFile, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "S3 file {} downloaded to {}. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); } catch (Exception ex) { logger.error("Error decrypting file", ex); throw ex; } + } else { + logger.info( + "S3 file {} downloaded to {}. It took {} ms with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis, + retryCount); } return; @@ -433,21 +470,24 @@ public InputStream downloadToStream( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info("Staring download of file from S3 stage path: {} to input stream", stageFilePath); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); int retryCount = 0; do { try { S3Object file = amazonClient.getObject(remoteStorageLocation, stageFilePath); - ObjectMetadata meta = amazonClient.getObjectMetadata(remoteStorageLocation, stageFilePath); - InputStream stream = file.getObjectContent(); - + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); Map metaMap = meta.getUserMetadata(); String key = metaMap.get(AMZ_KEY); String iv = metaMap.get(AMZ_IV); if (this.isEncrypting() && this.getEncryptionKeySize() < 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -458,16 +498,31 @@ public InputStream downloadToStream( } try { - - return EncryptionProvider.decryptStream(stream, key, iv, encMat); + InputStream is = EncryptionProvider.decryptStream(stream, key, iv, encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "S3 file {} downloaded to input stream. It took {} ms " + + "(download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); + return is; } catch (Exception ex) { logger.error("Error in decrypting file", ex); throw ex; } } else { - return stream; + logger.info( + "S3 file {} downloaded to input stream. Download took {} ms with {} retries", + stageFilePath, + downloadMillis, + retryCount); } + return stream; } catch (Exception ex) { handleS3Exception(ex, ++retryCount, "download", session, command, this, queryId); } @@ -515,6 +570,10 @@ public void upload( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "S3", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); + final long originalContentLength = meta.getContentLength(); final List toClose = new ArrayList<>(); SFPair uploadStreamInfo = @@ -537,9 +596,10 @@ public void upload( TransferManager tx = null; int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); do { try { - logger.debug( "Creating executor service for transfer" + "manager with {} threads", parallelism); @@ -576,11 +636,28 @@ public ExecutorService newExecutor() { } myUpload.waitForCompletion(); + stopwatch.stop(); + long uploadMillis = stopwatch.elapsedMillis(); // get out for (FileInputStream is : toClose) { IOUtils.closeQuietly(is); } + + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to S3 location: {}. It took {} ms with {} retries", + destFileName, + uploadMillis, + retryCount); + } else { + logger.info( + "Uploaded file {} to S3 location: {}. It took {} ms with {} retries", + srcFile.getAbsolutePath(), + destFileName, + uploadMillis, + retryCount); + } return; } catch (Exception ex) { @@ -635,7 +712,7 @@ private SFPair createUploadStream( String queryId) throws SnowflakeSQLException { logger.debug( - "createUploadStream({}, {}, {}, {}, {}, {}, {}) " + "keySize={}", + "createUploadStream({}, {}, {}, {}, {}, {}, {}) " + "keySize: {}", this, srcFile, uploadFromStream, diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java index ef97c9508..ac7de73a6 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java @@ -53,7 +53,7 @@ public static StorageClientFactory getFactory() { public SnowflakeStorageClient createClient( StageInfo stage, int parallel, RemoteStoreFileEncryptionMaterial encMat, SFSession session) throws SnowflakeSQLException { - logger.debug("createClient client type={}", stage.getStageType().name()); + logger.debug("Creating storage client. Client type: {}", stage.getStageType().name()); switch (stage.getStageType()) { case S3: @@ -113,7 +113,7 @@ private SnowflakeS3Client createS3Client( throws SnowflakeSQLException { final int S3_TRANSFER_MAX_RETRIES = 3; - logger.debug("createS3Client encryption={}", (encMat == null ? "no" : "yes")); + logger.debug("Creating S3 client with encryption: {}", (encMat == null ? "no" : "yes")); SnowflakeS3Client s3Client; @@ -130,8 +130,8 @@ private SnowflakeS3Client createS3Client( clientConfig.setProxyPassword(""); logger.debug( - "s3 client configuration: maxConnection={}, connectionTimeout={}, " - + "socketTimeout={}, maxErrorRetry={}", + "S3 client configuration: maxConnection: {}, connectionTimeout: {}, " + + "socketTimeout: {}, maxErrorRetry: {}", clientConfig.getMaxConnections(), clientConfig.getConnectionTimeout(), clientConfig.getSocketTimeout(), @@ -153,7 +153,7 @@ private SnowflakeS3Client createS3Client( logger.debug("Exception creating s3 client", ex); throw ex; } - logger.debug("s3 client created", false); + logger.debug("S3 Storage client created", false); return s3Client; } @@ -195,7 +195,7 @@ public StorageObjectMetadata createStorageMetadataObj(StageInfo.StageType stageT private SnowflakeAzureClient createAzureClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFBaseSession session) throws SnowflakeSQLException { - logger.debug("createAzureClient encryption={}", (encMat == null ? "no" : "yes")); + logger.debug("Creating Azure client with encryption: {}", (encMat == null ? "no" : "yes")); SnowflakeAzureClient azureClient; @@ -220,7 +220,7 @@ private SnowflakeAzureClient createAzureClient( private SnowflakeGCSClient createGCSClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFSession session) throws SnowflakeSQLException { - logger.debug("createGCSClient encryption={}", (encMat == null ? "no" : "yes")); + logger.debug("Creating GCS client with encryption: {}", (encMat == null ? "no" : "yes")); SnowflakeGCSClient gcsClient; diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageHelper.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageHelper.java new file mode 100644 index 000000000..34098d9d0 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageHelper.java @@ -0,0 +1,36 @@ +package net.snowflake.client.jdbc.cloud.storage; + +import java.io.File; +import java.io.InputStream; +import net.snowflake.client.jdbc.FileBackedOutputStream; + +class StorageHelper { + static String getStartUploadLog( + String serviceName, + boolean uploadFromStream, + InputStream inputStream, + FileBackedOutputStream fileBackedOutputStream, + File srcFile, + String destFileName) { + if (uploadFromStream && fileBackedOutputStream != null) { + File file = fileBackedOutputStream.getFile(); + String fileBackedOutputStreamType = + file == null ? "byte stream" : ("file: " + file.getAbsolutePath()); + return "Starting upload from stream (" + + fileBackedOutputStreamType + + ") to " + + serviceName + + " location: " + + destFileName; + } else if (uploadFromStream && inputStream != null) { + return "Starting upload from input stream to " + serviceName + " location: " + destFileName; + } else { + return "Starting upload from file " + + srcFile.getAbsolutePath() + + " to " + + serviceName + + " location: " + + destFileName; + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/CertificateDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/CertificateDiagnosticCheck.java new file mode 100644 index 000000000..1fcbb1fb2 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/CertificateDiagnosticCheck.java @@ -0,0 +1,61 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.Proxy; +import java.net.URL; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.TrustManager; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class CertificateDiagnosticCheck extends DiagnosticCheck { + + private static final String SECURE_SOCKET_PROTOCOL = "TLS"; + + private static final SFLogger logger = + SFLoggerFactory.getLogger(CertificateDiagnosticCheck.class); + + public CertificateDiagnosticCheck(ProxyConfig proxyConfig) { + super("SSL/TLS Certificate Test", proxyConfig); + } + + @Override + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + String port = Integer.toString(snowflakeEndpoint.getPort()); + if (snowflakeEndpoint.isSslEnabled()) { + String urlString = "https://" + hostname + ":" + port; + try { + SSLContext sslContext = SSLContext.getInstance(SECURE_SOCKET_PROTOCOL); + sslContext.init(null, new TrustManager[] {new DiagnosticTrustManager()}, null); + HttpsURLConnection.setDefaultSSLSocketFactory(sslContext.getSocketFactory()); + Proxy proxy = this.proxyConf.getProxy(snowflakeEndpoint); + new URL(urlString).openConnection(proxy).connect(); + } catch (NoSuchAlgorithmException e) { + logger.error( + "None of the security provider's implementation of SSLContextSpi supports " + + SECURE_SOCKET_PROTOCOL, + e); + } catch (KeyManagementException e) { + logger.error("Failed to initialize SSLContext", e); + } catch (MalformedURLException e) { + logger.error("Failed to create new URL object: " + urlString, e); + } catch (IOException e) { + logger.error("Failed to open a connection to: " + urlString, e); + } catch (Exception e) { + logger.error( + "Unexpected error occurred when trying to retrieve certificate from: " + hostname, e); + } finally { + HttpsURLConnection.setDefaultSSLSocketFactory( + (SSLSocketFactory) SSLSocketFactory.getDefault()); + } + } else { + logger.info("Host " + hostname + ":" + port + " is not secure. Skipping certificate check."); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticCheck.java new file mode 100644 index 000000000..14aa2ff0a --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticCheck.java @@ -0,0 +1,22 @@ +package net.snowflake.client.jdbc.diagnostic; + +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +abstract class DiagnosticCheck { + protected final String name; + protected final ProxyConfig proxyConf; + private static final SFLogger logger = SFLoggerFactory.getLogger(DiagnosticCheck.class); + + abstract void doCheck(SnowflakeEndpoint snowflakeEndpoint); + + final void run(SnowflakeEndpoint snowflakeEndpoint) { + logger.info("JDBC Diagnostics - {}: hostname: {}", this.name, snowflakeEndpoint.getHost()); + doCheck(snowflakeEndpoint); + } + + protected DiagnosticCheck(String name, ProxyConfig proxyConf) { + this.name = name; + this.proxyConf = proxyConf; + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContext.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContext.java new file mode 100644 index 000000000..7f146ac1b --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContext.java @@ -0,0 +1,190 @@ +package net.snowflake.client.jdbc.diagnostic; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.File; +import java.io.IOException; +import java.net.Proxy; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import net.snowflake.client.core.SFSessionProperty; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class DiagnosticContext { + + private static final SFLogger logger = SFLoggerFactory.getLogger(DiagnosticContext.class); + private static final String JAVAX_NET_DEBUG = "javax.net.debug"; + + private static final String JAVAX_TRUSTSTORE = "javax.net.ssl.trustStore"; + private ProxyConfig proxyConf; + + private List endpoints = new ArrayList<>(); + + private final DiagnosticCheck[] tests; + + public DiagnosticContext( + String allowListFile, Map connectionPropertiesMap) { + + createProxyConfiguration(connectionPropertiesMap); + + try { + JsonNode jsonNode = readAllowListJsonFile(allowListFile); + for (JsonNode objectNode : jsonNode) { + String type = objectNode.get("type").asText(); + String host = objectNode.get("host").asText(); + int port = objectNode.get("port").asInt(); + SnowflakeEndpoint e = new SnowflakeEndpoint(type, host, port); + endpoints.add(e); + } + + } catch (IOException e) { + logger.error("Failed to read allowlist file: ", e); + } catch (Exception e) { + logger.error("Failed to parse data in allowlist file: " + allowListFile, e); + } + + tests = + new DiagnosticCheck[] { + new DnsDiagnosticCheck(proxyConf), + new TcpDiagnosticCheck(proxyConf), + new CertificateDiagnosticCheck(proxyConf), + new HttpAndHttpsDiagnosticCheck(proxyConf) + }; + } + + /** This constructor is only used for testing */ + DiagnosticContext(Map connectionPropertiesMap) { + createProxyConfiguration(connectionPropertiesMap); + tests = null; + } + + private void createProxyConfiguration(Map connectionPropertiesMap) { + String proxyHost = (String) connectionPropertiesMap.get(SFSessionProperty.PROXY_HOST); + int proxyPort = + (connectionPropertiesMap.get(SFSessionProperty.PROXY_PORT) == null) + ? -1 + : Integer.parseInt((String) connectionPropertiesMap.get(SFSessionProperty.PROXY_PORT)); + String nonProxyHosts = (String) connectionPropertiesMap.get(SFSessionProperty.NON_PROXY_HOSTS); + proxyConf = new ProxyConfig(proxyHost, proxyPort, nonProxyHosts); + } + + public void runDiagnostics() { + + logEnvironmentInfo(); + + // Loop through endpoints and run diagnostic test on each one of them + for (DiagnosticCheck test : tests) { + for (SnowflakeEndpoint endpoint : endpoints) { + test.run(endpoint); + } + } + } + + private JsonNode readAllowListJsonFile(String jsonFilePath) throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + File allowListFile = new File(jsonFilePath); + + return objectMapper.readTree(allowListFile); + } + + public void logEnvironmentInfo() { + logger.info("Getting environment information"); + logger.info("Current truststore used: " + getTrustStoreLocation()); + logger.info("-Dnetworkaddress.cache.ttl: " + System.getProperty("networkaddress.cache.ttl")); + logger.info( + "-Dnetworkaddress.cache.negative.ttl: " + + System.getProperty("networkaddress.cache.negative.ttl")); + logger.info("-Djavax.net.debug: " + System.getProperty(JAVAX_NET_DEBUG)); + } + + private boolean isNullOrEmpty(String a) { + return a == null || a.isEmpty(); + } + + /** + * We determine the truststore in use based on the JSSE documentation: + * + *

1.) If the javax.net.ssl.trustStore property is defined, then the TrustManagerFactory + * attempts to find a file using the file name specified by that system property, and uses that + * file for the KeyStore parameter. If the javax.net.ssl.trustStorePassword system property is + * also defined, then its value is used to check the integrity of the data in the truststore + * before opening it. + * + *

If the javax.net.ssl.trustStore property is defined but the specified file does not exist, + * then a default TrustManager using an empty keystore is created. + * + *

2.) If the javax.net.ssl.trustStore system property was not specified, then: - if the file + * java-home/lib/security/jssecacerts exists, that file is used; - if the file + * java-home/lib/security/cacerts exists, that file is used; - if neither of these files exists, + * then the SSL cipher suite is anonymous, does not perform any authentication, and thus does not + * need a truststore. + */ + private String getTrustStoreLocation() { + String trustStore = System.getProperty(JAVAX_TRUSTSTORE); + String javaHome = System.getProperty("java.home"); + Path javaSecurityPath = FileSystems.getDefault().getPath(javaHome, "/lib/security"); + logger.info("JAVA_HOME: " + javaHome); + + if (isNullOrEmpty(trustStore)) { + logger.info("-D{} is null", JAVAX_TRUSTSTORE); + Path jssecacertsPath = + FileSystems.getDefault().getPath(javaSecurityPath.toString(), "jssecacerts"); + Path cacertsPath = FileSystems.getDefault().getPath(javaSecurityPath.toString(), "cacerts"); + + logger.info("Checking if jssecacerts or cacerts exist"); + if (Files.exists(jssecacertsPath)) { + logger.info(jssecacertsPath.toString() + " exists"); + trustStore = jssecacertsPath.toString(); + } else if (Files.exists(cacertsPath)) { + logger.info(cacertsPath.toString() + " exists"); + trustStore = cacertsPath.toString(); + } + } else { + logger.info("-D{} is set by user: {}", JAVAX_TRUSTSTORE, trustStore); + } + return trustStore; + } + + String getHttpProxyHost() { + return proxyConf.getHttpProxyHost(); + } + + int getHttpProxyPort() { + return proxyConf.getHttpProxyPort(); + } + + String getHttpsProxyHost() { + return proxyConf.getHttpsProxyHost(); + } + + int getHttpsProxyPort() { + return proxyConf.getHttpsProxyPort(); + } + + String getHttpNonProxyHosts() { + return proxyConf.getNonProxyHosts(); + } + + List getEndpoints() { + return endpoints; + } + + Proxy getProxy(SnowflakeEndpoint snowflakeEndpoint) { + return this.proxyConf.getProxy(snowflakeEndpoint); + } + + boolean isProxyEnabled() { + return proxyConf.isProxyEnabled(); + } + + boolean isProxyEnabledOnJvm() { + return proxyConf.isProxyEnabledOnJvm(); + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticTrustManager.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticTrustManager.java new file mode 100644 index 000000000..cfd316f40 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticTrustManager.java @@ -0,0 +1,78 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.net.Socket; +import java.security.cert.CertificateParsingException; +import java.security.cert.X509Certificate; +import javax.net.ssl.SSLEngine; +import javax.net.ssl.X509ExtendedTrustManager; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class DiagnosticTrustManager extends X509ExtendedTrustManager { + + private static final SFLogger logger = SFLoggerFactory.getLogger(DiagnosticTrustManager.class); + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType) { + printCertificates(certs); + } + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType, SSLEngine engine) { + printCertificates(certs); + } + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType, Socket sc) { + printCertificates(certs); + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) { + // do nothing + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType, Socket sc) { + // do nothing + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType, SSLEngine engine) { + // do nothing + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + // This implementation is not needed, so we're returning an empty array + return new X509Certificate[0]; + } + + private void printCertificates(X509Certificate[] chainCerts) { + logger.info("Printing certificate chain"); + StringBuilder sb = new StringBuilder(); + int i = 0; + for (X509Certificate x509Cert : chainCerts) { + try { + sb.append("\nCertificate[").append(i).append("]:").append("\n"); + sb.append("Subject: ").append(x509Cert.getSubjectDN()).append("\n"); + sb.append("Issuer: ").append(x509Cert.getIssuerDN()).append("\n"); + sb.append("Valid from: ").append(x509Cert.getNotBefore()).append("\n"); + sb.append("Not Valid After: ").append(x509Cert.getNotAfter()).append("\n"); + sb.append("Subject Alternative Names: ") + .append(x509Cert.getSubjectAlternativeNames()) + .append("\n"); + sb.append("Issuer Alternative Names: ") + .append(x509Cert.getIssuerAlternativeNames()) + .append("\n"); + sb.append("Serial: ").append(x509Cert.getSerialNumber().toString(16)).append("\n"); + logger.info(sb.toString()); + i++; + } catch (CertificateParsingException e) { + logger.error("Error parsing certificate", e); + } catch (Exception e) { + logger.error("Unexpected error occurred when parsing certificate", e); + } + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DnsDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DnsDiagnosticCheck.java new file mode 100644 index 000000000..259015283 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DnsDiagnosticCheck.java @@ -0,0 +1,83 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.net.Inet4Address; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Hashtable; +import javax.naming.Context; +import javax.naming.NamingEnumeration; +import javax.naming.NamingException; +import javax.naming.directory.Attribute; +import javax.naming.directory.Attributes; +import javax.naming.directory.DirContext; +import javax.naming.spi.NamingManager; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class DnsDiagnosticCheck extends DiagnosticCheck { + + private static final SFLogger logger = SFLoggerFactory.getLogger(DnsDiagnosticCheck.class); + + private final String INITIAL_DNS_CONTEXT = "com.sun.jndi.dns.DnsContextFactory"; + + DnsDiagnosticCheck(ProxyConfig proxyConfig) { + super("DNS Lookup Test", proxyConfig); + } + + @Override + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + getCnameRecords(snowflakeEndpoint); + getArecords(snowflakeEndpoint); + } + + private void getCnameRecords(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + try { + Hashtable env = new Hashtable<>(); + env.put(Context.INITIAL_CONTEXT_FACTORY, INITIAL_DNS_CONTEXT); + DirContext dirCtx = (DirContext) NamingManager.getInitialContext(env); + Attributes attrs1 = dirCtx.getAttributes(snowflakeEndpoint.getHost(), new String[] {"CNAME"}); + NamingEnumeration attrs = attrs1.getAll(); + StringBuilder sb = new StringBuilder(); + sb.append("\nCNAME:\n"); + while (attrs.hasMore()) { + Attribute a = attrs.next(); + NamingEnumeration values = a.getAll(); + while (values.hasMore()) { + sb.append(values.next()); + sb.append("\n"); + } + } + logger.info(sb.toString()); + } catch (NamingException e) { + logger.error("Error occurred when getting CNAME record for host " + hostname, e); + } catch (Exception e) { + logger.error("Unexpected error occurred when getting CNAME record for host " + hostname, e); + } + } + + private void getArecords(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + try { + InetAddress[] addresses = InetAddress.getAllByName(hostname); + StringBuilder sb = new StringBuilder(); + sb.append("\nA Records:\n"); + for (InetAddress ip : addresses) { + if (ip instanceof Inet4Address) { + sb.append(ip.getHostAddress()); + sb.append("\n"); + } + // Check if this is a private link endpoint and if the ip address + // returned by the DNS query is a private IP address as expected. + if (snowflakeEndpoint.isPrivateLink() && !ip.isSiteLocalAddress()) { + logger.error( + "Public IP address was returned for {}. Please review your DNS configurations.", + hostname); + } + } + logger.info(sb.toString()); + } catch (UnknownHostException e) { + logger.error("DNS query failed for host: " + snowflakeEndpoint.getHost(), e); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/HttpAndHttpsDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/HttpAndHttpsDiagnosticCheck.java new file mode 100644 index 000000000..90b499435 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/HttpAndHttpsDiagnosticCheck.java @@ -0,0 +1,57 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.Proxy; +import java.net.URL; +import java.util.List; +import java.util.Map; +import javax.net.ssl.HttpsURLConnection; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class HttpAndHttpsDiagnosticCheck extends DiagnosticCheck { + + private static final SFLogger logger = + SFLoggerFactory.getLogger(HttpAndHttpsDiagnosticCheck.class); + private final String HTTP_SCHEMA = "http://"; + private final String HTTPS_SCHEMA = "https://"; + + HttpAndHttpsDiagnosticCheck(ProxyConfig proxyConfig) { + super("HTTP/HTTPS Connection Test", proxyConfig); + } + + @Override + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + // We have to replace underscores with hyphens because the JDK doesn't allow underscores in the + // hostname + String hostname = snowflakeEndpoint.getHost().replace('_', '-'); + try { + Proxy proxy = this.proxyConf.getProxy(snowflakeEndpoint); + StringBuilder sb = new StringBuilder(); + String urlString = + (snowflakeEndpoint.isSslEnabled()) ? HTTPS_SCHEMA + hostname : HTTP_SCHEMA + hostname; + URL url = new URL(urlString); + HttpURLConnection con = + (snowflakeEndpoint.isSslEnabled()) + ? (HttpsURLConnection) url.openConnection(proxy) + : (HttpURLConnection) url.openConnection(proxy); + logger.info("Response from server: {} {}", con.getResponseCode(), con.getResponseMessage()); + sb.append("Headers:\n"); + + Map> headerFields = con.getHeaderFields(); + for (Map.Entry> header : headerFields.entrySet()) { + sb.append(header.getKey()).append(": ").append(header.getValue()).append("\n"); + } + + logger.info(sb.toString()); + + } catch (MalformedURLException e) { + logger.error( + "The URL format is incorrect, please check your allowlist JSON file for errors.", e); + } catch (IOException e) { + logger.error("Could not send an HTTP/HTTPS request to host " + hostname, e); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/ProxyConfig.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/ProxyConfig.java new file mode 100644 index 000000000..7003bae46 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/ProxyConfig.java @@ -0,0 +1,214 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.util.regex.Pattern; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +/** + * This class is used to represent the proxy configurations passed to the JDBC driver either as JVM + * arguments or connection parameters. The class determines which proxy settings take precedence and + * should be used by the diagnostic tests. We normalize configurations where empty strings for + * hostnames and -1 for ports represent the absence of a configuration. + * + *

The order of precedence is: + * + *

1.) Connection parameters (proxy configurations passed to the constructor) 2.) JVM arguments + * + *

The useProxy parameter is ignored. If the proxy is configured using the JVM and someone wants + * to bypass that at the connection-level then they would need to set the following connection + * parameters: proxyHost=127.0.0.1 proxyPort=8080 nonProxyHosts=* + * + *

i.e. bypass the proxy host when connecting to any host. + */ +class ProxyConfig { + private String proxyHost; + private int proxyPort; + private String nonProxyHosts; + private String jvmHttpProxyHost; + private String jvmHttpsProxyHost; + private int jvmHttpProxyPort; + private int jvmHttpsProxyPort; + private String jvmNonProxyHosts; + private String finalHttpProxyHost = ""; + private String finalHttpsProxyHost = ""; + private int finalHttpProxyPort = -1; + private int finalHttpsProxyPort = -1; + private String finalNonProxyHosts = ""; + private boolean isProxyEnabled = false; + + private boolean isProxyEnabledOnJvm = false; + + private final String JVM_HTTP_PROXY_HOST = "http.proxyHost"; + private final String JVM_HTTPS_PROXY_HOST = "https.proxyHost"; + private final String JVM_HTTP_PROXY_PORT = "http.proxyPort"; + private final String JVM_HTTPS_PROXY_PORT = "https.proxyPort"; + private final String JVM_HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts"; + + private static final SFLogger logger = SFLoggerFactory.getLogger(ProxyConfig.class); + + public String getHttpProxyHost() { + return finalHttpProxyHost; + } + + public String getHttpsProxyHost() { + return finalHttpsProxyHost; + } + + public int getHttpProxyPort() { + return finalHttpProxyPort; + } + + public int getHttpsProxyPort() { + return finalHttpsProxyPort; + } + + public String getNonProxyHosts() { + return finalNonProxyHosts; + } + + public void setProxyHost(String proxyHost) { + this.proxyHost = proxyHost; + } + + public void setProxyPort(int proxyPort) { + this.proxyPort = proxyPort; + } + + public void setNonProxyHosts(String nonProxyHosts) { + this.nonProxyHosts = nonProxyHosts; + } + + public ProxyConfig(String proxyHost, int proxyPort, String nonProxyHosts) { + jvmHttpProxyHost = + (System.getProperty(JVM_HTTP_PROXY_HOST) == null) + ? "" + : System.getProperty(JVM_HTTP_PROXY_HOST); + + jvmHttpsProxyHost = + (System.getProperty(JVM_HTTPS_PROXY_HOST) == null) + ? "" + : System.getProperty(JVM_HTTPS_PROXY_HOST); + + jvmHttpProxyPort = + (System.getProperty(JVM_HTTP_PROXY_PORT) == null) + ? -1 + : Integer.parseInt(System.getProperty(JVM_HTTP_PROXY_PORT)); + + jvmHttpsProxyPort = + (System.getProperty(JVM_HTTPS_PROXY_PORT) == null) + ? -1 + : Integer.parseInt(System.getProperty(JVM_HTTPS_PROXY_PORT)); + + jvmNonProxyHosts = + (System.getProperty(JVM_HTTP_NON_PROXY_HOSTS) == null) + ? "" + : System.getProperty(JVM_HTTP_NON_PROXY_HOSTS); + this.proxyHost = (proxyHost == null) ? "" : proxyHost; + this.proxyPort = proxyPort; + this.nonProxyHosts = (nonProxyHosts == null) ? "" : nonProxyHosts; + resolveProxyConfigurations(); + } + + public ProxyConfig() { + this(null, -1, null); + } + + public boolean isProxyEnabled() { + return isProxyEnabled; + } + + public boolean isProxyEnabledOnJvm() { + return isProxyEnabledOnJvm; + } + + /** + * This method reviews both the JVM and connection parameter configurations then concludes which + * settings to use 1.) Check if proxy settings were passed in the connection parameters, if so, + * then we use that right away. 2.) If connection parameters were not passed, then review JVM + * arguments and use those. 3.) If neither were set, then don't use any proxy settings (default). + */ + private void resolveProxyConfigurations() { + // Both proxyHost and proxyPort connection parameters must be present. + StringBuilder sb = new StringBuilder(); + logger.info("Resolving proxy configurations"); + sb.append("Proxy Configurations picked up from "); + if (!proxyHost.isEmpty() && proxyPort != -1) { + finalHttpProxyHost = proxyHost; + finalHttpsProxyHost = proxyHost; + finalHttpProxyPort = proxyPort; + finalHttpsProxyPort = proxyPort; + finalNonProxyHosts = nonProxyHosts; + isProxyEnabled = true; + sb.append("connection parameters:\n"); + sb.append("proxyHost: ").append(proxyHost).append("\n"); + sb.append("proxyPort: ").append(proxyPort).append("\n"); + sb.append("nonProxyHosts: ").append(nonProxyHosts); + } else if ((!jvmHttpProxyHost.isEmpty() && jvmHttpProxyPort != -1) + || (!jvmHttpsProxyHost.isEmpty() && jvmHttpsProxyPort != -1)) { + finalHttpProxyHost = jvmHttpProxyHost; + finalHttpProxyPort = jvmHttpProxyPort; + finalHttpsProxyHost = jvmHttpsProxyHost; + finalHttpsProxyPort = jvmHttpsProxyPort; + finalNonProxyHosts = jvmNonProxyHosts; + isProxyEnabled = true; + isProxyEnabledOnJvm = true; + sb.append("JVM arguments:\n"); + sb.append("-D").append(JVM_HTTP_PROXY_HOST).append("=").append(jvmHttpProxyHost).append("\n"); + sb.append("-D").append(JVM_HTTP_PROXY_PORT).append("=").append(jvmHttpProxyPort).append("\n"); + sb.append("-D") + .append(JVM_HTTPS_PROXY_HOST) + .append("=") + .append(jvmHttpsProxyHost) + .append("\n"); + sb.append("-D") + .append(JVM_HTTPS_PROXY_PORT) + .append("=") + .append(jvmHttpsProxyPort) + .append("\n"); + } + logger.info(sb.toString()); + } + + protected boolean isBypassProxy(String hostname) { + String nonProxyHosts = getNonProxyHosts().replace(".", "\\.").replace("*", ".*"); + String[] nonProxyHostsArray = nonProxyHosts.split("\\|"); + for (String i : nonProxyHostsArray) { + if (Pattern.compile(i).matcher(hostname).matches()) { + return true; + } + } + return false; + } + + public Proxy getProxy(SnowflakeEndpoint endpoint) { + if (!isProxyEnabled || isBypassProxy(endpoint.getHost())) { + return Proxy.NO_PROXY; + } else if (endpoint.isSslEnabled()) { + return (isHttpsProxyEnabled()) + ? new Proxy( + Proxy.Type.HTTP, new InetSocketAddress(finalHttpsProxyHost, finalHttpsProxyPort)) + : Proxy.NO_PROXY; + } + return (isHttpProxyEnabled()) + ? new Proxy(Proxy.Type.HTTP, new InetSocketAddress(finalHttpProxyHost, finalHttpProxyPort)) + : Proxy.NO_PROXY; + } + + /* + Check that both http proxy host and http proxy port are set, + only then do we consider that http proxy is enabled. + */ + private boolean isHttpProxyEnabled() { + return (!finalHttpProxyHost.isEmpty() || finalHttpProxyPort != -1); + } + + /* + Check that both https proxy host and http proxy port are set, + only then do we consider that http proxy is enabled. + */ + private boolean isHttpsProxyEnabled() { + return (!finalHttpsProxyHost.isEmpty() || finalHttpsProxyPort != -1); + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java new file mode 100644 index 000000000..2a181c08a --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java @@ -0,0 +1,69 @@ +package net.snowflake.client.jdbc.diagnostic; + +import net.snowflake.client.core.PrivateLinkDetector; + +/* +The SnowflakeEndpoint class represents an endpoint as returned by the System$allowlist() SQL +function. Example: + +[{"type":"SNOWFLAKE_DEPLOYMENT","host":"snowhouse.snowflakecomputing.com","port":443},{"type":"SNOWFLAKE_DEPLOYMENT_REGIONLESS","host":"sfcogsops-snowhouse_aws_us_west_2.snowflakecomputing.com","port":443},{"type":"STAGE","host":"sfc-ds2-customer-stage.s3.amazonaws.com","port":443},{"type":"STAGE","host":"sfc-ds2-customer-stage.s3.us-west-2.amazonaws.com","port":443},{"type":"STAGE","host":"sfc-ds2-customer-stage.s3-us-west-2.amazonaws.com","port":443},{"type":"SNOWSQL_REPO","host":"sfc-repo.snowflakecomputing.com","port":443},{"type":"OUT_OF_BAND_TELEMETRY","host":"client-telemetry.snowflakecomputing.com","port":443},{"type":"OCSP_CACHE","host":"ocsp.snowflakecomputing.com","port":80},{"type":"DUO_SECURITY","host":"api-35a58de5.duosecurity.com","port":443},{"type":"CLIENT_FAILOVER","host":"sfcogsops-snowhouseprimary.snowflakecomputing.com","port":443},{"type":"OCSP_RESPONDER","host":"o.ss2.us","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.r2m02.amazontrust.com","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.sca1b.amazontrust.com","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.rootg2.amazontrust.com","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.rootca1.amazontrust.com","port":80},{"type":"SNOWSIGHT_DEPLOYMENT","host":"app.snowflake.com","port":443},{"type":"SNOWSIGHT_DEPLOYMENT","host":"apps-api.c1.us-west-2.aws.app.snowflake.com","port":443}] + + */ +class SnowflakeEndpoint { + private final String type; + private final String host; + private final int port; + private final boolean isSecure; + + public SnowflakeEndpoint(String type, String host, int port) { + this.type = type; + this.host = host; + this.port = port; + this.isSecure = (this.port == 443); + } + + public String getType() { + return this.type; + } + + public String getHost() { + return this.host; + } + + public boolean isSslEnabled() { + return this.isSecure; + } + + public int getPort() { + return this.port; + } + + public boolean isPrivateLink() { + return PrivateLinkDetector.isPrivateLink(host); + } + + @Override + public String toString() { + return this.host + ":" + this.port; + } + + @Override + public boolean equals(Object o) { + boolean isSnowflakeEndpoint = o instanceof SnowflakeEndpoint; + if (!isSnowflakeEndpoint) { + return false; + } + if (!((SnowflakeEndpoint) o).getHost().equals(this.host)) { + return false; + } + if (((SnowflakeEndpoint) o).getPort() != this.port) { + return false; + } + + if (!((SnowflakeEndpoint) o).getType().equals(this.type)) { + return false; + } + + return true; + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/TcpDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/TcpDiagnosticCheck.java new file mode 100644 index 000000000..c1538de7d --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/TcpDiagnosticCheck.java @@ -0,0 +1,45 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.net.Socket; +import java.net.SocketTimeoutException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class TcpDiagnosticCheck extends DiagnosticCheck { + + private static final SFLogger logger = SFLoggerFactory.getLogger(TcpDiagnosticCheck.class); + + TcpDiagnosticCheck(ProxyConfig proxyConfig) { + super("TCP Connection Test", proxyConfig); + } + + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + int connectTimeoutMillis = 60000; + int port = snowflakeEndpoint.getPort(); + Proxy proxy = proxyConf.getProxy(snowflakeEndpoint); + try (Socket socket = new Socket(proxy)) { + socket.bind(null); + logger.info( + "Establishing TCP connection: {} -> {}:{}", + socket.getLocalSocketAddress(), + snowflakeEndpoint.getHost(), + snowflakeEndpoint.getPort()); + socket.connect(new InetSocketAddress(hostname, port), connectTimeoutMillis); + logger.info( + "Established a TCP connection successfully: {} -> {}", + socket.getLocalSocketAddress(), + socket.getRemoteSocketAddress()); + } catch (SocketTimeoutException e) { + logger.error( + "Could not establish TCP connection within timeout of " + connectTimeoutMillis + "ms", e); + } catch (IOException e) { + logger.error("Error connecting to host " + hostname + ":" + port, e); + } catch (Exception e) { + logger.error("Unexpected error occurred when connecting to host " + hostname + ":" + port, e); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java b/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java index e92a57524..50efb9234 100644 --- a/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java +++ b/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java @@ -22,6 +22,7 @@ import net.snowflake.client.jdbc.telemetryOOB.TelemetryThreadPool; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.util.Stopwatch; import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; @@ -117,6 +118,11 @@ private TelemetryClient( this.logBatch = new LinkedList<>(); this.isClosed = false; this.forceFlushSize = flushSize; + logger.debug( + "Initializing telemetry client with telemetry url: {}, flush size: {}, auth type: {}", + telemetryUrl, + forceFlushSize, + authType); } /** @@ -131,6 +137,7 @@ public boolean isTelemetryEnabled() { /** Disable any use of the client to add/send metrics */ public void disableTelemetry() { + logger.debug("Disabling telemetry"); this.isTelemetryServiceAvailable = false; } @@ -146,7 +153,7 @@ public static Telemetry createTelemetry(Connection conn, int flushSize) { return createTelemetry( (SFSession) conn.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(), flushSize); } catch (SQLException ex) { - logger.debug("input connection is not a SnowflakeConnection", false); + logger.debug("Input connection is not a SnowflakeConnection", false); return null; } } @@ -243,7 +250,9 @@ public void addLogToBatch(TelemetryData log) { this.logBatch.add(log); } - if (this.logBatch.size() >= this.forceFlushSize) { + int logBatchSize = this.logBatch.size(); + if (logBatchSize >= this.forceFlushSize) { + logger.debug("Force flushing telemetry batch of size: {}", logBatchSize); this.sendBatchAsync(); } } @@ -312,7 +321,6 @@ public void postProcess(String queryId, String sqlState, int vendorCode, Throwab * @throws IOException if closed or uploading batch fails */ private boolean sendBatch() throws IOException { - if (isClosed) { throw new IOException("Telemetry connector is closed"); } @@ -331,6 +339,8 @@ private boolean sendBatch() throws IOException { } if (!tmpList.isEmpty()) { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); // session shared with JDBC String payload = logsToString(tmpList); @@ -369,11 +379,16 @@ private boolean sendBatch() throws IOException { this.session.getHttpClientSocketTimeout(), 0, this.session.getHttpClientKey()); + stopwatch.stop(); + logger.debug( + "Sending telemetry took {} ms. Batch size: {}", + stopwatch.elapsedMillis(), + tmpList.size()); } catch (SnowflakeSQLException e) { disableTelemetry(); // when got error like 404 or bad request, disable telemetry in this // telemetry instance logger.error( - "Telemetry request failed, " + "response: {}, exception: {}", response, e.getMessage()); + "Telemetry request failed, response: {}, exception: {}", response, e.getMessage()); return false; } } diff --git a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java index 8b1918bc1..ed360789e 100644 --- a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java +++ b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java @@ -11,10 +11,12 @@ import java.util.concurrent.atomic.AtomicInteger; import net.minidev.json.JSONArray; import net.minidev.json.JSONObject; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.SnowflakeConnectString; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; @@ -108,28 +110,38 @@ public void setNumOfRetryToTriggerTelemetry(int num) { public static void enable() { synchronized (enableLock) { + logger.debug("Enabling out-of-band telemetry", false); enabled = true; } } public static void disable() { synchronized (enableLock) { + logger.debug("Disabling out-of-band telemetry", false); enabled = false; } } public static void enableHTAP() { synchronized (enableHTAPLock) { + logger.debug("Enabling out-of-band HTAP telemetry"); htapEnabled = true; } } public static void disableHTAP() { synchronized (enableHTAPLock) { + logger.debug("Disabling out-of-band HTAP telemetry"); htapEnabled = false; } } + @SnowflakeJdbcInternalApi + public static void disableOOBTelemetry() { + disable(); + disableHTAP(); + } + public boolean isEnabled() { synchronized (enableLock) { return enabled; @@ -309,6 +321,7 @@ public void setURL(String url) { } public void setDeployment(TELEMETRY_SERVER_DEPLOYMENT deployment) { + logger.debug("Setting out-of-band telemetry sever deployment to {}", deployment); serverDeployment = deployment; } @@ -421,13 +434,13 @@ public void run() { if (!instance.isDeploymentEnabled()) { // skip the disabled deployment - logger.debug("skip the disabled deployment: ", instance.serverDeployment.name); + logger.debug("Skip the disabled deployment: ", instance.serverDeployment.name); return; } if (!instance.serverDeployment.url.matches(TELEMETRY_SERVER_URL_PATTERN)) { // skip the disabled deployment - logger.debug("ignore invalid url: ", instance.serverDeployment.url); + logger.debug("Ignore invalid url: ", instance.serverDeployment.url); return; } @@ -435,7 +448,10 @@ public void run() { } private void uploadPayload() { - logger.debugNoMask("Running telemetry uploader. The payload is: " + payloadLogStr); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + logger.debugNoMask( + "Running out-of-band telemetry uploader. The payload is: " + payloadLogStr); CloseableHttpResponse response = null; boolean success = true; @@ -450,13 +466,14 @@ private void uploadPayload() { int statusCode = response.getStatusLine().getStatusCode(); if (statusCode == 200) { - logger.debug("telemetry server request success: {}", response, true); + logger.debug("Out-of-band telemetry server request success: {}", response, true); instance.count(); } else if (statusCode == 429) { - logger.debug("telemetry server request hit server cap on response: {}", response); + logger.debug( + "Out-of-band telemetry server request hit server cap on response: {}", response); instance.serverFailureCnt.incrementAndGet(); } else { - logger.debug("telemetry server request error: {}", response, true); + logger.debug("Out-of-band telemetry server request error: {}", response, true); instance.lastClientError = response.toString(); instance.clientFailureCnt.incrementAndGet(); success = false; @@ -467,7 +484,7 @@ private void uploadPayload() { } catch (Exception e) { // exception from here is always captured logger.debug( - "Telemetry request failed, Exception" + "response: {}, exception: {}", + "Out-of-band telemetry request failed, Exception response: {}, exception: {}", response, e.getMessage()); String res = "null"; @@ -478,7 +495,16 @@ private void uploadPayload() { instance.clientFailureCnt.incrementAndGet(); success = false; } finally { - logger.debug("Telemetry request success={} " + "and clean the current queue", success); + stopwatch.stop(); + logger.debug( + "Out-of-band telemetry request success: {} and clean the current queue. It took {} ms." + + " Total successful events: {}, total unsuccessful events: {} (client failures: {}, server failures: {})", + success, + stopwatch.elapsedMillis(), + instance.eventCnt, + instance.clientFailureCnt.get() + instance.serverFailureCnt.get(), + instance.clientFailureCnt, + instance.serverFailureCnt); } } } diff --git a/src/main/java/net/snowflake/client/loader/BufferStage.java b/src/main/java/net/snowflake/client/loader/BufferStage.java index 6f70ec050..d7690f532 100644 --- a/src/main/java/net/snowflake/client/loader/BufferStage.java +++ b/src/main/java/net/snowflake/client/loader/BufferStage.java @@ -24,7 +24,7 @@ * single processing stage. */ public class BufferStage { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(BufferStage.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(BufferStage.class); public enum State { CREATED, @@ -87,7 +87,7 @@ public enum State { private ArrayList _uploaders = new ArrayList<>(); BufferStage(StreamLoader loader, Operation op, long csvFileBucketSize, long csvFileSize) { - LOGGER.debug("Operation: {}", op); + logger.debug("Operation: {}", op); _state = State.CREATED; _loader = loader; @@ -145,7 +145,7 @@ private synchronized void openFile() { if (_loader._compressDataBeforePut) { fName += StreamLoader.FILE_SUFFIX; } - LOGGER.debug("openFile: {}", fName); + logger.debug("openFile: {}", fName); OutputStream fileStream = new FileOutputStream(fName); if (_loader._compressDataBeforePut) { @@ -173,7 +173,7 @@ private synchronized void openFile() { // not thread safe boolean stageData(final byte[] line) throws IOException { if (this._rowCount % 10000 == 0) { - LOGGER.debug("rowCount: {}, currentSize: {}", this._rowCount, _currentSize); + logger.debug("rowCount: {}, currentSize: {}", this._rowCount, _currentSize); } _outstream.write(line); _currentSize += line.length; @@ -191,7 +191,7 @@ boolean stageData(final byte[] line) throws IOException { } if (_currentSize >= this._csvFileSize) { - LOGGER.debug( + logger.debug( "name: {}, currentSize: {}, Threshold: {}," + " fileCount: {}, fileBucketSize: {}", _file.getAbsolutePath(), _currentSize, @@ -217,7 +217,7 @@ boolean stageData(final byte[] line) throws IOException { * @throws IOException raises an exception if IO error occurs */ void completeUploading() throws IOException { - LOGGER.debug( + logger.debug( "name: {}, currentSize: {}, Threshold: {}," + " fileCount: {}, fileBucketSize: {}", _file.getAbsolutePath(), _currentSize, diff --git a/src/main/java/net/snowflake/client/loader/FileUploader.java b/src/main/java/net/snowflake/client/loader/FileUploader.java index 3862bd0e5..05b6c84f5 100644 --- a/src/main/java/net/snowflake/client/loader/FileUploader.java +++ b/src/main/java/net/snowflake/client/loader/FileUploader.java @@ -14,7 +14,7 @@ /** Class responsible for uploading a single data file. */ public class FileUploader implements Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(PutQueue.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(PutQueue.class); private static final int RETRY = 6; private final Thread _thread; @@ -23,7 +23,7 @@ public class FileUploader implements Runnable { private final File _file; FileUploader(StreamLoader loader, String stage, File file) { - LOGGER.debug("", false); + logger.trace("Creating new FileUploader", false); _loader = loader; _thread = new Thread(this); _thread.setName("FileUploaderThread"); @@ -33,7 +33,7 @@ public class FileUploader implements Runnable { public synchronized void upload() { // throttle up will wait if too many files are uploading - LOGGER.debug("", false); + logger.trace("Creating new FileUploader", false); _loader.throttleUp(); _thread.start(); } @@ -66,7 +66,7 @@ public void run() { } if (attempt > 0) { - LOGGER.debug("Will retry PUT after {} seconds", Math.pow(2, attempt)); + logger.debug("Will retry PUT after {} seconds", Math.pow(2, attempt)); Thread.sleep(1000 * ((int) Math.pow(2, attempt))); } @@ -114,9 +114,9 @@ public void run() { Statement statement = _loader.getPutConnection().createStatement(); try { - LOGGER.debug("Put Statement start: {}", putStatement); + logger.debug("Put Statement start: {}", putStatement); statement.execute(putStatement); - LOGGER.debug("Put Statement end: {}", putStatement); + logger.debug("Put Statement end: {}", putStatement); ResultSet putResult = statement.getResultSet(); putResult.next(); @@ -137,13 +137,13 @@ public void run() { } else { // The log level should be WARNING for a single upload failure. if (message.startsWith("Simulated upload failure")) { - LOGGER.debug( + logger.debug( "Failed to upload a file:" + " status={}," + " filename={}," + " message={}", status, file, message); } else { - LOGGER.debug( + logger.debug( "Failed to upload a file:" + " status={}," + " filename={}," + " message={}", status, file, @@ -152,7 +152,7 @@ public void run() { } } catch (Throwable t) { // The log level for unknown error is set to SEVERE - LOGGER.error( + logger.error( String.format( "Failed to PUT on attempt: attempt=[%s], " + "Message=[%s]", attempt, t.getMessage()), @@ -161,7 +161,7 @@ public void run() { } } } catch (Throwable t) { - LOGGER.error("PUT exception", t); + logger.error("PUT exception", t); _loader.abort(new Loader.ConnectionError(t.getMessage(), t.getCause())); } finally { _loader.throttleDown(); @@ -169,11 +169,11 @@ public void run() { } public void join() { - LOGGER.debug("", false); + logger.trace("Joining threads", false); try { _thread.join(0); } catch (InterruptedException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); } } diff --git a/src/main/java/net/snowflake/client/loader/LoaderFactory.java b/src/main/java/net/snowflake/client/loader/LoaderFactory.java index 2d0dc8e99..1bd5ca1b0 100644 --- a/src/main/java/net/snowflake/client/loader/LoaderFactory.java +++ b/src/main/java/net/snowflake/client/loader/LoaderFactory.java @@ -10,13 +10,13 @@ import net.snowflake.client.log.SFLoggerFactory; public class LoaderFactory { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(LoaderFactory.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(LoaderFactory.class); public static Loader createLoader( Map properties, Connection uploadConnection, Connection processingConnection) { - LOGGER.debug("", false); + logger.debug("", false); StreamLoader loader = new StreamLoader(properties, uploadConnection, processingConnection); return loader; } diff --git a/src/main/java/net/snowflake/client/loader/LoadingError.java b/src/main/java/net/snowflake/client/loader/LoadingError.java index 9593458dc..e04a1ba29 100644 --- a/src/main/java/net/snowflake/client/loader/LoadingError.java +++ b/src/main/java/net/snowflake/client/loader/LoadingError.java @@ -13,7 +13,7 @@ /** Wrapper for data format errors returned by the COPY/validate command */ public class LoadingError { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(LoadingError.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(LoadingError.class); public enum ErrorProperty { ERROR, @@ -65,7 +65,7 @@ public LoadingError(ResultSet rs, BufferStage bs, StreamLoader loader) { try { _properties.put(p, rs.getString(p.name())); } catch (SQLException ex) { - LOGGER.error("Exception", ex); + logger.error("Exception", ex); } } } diff --git a/src/main/java/net/snowflake/client/loader/ProcessQueue.java b/src/main/java/net/snowflake/client/loader/ProcessQueue.java index 6a20184aa..4a4114661 100644 --- a/src/main/java/net/snowflake/client/loader/ProcessQueue.java +++ b/src/main/java/net/snowflake/client/loader/ProcessQueue.java @@ -20,14 +20,14 @@ * BufferStage class */ public class ProcessQueue implements Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(ProcessQueue.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(ProcessQueue.class); private final Thread _thread; private final StreamLoader _loader; public ProcessQueue(StreamLoader loader) { - LOGGER.debug("", false); + logger.debug("", false); _loader = loader; _thread = new Thread(this); @@ -73,10 +73,10 @@ public void run() { if (_loader.isAborted()) { if (!_loader._preserveStageFile) { currentCommand = "RM '" + remoteStage + "'"; - LOGGER.debug(currentCommand, true); + logger.debug(currentCommand, true); conn.createStatement().execute(currentCommand); } else { - LOGGER.debug( + logger.debug( "Error occurred. The remote stage is preserved for " + "further investigation: {}", remoteStage); @@ -97,7 +97,7 @@ public void run() { String lastErrorRow = ""; // Create temp table to load data (may have a subset of columns) - LOGGER.debug("Creating Temporary Table: name={}", stage.getId()); + logger.debug("Creating Temporary Table: name={}", stage.getId()); currentState = State.CREATE_TEMP_TABLE; List allColumns = getAllColumns(conn); @@ -124,7 +124,7 @@ public void run() { } // Load data there - LOGGER.debug( + logger.debug( "COPY data in the stage to table:" + " stage={}," + " name={}", remoteStage, stage.getId()); @@ -152,7 +152,7 @@ public void run() { } int errorRecordCount = toIntExact(parsed - loaded); - LOGGER.debug( + logger.debug( "errorRecordCount=[{}]," + " parsed=[{}]," + " loaded=[{}]", errorRecordCount, parsed, @@ -163,13 +163,13 @@ public void run() { if (loaded == stage.getRowCount()) { // successfully loaded everything - LOGGER.debug( + logger.debug( "COPY command successfully finished:" + " stage={}," + " name={}", remoteStage, stage.getId()); listener.addErrorCount(0); } else { - LOGGER.debug( + logger.debug( "Found errors in COPY command:" + " stage={}," + " name={}", remoteStage, stage.getId()); @@ -204,7 +204,7 @@ public void run() { dataError = loadError.getException(); } } - LOGGER.debug("errorCount: {}", errorCount); + logger.debug("errorCount: {}", errorCount); listener.addErrorCount(errorCount); if (listener.throwOnError()) { @@ -212,10 +212,10 @@ public void run() { _loader.abort(dataError); if (!_loader._preserveStageFile) { - LOGGER.debug("RM: {}", remoteStage); + logger.debug("RM: {}", remoteStage); conn.createStatement().execute("RM '" + remoteStage + "'"); } else { - LOGGER.error( + logger.error( "Error occurred. The remote stage is preserved for " + "further investigation: {}", remoteStage); @@ -320,7 +320,7 @@ public void run() { } currentCommand = loadStatement; - LOGGER.debug("Load Statement: {}", loadStatement); + logger.debug("Load Statement: {}", loadStatement); Statement s = conn.createStatement(); s.execute(loadStatement); @@ -357,13 +357,13 @@ public void run() { } } } catch (InterruptedException ex) { - LOGGER.error("Interrupted", ex); + logger.error("Interrupted", ex); break; } catch (Exception ex) { String msg = String.format("State: %s, %s, %s", currentState, currentCommand, ex.getMessage()); _loader.abort(new Loader.ConnectionError(msg, Utils.getCause(ex))); - LOGGER.error(msg, true); + logger.error(msg, true); if (stage == null || stage.isTerminate()) { break; } @@ -406,11 +406,11 @@ private String getOn(List keys, String L, String R) { } public void join() { - LOGGER.debug("", false); + logger.trace("Joining threads", false); try { _thread.join(0); } catch (InterruptedException ex) { - LOGGER.debug("Exception: ", ex); + logger.debug("Exception: ", ex); } } diff --git a/src/main/java/net/snowflake/client/loader/PutQueue.java b/src/main/java/net/snowflake/client/loader/PutQueue.java index 69f2b08e7..d11067016 100644 --- a/src/main/java/net/snowflake/client/loader/PutQueue.java +++ b/src/main/java/net/snowflake/client/loader/PutQueue.java @@ -13,14 +13,14 @@ * ProcessQueue. */ public class PutQueue implements Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(PutQueue.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(PutQueue.class); private final Thread _thread; private final StreamLoader _loader; public PutQueue(StreamLoader loader) { - LOGGER.debug("", false); + logger.trace("Creating new PutQueue", false); _loader = loader; _thread = new Thread(this); _thread.setName("PutQueueThread"); @@ -60,7 +60,7 @@ public void run() { } } catch (InterruptedException | IOException ex) { - LOGGER.error("Exception: ", ex); + logger.error("Exception: ", ex); break; } finally { @@ -72,7 +72,7 @@ public void join() { try { _thread.join(0); } catch (InterruptedException ex) { - LOGGER.error("Exception: ", ex); + logger.error("Exception: ", ex); } } } diff --git a/src/main/java/net/snowflake/client/loader/StreamLoader.java b/src/main/java/net/snowflake/client/loader/StreamLoader.java index af96a8763..d680b0363 100644 --- a/src/main/java/net/snowflake/client/loader/StreamLoader.java +++ b/src/main/java/net/snowflake/client/loader/StreamLoader.java @@ -30,7 +30,7 @@ /** Stream Loader */ public class StreamLoader implements Loader, Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(StreamLoader.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(StreamLoader.class); private static final String SYSTEM_PARAMETER_PREFIX = "net.snowflake.client.loader."; @@ -333,7 +333,7 @@ private void initDateFormats() { /** Starts the loader */ @Override public void start() { - LOGGER.debug("Start Loading", false); + logger.debug("Start Loading", false); // validate parameters validateParameters(); @@ -351,10 +351,10 @@ public void start() { try { if (_startTransaction) { - LOGGER.debug("Begin Transaction", false); + logger.debug("Begin Transaction", false); _processConn.createStatement().execute("begin transaction"); } else { - LOGGER.debug("No Transaction started", false); + logger.debug("No Transaction started", false); } } catch (SQLException ex) { abort(new Loader.ConnectionError("Failed to start Transaction", Utils.getCause(ex))); @@ -366,7 +366,7 @@ public void start() { try { if (_before != null) { - LOGGER.debug("Running Execute Before SQL", false); + logger.debug("Running Execute Before SQL", false); _processConn.createStatement().execute(_before); } } catch (SQLException ex) { @@ -379,14 +379,14 @@ public void start() { } private void validateParameters() { - LOGGER.debug("Validate Parameters", false); + logger.debug("Validate Parameters", false); if (Operation.INSERT != this._op) { if (this._keys == null || this._keys.isEmpty()) { throw new ConnectionError("Updating operations require keys"); } } setPropertyBySystemProperty(); - LOGGER.debug( + logger.debug( "Database Name: {}, Schema Name: {}, Table Name: {}, " + "Remote Stage: {}, Columns: {}, Keys: {}, Operation: {}, " + "Start Transaction: {}, OneBatch: {}, Truncate Table: {}, " @@ -427,7 +427,7 @@ String getNoise() { public void abort(RuntimeException t) { synchronized (this) { // Abort once, keep first error. - LOGGER.debug("Exception received. Aborting...", t); + logger.debug("Exception received. Aborting...", t); if (_aborted.getAndSet(true)) { return; @@ -451,14 +451,14 @@ boolean isAborted() { @Override public void rollback() { - LOGGER.debug("Rollback", false); + logger.debug("Rollback", false); try { terminate(); - LOGGER.debug("Rollback", false); + logger.debug("Rollback", false); this._processConn.createStatement().execute("rollback"); } catch (SQLException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); } } @@ -478,7 +478,7 @@ public void submitRow(final Object[] row) { byte[] data = null; try { if (!_active.get()) { - LOGGER.debug("Inactive loader. Row ignored", false); + logger.debug("Inactive loader. Row ignored", false); return; } @@ -502,7 +502,7 @@ public void submitRow(final Object[] row) { if (_batchRowSize > 0 && _listener.getSubmittedRowCount() > 0 && (_listener.getSubmittedRowCount() % _batchRowSize) == 0) { - LOGGER.debug( + logger.debug( "Flushing Queue: Submitted Row Count: {}, Batch Row Size: {}", _listener.getSubmittedRowCount(), _batchRowSize); @@ -522,7 +522,7 @@ public void submitRow(final Object[] row) { /** Initializes queues */ private void initQueues() { - LOGGER.debug("Init Queues", false); + logger.debug("Init Queues", false); if (_active.getAndSet(true)) { // NOP if the loader is already active return; @@ -546,7 +546,7 @@ private void initQueues() { /** Flushes data by joining PUT and PROCESS queues */ private void flushQueues() { // Terminate data loading thread. - LOGGER.debug("Flush Queues", false); + logger.debug("Flush Queues", false); try { _queueData.put(new byte[0]); _thread.join(10000); @@ -556,7 +556,7 @@ private void flushQueues() { } } catch (Exception ex) { String msg = "Failed to join StreamLoader queue: " + ex.getMessage(); - LOGGER.error(msg, ex); + logger.error(msg, ex); throw new DataError(msg, Utils.getCause(ex)); } // Put last stage on queue @@ -597,7 +597,7 @@ private void truncateTargetTable() { // TODO: could be replaced with TRUNCATE? _processConn.createStatement().execute("DELETE FROM " + this.getFullTableName()); } catch (SQLException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); abort(new Loader.ConnectionError(Utils.getCause(ex))); } } @@ -615,7 +615,7 @@ public void run() { this.writeBytes(data); } } catch (Exception ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); abort(new Loader.ConnectionError(Utils.getCause(ex))); } } @@ -642,25 +642,25 @@ private byte[] createCSVRecord(final Object[] data) { */ @Override public void finish() throws Exception { - LOGGER.debug("Finish Loading", false); + logger.debug("Finish Loading", false); flushQueues(); if (_is_last_finish_call) { try { if (_after != null) { - LOGGER.debug("Running Execute After SQL", false); + logger.debug("Running Execute After SQL", false); _processConn.createStatement().execute(_after); } // Loader successfully completed. Commit and return. _processConn.createStatement().execute("commit"); - LOGGER.debug("Committed", false); + logger.debug("Committed", false); } catch (SQLException ex) { try { _processConn.createStatement().execute("rollback"); } catch (SQLException ex0) { - LOGGER.debug("Failed to rollback", false); + logger.debug("Failed to rollback", false); } - LOGGER.debug(String.format("Execute After SQL failed to run: %s", _after), ex); + logger.debug(String.format("Execute After SQL failed to run: %s", _after), ex); throw new Loader.ConnectionError(Utils.getCause(ex)); } } @@ -668,19 +668,19 @@ public void finish() throws Exception { @Override public void close() { - LOGGER.debug("Close Loader", false); + logger.debug("Close Loader", false); try { this._processConn.close(); this._putConn.close(); } catch (SQLException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); throw new ConnectionError(Utils.getCause(ex)); } } /** Set active to false (no-op if not active), add a stage with terminate flag onto the queue */ private void terminate() { - LOGGER.debug("Terminate Loader", false); + logger.debug("Terminate Loader", false); boolean active = _active.getAndSet(false); @@ -697,10 +697,10 @@ private void terminate() { try { queuePut(_stage); } catch (InterruptedException ex) { - LOGGER.error("Unknown Error", ex); + logger.error("Unknown Error", ex); } - LOGGER.debug("Snowflake loader terminating", false); + logger.debug("Snowflake loader terminating", false); } // If operation changes, existing stage needs to be scheduled for processing. @@ -712,14 +712,14 @@ public void resetOperation(Operation op) { return; } - LOGGER.debug("Operation is changing from {} to {}", _op, op); + logger.debug("Operation is changing from {} to {}", _op, op); _op = op; if (_stage != null) { try { queuePut(_stage); } catch (InterruptedException ex) { - LOGGER.error(_stage.getId(), ex); + logger.error(_stage.getId(), ex); } } @@ -802,27 +802,27 @@ BufferStage takeProcess() throws InterruptedException { void throttleUp() { int open = this._throttleCounter.incrementAndGet(); - LOGGER.debug("PUT Throttle Up: {}", open); + logger.debug("PUT Throttle Up: {}", open); if (open > 8) { - LOGGER.debug( + logger.debug( "Will retry scheduling file for upload after {} seconds", (Math.pow(2, open - 7))); try { Thread.sleep(1000 * ((int) Math.pow(2, open - 7))); } catch (InterruptedException ex) { - LOGGER.error("Exception occurs while waiting", ex); + logger.error("Exception occurs while waiting", ex); } } } void throttleDown() { int throttleLevel = this._throttleCounter.decrementAndGet(); - LOGGER.debug("PUT Throttle Down: {}", throttleLevel); + logger.debug("PUT Throttle Down: {}", throttleLevel); if (throttleLevel < 0) { - LOGGER.debug("Unbalanced throttle", false); + logger.debug("Unbalanced throttle", false); _throttleCounter.set(0); } - LOGGER.debug("Connector throttle {}", throttleLevel); + logger.debug("Connector throttle {}", throttleLevel); } private LoadResultListener _listener = diff --git a/src/main/java/net/snowflake/client/log/SFLoggerUtil.java b/src/main/java/net/snowflake/client/log/SFLoggerUtil.java index da42e0a0c..568802f3b 100644 --- a/src/main/java/net/snowflake/client/log/SFLoggerUtil.java +++ b/src/main/java/net/snowflake/client/log/SFLoggerUtil.java @@ -5,9 +5,14 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; +import com.google.common.base.Strings; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import org.apache.commons.logging.LogFactory; public class SFLoggerUtil { + private static final String NOT_PROVIDED_LOG = "not provided"; + private static final String PROVIDED_LOG = "provided"; + public static void initializeSnowflakeLogger() { String logger = systemGetProperty("net.snowflake.jdbc.loggerImpl"); SFLoggerFactory.LoggerImpl loggerImplementation = SFLoggerFactory.LoggerImpl.fromString(logger); @@ -29,4 +34,12 @@ public static void initializeSnowflakeLogger() { "org.apache.commons.logging.Log", "net.snowflake.client.log.JDK14JCLWrapper"); } } + + @SnowflakeJdbcInternalApi + public static String isVariableProvided(T variable) { + if (variable instanceof String) { + return (Strings.isNullOrEmpty((String) variable)) ? NOT_PROVIDED_LOG : PROVIDED_LOG; + } + return variable == null ? NOT_PROVIDED_LOG : PROVIDED_LOG; + } } diff --git a/src/main/java/net/snowflake/client/pooling/LogicalConnection.java b/src/main/java/net/snowflake/client/pooling/LogicalConnection.java index 76c2b328c..623f9bcf6 100644 --- a/src/main/java/net/snowflake/client/pooling/LogicalConnection.java +++ b/src/main/java/net/snowflake/client/pooling/LogicalConnection.java @@ -21,13 +21,18 @@ import java.util.Properties; import java.util.concurrent.Executor; import net.snowflake.client.jdbc.ErrorCode; +import net.snowflake.client.jdbc.SnowflakeConnectionV1; import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; /** * Logical connection is wrapper class on top of SnowflakeConnectionV1 Every method call will be * delegated to SnowflakeConnectionV1 except for close method */ class LogicalConnection implements Connection { + private static final SFLogger logger = SFLoggerFactory.getLogger(LogicalConnection.class); + /** physical connection to snowflake, instance SnowflakeConnectionV1 */ private final Connection physicalConnection; @@ -148,6 +153,8 @@ public void close() throws SQLException { if (isClosed) { return; } + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug("Closing logical connection with session id: {}", sfConnection.getSessionID()); pooledConnection.fireConnectionCloseEvent(); isClosed = true; } diff --git a/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java b/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java index 345274516..4d053c5e5 100644 --- a/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java +++ b/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java @@ -11,22 +11,35 @@ import javax.sql.ConnectionEventListener; import javax.sql.PooledConnection; import javax.sql.StatementEventListener; +import net.snowflake.client.jdbc.SnowflakeConnectionV1; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; /** Snowflake implementation of pooled connection */ public class SnowflakePooledConnection implements PooledConnection { + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakePooledConnection.class); + /** physical connection, an instance of SnowflakeConnectionV1 class */ private Connection physicalConnection; /** list of event listener registered to listen for connection event */ private final Set eventListeners; - SnowflakePooledConnection(Connection physicalConnection) { + SnowflakePooledConnection(Connection physicalConnection) throws SQLException { this.physicalConnection = physicalConnection; + + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug("Creating new pooled connection with session id: {}", sfConnection.getSessionID()); + this.eventListeners = new HashSet<>(); } @Override public Connection getConnection() throws SQLException { + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug( + "Creating new Logical Connection based on pooled connection with session id: {}", + sfConnection.getSessionID()); return new LogicalConnection(this); } @@ -55,6 +68,8 @@ public void addConnectionEventListener(ConnectionEventListener eventListener) { @Override public void close() throws SQLException { if (this.physicalConnection != null) { + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug("Closing pooled connection with session id: {}", sfConnection.getSessionID()); this.physicalConnection.close(); this.physicalConnection = null; } diff --git a/src/main/java/net/snowflake/client/util/SecretDetector.java b/src/main/java/net/snowflake/client/util/SecretDetector.java index 0d43ec725..454d7b7be 100644 --- a/src/main/java/net/snowflake/client/util/SecretDetector.java +++ b/src/main/java/net/snowflake/client/util/SecretDetector.java @@ -19,8 +19,6 @@ import net.minidev.json.JSONArray; import net.minidev.json.JSONObject; import net.minidev.json.JSONStyle; -import net.snowflake.client.log.SFLogger; -import net.snowflake.client.log.SFLoggerFactory; /** Search for credentials in sql and/or other text */ public class SecretDetector { @@ -72,13 +70,9 @@ public class SecretDetector { "(token|assertion content)" + "(['\"\\s:=]+)" + "([a-z0-9=/_\\-+]{8,})", Pattern.CASE_INSENSITIVE); - private static final int LOOK_AHEAD = 10; - // only attempt to find secrets in its leading 100Kb SNOW-30961 private static final int MAX_LENGTH = 100 * 1000; - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(SecretDetector.class); - private static String[] SENSITIVE_NAMES = { "access_key_id", "accesstoken", diff --git a/src/main/java/net/snowflake/client/util/Stopwatch.java b/src/main/java/net/snowflake/client/util/Stopwatch.java new file mode 100644 index 000000000..d891d51d9 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/Stopwatch.java @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +/** Stopwatch class used to calculate the time between start and stop. */ +@SnowflakeJdbcInternalApi +public class Stopwatch { + private boolean isStarted = false; + private long startTime; + private long elapsedTime; + + /** + * Starts the Stopwatch. + * + * @throws IllegalStateException when Stopwatch is already running. + */ + public void start() { + if (isStarted) { + throw new IllegalStateException("Stopwatch is already running"); + } + + isStarted = true; + startTime = System.nanoTime(); + } + + /** + * Stops the Stopwatch. + * + * @throws IllegalStateException when Stopwatch was not yet started or is already stopped. + */ + public void stop() { + if (!isStarted) { + if (startTime == 0) { + throw new IllegalStateException("Stopwatch has not been started"); + } + throw new IllegalStateException("Stopwatch is already stopped"); + } + + isStarted = false; + elapsedTime = System.nanoTime() - startTime; + } + + /** Resets the instance to it's initial state. */ + public void reset() { + isStarted = false; + startTime = 0; + elapsedTime = 0; + } + + /** Restarts the instance. */ + public void restart() { + isStarted = true; + startTime = System.nanoTime(); + elapsedTime = 0; + } + + /** + * Get the elapsed time (in ms) between the stopTime and startTime. + * + * @return elapsed milliseconds between stopTime and startTime + * @throws IllegalStateException when Stopwatch has not been started yet + */ + public long elapsedMillis() { + return elapsedNanos() / 1_000_000; + } + + /** + * Get the elapsed time (in nanoseconds) between the stopTime and startTime. + * + * @return elapsed nanoseconds between stopTime and startTime + * @throws IllegalStateException when Stopwatch has not been started yet + */ + public long elapsedNanos() { + if (isStarted) { + return (System.nanoTime() - startTime); + } + if (startTime == 0) { + throw new IllegalStateException("Stopwatch has not been started"); + } + return elapsedTime; + } + + /** + * Get the instance status. + * + * @return true if the stopwatch is running, false otherwise + */ + public boolean isStarted() { + return isStarted; + } +} diff --git a/src/main/java/net/snowflake/client/util/ThrowingBiCallable.java b/src/main/java/net/snowflake/client/util/ThrowingBiCallable.java new file mode 100644 index 000000000..e81020195 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/ThrowingBiCallable.java @@ -0,0 +1,9 @@ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +@FunctionalInterface +public interface ThrowingBiCallable { + void apply(A a, B b) throws T; +} diff --git a/src/main/java/net/snowflake/client/util/ThrowingTriCallable.java b/src/main/java/net/snowflake/client/util/ThrowingTriCallable.java new file mode 100644 index 000000000..a21ce7a79 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/ThrowingTriCallable.java @@ -0,0 +1,9 @@ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +@FunctionalInterface +public interface ThrowingTriCallable { + void apply(A a, B b, C c) throws T; +} diff --git a/src/main/java/net/snowflake/client/util/TimeMeasurement.java b/src/main/java/net/snowflake/client/util/TimeMeasurement.java new file mode 100644 index 000000000..390294236 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/TimeMeasurement.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeUtil; + +/** Class keeping the start and stop time in epoch microseconds. */ +@SnowflakeJdbcInternalApi +public class TimeMeasurement { + private long start; + private long end; + + /** Get the start time as epoch time in microseconds. */ + public long getStart() { + return start; + } + + /** Set the start time as current epoch time in microseconds. */ + public void setStart() { + this.start = SnowflakeUtil.getEpochTimeInMicroSeconds(); + } + + /** Get the stop time as epoch time in microseconds. */ + public long getEnd() { + return end; + } + + /** Set the stop time as current epoch time in microseconds. */ + public void setEnd() { + this.end = SnowflakeUtil.getEpochTimeInMicroSeconds(); + } + + /** + * Get the microseconds between the stop and start time. + * + * @return difference between stop and start in microseconds. If one of the variables is not + * initialized, it returns -1 + */ + public long getTime() { + if (start == 0 || end == 0) { + return -1; + } + + return end - start; + } +} diff --git a/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties b/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties index 5d9973bb1..3b68fbf69 100644 --- a/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties +++ b/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties @@ -80,5 +80,6 @@ Error message={3}, Extended error info={4} 200058=Value is too large to be stored as integer at batch index {0}. Use executeLargeBatch() instead. 200059=Invalid Connect String: {0}. 200061=GCS operation failed: Operation={0}, Error code={1}, Message={2}, Reason={3} +200062=Authentication timed out. 200063=Invalid data - Cannot be parsed and converted to structured type. diff --git a/src/test/java/net/snowflake/client/AbstractDriverIT.java b/src/test/java/net/snowflake/client/AbstractDriverIT.java index b44cc31ef..4a3acea23 100644 --- a/src/test/java/net/snowflake/client/AbstractDriverIT.java +++ b/src/test/java/net/snowflake/client/AbstractDriverIT.java @@ -6,7 +6,9 @@ import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.base.Strings; +import java.net.URISyntaxException; import java.net.URL; +import java.nio.file.Paths; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; @@ -385,7 +387,11 @@ public static String getFullPathFileInResource(String fileName) { ClassLoader classLoader = AbstractDriverIT.class.getClassLoader(); URL url = classLoader.getResource(fileName); if (url != null) { - return url.getFile(); + try { + return Paths.get(url.toURI()).toAbsolutePath().toString(); + } catch (URISyntaxException ex) { + throw new RuntimeException("Unable to get absolute path: " + fileName); + } } else { throw new RuntimeException("No file is found: " + fileName); } diff --git a/src/test/java/net/snowflake/client/RunningNotOnAWS.java b/src/test/java/net/snowflake/client/RunningNotOnAWS.java new file mode 100644 index 000000000..70f54ab8f --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnAWS.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnAWS implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("AWS"); + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnAzure.java b/src/test/java/net/snowflake/client/RunningNotOnAzure.java new file mode 100644 index 000000000..e2a00966c --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnAzure.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnAzure implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("Azure"); + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnGCP.java b/src/test/java/net/snowflake/client/RunningNotOnGCP.java new file mode 100644 index 000000000..7a5c7aafb --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnGCP.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnGCP implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("GCP"); + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java b/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java new file mode 100644 index 000000000..a99eaa3b7 --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java @@ -0,0 +1,13 @@ +package net.snowflake.client; + +import net.snowflake.client.core.Constants; + +public class RunningNotOnLinuxMac implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; + } + + public static boolean isNotRunningOnLinuxMac() { + return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnWin.java b/src/test/java/net/snowflake/client/RunningNotOnWin.java new file mode 100644 index 000000000..ce5cdf7d1 --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnWin.java @@ -0,0 +1,9 @@ +package net.snowflake.client; + +import net.snowflake.client.core.Constants; + +public class RunningNotOnWin implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + return Constants.getOS() != Constants.OS.WINDOWS; + } +} diff --git a/src/test/java/net/snowflake/client/RunningOnWin.java b/src/test/java/net/snowflake/client/RunningOnWin.java new file mode 100644 index 000000000..025ab1e04 --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningOnWin.java @@ -0,0 +1,9 @@ +package net.snowflake.client; + +import net.snowflake.client.core.Constants; + +public class RunningOnWin implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + return Constants.getOS() == Constants.OS.WINDOWS; + } +} diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index 1f782ec1f..76487bcb4 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -5,9 +5,12 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.List; @@ -66,13 +69,14 @@ public interface TestRunInterface { * System.getenv wrapper. If System.getenv raises an SecurityException, it is ignored and returns * null. * - *

This is replicated from SnowflakeUtil.systemGetEnv, because the old driver doesn't have that - * function for the tests to use it. Replace this function call with SnowflakeUtil.systemGetEnv - * when it is available. - * + * @deprecated This method should be replaced by SnowflakeUtil.systemGetEnv. + *

This is replicated from SnowflakeUtil.systemGetEnv, because the old driver doesn't have + * that function for the tests to use it. Replace this function call with + * SnowflakeUtil.systemGetEnv when it is available. * @param env the environment variable name. * @return the environment variable value if set, otherwise null. */ + @Deprecated public static String systemGetEnv(String env) { try { return System.getenv(env); @@ -127,4 +131,17 @@ public static void withRandomSchema( statement.execute("DROP SCHEMA " + customSchema); } } + + public interface MethodRaisesSQLException { + void run() throws SQLException; + } + + public static void expectSnowflakeLoggedFeatureNotSupportedException(MethodRaisesSQLException f) { + try { + f.run(); + fail("must raise exception"); + } catch (SQLException ex) { + assertEquals(ex.getClass().getSimpleName(), "SnowflakeLoggedFeatureNotSupportedException"); + } + } } diff --git a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java b/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java new file mode 100644 index 000000000..ecb5c0509 --- /dev/null +++ b/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java @@ -0,0 +1,3 @@ +package net.snowflake.client.category; + +public interface TestCategoryDiagnostic {} diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java index 225fff203..a00784f68 100644 --- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java @@ -8,6 +8,7 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemSetEnv; import static net.snowflake.client.jdbc.SnowflakeUtil.systemUnsetEnv; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mockStatic; @@ -17,128 +18,127 @@ import java.nio.file.Path; import java.nio.file.Paths; import net.snowflake.client.jdbc.SnowflakeUtil; +import org.junit.After; import org.junit.Test; import org.mockito.MockedStatic; public class SFClientConfigParserTest { private static final String CONFIG_JSON = "{\"common\":{\"log_level\":\"info\",\"log_path\":\"/jdbc.log\"}}"; + private static final String CONFIG_JSON_WITH_UNKNOWN_PROPS = + "{\"common\":{\"log_level\":\"info\",\"log_path\":\"/jdbc.log\",\"unknown_inside\":\"/unknown\"},\"unknown_outside\":\"/unknown\"}"; - @Test - public void testloadSFClientConfigValidPath() { - Path configFilePath = Paths.get("config.json"); - try { - Files.write(configFilePath, CONFIG_JSON.getBytes()); - SFClientConfig actualConfig = - SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); - assertEquals("info", actualConfig.getCommonProps().getLogLevel()); - assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); + private Path configFilePath; - Files.delete(configFilePath); - } catch (IOException e) { - fail("testloadSFClientConfigValidPath failed"); + @After + public void cleanup() throws IOException { + if (configFilePath != null) { + Files.deleteIfExists(configFilePath); } + + systemUnsetEnv(SF_CLIENT_CONFIG_ENV_NAME); + } + + @Test + public void testLoadSFClientConfigValidPath() throws IOException { + configFilePath = Paths.get("config.json"); + Files.write(configFilePath, CONFIG_JSON.getBytes()); + SFClientConfig actualConfig = + SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); + assertEquals("config.json", actualConfig.getConfigFilePath()); } @Test - public void testloadSFClientConfigInValidPath() { + public void testLoadSFClientConfigValidPathWithUnknownProperties() throws IOException { + configFilePath = Paths.get("config.json"); + Files.write(configFilePath, CONFIG_JSON_WITH_UNKNOWN_PROPS.getBytes()); + SFClientConfig actualConfig = + SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); + } + + @Test + public void testLoadSFClientConfigInValidPath() { String configFilePath = "InvalidPath"; SFClientConfig config = null; try { - SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); - fail("testloadSFClientConfigInValidPath"); // this will not be reached! + SFClientConfigParser.loadSFClientConfig(configFilePath); + fail("testLoadSFClientConfigInValidPath"); // this will not be reached! } catch (IOException e) { // do nothing } } @Test - public void testloadSFClientConfigInValidJson() { + public void testLoadSFClientConfigInValidJson() { try { String invalidJson = "invalidJson"; - Path configFilePath = Paths.get("config.json"); + configFilePath = Paths.get("config.json"); Files.write(configFilePath, invalidJson.getBytes()); SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); - fail("testloadSFClientConfigInValidJson"); + fail("testLoadSFClientConfigInValidJson"); } catch (IOException e) { // DO Nothing } } @Test - public void testloadSFClientConfigWithEnvVar() { - Path configFilePath = Paths.get("config.json"); - - try { - Files.write(configFilePath, CONFIG_JSON.getBytes()); - systemSetEnv(SF_CLIENT_CONFIG_ENV_NAME, "config.json"); - SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); - assertEquals("info", actualConfig.getCommonProps().getLogLevel()); - assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); - - Files.delete(configFilePath); - systemUnsetEnv(SF_CLIENT_CONFIG_ENV_NAME); - } catch (IOException e) { - fail("testloadSFClientConfigWithEnvVar failed"); - } + public void testLoadSFClientConfigWithEnvVar() throws IOException { + configFilePath = Paths.get("config.json"); + Files.write(configFilePath, CONFIG_JSON.getBytes()); + systemSetEnv(SF_CLIENT_CONFIG_ENV_NAME, "config.json"); + SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); } @Test - public void testloadSFClientConfigWithDriverLoaction() { + public void testLoadSFClientConfigWithDriverLocation() throws IOException { String configLocation = Paths.get(getConfigFilePathFromJDBCJarLocation(), SF_CLIENT_CONFIG_FILE_NAME).toString(); - Path configFilePath = Paths.get(configLocation); - - try { - Files.write(configFilePath, CONFIG_JSON.getBytes()); - SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); - assertEquals("info", actualConfig.getCommonProps().getLogLevel()); - assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); - - Files.delete(configFilePath); - } catch (IOException e) { - fail("testloadSFClientConfigWithClasspath failed"); - } + configFilePath = Paths.get(configLocation); + Files.write(configFilePath, CONFIG_JSON.getBytes()); + SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); } @Test - public void testloadSFClientConfigWithUserHome() { + public void testLoadSFClientConfigWithUserHome() throws IOException { String tmpDirectory = systemGetProperty("java.io.tmpdir"); try (MockedStatic mockedSnowflakeUtil = mockStatic(SnowflakeUtil.class)) { // mocking this as Jenkins/GH Action doesn't have write permissions on user.home directory. mockedSnowflakeUtil.when(() -> systemGetProperty("user.home")).thenReturn(tmpDirectory); - Path configFilePath = Paths.get(systemGetProperty("user.home"), SF_CLIENT_CONFIG_FILE_NAME); + configFilePath = Paths.get(systemGetProperty("user.home"), SF_CLIENT_CONFIG_FILE_NAME); Files.write(configFilePath, CONFIG_JSON.getBytes()); SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); assertEquals("info", actualConfig.getCommonProps().getLogLevel()); assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); - - Files.delete(configFilePath); - } catch (IOException e) { - e.printStackTrace(System.err); - fail("testloadSFClientConfigWithUserHome failed: " + e.getMessage()); } } @Test - public void testloadSFClientNoConditionsMatch() throws IOException { + public void testLoadSFClientNoConditionsMatch() throws IOException { SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); - assertTrue(actualConfig == null); + assertNull(actualConfig); } @Test - public void testgetConfigFileNameFromJDBCJarLocation() { + public void testGetConfigFileNameFromJDBCJarLocation() { String jdbcDirectoryPath = getConfigFilePathFromJDBCJarLocation(); assertTrue(jdbcDirectoryPath != null && !jdbcDirectoryPath.isEmpty()); } @Test - public void testconvertToWindowsPath() { + public void testConvertToWindowsPath() { String mockWindowsPath = "C:/Program Files/example.txt"; String resultWindowsPath = "C:\\Program Files\\example.txt"; - String[] testCases = new String[] {"", "file:\\", "\\\\", "/"}; + String[] testCases = new String[] {"", "file:\\", "\\\\", "/", "nested:\\"}; String mockCloudPrefix = "cloud://"; for (String testcase : testCases) { diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java new file mode 100644 index 000000000..07882fcb7 --- /dev/null +++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java @@ -0,0 +1,161 @@ +package net.snowflake.client.config; + +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_HOME_KEY; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assume.assumeFalse; + +import com.fasterxml.jackson.dataformat.toml.TomlMapper; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import net.snowflake.client.RunningNotOnLinuxMac; +import net.snowflake.client.core.Constants; +import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.jdbc.SnowflakeUtil; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +public class SFConnectionConfigParserTest { + + private Path tempPath = null; + private TomlMapper tomlMapper = new TomlMapper(); + + @Before + public void setUp() throws IOException { + tempPath = Files.createTempDirectory(".snowflake"); + } + + @After + public void close() throws IOException { + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_HOME_KEY); + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + Files.walk(tempPath).map(Path::toFile).forEach(File::delete); + Files.delete(tempPath); + } + + @Test + public void testLoadSFConnectionConfigWrongConfigurationName() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "unknown"); + prepareConnectionConfigurationTomlFile(null, true); + ConnectionParameters connectionParameters = + SFConnectionConfigParser.buildConnectionParameters(); + assertNull(connectionParameters); + } + + @Test + public void testLoadSFConnectionConfigInValidPath() throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, Paths.get("unknownPath").toString()); + prepareConnectionConfigurationTomlFile(null, true); + assertNull(SFConnectionConfigParser.buildConnectionParameters()); + } + + @Test + public void testLoadSFConnectionConfigWithTokenFromFile() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), true); + + ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters(); + assertNotNull(data); + assertEquals(tokenFile.toString(), data.getParams().get("token_file_path")); + assertEquals("testToken", data.getParams().get("token")); + } + + @Test + public void testThrowErrorWhenWrongPermissionsForTokenFile() throws IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), false); + assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assertThrows( + SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); + } + + private void prepareConnectionConfigurationTomlFile( + Map moreParameters, boolean onlyUserPermission) throws IOException { + Path path = Paths.get(tempPath.toString(), "connections.toml"); + Path filePath = createFilePathWithPermission(path, onlyUserPermission); + File file = filePath.toFile(); + + Map configuration = new HashMap(); + Map configurationParams = new HashMap(); + configurationParams.put("account", "snowaccount.us-west-2.aws"); + configurationParams.put("user", "user1"); + configurationParams.put("token", "testToken"); + configurationParams.put("port", "443"); + + if (moreParameters != null) { + moreParameters.forEach((k, v) -> configurationParams.put(k, v)); + } + configuration.put("default", configurationParams); + tomlMapper.writeValue(file, configuration); + + if (configurationParams.containsKey("token_file_path")) { + Path tokenFilePath = + createFilePathWithPermission( + Paths.get(configurationParams.get("token_file_path").toString()), onlyUserPermission); + Files.write(tokenFilePath, "token_from_file".getBytes()); + } + } + + private Path createFilePathWithPermission(Path path, boolean onlyUserPermission) + throws IOException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + FileAttribute> fileAttribute = + onlyUserPermission + ? PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-------")) + : PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrw----")); + return Files.createFile(path, fileAttribute); + } else { + return Files.createFile(path); + } + } + + @Test + public void testLoadSFConnectionConfigWithHostConfigured() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + Map extraparams = new HashMap(); + extraparams.put("host", "snowflake.reg.local"); + extraparams.put("account", null); + extraparams.put("port", "8082"); + prepareConnectionConfigurationTomlFile(extraparams, true); + ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters(); + assertNotNull(data); + assertEquals("jdbc:snowflake://snowflake.reg.local:8082", data.getUrl()); + } + + @Test + public void shouldThrowExceptionIfNoneOfHostAndAccountIsSet() throws IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + Map extraparams = new HashMap(); + extraparams.put("host", null); + extraparams.put("account", null); + prepareConnectionConfigurationTomlFile(extraparams, true); + Assert.assertThrows( + SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); + } +} diff --git a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java new file mode 100644 index 000000000..92ec8a624 --- /dev/null +++ b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java @@ -0,0 +1,86 @@ +package net.snowflake.client.config; + +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnWin; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class SFPermissionsTest { + @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); + + @Parameterized.Parameters(name = "permission={0}") + public static Set> data() { + Map testConfigFilePermissions = + new HashMap() { + { + put("rwx------", false); + put("rw-------", false); + put("r-x------", false); + put("r--------", false); + put("rwxrwx---", true); + put("rwxrw----", true); + put("rwxr-x---", false); + put("rwxr-----", false); + put("rwx-wx---", true); + put("rwx-w----", true); + put("rwx--x---", false); + put("rwx---rwx", true); + put("rwx---rw-", true); + put("rwx---r-x", false); + put("rwx---r--", false); + put("rwx----wx", true); + put("rwx----w-", true); + put("rwx-----x", false); + } + }; + return testConfigFilePermissions.entrySet(); + } + + Path configFilePath = Paths.get("config.json"); + String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"logs\"}}"; + String permission; + Boolean isSucceed; + + public SFPermissionsTest(Map.Entry permission) { + this.permission = permission.getKey(); + this.isSucceed = permission.getValue(); + } + + @Before + public void createConfigFile() throws IOException { + Files.write(configFilePath, configJson.getBytes()); + } + + @After + public void cleanupConfigFile() throws IOException { + Files.deleteIfExists(configFilePath); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnWin.class) + public void testLogDirectoryPermissions() throws IOException { + // TODO: SNOW-1503722 Change to check for thrown exceptions + // Don't run on Windows + Files.setPosixFilePermissions(configFilePath, PosixFilePermissions.fromString(permission)); + Boolean result = + SFClientConfigParser.checkGroupOthersWritePermissions(configFilePath.toString()); + if (isSucceed != result) { + fail("testLogDirectoryPermissions failed. Expected " + isSucceed); + } + } +} diff --git a/src/test/java/net/snowflake/client/core/EventHandlerTest.java b/src/test/java/net/snowflake/client/core/EventHandlerTest.java index 48b381330..eb930f7c6 100644 --- a/src/test/java/net/snowflake/client/core/EventHandlerTest.java +++ b/src/test/java/net/snowflake/client/core/EventHandlerTest.java @@ -62,12 +62,11 @@ public void testDumpLogBuffer() throws IOException { @Test public void testEventFlusher() { EventHandler handler = new EventHandler(2, 1000); - handler.startFlusher(); + assertEquals(0, handler.getBufferSize()); handler.triggerBasicEvent(Event.EventType.STATE_TRANSITION, "test event"); - assertEquals(handler.getBufferSize(), 1); + assertEquals(1, handler.getBufferSize()); handler.triggerBasicEvent(Event.EventType.STATE_TRANSITION, "test event 2"); // buffer should flush when max entries is reached - assertEquals(handler.getBufferSize(), 0); - handler.stopFlusher(); + assertEquals(0, handler.getBufferSize()); } } diff --git a/src/test/java/net/snowflake/client/core/EventTest.java b/src/test/java/net/snowflake/client/core/EventTest.java index 441eee25a..e9ee978e5 100644 --- a/src/test/java/net/snowflake/client/core/EventTest.java +++ b/src/test/java/net/snowflake/client/core/EventTest.java @@ -62,7 +62,7 @@ public void testWriteEventDumpLine() throws IOException { File dumpFile = new File( EventUtil.getDumpPathPrefix() - + "/" + + File.separator + "sf_event_" + EventUtil.getDumpFileId() + ".dmp.gz"); diff --git a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java new file mode 100644 index 000000000..f7ad06b46 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java @@ -0,0 +1,84 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import net.minidev.json.JSONObject; +import net.minidev.json.parser.JSONParser; +import net.minidev.json.parser.ParseException; +import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import org.junit.Test; + +public class ExecTimeTelemetryDataTest { + + @Test + public void testExecTimeTelemetryData() throws ParseException { + ExecTimeTelemetryData execTimeTelemetryData = new ExecTimeTelemetryData(); + execTimeTelemetryData.sendData = true; + execTimeTelemetryData.setBindStart(); + execTimeTelemetryData.setOCSPStatus(true); + execTimeTelemetryData.setBindEnd(); + execTimeTelemetryData.setHttpClientStart(); + execTimeTelemetryData.setHttpClientEnd(); + execTimeTelemetryData.setGzipStart(); + execTimeTelemetryData.setGzipEnd(); + execTimeTelemetryData.setQueryEnd(); + execTimeTelemetryData.setQueryId("queryid"); + execTimeTelemetryData.setProcessResultChunkStart(); + execTimeTelemetryData.setProcessResultChunkEnd(); + execTimeTelemetryData.setResponseIOStreamStart(); + execTimeTelemetryData.setResponseIOStreamEnd(); + execTimeTelemetryData.setCreateResultSetStart(); + execTimeTelemetryData.setCreateResultSetEnd(); + execTimeTelemetryData.incrementRetryCount(); + execTimeTelemetryData.setRequestId("mockId"); + execTimeTelemetryData.addRetryLocation("retry"); + + String telemetry = execTimeTelemetryData.generateTelemetry(); + JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); + JSONObject json = (JSONObject) parser.parse(telemetry); + assertNotNull(json.get("BindStart")); + assertNotNull(json.get("BindEnd")); + assertEquals(json.get("ocspEnabled"), true); + assertNotNull(json.get("HttpClientStart")); + assertNotNull(json.get("HttpClientEnd")); + assertNotNull(json.get("GzipStart")); + assertNotNull(json.get("GzipEnd")); + assertNotNull(json.get("QueryEnd")); + assertEquals(json.get("QueryID"), "queryid"); + assertNotNull(json.get("ProcessResultChunkStart")); + assertNotNull(json.get("ProcessResultChunkEnd")); + assertNotNull(json.get("ResponseIOStreamStart")); + assertNotNull(json.get("CreateResultSetStart")); + assertNotNull(json.get("CreateResultSetEnd")); + assertNotNull(json.get("ElapsedQueryTime")); + assertNotNull(json.get("ElapsedResultProcessTime")); + assertNull(json.get("QueryFunction")); + assertNull(json.get("BatchID")); + assertEquals(((Long) json.get("RetryCount")).intValue(), 1); + assertEquals(json.get("RequestID"), "mockId"); + assertEquals(json.get("RetryLocations"), "retry"); + assertEquals(json.get("Urgent"), true); + assertEquals(json.get("eventType"), "ExecutionTimeRecord"); + } + + @Test + public void testRetryLocation() throws ParseException { + TelemetryService.enableHTAP(); + ExecTimeTelemetryData execTimeTelemetryData = + new ExecTimeTelemetryData("queryFunction", "batchId"); + execTimeTelemetryData.addRetryLocation("hello"); + execTimeTelemetryData.addRetryLocation("world"); + execTimeTelemetryData.sendData = true; + String telemetry = execTimeTelemetryData.generateTelemetry(); + + JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); + JSONObject json = (JSONObject) parser.parse(telemetry); + assertEquals(json.get("QueryFunction"), "queryFunction"); + assertEquals(json.get("BatchID"), "batchId"); + assertNotNull(json.get("QueryStart")); + assertEquals(json.get("RetryLocations"), "hello, world"); + TelemetryService.disableHTAP(); + } +} diff --git a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java new file mode 100644 index 000000000..9a5af03b2 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java @@ -0,0 +1,97 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class OCSPCacheServerTest { + + @Parameterized.Parameters( + name = "For host {0} cache server fetch url should be {1} and retry url {2}") + public static Object[][] data() { + return new Object[][] { + { + "bla-12345.global.snowflakecomputing.com", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry" + }, + { + "bla-12345.global.snowflakecomputing.cn", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry" + }, + { + "bla-12345.global.snowflakecomputing.xyz", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry" + }, + { + "bla-12345.GLOBAL.snowflakecomputing.xyz", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry" + }, + { + "bla-12345.snowflakecomputing.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry" + }, + { + "bla-12345.snowflakecomputing.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry" + }, + { + "bla-12345.snowflakecomputing.xyz", + "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd.snowflakecomputing.xyz/ocsp/retry" + }, + { + "bla-12345.SNOWFLAKEcomputing.xyz", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry" + }, + { + "s3.amazoncomaws.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry" + }, + { + "s3.amazoncomaws.COM", + "https://ocspssd.snowflakecomputing.COM/ocsp/fetch", + "https://ocspssd.snowflakecomputing.COM/ocsp/retry" + }, + { + "s3.amazoncomaws.com.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry" + }, + { + "S3.AMAZONCOMAWS.COM.CN", + "https://ocspssd.snowflakecomputing.CN/ocsp/fetch", + "https://ocspssd.snowflakecomputing.CN/ocsp/retry" + }, + }; + } + + private final String host; + private final String expectedFetchUrl; + private final String expectedRetryUrl; + + public OCSPCacheServerTest(String host, String expectedFetchUrl, String expectedRetryUrl) { + this.host = host; + this.expectedFetchUrl = expectedFetchUrl; + this.expectedRetryUrl = expectedRetryUrl; + } + + @Test + public void shouldChooseOcspCacheServerUrls() { + SFTrustManager.OCSPCacheServer ocspCacheServer = new SFTrustManager.OCSPCacheServer(); + ocspCacheServer.resetOCSPResponseCacheServer(host); + + assertEquals(expectedFetchUrl, ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER); + assertEquals(expectedRetryUrl, ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL); + } +} diff --git a/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java new file mode 100644 index 000000000..b3af68011 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java @@ -0,0 +1,42 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class PrivateLinkDetectorTest { + + @Parameterized.Parameters(name = "Host {0} is private link: {1}") + public static Object[][] data() { + return new Object[][] { + {"snowhouse.snowflakecomputing.com", false}, + {"snowhouse.privatelink.snowflakecomputing.com", true}, + {"snowhouse.PRIVATELINK.snowflakecomputing.com", true}, + {"snowhouse.snowflakecomputing.cn", false}, + {"snowhouse.privatelink.snowflakecomputing.cn", true}, + {"snowhouse.PRIVATELINK.snowflakecomputing.cn", true}, + {"snowhouse.snowflakecomputing.xyz", false}, + {"snowhouse.privatelink.snowflakecomputing.xyz", true}, + {"snowhouse.PRIVATELINK.snowflakecomputing.xyz", true}, + }; + } + + private final String host; + private final boolean expectedToBePrivateLink; + + public PrivateLinkDetectorTest(String host, boolean expectedToBePrivateLink) { + this.host = host; + this.expectedToBePrivateLink = expectedToBePrivateLink; + } + + @Test + public void shouldDetectPrivateLinkHost() { + assertEquals( + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), + expectedToBePrivateLink, + PrivateLinkDetector.isPrivateLink(host)); + } +} diff --git a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java index cd841b474..862dd1c40 100644 --- a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java +++ b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java @@ -6,6 +6,9 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import org.junit.Test; @@ -217,6 +220,11 @@ public void testSerializeRequestAndDeserializeResponseDataWithNullContext() thro qcc.deserializeQueryContextDTO(requestData); assertCacheDataWithContext(null); + + QueryContextCache mockQcc = spy(qcc); + mockQcc.deserializeQueryContextDTO(null); + verify(mockQcc).clearCache(); + verify(mockQcc, times(2)).logCacheEntries(); } private void assertCacheData() { diff --git a/src/test/java/net/snowflake/client/core/SFLoginInputTest.java b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java new file mode 100644 index 000000000..7d8a5b67b --- /dev/null +++ b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java @@ -0,0 +1,22 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class SFLoginInputTest { + + @Test + public void testGetHostFromServerUrlWithoutProtocolShouldNotThrow() throws SFException { + SFLoginInput sfLoginInput = new SFLoginInput(); + sfLoginInput.setServerUrl("host.com:443"); + assertEquals("host.com", sfLoginInput.getHostFromServerUrl()); + } + + @Test + public void testGetHostFromServerUrlWithProtocolShouldNotThrow() throws SFException { + SFLoginInput sfLoginInput = new SFLoginInput(); + sfLoginInput.setServerUrl("https://host.com"); + assertEquals("host.com", sfLoginInput.getHostFromServerUrl()); + } +} diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java index 13f3f8f09..f30cd88e1 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java @@ -20,9 +20,12 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLHandshakeException; import net.snowflake.client.category.TestCategoryCore; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; @@ -32,22 +35,37 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +@RunWith(Parameterized.class) @Category(TestCategoryCore.class) public class SFTrustManagerIT extends BaseJDBCTest { - private static final String[] TARGET_HOSTS = { - "storage.googleapis.com", - "ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch", - "sfcsupport.snowflakecomputing.com", - "sfcsupport.us-east-1.snowflakecomputing.com", - "sfcsupport.eu-central-1.snowflakecomputing.com", - "sfc-dev1-regression.s3.amazonaws.com", - "sfc-ds2-customer-stage.s3.amazonaws.com", - "snowflake.okta.com", - "sfcdev2.blob.core.windows.net" - }; + private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManagerIT.class); + + public SFTrustManagerIT(String host) { + this.host = host; + } + + @Parameterized.Parameters(name = "host={0}") + public static Object[][] data() { + return new Object[][] { + // this host generates many "SSLHandshake Certificate Revocation + // check failed. Could not retrieve OCSP Response." when running in parallel CI builds + // {"storage.googleapis.com"}, + {"ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch"}, + {"sfcsupport.snowflakecomputing.com"}, + {"sfcsupport.us-east-1.snowflakecomputing.com"}, + {"sfcsupport.eu-central-1.snowflakecomputing.com"}, + {"sfc-dev1-regression.s3.amazonaws.com"}, + {"sfc-ds2-customer-stage.s3.amazonaws.com"}, + {"snowflake.okta.com"}, + {"sfcdev2.blob.core.windows.net"} + }; + } private boolean defaultState; + private final String host; @Before public void setUp() { @@ -83,15 +101,13 @@ public void tearDown() throws InterruptedException { public void testOcsp() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - null, // default OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + null, // default OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** @@ -104,15 +120,13 @@ public void testOcspWithFileCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ @@ -121,15 +135,13 @@ public void testOcspWithServerCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** @@ -141,15 +153,13 @@ public void testOcspWithoutServerCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ @@ -159,7 +169,6 @@ public void testInvalidCacheFile() throws Throwable { SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); // a file under never exists. File ocspCacheFile = new File("NEVER_EXISTS", "NEVER_EXISTS"); - String host = TARGET_HOSTS[0]; HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), @@ -169,20 +178,38 @@ public void testInvalidCacheFile() throws Throwable { accessHost(host, client); } - private static void accessHost(String host, HttpClient client) throws IOException { - int statusCode = -1; - - HttpGet httpRequest = new HttpGet(String.format("https://%s:443/", host)); - HttpResponse response = client.execute(httpRequest); - statusCode = response.getStatusLine().getStatusCode(); + private static void accessHost(String host, HttpClient client) + throws IOException, InterruptedException { + HttpResponse response = executeWithRetries(host, client); await() .atMost(Duration.ofSeconds(10)) .until(() -> response.getStatusLine().getStatusCode(), not(equalTo(-1))); + assertThat( String.format("response code for %s", host), - statusCode, - anyOf(equalTo(200), equalTo(403), equalTo(400))); + response.getStatusLine().getStatusCode(), + anyOf(equalTo(200), equalTo(400), equalTo(403), equalTo(404), equalTo(513))); + } + + private static HttpResponse executeWithRetries(String host, HttpClient client) + throws IOException, InterruptedException { + // There is one host that causes SSLHandshakeException very often - let's retry + int maxRetries = host.equals("storage.googleapis.com") ? 5 : 0; + int retries = 0; + HttpGet httpRequest = new HttpGet(String.format("https://%s:443/", host)); + while (true) { + try { + return client.execute(httpRequest); + } catch (SSLHandshakeException e) { + logger.warn("SSL handshake failed (host = {}, retries={}}", host, retries, e); + ++retries; + if (retries >= maxRetries) { + throw e; + } + Thread.sleep(retries * 1000); + } + } } /** diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java index a4326d5bd..6a55b2cd4 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java @@ -46,6 +46,18 @@ public void testBuildRetryURL() throws Exception { SFTrustManager.resetOCSPResponseCacherServerURL( "http://ocsp.snowflakecomputing.com:80/" + SFTrustManager.CACHE_FILE_NAME); assertThat(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN, nullValue()); + + // default OCSP Cache server URL in specific domain without port + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = null; + SFTrustManager.resetOCSPResponseCacherServerURL( + "http://ocsp.snowflakecomputing.cn/" + SFTrustManager.CACHE_FILE_NAME); + assertThat(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN, nullValue()); + + // default OCSP Cache server URL in specific domain with port + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = null; + SFTrustManager.resetOCSPResponseCacherServerURL( + "http://ocsp.snowflakecomputing.cn:80/" + SFTrustManager.CACHE_FILE_NAME); + assertThat(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN, nullValue()); } @Test @@ -65,6 +77,14 @@ public void testBuildNewRetryURL() { tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, equalTo("https://ocspssd.snowflakecomputing.com/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer("a1.snowflakecomputing.cn"); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, + equalTo("https://ocspssd.snowflakecomputing.cn/ocsp/fetch")); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, + equalTo("https://ocspssd.snowflakecomputing.cn/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer( "a1-12345.global.snowflakecomputing.com"); assertThat( @@ -74,6 +94,15 @@ public void testBuildNewRetryURL() { tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, equalTo("https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer( + "a1-12345.global.snowflakecomputing.cn"); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, + equalTo("https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch")); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, + equalTo("https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer("okta.snowflake.com"); assertThat( tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, @@ -90,6 +119,15 @@ public void testBuildNewRetryURL() { assertThat( tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, equalTo("https://ocspssd.us-east-1.privatelink.snowflakecomputing.com/ocsp/retry")); + + tManager.ocspCacheServer.resetOCSPResponseCacheServer( + "a1.us-east-1.privatelink.snowflakecomputing.cn"); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, + equalTo("https://ocspssd.us-east-1.privatelink.snowflakecomputing.cn/ocsp/fetch")); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, + equalTo("https://ocspssd.us-east-1.privatelink.snowflakecomputing.cn/ocsp/retry")); } finally { System.clearProperty("net.snowflake.jdbc.ocsp_activate_new_endpoint"); } diff --git a/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java new file mode 100644 index 000000000..346d43c34 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java @@ -0,0 +1,42 @@ +package net.snowflake.client.core; + +import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; +import static org.mockito.Mockito.mock; + +import java.sql.SQLData; +import org.junit.Test; + +public class SQLInputOutputTest { + + @Test + public void testBaseSQLUnSupportedException() { + BaseSqlInput sqlInput = new ArrowSqlInput(null, null, null, null); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readCharacterStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readAsciiStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readBinaryStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readRef); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readBlob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readClob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readArray); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readURL); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readNClob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readNString); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readSQLXML); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readRowId); + } + + @Test + public void testJsonSqlOutPutUnSupportedTest() { + JsonSqlOutput sqloutput = new JsonSqlOutput(mock(SQLData.class), mock(SFBaseSession.class)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeRef(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeBlob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeClob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeStruct(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeArray(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeURL(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeNString(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeNClob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeRowId(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeSQLXML(null)); + } +} diff --git a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java index dd6d5e7bd..f936ee616 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java @@ -465,4 +465,171 @@ public void testOktaAuthRetry() throws Throwable { SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); } } + + /** + * Tests the disableSamlURLCheck. If the disableSamlUrl is provided to the login input with true, + * the driver will skip checking the format of the saml URL response. This latest test will work + * with jdbc > 3.16.0 + * + * @throws Throwable + */ + @Test + public void testOktaDisableSamlUrlCheck() throws Throwable { + SFLoginInput loginInput = createOktaLoginInput(); + loginInput.setDisableSamlURLCheck(true); + Map connectionPropertiesMap = initConnectionPropertiesMap(); + try (MockedStatic mockedHttpUtil = mockStatic(HttpUtil.class)) { + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpPost.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"data\":{\"tokenUrl\":\"https://testauth.okta.com/api/v1/authn\"," + + "\"ssoUrl\":\"https://testauth.okta.com/app/snowflake/abcdefghijklmnopqrstuvwxyz/sso/saml\"," + + "\"proofKey\":null},\"code\":null,\"message\":null,\"success\":true}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeRequestWithoutCookies( + Mockito.any(HttpRequestBase.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(AtomicBoolean.class), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"expiresAt\":\"2023-10-13T19:18:09.000Z\",\"status\":\"SUCCESS\",\"sessionToken\":\"testsessiontoken\"}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpGet.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn("

"); + + SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); + } + } + + @Test + public void testInvalidOktaSamlFormat() throws Throwable { + SFLoginInput loginInput = createOktaLoginInput(); + Map connectionPropertiesMap = initConnectionPropertiesMap(); + try (MockedStatic mockedHttpUtil = mockStatic(HttpUtil.class)) { + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpPost.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"data\":{\"tokenUrl\":\"https://testauth.okta.com/api/v1/authn\"," + + "\"ssoUrl\":\"https://testauth.okta.com/app/snowflake/abcdefghijklmnopqrstuvwxyz/sso/saml\"," + + "\"proofKey\":null},\"code\":null,\"message\":null,\"success\":true}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeRequestWithoutCookies( + Mockito.any(HttpRequestBase.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(AtomicBoolean.class), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"expiresAt\":\"2023-10-13T19:18:09.000Z\",\"status\":\"SUCCESS\",\"sessionToken\":\"testsessiontoken\"}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpGet.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn("
"); + + SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); + fail("Should be failed because of the invalid form"); + } catch (SnowflakeSQLException ex) { + assertEquals((int) ErrorCode.NETWORK_ERROR.getMessageCode(), ex.getErrorCode()); + } + } + + @Test + public void testOktaWithInvalidHostName() throws Throwable { + SFLoginInput loginInput = createOktaLoginInput(); + Map connectionPropertiesMap = initConnectionPropertiesMap(); + try (MockedStatic mockedHttpUtil = mockStatic(HttpUtil.class)) { + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpPost.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"data\":{\"tokenUrl\":\"https://testauth.okta.com/api/v1/authn\"," + + "\"ssoUrl\":\"https://testauth.okta.com/app/snowflake/abcdefghijklmnopqrstuvwxyz/sso/saml\"," + + "\"proofKey\":null},\"code\":null,\"message\":null,\"success\":true}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeRequestWithoutCookies( + Mockito.any(HttpRequestBase.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(AtomicBoolean.class), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"expiresAt\":\"2023-10-13T19:18:09.000Z\",\"status\":\"SUCCESS\",\"sessionToken\":\"testsessiontoken\"}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpGet.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn("
"); + + SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); + fail("Should be failed because of the invalid form"); + } catch (SnowflakeSQLException ex) { + assertEquals((int) ErrorCode.IDP_INCORRECT_DESTINATION.getMessageCode(), ex.getErrorCode()); + } + } } diff --git a/src/test/java/net/snowflake/client/core/SessionUtilTest.java b/src/test/java/net/snowflake/client/core/SessionUtilTest.java index 5cb118c56..cab5fb68f 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilTest.java @@ -10,6 +10,7 @@ import static org.junit.Assert.assertTrue; import com.fasterxml.jackson.databind.node.BooleanNode; +import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -66,7 +67,7 @@ public void testParameterParsing() { parameterMap.put("other_parameter", BooleanNode.getTrue()); SFBaseSession session = new MockConnectionTest.MockSnowflakeConnectionImpl().getSFSession(); SessionUtil.updateSfDriverParamValues(parameterMap, session); - assert (((BooleanNode) session.getOtherParameter("other_parameter")).asBoolean()); + assertTrue(((BooleanNode) session.getOtherParameter("other_parameter")).asBoolean()); } @Test @@ -129,4 +130,42 @@ public void testIsLoginRequestInvalidURIPath() { } } } + + @Test + public void shouldDerivePrivateLinkOcspCacheServerUrlBasedOnHost() throws IOException { + resetOcspConfiguration(); + + SessionUtil.resetOCSPUrlIfNecessary("https://test.privatelink.snowflakecomputing.com"); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.com/ocsp_response_cache.json", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.com/retry/%s/%s", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); + + resetOcspConfiguration(); + + SessionUtil.resetOCSPUrlIfNecessary("https://test.privatelink.snowflakecomputing.cn"); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.cn/ocsp_response_cache.json", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.cn/retry/%s/%s", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); + + resetOcspConfiguration(); + + SessionUtil.resetOCSPUrlIfNecessary("https://test.privatelink.snowflakecomputing.xyz"); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.xyz/ocsp_response_cache.json", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.xyz/retry/%s/%s", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); + } + + private void resetOcspConfiguration() { + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = null; + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = null; + } } diff --git a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java index 7d2b22d33..752229fc9 100644 --- a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java @@ -48,7 +48,7 @@ public void shouldGetTimestampForDifferentType() { } private Timestamp getFromType(int type, String value, TimeZone explicitTimezone) { - return SqlInputTimestampUtil.getTimestampFromType( + return SfTimestampUtil.getTimestampFromType( type, value, mockSession, TimeZone.getTimeZone("GMT"), explicitTimezone); } } diff --git a/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java new file mode 100644 index 000000000..f3ae88eee --- /dev/null +++ b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java @@ -0,0 +1,23 @@ +package net.snowflake.client.core.bind; + +import static org.junit.Assert.assertEquals; + +import net.snowflake.client.jdbc.telemetry.TelemetryField; +import org.junit.Test; + +public class BindExceptionTest { + + @Test + public void testBindExceptionType() { + assertEquals(BindException.Type.SERIALIZATION.field, TelemetryField.FAILED_BIND_SERIALIZATION); + assertEquals(BindException.Type.UPLOAD.field, TelemetryField.FAILED_BIND_UPLOAD); + assertEquals(BindException.Type.OTHER.field, TelemetryField.FAILED_BIND_OTHER); + } + + @Test + public void testBindExceptionConstructor() { + BindException exception = new BindException("testException", BindException.Type.SERIALIZATION); + assertEquals(exception.getMessage(), "testException"); + assertEquals(exception.type.field, TelemetryField.FAILED_BIND_SERIALIZATION); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java index b8bacc82b..a326dea12 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java @@ -35,6 +35,7 @@ import javax.xml.transform.Result; import javax.xml.transform.Source; import net.snowflake.client.AbstractDriverIT; +import net.snowflake.client.core.SFException; public class BaseJDBCTest extends AbstractDriverIT { // Test UUID unique per session @@ -44,6 +45,10 @@ protected interface MethodRaisesSQLException { void run() throws SQLException; } + protected interface MethodRaisesSFException { + void run() throws SFException; + } + protected interface MethodRaisesSQLClientInfoException { void run() throws SQLClientInfoException; } diff --git a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java new file mode 100644 index 000000000..a408e5d5a --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java @@ -0,0 +1,391 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Connection; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.HashMap; +import java.util.Map; +import java.util.TimeZone; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; +import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; +import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +@Category(TestCategoryResultSet.class) +public class BindingAndInsertingStructuredTypesLatestIT extends BaseJDBCTest { + + @Parameterized.Parameters(name = "format={0}") + public static Object[][] data() { + return new Object[][] { + {ResultSetFormatType.JSON}, + {ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES}, + {ResultSetFormatType.NATIVE_ARROW} + }; + } + + private final ResultSetFormatType queryResultFormat; + + public BindingAndInsertingStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { + this.queryResultFormat = queryResultFormat; + } + + public Connection init() throws SQLException { + Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); + stmt.execute("alter session set IGNORE_CLIENT_VESRION_IN_STRUCTURED_TYPES_RESPONSE = true"); + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_BINDS = enable"); + stmt.execute("alter session set ENABLE_OBJECT_TYPED_BINDS = true"); + stmt.execute("alter session set enable_structured_types_in_fdn_tables=true"); + stmt.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); + stmt.execute( + "alter session set jdbc_query_result_format = '" + + queryResultFormat.sessionParameterTypeValue + + "'"); + if (queryResultFormat == ResultSetFormatType.NATIVE_ARROW) { + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); + stmt.execute("alter session set FORCE_ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); + } + } + return conn; + } + + @Before + public void setup() { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); + } + + @After + public void clean() { + SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); + } + + // TODO Structured types feature exists only on QA environments + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteObject() throws SQLException { + SimpleClass sc = new SimpleClass("text1", 2); + SimpleClass sc2 = new SimpleClass("text2", 3); + try (Connection connection = init()) { + Statement statement = connection.createStatement(); + statement.execute( + "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); + try (SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_table select ?"); + SnowflakePreparedStatementV1 stmt3 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("SELECT ob FROM test_table where ob = ?"); ) { + + stmt.setObject(1, sc); + stmt.executeUpdate(); + + stmt.setObject(1, sc2); + stmt.executeUpdate(); + + stmt3.setObject(1, sc2); + + try (ResultSet resultSet = stmt3.executeQuery()) { + + resultSet.next(); + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("text2", object.getString()); + assertEquals(Integer.valueOf("3"), object.getIntValue()); + assertFalse(resultSet.next()); + } + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteNullObject() throws SQLException { + Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmtement2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_table select null"); + SnowflakePreparedStatementV1 statement3 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("SELECT * FROM test_table"); ) { + + statement.execute( + "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); + + stmtement2.executeUpdate(); + + try (ResultSet resultSet = statement3.executeQuery()) { + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(1)); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteObjectBindingNull() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_table select ?"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("SELECT * FROM test_table"); ) { + statement.execute( + "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); + stmt.setObject(1, null); + stmt.executeUpdate(); + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertNull(object); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteObjectAllTypes() throws SQLException { + TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC)); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_all_types_object select ?"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("select * from test_all_types_object where ob=?"); ) { + + statement.execute( + " CREATE OR REPLACE TABLE test_all_types_object (" + + " ob OBJECT(string VARCHAR, " + + " b TINYINT, " + + " s SMALLINT, " + + " i INTEGER, " + + " l BIGINT, " + + " f FLOAT, " + + " d DOUBLE, " + + " bd NUMBER(38,2), " + + " bool BOOLEAN, " + + " timestampLtz TIMESTAMP_LTZ, " + + " timestampNtz TIMESTAMP_NTZ, " + + " timestampTz TIMESTAMP_TZ, " + + " date DATE," + + " time TIME, " + + " binary BINARY, " + + " simpleClass OBJECT(string VARCHAR, intValue INTEGER)" + + " ) )"); + + AllTypesClass allTypeInstance = + new AllTypesClass( + "string", + "1".getBytes(StandardCharsets.UTF_8)[0], + Short.valueOf("2"), + Integer.valueOf(3), + Long.valueOf(4), + 1.1f, + 2.24, + new BigDecimal("999999999999999999999999999999999999.55"), + Boolean.TRUE, + Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), + toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, ZoneId.of("UTC"))), + toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, ZoneId.of("Asia/Tokyo"))), + Date.valueOf("2023-12-24"), + Time.valueOf("12:34:56"), + new byte[] {'a', 'b', 'c'}, + new SimpleClass("testString", 2)); + stmt.setObject(1, allTypeInstance); + stmt.executeUpdate(); + statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); + + stmt2.setObject(1, allTypeInstance); + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); + assertEquals("string", object.getString()); + assertEquals(49, (long) object.getB()); + assertEquals(2, (long) object.getS()); + assertEquals(3, (long) object.getI()); + assertEquals(4, (long) object.getL()); + assertEquals(1.1, (double) object.getF(), 0.01); + assertEquals(2.24, (double) object.getD(), 0.01); + assertEquals(new BigDecimal("999999999999999999999999999999999999.55"), object.getBd()); + assertEquals(Boolean.TRUE, object.getBool()); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), object.getTimestampLtz()); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 23, 9, 44, 44)), object.getTimestampNtz()); + assertEquals( + toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, ZoneId.of("Asia/Tokyo"))), + object.getTimestampTz()); + // TODO uncomment after merge SNOW-928973: Date field is returning one day less when getting + // through getString method + // assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), object.getDate()); + assertEquals(Time.valueOf(LocalTime.of(12, 34, 56)), object.getTime()); + assertArrayEquals(new byte[] {'a', 'b', 'c'}, object.getBinary()); + assertEquals("testString", object.getSimpleClass().getString()); + assertEquals(Integer.valueOf("2"), object.getSimpleClass().getIntValue()); + } + } + } + + public static Timestamp toTimestamp(ZonedDateTime dateTime) { + return new Timestamp(dateTime.toInstant().getEpochSecond() * 1000L); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteArray() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement( + "INSERT INTO array_of_integers (arrayInt) SELECT ?;"); ) { + + statement.execute(" CREATE OR REPLACE TABLE array_of_integers(arrayInt ARRAY(INTEGER))"); + + Array array = connection.createArrayOf("INTEGER", new Integer[] {1, 2, 3}); + stmt.setArray(1, array); + stmt.executeUpdate(); + + try (ResultSet resultSet = statement.executeQuery("SELECT * from array_of_integers"); ) { + resultSet.next(); + + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf(1), resultArray[0]); + assertEquals(Long.valueOf(2), resultArray[1]); + assertEquals(Long.valueOf(3), resultArray[2]); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteArrayNoBinds() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement( + "insert into array_of_integers select ([1, 2, 3]::array(integer));"); ) { + + statement.execute(" CREATE OR REPLACE TABLE array_of_integers(arrayInt ARRAY(INTEGER))"); + + stmt.executeUpdate(); + + try (ResultSet resultSet = statement.executeQuery("SELECT * from array_of_integers"); ) { + resultSet.next(); + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf(1), resultArray[0]); + assertEquals(Long.valueOf(2), resultArray[1]); + assertEquals(Long.valueOf(3), resultArray[2]); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteMapOfSqlData() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("INSERT INTO map_of_objects (mapp) SELECT ?;"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("select * from map_of_objects where mapp=?"); ) { + + statement.execute( + " CREATE OR REPLACE TABLE map_of_objects(mapp MAP(VARCHAR, OBJECT(string VARCHAR, intValue INTEGER)))"); + + Map mapStruct = + Stream.of( + new Object[][] { + {"x", new SimpleClass("string1", 1)}, + {"y", new SimpleClass("string2", 2)}, + }) + .collect(Collectors.toMap(data -> (String) data[0], data -> (SimpleClass) data[1])); + + stmt.setMap(1, mapStruct, Types.STRUCT); + stmt.executeUpdate(); + + stmt2.setMap(1, mapStruct, Types.STRUCT); + + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteMapOfInteger() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("INSERT INTO map_of_objects (mapp) SELECT ?;"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("select * from map_of_objects where mapp=?"); ) { + + statement.execute(" CREATE OR REPLACE TABLE map_of_objects(mapp MAP(VARCHAR, INTEGER))"); + + Map mapStruct = new HashMap<>(); + mapStruct.put("x", 1); + mapStruct.put("y", 2); + + stmt.setMap(1, mapStruct, Types.INTEGER); + stmt.executeUpdate(); + + stmt2.setMap(1, mapStruct, Types.INTEGER); + + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Integer.class); + } + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java index 257759120..71c556686 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java @@ -6,6 +6,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.PreparedStatement; @@ -30,26 +31,34 @@ */ @Category(TestCategoryOthers.class) public class BindingDataLatestIT extends AbstractDriverIT { + TimeZone origTz = TimeZone.getDefault(); + TimeZone tokyoTz = TimeZone.getTimeZone("Asia/Tokyo"); + TimeZone australiaTz = TimeZone.getTimeZone("Australia/Sydney"); + Calendar tokyo = Calendar.getInstance(tokyoTz); + @Test public void testBindTimestampTZ() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table testBindTimestampTZ(" + "cola int, colb timestamp_tz)"); - statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_TZ"); - - long millSeconds = System.currentTimeMillis(); - Timestamp ts = new Timestamp(millSeconds); - PreparedStatement prepStatement = - connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)"); - prepStatement.setInt(1, 123); - prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("EST"))); - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select cola, colb from testBindTimestampTz"); - resultSet.next(); - assertThat("integer", resultSet.getInt(1), equalTo(123)); - assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table testBindTimestampTZ(cola int, colb timestamp_tz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_TZ"); + + long milliSeconds = System.currentTimeMillis(); + Timestamp ts = new Timestamp(milliSeconds); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)")) { + prepStatement.setInt(1, 123); + prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("EST"))); + prepStatement.execute(); + } + + try (ResultSet resultSet = + statement.executeQuery("select cola, colb from testBindTimestampTz")) { + assertTrue(resultSet.next()); + assertThat("integer", resultSet.getInt(1), equalTo(123)); + assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); + } + } } /** @@ -60,57 +69,52 @@ public void testBindTimestampTZ() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testTimestampBindingWithNTZType() throws SQLException { - try (Connection connection = getConnection()) { - TimeZone origTz = TimeZone.getDefault(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute( - "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); - statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); - TimeZone.setDefault(TimeZone.getTimeZone("Asia/Tokyo")); - Timestamp currT = new Timestamp(System.currentTimeMillis()); - - // insert using stage binding - PreparedStatement prepStatement = - connection.prepareStatement("insert into stageinsert values (?,?,?,?)"); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); - prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); - prepStatement.addBatch(); - prepStatement.executeBatch(); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); - - // insert using regular binging - prepStatement = connection.prepareStatement("insert into regularinsert values (?,?,?,?)"); - for (int i = 1; i <= 6; i++) { - prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); - prepStatement.addBatch(); - } - prepStatement.executeBatch(); + TimeZone.setDefault(tokyoTz); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute( + "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); + statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); + Timestamp currT = new Timestamp(System.currentTimeMillis()); + + // insert using regular binging + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into regularinsert values (?,?,?,?)")) { + prepStatement.setInt(1, 1); + prepStatement.setTimestamp(2, currT, tokyo); + prepStatement.setTimestamp(3, currT, tokyo); + prepStatement.setTimestamp(4, currT); + prepStatement.addBatch(); + prepStatement.executeBatch(); + } + // insert using stage binding + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + executePsStatementForTimestampTest(connection, "stageinsert", currT); + + // Compare the results + try (ResultSet rs1 = statement.executeQuery("select * from stageinsert"); + ResultSet rs2 = statement.executeQuery("select * from regularinsert")) { + assertTrue(rs1.next()); + assertTrue(rs2.next()); + + assertEquals(rs1.getInt(1), rs2.getInt(1)); - // Compare the results - ResultSet rs1 = statement.executeQuery("select * from stageinsert"); - ResultSet rs2 = statement.executeQuery("select * from regularinsert"); - rs1.next(); - rs2.next(); - - assertEquals(rs1.getInt(1), rs2.getInt(1)); - assertEquals(rs1.getString(2), rs2.getString(2)); - assertEquals(rs1.getString(3), rs2.getString(3)); - assertEquals(rs1.getString(4), rs2.getString(4)); - - statement.execute("drop table if exists stageinsert"); - statement.execute("drop table if exists regularinsert"); - TimeZone.setDefault(origTz); - statement.close(); - prepStatement.close(); + // Check tz type and ltz type columns have the same value. + assertEquals(rs1.getTimestamp(2), rs1.getTimestamp(3)); + + assertEquals(rs1.getTimestamp(2), rs2.getTimestamp(2)); + assertEquals(rs1.getTimestamp(3), rs2.getTimestamp(3)); + assertEquals(rs1.getTimestamp(4), rs2.getTimestamp(4)); + } + } finally { + statement.execute("drop table if exists stageinsert"); + statement.execute("drop table if exists regularinsert"); + TimeZone.setDefault(origTz); + } } } @@ -122,57 +126,182 @@ public void testTimestampBindingWithNTZType() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testTimestampBindingWithLTZType() throws SQLException { - try (Connection connection = getConnection()) { - TimeZone origTz = TimeZone.getDefault(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute( - "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_LTZ"); - statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); - TimeZone.setDefault(TimeZone.getTimeZone("Asia/Tokyo")); - Timestamp currT = new Timestamp(System.currentTimeMillis()); - - // insert using stage binding - PreparedStatement prepStatement = - connection.prepareStatement("insert into stageinsert values (?,?,?,?)"); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + TimeZone.setDefault(tokyoTz); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute( + "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_LTZ"); + statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); + Timestamp currT = new Timestamp(System.currentTimeMillis()); + + // insert using regular binging + executePsStatementForTimestampTest(connection, "regularinsert", currT); + + // insert using stage binding + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + executePsStatementForTimestampTest(connection, "stageinsert", currT); + + // Compare the results + try (ResultSet rs1 = statement.executeQuery("select * from stageinsert"); + ResultSet rs2 = statement.executeQuery("select * from regularinsert")) { + assertTrue(rs1.next()); + assertTrue(rs2.next()); + + assertEquals(rs1.getInt(1), rs2.getInt(1)); + + // Check that all the values are the same. + assertEquals(rs1.getTimestamp(2), rs1.getTimestamp(3)); + assertEquals(rs1.getTimestamp(3), rs1.getTimestamp(4)); + + assertEquals(rs1.getTimestamp(2), rs2.getTimestamp(2)); + assertEquals(rs1.getTimestamp(3), rs2.getTimestamp(3)); + assertEquals(rs1.getTimestamp(4), rs2.getTimestamp(4)); + } + } finally { + statement.execute("drop table if exists stageinsert"); + statement.execute("drop table if exists regularinsert"); + TimeZone.setDefault(origTz); + } + } + } + + /** + * Test that stage binding and regular binding insert and return the same value for timestamp_ltz + * when the local timezone has the daylight saving. This test is added in version > 3.16.1 + * + *

When CLIENT_TIMESTAMP_TYPE_MAPPING setting is mismatched with target data type (e.g + * MAPPING=LTZ and insert to NTZ or MAPPING=NTZ and insert to TZ/LTZ there could be different + * result as the timezone offset is applied on client side and removed on server side. This only + * occurs around the boundary of daylight-savings and the difference from the source data would be + * one hour. Both regular binding and stage binding have such issue but they also behave + * diffently, for some data only regular binding gets the extra hour while sometime only stage + * binding does. The workaround is to use CLIENT_TIMESTAMP_TYPE_MAPPING=LTZ to insert LTZ/TZ data + * and use CLIENT_TIMESTAMP_TYPE_MAPPING=NTZ to insert NTZ data. + * + *

This test cannot run on the GitHub testing because of the "ALTER SESSION SET + * CLIENT_STAGE_ARRAY_BINDING_THRESHOLD" This command should be executed with the system admin. + * + * @throws SQLException + */ + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testTimestampBindingWithLTZTypeForDayLightSavingTimeZone() throws SQLException { + Calendar australia = Calendar.getInstance(australiaTz); + TimeZone.setDefault(australiaTz); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, ltz1 timestamp_ltz, ltz2 timestamp_ltz, tz0 timestamp_tz, tz1 timestamp_tz, tz2 timestamp_tz, ntz0 timestamp_ntz, ntz1 timestamp_ntz, ntz2 timestamp_ntz)"); + statement.execute( + "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, ltz1 timestamp_ltz, ltz2 timestamp_ltz, tz0 timestamp_tz, tz1 timestamp_tz, tz2 timestamp_tz, ntz0 timestamp_ntz, ntz1 timestamp_ntz, ntz2 timestamp_ntz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_LTZ"); + statement.execute("alter session set TIMEZONE='UTC'"); + + Timestamp ts1 = new Timestamp(1403049600000L); + Timestamp ts2 = new Timestamp(1388016000000L); + Timestamp ts3 = new Timestamp(System.currentTimeMillis()); + + // insert using regular binging + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into regularinsert values (?,?,?,?,?,?,?,?,?,?)")) { + prepStatement.setInt(1, 1); + prepStatement.setTimestamp(2, ts1); + prepStatement.setTimestamp(3, ts2); + prepStatement.setTimestamp(4, ts3); + + prepStatement.setTimestamp(5, ts1); + prepStatement.setTimestamp(6, ts2); + prepStatement.setTimestamp(7, ts3); + + prepStatement.setTimestamp(8, ts1, australia); + prepStatement.setTimestamp(9, ts2, australia); + prepStatement.setTimestamp(10, ts3, australia); + + prepStatement.addBatch(); + prepStatement.executeBatch(); + } + + // insert using stage binding + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into stageinsert values (?,?,?,?,?,?,?,?,?,?)")) { + prepStatement.setInt(1, 1); + prepStatement.setTimestamp(2, ts1); + prepStatement.setTimestamp(3, ts2); + prepStatement.setTimestamp(4, ts3); + + prepStatement.setTimestamp(5, ts1); + prepStatement.setTimestamp(6, ts2); + prepStatement.setTimestamp(7, ts3); + + prepStatement.setTimestamp(8, ts1); + prepStatement.setTimestamp(9, ts2); + prepStatement.setTimestamp(10, ts3); + + prepStatement.addBatch(); + prepStatement.executeBatch(); + } + + // Compare the results + try (ResultSet rs1 = statement.executeQuery("select * from stageinsert"); + ResultSet rs2 = statement.executeQuery("select * from regularinsert")) { + assertTrue(rs1.next()); + assertTrue(rs2.next()); + + assertEquals(rs1.getInt(1), rs2.getInt(1)); + assertEquals(rs1.getTimestamp(2), rs2.getTimestamp(2)); + assertEquals(rs1.getTimestamp(3), rs2.getTimestamp(3)); + assertEquals(rs1.getTimestamp(4), rs2.getTimestamp(4)); + assertEquals(rs1.getTimestamp(5), rs2.getTimestamp(5)); + assertEquals(rs1.getTimestamp(6), rs2.getTimestamp(6)); + assertEquals(rs1.getTimestamp(7), rs2.getTimestamp(7)); + assertEquals(rs1.getTimestamp(8), rs2.getTimestamp(8)); + assertEquals(rs1.getTimestamp(9), rs2.getTimestamp(9)); + assertEquals(rs1.getTimestamp(10), rs2.getTimestamp(10)); + + assertEquals(ts1.getTime(), rs1.getTimestamp(2).getTime()); + assertEquals(ts2.getTime(), rs1.getTimestamp(3).getTime()); + assertEquals(ts3.getTime(), rs1.getTimestamp(4).getTime()); + assertEquals(ts1.getTime(), rs1.getTimestamp(5).getTime()); + assertEquals(ts2.getTime(), rs1.getTimestamp(6).getTime()); + assertEquals(ts3.getTime(), rs1.getTimestamp(7).getTime()); + assertEquals(ts1.getTime(), rs1.getTimestamp(8).getTime()); + assertEquals(ts2.getTime(), rs1.getTimestamp(9).getTime()); + assertEquals(ts3.getTime(), rs1.getTimestamp(10).getTime()); + + assertEquals(ts1.getTime(), rs2.getTimestamp(2).getTime()); + assertEquals(ts2.getTime(), rs2.getTimestamp(3).getTime()); + assertEquals(ts3.getTime(), rs2.getTimestamp(4).getTime()); + assertEquals(ts1.getTime(), rs2.getTimestamp(5).getTime()); + assertEquals(ts2.getTime(), rs2.getTimestamp(6).getTime()); + assertEquals(ts3.getTime(), rs2.getTimestamp(7).getTime()); + assertEquals(ts1.getTime(), rs2.getTimestamp(8).getTime()); + assertEquals(ts2.getTime(), rs2.getTimestamp(9).getTime()); + assertEquals(ts3.getTime(), rs2.getTimestamp(10).getTime()); + } + } finally { + statement.execute("drop table if exists stageinsert"); + statement.execute("drop table if exists regularinsert"); + TimeZone.setDefault(origTz); + } + } + } + + public void executePsStatementForTimestampTest( + Connection connection, String tableName, Timestamp timestamp) throws SQLException { + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into " + tableName + " values (?,?,?,?)")) { prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); + prepStatement.setTimestamp(2, timestamp); + prepStatement.setTimestamp(3, timestamp); + prepStatement.setTimestamp(4, timestamp); prepStatement.addBatch(); prepStatement.executeBatch(); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); - - // insert using regular binging - prepStatement = connection.prepareStatement("insert into regularinsert values (?,?,?,?)"); - for (int i = 1; i <= 6; i++) { - prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); - prepStatement.addBatch(); - } - prepStatement.executeBatch(); - - // Compare the results - ResultSet rs1 = statement.executeQuery("select * from stageinsert"); - ResultSet rs2 = statement.executeQuery("select * from regularinsert"); - rs1.next(); - rs2.next(); - - assertEquals(rs1.getInt(1), rs2.getInt(1)); - assertEquals(rs1.getString(2), rs2.getString(2)); - assertEquals(rs1.getString(3), rs2.getString(3)); - assertEquals(rs1.getString(4), rs2.getString(4)); - - statement.execute("drop table if exists stageinsert"); - statement.execute("drop table if exists regularinsert"); - TimeZone.setDefault(origTz); - statement.close(); - prepStatement.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java index 7b6c758ce..ae6d1fac4 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java @@ -41,8 +41,6 @@ public void testFeatureNotSupportedException() throws Throwable { expectFeatureNotSupportedException(connection::createBlob); expectFeatureNotSupportedException(connection::createNClob); expectFeatureNotSupportedException(connection::createSQLXML); - expectFeatureNotSupportedException( - () -> connection.createArrayOf("fakeType", new Object[] {})); expectFeatureNotSupportedException( () -> connection.createStruct("fakeType", new Object[] {})); } diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index 02ba5a983..0e7083e7e 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -40,6 +40,7 @@ import java.sql.Statement; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.List; @@ -47,6 +48,7 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningNotOnAWS; import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; import net.snowflake.client.category.TestCategoryConnection; @@ -55,9 +57,12 @@ import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.QueryStatus; import net.snowflake.client.core.SFSession; +import net.snowflake.client.core.SFSessionProperty; import net.snowflake.client.core.SecurityUtil; import net.snowflake.client.core.SessionUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.ClientAuthnParameter; import net.snowflake.common.core.SqlState; @@ -81,6 +86,7 @@ @Category(TestCategoryConnection.class) public class ConnectionLatestIT extends BaseJDBCTest { @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + private static final SFLogger logger = SFLoggerFactory.getLogger(ConnectionLatestIT.class); private boolean defaultState; @@ -427,8 +433,10 @@ public void testQueryStatusErrorMessageAndErrorCodeChangeOnAsyncQuery() throws S await() .atMost(Duration.ofSeconds(10)) .until(() -> sfResultSet.getStatusV2().getStatus(), equalTo(QueryStatus.RUNNING)); + + // it may take more time to finish the test when running in parallel in CI builds await() - .atMost(Duration.ofSeconds(50)) + .atMost(Duration.ofSeconds(360)) .until(() -> sfResultSet.getStatusV2().getStatus(), equalTo(QueryStatus.SUCCESS)); } } @@ -1167,7 +1175,13 @@ public void testReadOnly() throws Throwable { } } + /** + * Test case for the method testDownloadStreamWithFileNotFoundException. This test verifies that a + * SQLException is thrown when attempting to download a file that does not exist. It verifies that + * the error code is ErrorCode.S3_OPERATION_ERROR so only runs on AWS. + */ @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnAWS.class) public void testDownloadStreamWithFileNotFoundException() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -1365,4 +1379,184 @@ public void testDataSourceOktaGenerates429StatusCode() throws Exception { thread.join(); } } + + /** Test added in JDBC driver version > 3.16.1 */ + @Test + public void testDataSourceSetters() { + Map params = getConnectionParameters(); + SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); + + ds.setTracing("all"); + ds.setApplication("application_name"); + ds.setAccount(params.get("account")); + ds.setAuthenticator("snowflake"); + ds.setArrowTreatDecimalAsInt(true); + ds.setAllowUnderscoresInHost(true); + ds.setClientConfigFile("/some/path/file.json"); + ds.setDisableGcsDefaultCredentials(false); + ds.setDisableSamlURLCheck(false); + ds.setDisableSocksProxy(false); + ds.setEnablePatternSearch(true); + ds.setDatabaseName("DB_NAME"); + ds.setEnablePutGet(false); + ds.setMaxHttpRetries(5); + ds.setNetworkTimeout(10); + ds.setOcspFailOpen(false); + ds.setProxyHost("proxyHost.com"); + ds.setProxyPort(8080); + ds.setProxyProtocol("http"); + ds.setProxyUser("proxyUser"); + ds.setProxyPassword("proxyPassword"); + ds.setPutGetMaxRetries(3); + ds.setStringsQuotedForColumnDef(true); + ds.setEnableDiagnostics(true); + ds.setDiagnosticsAllowlistFile("/some/path/allowlist.json"); + + Properties props = ds.getProperties(); + assertEquals(params.get("account"), props.get("account")); + assertEquals("snowflake", props.get("authenticator")); + assertEquals("all", props.get("tracing")); + assertEquals("application_name", props.get(SFSessionProperty.APPLICATION.getPropertyKey())); + assertEquals("snowflake", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + assertEquals( + "true", props.get(SFSessionProperty.JDBC_ARROW_TREAT_DECIMAL_AS_INT.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.ALLOW_UNDERSCORES_IN_HOST.getPropertyKey())); + assertEquals( + "/some/path/file.json", props.get(SFSessionProperty.CLIENT_CONFIG_FILE.getPropertyKey())); + assertEquals( + "false", props.get(SFSessionProperty.DISABLE_GCS_DEFAULT_CREDENTIALS.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.DISABLE_SAML_URL_CHECK.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.DISABLE_SOCKS_PROXY.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.ENABLE_PATTERN_SEARCH.getPropertyKey())); + assertEquals("DB_NAME", props.get(SFSessionProperty.DATABASE.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.ENABLE_PUT_GET.getPropertyKey())); + assertEquals("5", props.get(SFSessionProperty.MAX_HTTP_RETRIES.getPropertyKey())); + assertEquals("10", props.get(SFSessionProperty.NETWORK_TIMEOUT.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.OCSP_FAIL_OPEN.getPropertyKey())); + assertEquals("proxyHost.com", props.get(SFSessionProperty.PROXY_HOST.getPropertyKey())); + assertEquals("8080", props.get(SFSessionProperty.PROXY_PORT.getPropertyKey())); + assertEquals("http", props.get(SFSessionProperty.PROXY_PROTOCOL.getPropertyKey())); + assertEquals("proxyUser", props.get(SFSessionProperty.PROXY_USER.getPropertyKey())); + assertEquals("proxyPassword", props.get(SFSessionProperty.PROXY_PASSWORD.getPropertyKey())); + assertEquals("3", props.get(SFSessionProperty.PUT_GET_MAX_RETRIES.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.STRINGS_QUOTED.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.ENABLE_DIAGNOSTICS.getPropertyKey())); + assertEquals( + "/some/path/allowlist.json", + props.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE.getPropertyKey())); + + ds.setOauthToken("a_token"); + assertEquals("OAUTH", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + assertEquals("a_token", props.get(SFSessionProperty.TOKEN.getPropertyKey())); + + ds.setPasscodeInPassword(true); + assertEquals("true", props.get(SFSessionProperty.PASSCODE_IN_PASSWORD.getPropertyKey())); + assertEquals( + "USERNAME_PASSWORD_MFA", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + + ds.setPrivateKeyFile("key.p8", "pwd"); + assertEquals("key.p8", props.get(SFSessionProperty.PRIVATE_KEY_FILE.getPropertyKey())); + assertEquals("pwd", props.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD.getPropertyKey())); + assertEquals("SNOWFLAKE_JWT", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + + ds.setPasscodeInPassword(false); + ds.setPasscode("a_passcode"); + assertEquals("false", props.get(SFSessionProperty.PASSCODE_IN_PASSWORD.getPropertyKey())); + assertEquals( + "USERNAME_PASSWORD_MFA", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + assertEquals("a_passcode", props.get(SFSessionProperty.PASSCODE.getPropertyKey())); + } + /** + * SNOW-1465374: For TIMESTAMP_LTZ we were returning timestamps without timezone when scale was + * set e.g. to 6 in Arrow format The problem wasn't visible when calling getString, but was + * visible when we called toString on passed getTimestamp since we returned {@link + * java.sql.Timestamp}, not {@link SnowflakeTimestampWithTimezone} + * + *

Timestamps before 1582-10-05 are always returned as {@link java.sql.Timestamp}, not {@link + * SnowflakeTimestampWithTimezone} {SnowflakeTimestampWithTimezone} + * + *

Added in > 3.16.1 + */ + @Test + public void shouldGetDifferentTimestampLtzConsistentBetweenFormats() throws Exception { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.executeUpdate( + "create or replace table DATETIMETZ_TYPE(timestamp_tzcol timestamp_ltz, timestamp_tzpcol timestamp_ltz(6), timestamptzcol timestampltz, timestampwtzcol timestamp with local time zone);"); + Arrays.asList( + "insert into DATETIMETZ_TYPE values('9999-12-31 23:59:59.999999999','9999-12-31 23:59:59.999999','9999-12-31 23:59:59.999999999','9999-12-31 23:59:59.999999999');", + "insert into DATETIMETZ_TYPE values('1582-01-01 00:00:00.000000001','1582-01-01 00:00:00.000001','1582-01-01 00:00:00.000000001','1582-01-01 00:00:00.000000001');", + "insert into DATETIMETZ_TYPE values('2000-06-18 18:29:30.123456789 +0100','2000-06-18 18:29:30.123456 +0100','2000-06-18 18:29:30.123456789 +0100','2000-06-18 18:29:30.123456789 +0100');", + "insert into DATETIMETZ_TYPE values(current_timestamp(),current_timestamp(),current_timestamp(),current_timestamp());", + "insert into DATETIMETZ_TYPE values('2000-06-18 18:29:30.12345 -0530','2000-06-18 18:29:30.123 -0530','2000-06-18 18:29:30.123456 -0530','2000-06-18 18:29:30.123 -0530');", + "insert into DATETIMETZ_TYPE values('2000-06-18 18:29:30','2000-06-18 18:29:30','2000-06-18 18:29:30','2000-06-18 18:29:30');", + "insert into DATETIMETZ_TYPE values('1582-10-04 00:00:00.000000001','1582-10-04 00:00:00.000001','1582-10-04 00:00:00.000000001','1582-10-04 00:00:00.000000001');", + "insert into DATETIMETZ_TYPE values('1582-10-05 00:00:00.000000001','1582-10-05 00:00:00.000001','1582-10-05 00:00:00.000000001','1582-10-05 00:00:00.000000001');", + "insert into DATETIMETZ_TYPE values('1583-10-05 00:00:00.000000001','1583-10-05 00:00:00.000001','1583-10-05 00:00:00.000000001','1583-10-05 00:00:00.000000001');") + .forEach( + insert -> { + try { + statement.executeUpdate(insert); + } catch (SQLException e) { + throw new RuntimeException(e); + } + }); + try (ResultSet arrowResultSet = statement.executeQuery("select * from DATETIMETZ_TYPE")) { + try (Connection jsonConnection = getConnection(); + Statement jsonStatement = jsonConnection.createStatement()) { + jsonStatement.execute("alter session set JDBC_QUERY_RESULT_FORMAT=JSON"); + try (ResultSet jsonResultSet = + jsonStatement.executeQuery("select * from DATETIMETZ_TYPE")) { + int rowIdx = 0; + while (arrowResultSet.next()) { + logger.debug("Checking row " + rowIdx); + assertTrue(jsonResultSet.next()); + for (int column = 1; column <= 4; ++column) { + logger.trace( + "JSON row[{}],column[{}] as string '{}', timestamp string '{}', as timestamp numeric '{}', tz offset={}, timestamp class {}", + rowIdx, + column, + jsonResultSet.getString(column), + jsonResultSet.getTimestamp(column), + jsonResultSet.getTimestamp(column).getTime(), + jsonResultSet.getTimestamp(column).getTimezoneOffset(), + jsonResultSet.getTimestamp(column).getClass()); + logger.trace( + "ARROW row[{}],column[{}] as string '{}', timestamp string '{}', as timestamp numeric '{}', tz offset={}, timestamp class {}", + rowIdx, + column, + arrowResultSet.getString(column), + arrowResultSet.getTimestamp(column), + arrowResultSet.getTimestamp(column).getTime(), + arrowResultSet.getTimestamp(column).getTimezoneOffset(), + arrowResultSet.getTimestamp(column).getClass()); + assertEquals( + "Expecting that string representation are the same for row " + + rowIdx + + " and column " + + column, + jsonResultSet.getString(column), + arrowResultSet.getString(column)); + assertEquals( + "Expecting that string representation (via toString) are the same for row " + + rowIdx + + " and column " + + column, + jsonResultSet.getTimestamp(column).toString(), + arrowResultSet.getTimestamp(column).toString()); + assertEquals( + "Expecting that timestamps are the same for row " + + rowIdx + + " and column " + + column, + jsonResultSet.getTimestamp(column), + arrowResultSet.getTimestamp(column)); + } + rowIdx++; + } + } + } + } + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java index 00978b0d5..04c9c9311 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java @@ -24,6 +24,7 @@ import net.snowflake.client.category.TestCategoryConnection; import net.snowflake.client.core.SFOCSPException; import net.snowflake.client.core.SFTrustManager; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; import org.junit.Ignore; @@ -108,7 +109,7 @@ public void testValidityExpiredOCSPResponseFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -146,7 +147,7 @@ public void testNoOCSPResponderURLFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -183,7 +184,7 @@ public void testValidityExpiredOCSPResponseInsecure() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -198,7 +199,7 @@ public void testCertAttachedInvalidFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -234,7 +235,7 @@ public void testUnknownOCSPCertFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -293,7 +294,7 @@ public void testOCSPCacheServerTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -332,7 +333,7 @@ public void testOCSPResponderTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -368,7 +369,7 @@ public void testOCSPResponder403FailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -429,4 +430,8 @@ public void testWrongHost() { instanceOf(SSLHandshakeException.class))); } } + + private static Matcher httpStatus403Or513() { + return anyOf(containsString("HTTP status=403"), containsString("HTTP status=513")); + } } diff --git a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java index 0a2482cca..c6fb29bf4 100644 --- a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java @@ -741,7 +741,7 @@ public PasswordAuthentication getPasswordAuthentication() { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -750,7 +750,7 @@ public PasswordAuthentication getPasswordAuthentication() { File original = new File(sourceFilePath); File unzipped = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } catch (Throwable t) { t.printStackTrace(); } finally { diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java index 0a52b3df1..2ea144f3c 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java @@ -59,6 +59,10 @@ public class DatabaseMetaDataIT extends BaseJDBCTest { + " $$\n" + " ;"; + public static final int EXPECTED_MAX_CHAR_LENGTH = 16777216; + + public static final int EXPECTED_MAX_BINARY_LENGTH = 8388608; + @Test public void testGetConnection() throws SQLException { try (Connection connection = getConnection()) { @@ -698,9 +702,9 @@ public void testDatabaseMetadata() throws SQLException { assertEquals("$", metaData.getExtraNameCharacters()); assertEquals("\"", metaData.getIdentifierQuoteString()); assertEquals(0, getSizeOfResultSet(metaData.getIndexInfo(null, null, null, true, true))); - assertEquals(8388608, metaData.getMaxBinaryLiteralLength()); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, metaData.getMaxBinaryLiteralLength()); assertEquals(255, metaData.getMaxCatalogNameLength()); - assertEquals(16777216, metaData.getMaxCharLiteralLength()); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, metaData.getMaxCharLiteralLength()); assertEquals(255, metaData.getMaxColumnNameLength()); assertEquals(0, metaData.getMaxColumnsInGroupBy()); assertEquals(0, metaData.getMaxColumnsInIndex()); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java index c5c0ec072..ec590b066 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java @@ -3,6 +3,8 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -59,7 +61,7 @@ static void initMetaData(Connection con) throws SQLException { st.execute("create or replace database JDBC_DB2"); st.execute("create or replace schema JDBC_SCHEMA21"); st.execute("create or replace table JDBC_TBL211(colA string)"); - st.execute("create or replace table JDBC_BIN(bin1 binary, bin2 binary(100))"); + st.execute("create or replace table JDBC_BIN(bin1 binary(8388608), bin2 binary(100))"); // st.execute("create or replace table JDBC_TBL211(colA string(25) NOT NULL DEFAULT // 'defstring')"); @@ -111,7 +113,7 @@ public void testGetColumn() throws SQLException { resultSet = databaseMetaData.getColumns(null, "JDBC_SCHEMA21", "JDBC_BIN", "BIN1"); resultSet.next(); - assertEquals(8388608, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(1, getSizeOfResultSet(resultSet) + 1); resultSet = databaseMetaData.getColumns(null, "JDBC_SCHEMA21", "JDBC_BIN", "BIN2"); @@ -187,8 +189,7 @@ public void testGetFunctions() throws SQLException { // test each column return the right value resultSet = databaseMetaData.getFunctions("JDBC_DB1", "JDBC_SCHEMA11", "JDBCFUNCTEST111"); - DatabaseMetaDataIT.verifyResultSetMetaDataColumns( - resultSet, DBMetadataResultSetMetadata.GET_FUNCTIONS); + verifyResultSetMetaDataColumns(resultSet, DBMetadataResultSetMetadata.GET_FUNCTIONS); resultSet.next(); assertEquals("JDBC_DB1", resultSet.getString("FUNCTION_CAT")); assertEquals("JDBC_SCHEMA11", resultSet.getString("FUNCTION_SCHEM")); @@ -476,15 +477,25 @@ public void testGetTables() throws SQLException { assertEquals(0, getSizeOfResultSet(resultSet)); } + // Get the count of tables in the SNOWFLAKE system database, so we can exclude them from + // subsequent assertions + int numSnowflakeTables = 0; + try (ResultSet snowflakeResultSet = + databaseMetaData.getTables("SNOWFLAKE", null, null, null)) { + numSnowflakeTables = getSizeOfResultSet(snowflakeResultSet); + } + try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, null)) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllTable), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllTable), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, new String[] {"VIEW", "SYSTEM_TABLE"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllView), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllView), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = @@ -497,13 +508,15 @@ public void testGetTables() throws SQLException { databaseMetaData.getTables( null, null, null, new String[] {"TABLE", "VIEW", "SYSTEM_TABLE"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllTable), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllTable), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, new String[] {"TABLE", "VIEW"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllTable), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllTable), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = @@ -515,7 +528,8 @@ public void testGetTables() throws SQLException { try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, new String[] {"VIEW"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllView), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllView), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java index 97e67683a..15701ca17 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java @@ -91,7 +91,7 @@ public void testGetFunctionColumns() throws SQLException { + "sharedCol decimal)"); statement.execute( "create or replace function JDBC_DB1.JDBC_SCHEMA11.FUNC112 " - + "() RETURNS TABLE(colA string, colB decimal, bin2 binary, sharedCol decimal) COMMENT= 'returns " + + "() RETURNS TABLE(colA string(16777216), colB decimal, bin2 binary(8388608), sharedCol decimal) COMMENT= 'returns " + "table of 4 columns'" + " as 'select JDBC_DB1.JDBC_SCHEMA11.JDBC_TBL111.colA, JDBC_DB1.JDBC_SCHEMA11.JDBC_TBL111.colB, " + "JDBC_DB1.JDBC_SCHEMA11.BIN_TABLE.bin2, JDBC_DB1.JDBC_SCHEMA11.BIN_TABLE.sharedCol from JDBC_DB1" @@ -173,7 +173,8 @@ public void testGetFunctionColumns() throws SQLException { assertEquals(10, resultSet.getInt("RADIX")); assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals( + databaseMetaData.getMaxCharLiteralLength(), resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); assertEquals("", resultSet.getString("IS_NULLABLE")); assertEquals( @@ -213,7 +214,8 @@ public void testGetFunctionColumns() throws SQLException { assertEquals(10, resultSet.getInt("RADIX")); assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); - assertEquals(8388608, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals( + databaseMetaData.getMaxBinaryLiteralLength(), resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); assertEquals("", resultSet.getString("IS_NULLABLE")); assertEquals( diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index e3659df39..bebe3d8f4 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -3,6 +3,8 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_CHAR_LENGTH; import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.NumericFunctionsSupported; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.StringFunctionsSupported; @@ -772,8 +774,8 @@ public void testGetFunctionColumns() throws Exception { "create or replace table JDBC_TBL111(colA string, colB decimal, colC " + "timestamp)"); /* Create a UDF that returns a table made up of 4 columns from 2 different tables, joined together */ statement.execute( - "create or replace function FUNC112 () RETURNS TABLE(colA string, colB decimal, bin2" - + " binary, sharedCol decimal) COMMENT= 'returns table of 4 columns' as 'select" + "create or replace function FUNC112 () RETURNS TABLE(colA string(16777216), colB decimal, bin2 " + + "binary(8388608) , sharedCol decimal) COMMENT= 'returns table of 4 columns' as 'select" + " JDBC_TBL111.colA, JDBC_TBL111.colB, BIN_TABLE.bin2, BIN_TABLE.sharedCol from" + " JDBC_TBL111 inner join BIN_TABLE on JDBC_TBL111.colB =BIN_TABLE.sharedCol'"); DatabaseMetaData metaData = connection.getMetaData(); @@ -877,7 +879,7 @@ public void testGetFunctionColumns() throws Exception { assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); // char octet length column is not supported and always returns 0 - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); // is_nullable column is not supported and always returns empty string assertEquals("", resultSet.getString("IS_NULLABLE")); @@ -927,7 +929,7 @@ public void testGetFunctionColumns() throws Exception { assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); // char octet length column is not supported and always returns 0 - assertEquals(8388608, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); // is_nullable column is not supported and always returns empty string assertEquals("", resultSet.getString("IS_NULLABLE")); @@ -1222,8 +1224,8 @@ public void testGetColumns() throws Throwable { statement.execute( "create or replace table " + targetTable - + "(C1 int, C2 varchar(100), C3 string default '', C4 number(18,4), C5 double," - + " C6 boolean, C7 date not null, C8 time, C9 timestamp_ntz(7), C10 binary,C11" + + "(C1 int, C2 varchar(100), C3 string(16777216) default '', C4 number(18,4), C5 double," + + " C6 boolean, C7 date not null, C8 time, C9 timestamp_ntz(7), C10 binary(8388608),C11" + " variant, C12 timestamp_ltz(8), C13 timestamp_tz(3))"); DatabaseMetaData metaData = connection.getMetaData(); @@ -1290,14 +1292,14 @@ public void testGetColumns() throws Throwable { assertEquals("C3", resultSet.getString("COLUMN_NAME")); assertEquals(Types.VARCHAR, resultSet.getInt("DATA_TYPE")); assertEquals("VARCHAR", resultSet.getString("TYPE_NAME")); - assertEquals(16777216, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); assertEquals(0, resultSet.getInt("NUM_PREC_RADIX")); assertEquals(ResultSetMetaData.columnNullable, resultSet.getInt("NULLABLE")); assertEquals("", resultSet.getString("REMARKS")); assertEquals("", resultSet.getString("COLUMN_DEF")); - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); assertEquals("YES", resultSet.getString("IS_NULLABLE")); assertNull(resultSet.getString("SCOPE_CATALOG")); @@ -1465,7 +1467,7 @@ public void testGetColumns() throws Throwable { assertEquals("C10", resultSet.getString("COLUMN_NAME")); assertEquals(Types.BINARY, resultSet.getInt("DATA_TYPE")); assertEquals("BINARY", resultSet.getString("TYPE_NAME")); - assertEquals(8388608, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); assertEquals(0, resultSet.getInt("NUM_PREC_RADIX")); assertEquals(ResultSetMetaData.columnNullable, resultSet.getInt("NULLABLE")); @@ -2333,4 +2335,49 @@ public void testGetJDBCVersion() throws SQLException { assertEquals(2, metaData.getJDBCMinorVersion()); } } + + /** Added in > 3.15.1 */ + @Test + public void testKeywordsCount() throws SQLException { + try (Connection connection = getConnection()) { + DatabaseMetaData metaData = connection.getMetaData(); + assertEquals(43, metaData.getSQLKeywords().split(",").length); + } + } + /** Added in > 3.16.1 */ + @Test + public void testVectorDimension() throws SQLException { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table JDBC_VECTOR(text_col varchar(32), float_vec VECTOR(FLOAT, 256), int_vec VECTOR(INT, 16))"); + DatabaseMetaData metaData = connection.getMetaData(); + try (ResultSet resultSet = + metaData.getColumns( + connection.getCatalog(), + connection.getSchema().replaceAll("_", "\\\\_"), + "JDBC\\_VECTOR", + null)) { + assertTrue(resultSet.next()); + assertEquals(32, resultSet.getObject("COLUMN_SIZE")); + assertTrue(resultSet.next()); + assertEquals(256, resultSet.getObject("COLUMN_SIZE")); + assertTrue(resultSet.next()); + assertEquals(16, resultSet.getObject("COLUMN_SIZE")); + assertFalse(resultSet.next()); + } + + try (ResultSet resultSet = + statement.executeQuery("Select text_col, float_vec, int_vec from JDBC_VECTOR")) { + SnowflakeResultSetMetaData unwrapResultSetMetadata = + resultSet.getMetaData().unwrap(SnowflakeResultSetMetaData.class); + assertEquals(0, unwrapResultSetMetadata.getDimension("TEXT_COL")); + assertEquals(0, unwrapResultSetMetadata.getDimension(1)); + assertEquals(256, unwrapResultSetMetadata.getDimension("FLOAT_VEC")); + assertEquals(256, unwrapResultSetMetadata.getDimension(2)); + assertEquals(16, unwrapResultSetMetadata.getDimension("INT_VEC")); + assertEquals(16, unwrapResultSetMetadata.getDimension(3)); + } + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java index 7bf5872c0..0549a087d 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java @@ -4,8 +4,11 @@ package net.snowflake.client.jdbc; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.sql.Connection; +import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; @@ -32,4 +35,27 @@ public void testGetObjectNotSupported() throws SQLException { } } } + + /** Added in > 3.17.0 */ + @Test + public void testObjectColumn() throws SQLException { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "CREATE OR REPLACE TABLE TABLEWITHOBJECTCOLUMN (" + + " col OBJECT(" + + " str VARCHAR," + + " num NUMBER(38,0)" + + " )" + + " )"); + DatabaseMetaData metaData = connection.getMetaData(); + try (ResultSet resultSet = + metaData.getColumns( + connection.getCatalog(), connection.getSchema(), "TABLEWITHOBJECTCOLUMN", null)) { + assertTrue(resultSet.next()); + assertEquals("OBJECT", resultSet.getObject("TYPE_NAME")); + assertFalse(resultSet.next()); + } + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java index f61ff65e7..794af78df 100644 --- a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java @@ -26,17 +26,15 @@ public void setup() { @Test public void testSelectLargeResultSet() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("select seq4() from table" + "(generator" + "(rowcount=>10000))"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "select seq4() from table" + "(generator" + "(rowcount=>10000))")) { - while (resultSet.next()) { - resultSet.getString(1); + while (resultSet.next()) { + resultSet.getString(1); + } } - - resultSet.close(); - statement.close(); - connection.close(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java new file mode 100644 index 000000000..734446c92 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.After; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +/** This test could be run only on environment where file connection.toml is configured */ +@Ignore +public class FileConnectionConfigurationLatestIT { + + @After + public void cleanUp() { + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + } + + @Test + public void testThrowExceptionIfConfigurationDoesNotExist() { + SnowflakeUtil.systemSetEnv("SNOWFLAKE_DEFAULT_CONNECTION_NAME", "non-existent"); + Assert.assertThrows(SnowflakeSQLException.class, () -> SnowflakeDriver.INSTANCE.connect()); + } + + @Test + public void testSimpleConnectionUsingFileConfigurationToken() throws SQLException { + verifyConnetionToSnowflake("aws-oauth"); + } + + @Test + public void testSimpleConnectionUsingFileConfigurationTokenFromFile() throws SQLException { + verifyConnetionToSnowflake("aws-oauth-file"); + } + + private static void verifyConnetionToSnowflake(String connectionName) throws SQLException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, connectionName); + try (Connection con = + DriverManager.getConnection(SnowflakeDriver.AUTO_CONNECTION_STRING_PREFIX, null); + Statement statement = con.createStatement(); + ResultSet resultSet = statement.executeQuery("show parameters")) { + Assert.assertTrue(resultSet.next()); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java index 02ef84747..a4426d449 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java @@ -3,13 +3,25 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; import java.util.Properties; import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.stream.IntStream; import net.snowflake.client.core.OCSPMode; import org.junit.Assert; import org.junit.Rule; @@ -19,6 +31,8 @@ /** Tests for SnowflakeFileTransferAgent.expandFileNames */ public class FileUploaderExpandFileNamesTest { @Rule public TemporaryFolder folder = new TemporaryFolder(); + @Rule public TemporaryFolder secondFolder = new TemporaryFolder(); + private String localFSFileSep = systemGetProperty("file.separator"); @Test public void testProcessFileNames() throws Exception { @@ -30,20 +44,20 @@ public void testProcessFileNames() throws Exception { System.setProperty("user.home", folderName); String[] locations = { - folderName + "/Tes*Fil*A", - folderName + "/TestFil?B", - "~/TestFileC", + folderName + File.separator + "Tes*Fil*A", + folderName + File.separator + "TestFil?B", + "~" + File.separator + "TestFileC", "TestFileD", - folderName + "/TestFileE~" + folderName + File.separator + "TestFileE~" }; Set files = SnowflakeFileTransferAgent.expandFileNames(locations, null); - assertTrue(files.contains(folderName + "/TestFileA")); - assertTrue(files.contains(folderName + "/TestFileB")); - assertTrue(files.contains(folderName + "/TestFileC")); - assertTrue(files.contains(folderName + "/TestFileD")); - assertTrue(files.contains(folderName + "/TestFileE~")); + assertTrue(files.contains(folderName + File.separator + "TestFileA")); + assertTrue(files.contains(folderName + File.separator + "TestFileB")); + assertTrue(files.contains(folderName + File.separator + "TestFileC")); + assertTrue(files.contains(folderName + File.separator + "TestFileD")); + assertTrue(files.contains(folderName + File.separator + "TestFileE~")); } @Test @@ -114,15 +128,98 @@ public int read() throws IOException { SnowflakeFileTransferConfig config = builder.build(); // Assert setting fields are in config - assert (config.getSnowflakeFileTransferMetadata() == metadata); - assert (config.getUploadStream() == input); - assert (config.getOcspMode() == OCSPMode.FAIL_CLOSED); - assert (!config.getRequireCompress()); - assert (config.getNetworkTimeoutInMilli() == 12345); - assert (config.getProxyProperties() == props); - assert (config.getPrefix().equals("dummy_prefix")); - assert (config.getDestFileName().equals("dummy_dest_file_name")); - + assertEquals(metadata, config.getSnowflakeFileTransferMetadata()); + assertEquals(input, config.getUploadStream()); + assertEquals(OCSPMode.FAIL_CLOSED, config.getOcspMode()); + assertFalse(config.getRequireCompress()); + assertEquals(12345, config.getNetworkTimeoutInMilli()); + assertEquals(props, config.getProxyProperties()); + assertEquals("dummy_prefix", config.getPrefix()); + assertEquals("dummy_dest_file_name", config.getDestFileName()); assertEquals(expectedThrowCount, throwCount); } + + /** + * We have N jobs expanding files with exclusive pattern, processing them and deleting. Expanding + * the list should not cause the error when file of another pattern is deleted which may happen + * when FileUtils.listFiles is used. + * + *

Fix available after version 3.16.1. + * + * @throws Exception + */ + @Test + public void testFileListingDoesNotFailOnMissingFilesOfAnotherPattern() throws Exception { + folder.newFolder("TestFiles"); + String folderName = folder.getRoot().getCanonicalPath(); + + int filePatterns = 10; + int filesPerPattern = 100; + IntStream.range(0, filesPerPattern * filePatterns) + .forEach( + id -> { + try { + File file = + new File( + folderName + + localFSFileSep + + "foo" + + id % filePatterns + + "-" + + UUID.randomUUID()); + assertTrue(file.createNewFile()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + + ExecutorService executorService = Executors.newFixedThreadPool(filePatterns / 3); + List>> futures = new ArrayList<>(); + for (int i = 0; i < filePatterns; ++i) { + String[] locations = { + folderName + localFSFileSep + "foo" + i + "*", + }; + Future> future = + executorService.submit( + () -> { + try { + Set strings = SnowflakeFileTransferAgent.expandFileNames(locations, null); + strings.forEach( + fileName -> { + try { + File file = new File(fileName); + Files.delete(file.toPath()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + return strings; + } catch (SnowflakeSQLException e) { + throw new RuntimeException(e); + } + }); + futures.add(future); + } + executorService.shutdown(); + assertTrue(executorService.awaitTermination(60, TimeUnit.SECONDS)); + assertEquals(filePatterns, futures.size()); + for (Future> future : futures) { + assertTrue(future.isDone()); + assertEquals(filesPerPattern, future.get().size()); + } + } + + @Test + public void testFileListingDoesNotFailOnNotExistingDirectory() throws Exception { + folder.newFolder("TestFiles"); + String folderName = folder.getRoot().getCanonicalPath(); + String[] locations = { + folderName + localFSFileSep + "foo*", + }; + folder.delete(); + + Set files = SnowflakeFileTransferAgent.expandFileNames(locations, null); + + assertTrue(files.isEmpty()); + } } diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java index ac1ffe249..378234715 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java @@ -67,24 +67,24 @@ public class FileUploaderLatestIT extends FileUploaderPrepIT { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetS3StageDataWithS3Session() throws SQLException { - Connection con = getConnection("s3testaccount"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - // Set UseRegionalS3EndpointsForPresignedURL to true in session - sfSession.setUseRegionalS3EndpointsForPresignedURL(true); - - // Get sample stage info with session - StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - // Assert that true value from session is reflected in StageInfo - Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); - - // Set UseRegionalS3EndpointsForPresignedURL to false in session - sfSession.setUseRegionalS3EndpointsForPresignedURL(false); - stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - // Assert that false value from session is reflected in StageInfo - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); - con.close(); + try (Connection con = getConnection("s3testaccount")) { + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Set UseRegionalS3EndpointsForPresignedURL to true in session + sfSession.setUseRegionalS3EndpointsForPresignedURL(true); + + // Get sample stage info with session + StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); + Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + // Assert that true value from session is reflected in StageInfo + Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); + + // Set UseRegionalS3EndpointsForPresignedURL to false in session + sfSession.setUseRegionalS3EndpointsForPresignedURL(false); + stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); + Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + // Assert that false value from session is reflected in StageInfo + Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + } } /** @@ -96,56 +96,56 @@ public void testGetS3StageDataWithS3Session() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetS3StageDataWithAzureSession() throws SQLException { - Connection con = getConnection("azureaccount"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - // Set UseRegionalS3EndpointsForPresignedURL to true in session. This is redundant since session - // is Azure - sfSession.setUseRegionalS3EndpointsForPresignedURL(true); - - // Get sample stage info with session - StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleAzureJsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); - // Assert that UseRegionalS3EndpointsForPresignedURL is false in StageInfo even if it was set to - // true. - // The value should always be false for non-S3 accounts - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); - con.close(); + try (Connection con = getConnection("azureaccount")) { + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Set UseRegionalS3EndpointsForPresignedURL to true in session. This is redundant since + // session + // is Azure + sfSession.setUseRegionalS3EndpointsForPresignedURL(true); + + // Get sample stage info with session + StageInfo stageInfo = + SnowflakeFileTransferAgent.getStageInfo(exampleAzureJsonNode, sfSession); + Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + // Assert that UseRegionalS3EndpointsForPresignedURL is false in StageInfo even if it was set + // to + // true. + // The value should always be false for non-S3 accounts + Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetObjectMetadataWithGCS() throws Exception { - Connection connection = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; - statement.execute(putCommand); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeGCSClient client = - SnowflakeGCSClient.createSnowflakeGCSClient( - info, sfAgent.getEncryptionMaterial().get(0), sfSession); - - String location = info.getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + TEST_DATA_FILE + ".gz"; - StorageObjectMetadata metadata = client.getObjectMetadata(remoteStageLocation, path); - Assert.assertEquals("gzip", metadata.getContentEncoding()); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; + statement.execute(putCommand); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeGCSClient client = + SnowflakeGCSClient.createSnowflakeGCSClient( + info, sfAgent.getEncryptionMaterial().get(0), sfSession); + + String location = info.getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + TEST_DATA_FILE + ".gz"; + StorageObjectMetadata metadata = client.getObjectMetadata(remoteStageLocation, path); + Assert.assertEquals("gzip", metadata.getContentEncoding()); + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @@ -153,41 +153,38 @@ public void testGetObjectMetadataWithGCS() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { - Connection connection = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; - statement.execute(putCommand); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeGCSClient client = - SnowflakeGCSClient.createSnowflakeGCSClient( - info, sfAgent.getEncryptionMaterial().get(0), sfSession); - - String location = info.getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + "wrong_file.csv.gz"; - client.getObjectMetadata(remoteStageLocation, path); - fail("should raise exception"); - } catch (Exception ex) { - assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); - assertTrue(ex.getMessage().matches(".*Blob.*not found in bucket.*")); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; + statement.execute(putCommand); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeGCSClient client = + SnowflakeGCSClient.createSnowflakeGCSClient( + info, sfAgent.getEncryptionMaterial().get(0), sfSession); + + String location = info.getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + "wrong_file.csv.gz"; + client.getObjectMetadata(remoteStageLocation, path); + fail("should raise exception"); + } catch (Exception ex) { + assertTrue( + "Wrong type of exception. Message: " + ex.getMessage(), + ex instanceof StorageProviderException); + assertTrue(ex.getMessage().matches(".*Blob.*not found in bucket.*")); + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @@ -195,116 +192,114 @@ public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetObjectMetadataStorageExceptionWithGCS() throws Exception { - Connection connection = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; - statement.execute(putCommand); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeGCSClient client = - SnowflakeGCSClient.createSnowflakeGCSClient( - info, sfAgent.getEncryptionMaterial().get(0), sfSession); - - String location = info.getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - client.getObjectMetadata(remoteStageLocation, ""); - fail("should raise exception"); - } catch (Exception ex) { - assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); - assertTrue(ex.getMessage().matches(".*Permission.*denied.*")); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; + statement.execute(putCommand); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeGCSClient client = + SnowflakeGCSClient.createSnowflakeGCSClient( + info, sfAgent.getEncryptionMaterial().get(0), sfSession); + + String location = info.getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + client.getObjectMetadata(remoteStageLocation, ""); + fail("should raise exception"); + } catch (Exception ex) { + assertTrue( + "Wrong type of exception. Message: " + ex.getMessage(), + ex instanceof StorageProviderException); + assertTrue(ex.getMessage().matches(".*Permission.*denied.*")); + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @Test public void testGetFileTransferCommandType() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, sfAgent.getCommandType()); - statement.execute("drop stage if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, sfAgent.getCommandType()); + } finally { + statement.execute("drop stage if exists testStage"); + } + } } @Test public void testNullCommand() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - try { - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(null, sfSession, new SFStatement(sfSession)); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage() - .contains("JDBC driver internal error: Missing sql for statement execution")); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(null, sfSession, new SFStatement(sfSession)); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue( + err.getMessage() + .contains("JDBC driver internal error: Missing sql for statement execution")); + } finally { + statement.execute("drop stage if exists testStage"); + } } - statement.execute("drop stage if exists testStage"); - con.close(); } @Test public void testCompressStreamWithGzipException() throws Exception { - Connection con = null; // inject the NoSuchAlgorithmException SnowflakeFileTransferAgent.setInjectedFileTransferException(new NoSuchAlgorithmException()); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - List metadataList = sfAgent.getFileTransferMetadatas(); - SnowflakeFileTransferMetadataV1 metadata = - (SnowflakeFileTransferMetadataV1) metadataList.get(0); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - InputStream inputStream = new FileInputStream(srcPath); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(metadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(PUT_COMMAND) - .build()); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage() - .contains("JDBC driver internal error: error encountered for compression")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + List metadataList = sfAgent.getFileTransferMetadatas(); + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + InputStream inputStream = new FileInputStream(srcPath); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(metadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(PUT_COMMAND) + .build()); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue( + err.getMessage() + .contains("JDBC driver internal error: error encountered for compression")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -312,46 +307,43 @@ public void testCompressStreamWithGzipException() throws Exception { @Test public void testCompressStreamWithGzipNoDigestException() throws Exception { - Connection con = null; // inject the IOException SnowflakeFileTransferAgent.setInjectedFileTransferException(new IOException()); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - List metadataList = sfAgent.getFileTransferMetadatas(); - SnowflakeFileTransferMetadataV1 metadata = - (SnowflakeFileTransferMetadataV1) metadataList.get(0); - metadata.setEncryptionMaterial(null, null, null); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - - InputStream inputStream = new FileInputStream(srcPath); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(metadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(PUT_COMMAND) - .build()); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage() - .contains("JDBC driver internal error: error encountered for compression")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + List metadataList = sfAgent.getFileTransferMetadatas(); + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + metadata.setEncryptionMaterial(null, null, null); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + + InputStream inputStream = new FileInputStream(srcPath); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(metadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(PUT_COMMAND) + .build()); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue( + err.getMessage() + .contains("JDBC driver internal error: error encountered for compression")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -359,46 +351,43 @@ public void testCompressStreamWithGzipNoDigestException() throws Exception { @Test public void testUploadWithoutConnectionException() throws Exception { - Connection con = null; // inject the IOException SnowflakeFileTransferAgent.setInjectedFileTransferException( new Exception("Exception encountered during file upload: failed to push to remote store")); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - List metadataList = sfAgent.getFileTransferMetadatas(); - SnowflakeFileTransferMetadataV1 metadata = - (SnowflakeFileTransferMetadataV1) metadataList.get(0); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - - InputStream inputStream = new FileInputStream(srcPath); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(metadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(PUT_COMMAND) - .build()); - } catch (Exception err) { - Assert.assertTrue( - err.getMessage() - .contains( - "Exception encountered during file upload: failed to push to remote store")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + List metadataList = sfAgent.getFileTransferMetadatas(); + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + + InputStream inputStream = new FileInputStream(srcPath); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(metadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(PUT_COMMAND) + .build()); + } catch (Exception err) { + Assert.assertTrue( + err.getMessage() + .contains( + "Exception encountered during file upload: failed to push to remote store")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -406,73 +395,64 @@ public void testUploadWithoutConnectionException() throws Exception { @Test public void testInitFileMetadataFileNotFound() throws Exception { - Connection con = null; - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals(200008, err.getErrorCode()); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals(200008, err.getErrorCode()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testInitFileMetadataFileIsDirectory() throws Exception { - Connection con = null; - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - String srcPath = - getFullPathFileInResource(""); // will pull the resources directory without a file - String command = "put file://" + srcPath + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals(200009, err.getErrorCode()); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + String srcPath = + getFullPathFileInResource(""); // will pull the resources directory without a file + String command = "put file://" + srcPath + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals(200009, err.getErrorCode()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testCompareAndSkipFilesException() throws Exception { - Connection con = null; // inject the NoSuchAlgorithmException SnowflakeFileTransferAgent.setInjectedFileTransferException(new NoSuchAlgorithmException()); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Error reading:")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue(err.getMessage().contains("Error reading:")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -480,24 +460,22 @@ public void testCompareAndSkipFilesException() throws Exception { @Test public void testParseCommandException() throws SQLException { - Connection con = null; // inject the SnowflakeSQLException SnowflakeFileTransferAgent.setInjectedFileTransferException( new SnowflakeSQLException("invalid data")); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Failed to parse the locations")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue(err.getMessage().contains("Failed to parse the locations")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -505,63 +483,61 @@ public void testParseCommandException() throws SQLException { @Test public void testPopulateStatusRowsWithSortOn() throws Exception { - Connection con = null; - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - statement.execute("set-sf-property sort on"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // upload files orders_101.csv and orders_100.csv - String command = "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent1.execute(); // upload files - - // check that source files were sorted - assertEquals(2, sfAgent1.statusRows.size()); - assertEquals("orders_100.csv", sfAgent1.getNextRow().get(0).toString()); - - String getCommand = "GET @testStage file:///tmp"; - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); - sfAgent2.execute(); - // check that files are sorted on download - assertEquals(2, sfAgent2.statusRows.size()); - assertEquals("orders_100.csv.gz", sfAgent2.getNextRow().get(0).toString()); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + statement.execute("set-sf-property sort on"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // upload files orders_101.csv and orders_100.csv + String command = + "PUT file://" + + getFullPathFileInResource("") + + File.separator + + "orders_10*.csv @testStage"; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent1.execute(); // upload files + + // check that source files were sorted + assertEquals(2, sfAgent1.statusRows.size()); + assertEquals("orders_100.csv", sfAgent1.getNextRow().get(0).toString()); + + String getCommand = "GET @testStage file:///tmp"; + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); + sfAgent2.execute(); + // check that files are sorted on download + assertEquals(2, sfAgent2.statusRows.size()); + assertEquals("orders_100.csv.gz", sfAgent2.getNextRow().get(0).toString()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testListObjectsStorageException() throws Exception { - Connection con = null; // inject the StorageProviderException SnowflakeFileTransferAgent.setInjectedFileTransferException( new StorageProviderException(new Exception("could not list objects"))); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals(200016, err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Encountered exception during listObjects")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals(200016, err.getErrorCode()); + Assert.assertTrue(err.getMessage().contains("Encountered exception during listObjects")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -572,75 +548,76 @@ public void testUploadStreamInterruptedException() throws IOException, SQLExcept final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; // inject the InterruptedException SnowflakeFileTransferAgent.setInjectedFileTransferException(new InterruptedException()); - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); - - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); - - } catch (SnowflakeSQLLoggedException err) { - Assert.assertEquals(200003, err.getErrorCode()); - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); + + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); + + } catch (SnowflakeSQLLoggedException err) { + Assert.assertEquals(200003, err.getErrorCode()); + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); } @Test public void testFileTransferStageInfo() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - StageInfo stageInfo = sfAgent.getStageInfo(); - assertEquals(sfAgent.getStageCredentials(), stageInfo.getCredentials()); - assertEquals(sfAgent.getStageLocation(), stageInfo.getLocation()); - - statement.execute("drop stage if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + StageInfo stageInfo = sfAgent.getStageInfo(); + assertEquals(sfAgent.getStageCredentials(), stageInfo.getCredentials()); + assertEquals(sfAgent.getStageLocation(), stageInfo.getLocation()); + } finally { + statement.execute("drop stage if exists testStage"); + } + } } @Test public void testFileTransferMappingFromSourceFile() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - String command = "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent1.execute(); - - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent( - "GET @testStage file:///tmp/", sfSession, new SFStatement(sfSession)); - - assertEquals(2, sfAgent2.getSrcToMaterialsMap().size()); - assertEquals(2, sfAgent2.getSrcToPresignedUrlMap().size()); - - statement.execute("drop stage if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + String command = + "PUT file://" + + getFullPathFileInResource("") + + File.separator + + "orders_10*.csv @testStage"; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent1.execute(); + + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent( + "GET @testStage file:///tmp/", sfSession, new SFStatement(sfSession)); + + assertEquals(2, sfAgent2.getSrcToMaterialsMap().size()); + assertEquals(2, sfAgent2.getSrcToPresignedUrlMap().size()); + } finally { + statement.execute("drop stage if exists testStage"); + } + } } @Test @@ -648,25 +625,20 @@ public void testUploadFileCallableFileNotFound() throws Exception { // inject the FileNotFoundException SnowflakeFileTransferAgent.setInjectedFileTransferException( new FileNotFoundException("file does not exist")); - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent.execute(); - } catch (Exception err) { - assertEquals(err.getCause(), instanceOf(FileNotFoundException.class)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testStage"); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent.execute(); + } catch (Exception err) { + assertEquals(err.getCause(), instanceOf(FileNotFoundException.class)); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -674,64 +646,59 @@ public void testUploadFileCallableFileNotFound() throws Exception { @Test public void testUploadFileStreamWithNoOverwrite() throws Exception { - Connection connection = null; - - try { - connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - - uploadFileToStageUsingStream(connection, false); - ResultSet resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String expectedValue = resultSet.getString("last_modified"); - - Thread.sleep(1000); // add 1 sec delay between uploads. - - uploadFileToStageUsingStream(connection, false); - resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String actualValue = resultSet.getString("last_modified"); - - assertTrue(expectedValue.equals(actualValue)); - } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testStage"); - connection.close(); + String expectedValue = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + + uploadFileToStageUsingStream(connection, false); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + expectedValue = resultSet.getString("last_modified"); + } + Thread.sleep(1000); // add 1 sec delay between uploads. + + uploadFileToStageUsingStream(connection, false); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + String actualValue = resultSet.getString("last_modified"); + assertEquals(expectedValue, actualValue); + } + } catch (Exception e) { + Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testUploadFileStreamWithOverwrite() throws Exception { - Connection connection = null; - - try { - connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - - uploadFileToStageUsingStream(connection, true); - ResultSet resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String expectedValue = resultSet.getString("last_modified"); - - Thread.sleep(1000); // add 1 sec delay between uploads. - - uploadFileToStageUsingStream(connection, true); - resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String actualValue = resultSet.getString("last_modified"); - - assertFalse(expectedValue.equals(actualValue)); - } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testStage"); - connection.close(); + String expectedValue = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + + uploadFileToStageUsingStream(connection, true); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + expectedValue = resultSet.getString("last_modified"); + } + Thread.sleep(1000); // add 1 sec delay between uploads. + + uploadFileToStageUsingStream(connection, true); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + String actualValue = resultSet.getString("last_modified"); + + assertFalse(expectedValue.equals(actualValue)); + } + } catch (Exception e) { + Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @@ -739,71 +706,67 @@ public void testUploadFileStreamWithOverwrite() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetS3StorageObjectMetadata() throws Throwable { - Connection connection = null; - try { - connection = getConnection("s3testaccount"); - Statement statement = connection.createStatement(); - + try (Connection connection = getConnection("s3testaccount"); + Statement statement = connection.createStatement()) { // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // Test put file with internal compression - String putCommand = "put file:///dummy/path/file1.gz @" + OBJ_META_STAGE; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - List metadata = sfAgent.getFileTransferMetadatas(); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadata) { - InputStream inputStream = new FileInputStream(srcPath); - - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand) - .build()); - - SnowflakeStorageClient client = - StorageClientFactory.getFactory() - .createClient( - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), - 1, - null, - /*session = */ null); - - String location = - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + "file1.gz"; - StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); - - ObjectMetadata s3Meta = new ObjectMetadata(); - s3Meta.setContentLength(meta.getContentLength()); - s3Meta.setContentEncoding(meta.getContentEncoding()); - s3Meta.setUserMetadata(meta.getUserMetadata()); - - S3StorageObjectMetadata s3Metadata = new S3StorageObjectMetadata(s3Meta); - RemoteStoreFileEncryptionMaterial encMat = sfAgent.getEncryptionMaterial().get(0); - Map matDesc = - mapper.readValue(s3Metadata.getUserMetadata().get("x-amz-matdesc"), Map.class); - - assertEquals(encMat.getQueryId(), matDesc.get("queryId")); - assertEquals(encMat.getSmkId().toString(), matDesc.get("smkId")); - assertEquals(1360, s3Metadata.getContentLength()); - assertEquals("gzip", s3Metadata.getContentEncoding()); - } - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // Test put file with internal compression + String putCommand = "put file:///dummy/path/file1.gz @" + OBJ_META_STAGE; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + List metadata = sfAgent.getFileTransferMetadatas(); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadata) { + InputStream inputStream = new FileInputStream(srcPath); + + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand) + .build()); + + SnowflakeStorageClient client = + StorageClientFactory.getFactory() + .createClient( + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), + 1, + null, + /*session = */ null); + + String location = + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + "file1.gz"; + StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); + + ObjectMetadata s3Meta = new ObjectMetadata(); + s3Meta.setContentLength(meta.getContentLength()); + s3Meta.setContentEncoding(meta.getContentEncoding()); + s3Meta.setUserMetadata(meta.getUserMetadata()); + + S3StorageObjectMetadata s3Metadata = new S3StorageObjectMetadata(s3Meta); + RemoteStoreFileEncryptionMaterial encMat = sfAgent.getEncryptionMaterial().get(0); + Map matDesc = + mapper.readValue(s3Metadata.getUserMetadata().get("x-amz-matdesc"), Map.class); + + assertEquals(encMat.getQueryId(), matDesc.get("queryId")); + assertEquals(encMat.getSmkId().toString(), matDesc.get("smkId")); + assertEquals(1360, s3Metadata.getContentLength()); + assertEquals("gzip", s3Metadata.getContentEncoding()); + } + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @@ -832,10 +795,6 @@ private void uploadFileToStageUsingStream(Connection connection, boolean overwri @Test public void testUploadFileWithTildeInFolderName() throws SQLException, IOException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - Writer writer = null; Path topDataDir = null; try { @@ -847,47 +806,40 @@ public void testUploadFileWithTildeInFolderName() throws SQLException, IOExcepti // create a test data File dataFile = new File(subDir.toFile(), "test.txt"); - writer = + try (Writer writer = new BufferedWriter( new OutputStreamWriter( Files.newOutputStream(Paths.get(dataFile.getCanonicalPath())), - StandardCharsets.UTF_8)); - writer.write("1,test1"); - writer.close(); - - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("create or replace stage testStage"); - String sql = String.format("PUT 'file://%s' @testStage", dataFile.getCanonicalPath()); - - // Escape backslashes. This must be done by the application. - sql = sql.replaceAll("\\\\", "\\\\\\\\"); - resultSet = statement.executeQuery(sql); - while (resultSet.next()) { - assertEquals("UPLOADED", resultSet.getString("status")); - } - } finally { - if (connection != null) { - connection.createStatement().execute("drop stage if exists testStage"); + StandardCharsets.UTF_8))) { + writer.write("1,test1"); } - closeSQLObjects(resultSet, statement, connection); - if (writer != null) { - writer.close(); + + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + String sql = String.format("PUT 'file://%s' @testStage", dataFile.getCanonicalPath()); + + // Escape backslashes. This must be done by the application. + sql = sql.replaceAll("\\\\", "\\\\\\\\"); + try (ResultSet resultSet = statement.executeQuery(sql)) { + while (resultSet.next()) { + assertEquals("UPLOADED", resultSet.getString("status")); + } + } + } finally { + statement.execute("drop stage if exists testStage"); + } } + } finally { FileUtils.deleteDirectory(topDataDir.toFile()); } } @Test public void testUploadWithTildeInPath() throws SQLException, IOException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - Writer writer = null; Path subDir = null; - try { - String homeDir = systemGetProperty("user.home"); // create sub directory where the name includes ~ @@ -895,30 +847,29 @@ public void testUploadWithTildeInPath() throws SQLException, IOException { // create a test data File dataFile = new File(subDir.toFile(), "test.txt"); - writer = + try (Writer writer = new BufferedWriter( new OutputStreamWriter( Files.newOutputStream(Paths.get(dataFile.getCanonicalPath())), - StandardCharsets.UTF_8)); - writer.write("1,test1"); - writer.close(); - - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("create or replace stage testStage"); - - resultSet = statement.executeQuery("PUT 'file://~/snowflake/test.txt' @testStage"); - while (resultSet.next()) { - assertEquals("UPLOADED", resultSet.getString("status")); - } - } finally { - if (connection != null) { - connection.createStatement().execute("drop stage if exists testStage"); + StandardCharsets.UTF_8))) { + writer.write("1,test1"); } - closeSQLObjects(resultSet, statement, connection); - if (writer != null) { - writer.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + + try (ResultSet resultSet = + statement.executeQuery("PUT 'file://~/snowflake/test.txt' @testStage")) { + while (resultSet.next()) { + assertEquals("UPLOADED", resultSet.getString("status")); + } + } + } finally { + statement.execute("drop stage if exists testStage"); + } } + } finally { FileUtils.deleteDirectory(subDir.toFile()); } } diff --git a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java index 7cb79b404..b2c316d50 100644 --- a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java +++ b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java @@ -37,17 +37,18 @@ Connection init() throws SQLException { @Test public void testLargeResultSetGCP() throws Throwable { - try (Connection con = init()) { - PreparedStatement stmt = - con.prepareStatement( - "select seq8(), randstr(1000, random()) from table(generator(rowcount=>1000))"); + try (Connection con = init(); + PreparedStatement stmt = + con.prepareStatement( + "select seq8(), randstr(1000, random()) from table(generator(rowcount=>1000))")) { stmt.setMaxRows(999); - ResultSet rset = stmt.executeQuery(); - int cnt = 0; - while (rset.next()) { - ++cnt; + try (ResultSet rset = stmt.executeQuery()) { + int cnt = 0; + while (rset.next()) { + ++cnt; + } + assertEquals(cnt, 999); } - assertEquals(cnt, 999); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java index c59ddc987..e7217f695 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java @@ -39,21 +39,17 @@ public class HeartbeatAsyncLatestIT extends HeartbeatIT { @Override protected void submitQuery(boolean useKeepAliveSession, int queryIdx) throws SQLException, InterruptedException { - Connection connection = null; - ResultSet resultSet = null; - try { - Properties sessionParams = new Properties(); - sessionParams.put( - "CLIENT_SESSION_KEEP_ALIVE", - useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); + Properties sessionParams = new Properties(); + sessionParams.put( + "CLIENT_SESSION_KEEP_ALIVE", + useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); - connection = getConnection(sessionParams); - - Statement stmt = connection.createStatement(); - // Query will take 5 seconds to run, but ResultSet will be returned immediately - resultSet = - stmt.unwrap(SnowflakeStatement.class) - .executeAsyncQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 5))"); + try (Connection connection = getConnection(sessionParams); + Statement stmt = connection.createStatement(); + // Query will take 5 seconds to run, but ResultSet will be returned immediately + ResultSet resultSet = + stmt.unwrap(SnowflakeStatement.class) + .executeAsyncQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 5))")) { Thread.sleep(61000); // sleep 61 seconds to await original session expiration time QueryStatus qs = resultSet.unwrap(SnowflakeResultSet.class).getStatus(); // Ensure query succeeded. Avoid flaky test failure by waiting until query is complete to @@ -69,10 +65,6 @@ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) assertTrue(resultSet.next()); assertFalse(resultSet.next()); logger.fine("Query " + queryIdx + " passed "); - - } finally { - resultSet.close(); - connection.close(); } } @@ -92,16 +84,12 @@ public void testAsynchronousQueryFailure() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testIsValidWithInvalidSession() throws Exception { - Connection connection = null; - try { - connection = getConnection(); + try (Connection connection = getConnection()) { // assert that connection starts out valid assertTrue(connection.isValid(5)); Thread.sleep(61000); // sleep 61 seconds to await session expiration time // assert that connection is no longer valid after session has expired assertFalse(connection.isValid(5)); - } finally { - connection.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java index 16e8364d6..eb41ce76f 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java @@ -46,15 +46,14 @@ public class HeartbeatIT extends AbstractDriverIT { @BeforeClass public static void setUpClass() throws Exception { if (!RunningOnGithubAction.isRunningOnGithubAction()) { - Connection connection = getSnowflakeAdminConnection(); - connection - .createStatement() - .execute( - "alter system set" - + " master_token_validity=60" - + ",session_token_validity=20" - + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=1"); - connection.close(); + try (Connection connection = getSnowflakeAdminConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter system set" + + " master_token_validity=60" + + ",session_token_validity=20" + + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=1"); + } } } @@ -65,15 +64,14 @@ public static void setUpClass() throws Exception { @AfterClass public static void tearDownClass() throws Exception { if (!RunningOnGithubAction.isRunningOnGithubAction()) { - Connection connection = getSnowflakeAdminConnection(); - connection - .createStatement() - .execute( - "alter system set" - + " master_token_validity=default" - + ",session_token_validity=default" - + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=default"); - connection.close(); + try (Connection connection = getSnowflakeAdminConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter system set" + + " master_token_validity=default" + + ",session_token_validity=default" + + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=default"); + } } } @@ -87,34 +85,28 @@ public static void tearDownClass() throws Exception { */ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) throws SQLException, InterruptedException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; ResultSetMetaData resultSetMetaData; - try { - Properties sessionParams = new Properties(); - sessionParams.put( - "CLIENT_SESSION_KEEP_ALIVE", - useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); + Properties sessionParams = new Properties(); + sessionParams.put( + "CLIENT_SESSION_KEEP_ALIVE", + useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); - connection = getConnection(sessionParams); - statement = connection.createStatement(); + try (Connection connection = getConnection(sessionParams); + Statement statement = connection.createStatement()) { Thread.sleep(61000); // sleep 61 seconds - resultSet = statement.executeQuery("SELECT 1"); - resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = statement.executeQuery("SELECT 1")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 row - assertTrue(resultSet.next()); + // assert we get 1 row + assertTrue(resultSet.next()); - logger.fine("Query " + queryIdx + " passed "); - statement.close(); - } finally { - closeSQLObjects(resultSet, statement, connection); + logger.fine("Query " + queryIdx + " passed "); + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java new file mode 100644 index 000000000..56f02c6d5 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java @@ -0,0 +1,259 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; +import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.core.ObjectMapperFactory; +import net.snowflake.client.core.UUIDUtils; +import org.apache.commons.text.RandomStringGenerator; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +@Category(TestCategoryStatement.class) +public class LobSizeLatestIT extends BaseJDBCTest { + + private static final Logger logger = Logger.getLogger(SnowflakeDriverIT.class.getName()); + private static final Map LobSizeStringValues = new HashMap<>(); + + // Max LOB size is testable from version 3.15.0 and above. + private static int maxLobSize = 16 * 1024 * 1024; // default value + private static int largeLobSize = maxLobSize / 2; + private static int mediumLobSize = largeLobSize / 2; + private static int smallLobSize = 16; + private static int originLobSize = 16 * 1024 * 1024; + + @BeforeClass + public static void setUp() throws SQLException { + System.setProperty( + // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure + ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); + try (Connection con = BaseJDBCTest.getConnection()) { + // get max LOB size from session + maxLobSize = con.getMetaData().getMaxCharLiteralLength(); + logger.log(Level.INFO, "Using max lob size: " + maxLobSize); + LobSizeStringValues.put(smallLobSize, generateRandomString(smallLobSize)); + LobSizeStringValues.put(originLobSize, generateRandomString(originLobSize)); + LobSizeStringValues.put(mediumLobSize, generateRandomString(mediumLobSize)); + LobSizeStringValues.put(largeLobSize, generateRandomString(largeLobSize)); + LobSizeStringValues.put(maxLobSize, generateRandomString(maxLobSize)); + } + } + + @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") + public static Collection data() { + int[] lobSizes = + new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; + String[] resultFormats = new String[] {"Arrow", "JSON"}; + List ret = new ArrayList<>(); + for (int i = 0; i < lobSizes.length; i++) { + for (int j = 0; j < resultFormats.length; j++) { + ret.add(new Object[] {lobSizes[i], resultFormats[j]}); + } + } + return ret; + } + + private final int lobSize; + + private final String resultFormat; + + public LobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { + this.lobSize = lobSize; + this.resultFormat = resultFormat; + + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); + } + } + + private static String tableName = "my_lob_test"; + private static String executeInsert = "insert into " + tableName + " (c1, c2, c3) values ("; + private static String executePreparedStatementInsert = executeInsert + "?, ?, ?)"; + private static String selectQuery = "select * from " + tableName + " where c3="; + + private static String generateRandomString(int stringSize) { + RandomStringGenerator randomStringGenerator = + new RandomStringGenerator.Builder().withinRange('a', 'z').build(); + return randomStringGenerator.generate(stringSize); + } + + private static void setResultFormat(Statement stmt, String format) throws SQLException { + stmt.execute("alter session set jdbc_query_result_format = '" + format + "'"); + } + + private void createTable(int lobSize, Statement stmt) throws SQLException { + String createTableQuery = + "create or replace table " + + tableName + + " (c1 varchar, c2 varchar(" + + lobSize + + "), c3 varchar)"; + stmt.execute(createTableQuery); + } + + private void insertQuery(String varCharValue, String uuidValue, Statement stmt) + throws SQLException { + stmt.executeUpdate(executeInsert + "'abc', '" + varCharValue + "', '" + uuidValue + "')"); + } + + private void preparedInsertQuery(String varCharValue, String uuidValue, Connection con) + throws SQLException { + try (PreparedStatement pstmt = con.prepareStatement(executePreparedStatementInsert)) { + pstmt.setString(1, "abc"); + pstmt.setString(2, varCharValue); + pstmt.setString(3, uuidValue); + + pstmt.execute(); + } + } + + @AfterClass + public static void tearDown() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + stmt.execute("Drop table if exists " + tableName); + } + } + + @Test + public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + String varCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + insertQuery(varCharValue, uuidValue, stmt); + + try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rs.next()); + assertEquals("abc", rs.getString(1)); + assertEquals(varCharValue, rs.getString(2)); + assertEquals(uuidValue, rs.getString(3)); + } + } + } + + @Test + public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + String maxVarCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + preparedInsertQuery(maxVarCharValue, uuidValue, con); + + try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rs.next()); + assertEquals("abc", rs.getString(1)); + assertEquals(maxVarCharValue, rs.getString(2)); + assertEquals(uuidValue, rs.getString(3)); + } + } + } + + @Test + public void testPutAndGet() throws IOException, SQLException { + File tempFile = File.createTempFile("LobSizeTest", ".csv"); + // Delete file when JVM shuts down + tempFile.deleteOnExit(); + + String filePath = tempFile.getPath(); + String filePathEscaped = filePath.replace("\\", "\\\\"); + String fileName = tempFile.getName(); + + String varCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + String fileInput = "abc," + varCharValue + "," + uuidValue; + + // Print data to new temporary file + try (PrintWriter out = new PrintWriter(filePath)) { + out.println(fileInput); + } + + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + if (lobSize > originLobSize) { // for increased LOB size (16MB < lobSize < 128MB) + stmt.execute("alter session set ALLOW_LARGE_LOBS_IN_EXTERNAL_SCAN = true"); + } + // Test PUT + String sqlPut = "PUT 'file://" + filePathEscaped + "' @%" + tableName; + + stmt.execute(sqlPut); + + try (ResultSet rsPut = stmt.getResultSet()) { + assertTrue(rsPut.next()); + assertEquals(fileName, rsPut.getString(1)); + assertEquals(fileName + ".gz", rsPut.getString(2)); + assertEquals("GZIP", rsPut.getString(6)); + assertEquals("UPLOADED", rsPut.getString(7)); + } + + try (ResultSet rsFiles = stmt.executeQuery("ls @%" + tableName)) { + // ResultSet should return a row with the zipped file name + assertTrue(rsFiles.next()); + assertEquals(fileName + ".gz", rsFiles.getString(1)); + } + + String copyInto = + "copy into " + + tableName + + " from @%" + + tableName + + " file_format=(type=csv compression='gzip')"; + stmt.execute(copyInto); + + // Check that results are copied into table correctly + try (ResultSet rsCopy = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rsCopy.next()); + assertEquals("abc", rsCopy.getString(1)); + assertEquals(varCharValue, rsCopy.getString(2)); + assertEquals(uuidValue, rsCopy.getString(3)); + } + + // Test Get + Path tempDir = Files.createTempDirectory("MaxLobTest"); + // Delete tempDir when JVM shuts down + tempDir.toFile().deleteOnExit(); + String pathToTempDir = tempDir.toString().replace("\\", "\\\\"); + + String getSql = "get @%" + tableName + " 'file://" + pathToTempDir + "'"; + stmt.execute(getSql); + + try (ResultSet rsGet = stmt.getResultSet()) { + assertTrue(rsGet.next()); + assertEquals(fileName + ".gz", rsGet.getString(1)); + assertEquals("DOWNLOADED", rsGet.getString(3)); + } + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java index 8fd874a9c..8962b8141 100644 --- a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java @@ -1,252 +1,48 @@ -/* - * Copyright (c) 2024 Snowflake Computing Inc. All right reserved. - */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.text.IsEmptyString.emptyOrNullString; -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.file.Files; -import java.nio.file.Path; import java.sql.Connection; -import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import net.snowflake.client.category.TestCategoryStatement; -import net.snowflake.client.core.ObjectMapperFactory; -import net.snowflake.client.core.UUIDUtils; -import org.apache.commons.text.RandomStringGenerator; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGithubAction; +import org.hamcrest.CoreMatchers; +import org.junit.Assert; import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) public class MaxLobSizeLatestIT extends BaseJDBCTest { - // Max LOB size is testable from version 3.15.0 and above. - private static int maxLobSize = 16 * 1024 * 1024; - private static int largeLobSize = maxLobSize / 2; - private static int mediumLobSize = largeLobSize / 2; - private static int originLobSize = mediumLobSize / 2; - private static int smallLobSize = 16; - - private static Map LobSizeStringValues = - new HashMap() { - { - put(smallLobSize, generateRandomString(smallLobSize)); - put(originLobSize, generateRandomString(originLobSize)); - put(mediumLobSize, generateRandomString(mediumLobSize)); - put(largeLobSize, generateRandomString(largeLobSize)); - put(maxLobSize, generateRandomString(maxLobSize)); - } - }; - - @BeforeClass - public static void setUp() { - System.setProperty( - // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure - ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); - } - - @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") - public static Collection data() { - int[] lobSizes = - new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; - String[] resultFormats = new String[] {"Arrow", "JSON"}; - List ret = new ArrayList<>(); - for (int i = 0; i < lobSizes.length; i++) { - for (int j = 0; j < resultFormats.length; j++) { - ret.add(new Object[] {lobSizes[i], resultFormats[j]}); - } - } - return ret; - } - - private final int lobSize; - - private final String resultFormat; - - public MaxLobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { - this.lobSize = lobSize; - this.resultFormat = resultFormat; - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - createTable(lobSize, stmt); - } - } - - private static String tableName = "my_lob_test"; - private static String executeInsert = "insert into " + tableName + " (c1, c2, c3) values ("; - private static String executePreparedStatementInsert = executeInsert + "?, ?, ?)"; - private static String selectQuery = "select * from " + tableName + " where c3="; - - private static String generateRandomString(int stringSize) { - RandomStringGenerator randomStringGenerator = - new RandomStringGenerator.Builder().withinRange('a', 'z').build(); - return randomStringGenerator.generate(stringSize); - } - - private static void setResultFormat(Statement stmt, String format) throws SQLException { - stmt.execute("alter session set jdbc_query_result_format = '" + format + "'"); - } - - private void createTable(int lobSize, Statement stmt) throws SQLException { - String createTableQuery = - "create or replace table " - + tableName - + " (c1 varchar, c2 varchar(" - + lobSize - + "), c3 varchar)"; - stmt.execute(createTableQuery); - } - - private void insertQuery(String varCharValue, String uuidValue, Statement stmt) - throws SQLException { - stmt.executeUpdate(executeInsert + "'abc', '" + varCharValue + "', '" + uuidValue + "')"); - } - - private void preparedInsertQuery(String varCharValue, String uuidValue, Connection con) - throws SQLException { - try (PreparedStatement pstmt = con.prepareStatement(executePreparedStatementInsert)) { - pstmt.setString(1, "abc"); - pstmt.setString(2, varCharValue); - pstmt.setString(3, uuidValue); - - pstmt.execute(); - } - } - - @AfterClass - public static void tearDown() throws SQLException { - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - stmt.execute("Drop table if exists " + tableName); - } - } - + /** + * Available since 3.17.0 + * + * @throws SQLException + */ @Test - public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testIncreasedMaxLobSize() throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - String varCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - insertQuery(varCharValue, uuidValue, stmt); - - try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rs.next()); - assertEquals("abc", rs.getString(1)); - assertEquals(varCharValue, rs.getString(2)); - assertEquals(uuidValue, rs.getString(3)); + stmt.execute("alter session set FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY='ENABLED'"); + stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=false"); + try { + stmt.execute("select randstr(20000000, random()) as large_str"); + } catch (SnowflakeSQLException e) { + assertThat(e.getMessage(), CoreMatchers.containsString("exceeds supported length")); } - } - } - - @Test - public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - String maxVarCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - preparedInsertQuery(maxVarCharValue, uuidValue, con); - - try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rs.next()); - assertEquals("abc", rs.getString(1)); - assertEquals(maxVarCharValue, rs.getString(2)); - assertEquals(uuidValue, rs.getString(3)); - } - } - } - - @Test - public void testPutAndGet() throws IOException, SQLException { - File tempFile = File.createTempFile("LobSizeTest", ".csv"); - // Delete file when JVM shuts down - tempFile.deleteOnExit(); - - String filePath = tempFile.getPath(); - String filePathEscaped = filePath.replace("\\", "\\\\"); - String fileName = tempFile.getName(); - - String varCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - String fileInput = "abc," + varCharValue + "," + uuidValue; - - // Print data to new temporary file - try (PrintWriter out = new PrintWriter(filePath)) { - out.println(fileInput); - } - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - // Test PUT - String sqlPut = "PUT 'file://" + filePathEscaped + "' @%" + tableName; - - stmt.execute(sqlPut); - - try (ResultSet rsPut = stmt.getResultSet()) { - assertTrue(rsPut.next()); - assertEquals(fileName, rsPut.getString(1)); - assertEquals(fileName + ".gz", rsPut.getString(2)); - assertEquals("GZIP", rsPut.getString(6)); - assertEquals("UPLOADED", rsPut.getString(7)); - } - - try (ResultSet rsFiles = stmt.executeQuery("ls @%" + tableName)) { - // ResultSet should return a row with the zipped file name - assertTrue(rsFiles.next()); - assertEquals(fileName + ".gz", rsFiles.getString(1)); - } - - String copyInto = - "copy into " - + tableName - + " from @%" - + tableName - + " file_format=(type=csv compression='gzip')"; - stmt.execute(copyInto); - - // Check that results are copied into table correctly - try (ResultSet rsCopy = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rsCopy.next()); - assertEquals("abc", rsCopy.getString(1)); - assertEquals(varCharValue, rsCopy.getString(2)); - assertEquals(uuidValue, rsCopy.getString(3)); - } - - // Test Get - Path tempDir = Files.createTempDirectory("MaxLobTest"); - // Delete tempDir when JVM shuts down - tempDir.toFile().deleteOnExit(); - String pathToTempDir = tempDir.toString().replace("\\", "\\\\"); - - String getSql = "get @%" + tableName + " 'file://" + pathToTempDir + "'"; - stmt.execute(getSql); - try (ResultSet rsGet = stmt.getResultSet()) { - assertTrue(rsGet.next()); - assertEquals(fileName + ".gz", rsGet.getString(1)); - assertEquals("DOWNLOADED", rsGet.getString(3)); + stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=true"); + try (ResultSet resultSet = + stmt.executeQuery("select randstr(20000000, random()) as large_str")) { + Assert.assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is(not(emptyOrNullString()))); + } finally { + stmt.execute("alter session unset ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT"); + stmt.execute("alter session unset FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java index e12c3c5fb..c090bab03 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java @@ -31,356 +31,339 @@ public class MultiStatementIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testMultiStmtExecuteUpdateFail() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "select 1;\n" - + "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - try { - statement.executeUpdate(multiStmtQuery); - fail("executeUpdate should have failed because the first statement yields a result set"); - } catch (SQLException ex) { - assertThat( - ex.getErrorCode(), is(ErrorCode.UPDATE_FIRST_RESULT_NOT_UPDATE_COUNT.getMessageCode())); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "select 1;\n" + + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + try { + statement.executeUpdate(multiStmtQuery); + fail("executeUpdate should have failed because the first statement yields a result set"); + } catch (SQLException ex) { + assertThat( + ex.getErrorCode(), is(ErrorCode.UPDATE_FIRST_RESULT_NOT_UPDATE_COUNT.getMessageCode())); + } } - - statement.close(); - connection.close(); } @Test public void testMultiStmtExecuteQueryFail() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - try { - statement.executeQuery(multiStmtQuery); - fail("executeQuery should have failed because the first statement yields an update count"); - } catch (SQLException ex) { - assertThat( - ex.getErrorCode(), is(ErrorCode.QUERY_FIRST_RESULT_NOT_RESULT_SET.getMessageCode())); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; - statement.close(); - connection.close(); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + try { + statement.executeQuery(multiStmtQuery); + fail("executeQuery should have failed because the first statement yields an update count"); + } catch (SQLException ex) { + assertThat( + ex.getErrorCode(), is(ErrorCode.QUERY_FIRST_RESULT_NOT_RESULT_SET.getMessageCode())); + } + } } @Test public void testMultiStmtSetUnset() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - // setting session variable should propagate outside of query - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("set testvar = 1; select 1"); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select $testvar"); - rs.next(); - assertEquals(1, rs.getInt(1)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - // selecting unset variable should cause error - try { + // setting session variable should propagate outside of query statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("unset testvar; select $testvar"); - fail("Expected a failure"); - } catch (SQLException ex) { - assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); - } + statement.execute("set testvar = 1; select 1"); - // unsetting session variable should propagate outside of query - try { statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("select $testvar"); - fail("Expected a failure"); - } catch (SQLException ex) { - assertEquals(SqlState.NO_DATA, ex.getSQLState()); + try (ResultSet rs = statement.executeQuery("select $testvar")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + + // selecting unset variable should cause error + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("unset testvar; select $testvar"); + fail("Expected a failure"); + } catch (SQLException ex) { + assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); + } + + // unsetting session variable should propagate outside of query + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("select $testvar"); + fail("Expected a failure"); + } catch (SQLException ex) { + assertEquals(SqlState.NO_DATA, ex.getSQLState()); + } + } } - - statement.close(); - connection.close(); } @Test public void testMultiStmtParseError() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute("set testvar = 1"); - try { - // fails in the antlr parser - statement.execute("garbage text; set testvar = 2"); - fail("Expected a compiler error to be thrown"); - } catch (SQLException ex) { - assertEquals(SqlState.SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION, ex.getSQLState()); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - ResultSet rs = statement.executeQuery("select $testvar"); - rs.next(); - assertEquals(1, rs.getInt(1)); + statement.execute("set testvar = 1"); + try { + // fails in the antlr parser + statement.execute("garbage text; set testvar = 2"); + fail("Expected a compiler error to be thrown"); + } catch (SQLException ex) { + assertEquals(SqlState.SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION, ex.getSQLState()); + } - statement.close(); - connection.close(); + try (ResultSet rs = statement.executeQuery("select $testvar")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + } } @Test public void testMultiStmtExecError() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + // fails during execution (javascript invokes statement where it gets typechecked) + statement.execute( + "set testvar = 1; select nonexistent_column from nonexistent_table; set testvar = 2"); + fail("Expected an execution error to be thrown"); + } catch (SQLException ex) { + assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); + } - try { - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - // fails during execution (javascript invokes statement where it gets typechecked) - statement.execute( - "set testvar = 1; select nonexistent_column from nonexistent_table; set testvar = 2"); - fail("Expected an execution error to be thrown"); - } catch (SQLException ex) { - assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select $testvar")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } } - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select $testvar"); - rs.next(); - assertEquals(1, rs.getInt(1)); - - statement.close(); - connection.close(); } @Test public void testMultiStmtTempTable() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - String entry = "success"; - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "create or replace temporary table test_multi (cola string); insert into test_multi values ('" - + entry - + "')"); - // temporary table should persist outside of the above statement - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select * from test_multi"); - rs.next(); - assertEquals(entry, rs.getString(1)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + String entry = "success"; + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "create or replace temporary table test_multi (cola string); insert into test_multi values ('" + + entry + + "')"); + // temporary table should persist outside of the above statement + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select * from test_multi")) { + assertTrue(rs.next()); + assertEquals(entry, rs.getString(1)); + } + } } @Test public void testMultiStmtUseStmt() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - SFSession session = - statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - String originalSchema = session.getSchema(); + SFSession session = + statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("use schema public; select 1"); - // current schema change should persist outside of the above statement + String originalSchema = session.getSchema(); - assertEquals("PUBLIC", session.getSchema()); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select current_schema()"); - rs.next(); - assertEquals("PUBLIC", rs.getString(1)); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute(String.format("use schema %s; select 1", originalSchema)); - // current schema change should persist outside of the above statement + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("use schema public; select 1"); + // current schema change should persist outside of the above statement - session = statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); - assertEquals(originalSchema, session.getSchema()); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - rs = statement.executeQuery("select current_schema()"); - rs.next(); - assertEquals(originalSchema, rs.getString(1)); + assertEquals("PUBLIC", session.getSchema()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select current_schema()")) { + assertTrue(rs.next()); + assertEquals("PUBLIC", rs.getString(1)); + } + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute(String.format("use schema %s; select 1", originalSchema)); + // current schema change should persist outside of the above statement - statement.close(); - connection.close(); + session = statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); + assertEquals(originalSchema, session.getSchema()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select current_schema()")) { + assertTrue(rs.next()); + assertEquals(originalSchema, rs.getString(1)); + } + } } @Test public void testMultiStmtAlterSessionParams() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - SFSession session = - statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // we need an arbitrary parameter which is updated by the client after each query for this test - String param = "AUTOCOMMIT"; - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("alter session set " + param + "=false; select 1"); - assertFalse(session.getAutoCommit()); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("alter session set " + param + "=true; select 1"); - assertTrue(session.getAutoCommit()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + SFSession session = + statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // we need an arbitrary parameter which is updated by the client after each query for this + // test + String param = "AUTOCOMMIT"; + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("alter session set " + param + "=false; select 1"); + assertFalse(session.getAutoCommit()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("alter session set " + param + "=true; select 1"); + assertTrue(session.getAutoCommit()); + } } @Test public void testMultiStmtMultiLine() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - // these statements should not fail - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("select 1;\nselect 2"); - statement.execute("select \n 1; select 2"); - statement.execute("select \r\n 1; select 2"); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + // these statements should not fail + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("select 1;\nselect 2"); + statement.execute("select \n 1; select 2"); + statement.execute("select \r\n 1; select 2"); + } } @Test public void testMultiStmtQuotes() throws SQLException { // test various quotation usage and ensure they succeed - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "create or replace temporary table \"test_multi\" (cola string); select * from \"test_multi\""); - statement.execute( - "create or replace temporary table `test_multi` (cola string); select * from `test_multi`"); - statement.execute("select 'str'; select 'str2'"); - statement.execute("select '\\` backticks'; select '\\\\` more `backticks`'"); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "create or replace temporary table \"test_multi\" (cola string); select * from \"test_multi\""); + statement.execute( + "create or replace temporary table `test_multi` (cola string); select * from `test_multi`"); + statement.execute("select 'str'; select 'str2'"); + statement.execute("select '\\` backticks'; select '\\\\` more `backticks`'"); + } } @Test public void testMultiStmtCommitRollback() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("begin"); - statement.execute("insert into test_multi values ('abc')"); - // "commit" inside multistatement commits previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); commit"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("rollback"); - ResultSet rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("begin"); - statement.execute("insert into test_multi values ('abc')"); - // "rollback" inside multistatement rolls back previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); rollback"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("begin; insert into test_multi values ('abc')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("insert into test_multi values ('def')"); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("begin; insert into test_multi values ('abc')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("insert into test_multi values ('def')"); - statement.execute("rollback"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("begin"); + statement.execute("insert into test_multi values ('abc')"); + // "commit" inside multistatement commits previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); commit"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("begin"); + statement.execute("insert into test_multi values ('abc')"); + // "rollback" inside multistatement rolls back previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); rollback"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("begin; insert into test_multi values ('abc')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("insert into test_multi values ('def')"); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("begin; insert into test_multi values ('abc')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("insert into test_multi values ('def')"); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + } } @Test public void testMultiStmtCommitRollbackNoAutocommit() throws SQLException { - Connection connection = getConnection(); - connection.setAutoCommit(false); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("insert into test_multi values ('abc')"); - // "commit" inside multistatement commits previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); commit"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("rollback"); - ResultSet rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("insert into test_multi values ('abc')"); - // "rollback" inside multistatement rolls back previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); rollback"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "insert into test_multi values ('abc'); insert into test_multi values ('def')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "insert into test_multi values ('abc'); insert into test_multi values ('def')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("rollback"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + connection.setAutoCommit(false); + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("insert into test_multi values ('abc')"); + // "commit" inside multistatement commits previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); commit"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("insert into test_multi values ('abc')"); + // "rollback" inside multistatement rolls back previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); rollback"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "insert into test_multi values ('abc'); insert into test_multi values ('def')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "insert into test_multi values ('abc'); insert into test_multi values ('def')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + } } @Test @@ -388,97 +371,102 @@ public void testMultiStmtLarge() throws SQLException { // this test verifies that multiple-statement support does not break // with many statements // it also ensures that results are returned in the correct order - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - StringBuilder multiStmtBuilder = new StringBuilder(); - String query = "SELECT %d;"; - for (int i = 0; i < 100; i++) { - multiStmtBuilder.append(String.format(query, i)); - } - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 100); - - assertTrue(statement.execute(multiStmtBuilder.toString())); - for (int i = 0; i < 100; i++) { - ResultSet rs = statement.getResultSet(); - assertNotNull(rs); - assertEquals(-1, statement.getUpdateCount()); - assertTrue(rs.next()); - assertEquals(i, rs.getInt(1)); - assertFalse(rs.next()); - - if (i != 99) { - assertTrue(statement.getMoreResults()); - } else { - assertFalse(statement.getMoreResults()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + StringBuilder multiStmtBuilder = new StringBuilder(); + String query = "SELECT %d;"; + for (int i = 0; i < 100; i++) { + multiStmtBuilder.append(String.format(query, i)); + } + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 100); + + assertTrue(statement.execute(multiStmtBuilder.toString())); + for (int i = 0; i < 100; i++) { + try (ResultSet rs = statement.getResultSet()) { + assertNotNull(rs); + assertEquals(-1, statement.getUpdateCount()); + assertTrue(rs.next()); + assertEquals(i, rs.getInt(1)); + assertFalse(rs.next()); + + if (i != 99) { + assertTrue(statement.getMoreResults()); + } else { + assertFalse(statement.getMoreResults()); + } + } } } - - statement.close(); - connection.close(); } @Test public void testMultiStmtCountNotMatch() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - try { - statement.execute("select 1; select 2; select 3"); - fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(8)); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("select 1; select 2; select 3"); + fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(8)); + } - try { - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - statement.execute("select 1"); - fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(8)); - } + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + statement.execute("select 1"); + fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(8)); + } - // 0 means any number of statement can be executed - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 0); - statement.execute("select 1; select 2; select 3"); + // 0 means any number of statement can be executed + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 0); + statement.execute("select 1; select 2; select 3"); + } } @Test @ConditionalIgnore(condition = RunningOnGithubAction.class) public void testInvalidParameterCount() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - ResultSet rs = statement.executeQuery("select current_account_locator()"); - rs.next(); - String accountName = rs.getString(1); - - rs = statement.executeQuery("select current_user()"); - rs.next(); - String userName = rs.getString(1); - - String[] testSuites = new String[5]; - testSuites[0] = - String.format("alter account %s set " + "multi_statement_count = 20", accountName); - testSuites[1] = - String.format("alter account %s set " + "multi_statement_count = -1", accountName); - testSuites[2] = String.format("alter user %s set " + "multi_statement_count = 20", userName); - testSuites[3] = String.format("alter user %s set " + "multi_statement_count = -1", userName); - testSuites[4] = "alter session set " + "multi_statement_count = -1"; - - int[] expectedErrorCodes = new int[5]; - expectedErrorCodes[0] = 1008; - expectedErrorCodes[1] = 1008; - expectedErrorCodes[2] = 1006; - expectedErrorCodes[3] = 1006; - expectedErrorCodes[4] = 1008; - - statement.execute("use role accountadmin"); - - for (int i = 0; i < testSuites.length; i++) { - try { - statement.execute(testSuites[i]); - Assert.fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(expectedErrorCodes[i])); + String userName = null; + String accountName = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + try (ResultSet rs = statement.executeQuery("select current_account_locator()")) { + assertTrue(rs.next()); + accountName = rs.getString(1); + } + + try (ResultSet rs = statement.executeQuery("select current_user()")) { + assertTrue(rs.next()); + userName = rs.getString(1); + } + + String[] testSuites = new String[5]; + testSuites[0] = + String.format("alter account %s set " + "multi_statement_count = 20", accountName); + testSuites[1] = + String.format("alter account %s set " + "multi_statement_count = -1", accountName); + testSuites[2] = String.format("alter user %s set " + "multi_statement_count = 20", userName); + testSuites[3] = String.format("alter user %s set " + "multi_statement_count = -1", userName); + testSuites[4] = "alter session set " + "multi_statement_count = -1"; + + int[] expectedErrorCodes = new int[5]; + expectedErrorCodes[0] = 1008; + expectedErrorCodes[1] = 1008; + expectedErrorCodes[2] = 1006; + expectedErrorCodes[3] = 1006; + expectedErrorCodes[4] = 1008; + + statement.execute("use role accountadmin"); + + for (int i = 0; i < testSuites.length; i++) { + try { + statement.execute(testSuites[i]); + Assert.fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(expectedErrorCodes[i])); + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java index c4e91e872..59f5ba795 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java @@ -29,302 +29,300 @@ public class MultiStatementLatestIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testMultiStmtExecute() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - boolean hasResultSet = statement.execute(multiStmtQuery); - // first statement - assertFalse(hasResultSet); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + boolean hasResultSet = statement.execute(multiStmtQuery); + // first statement + assertFalse(hasResultSet); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } } @Test public void testMultiStmtTransaction() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table test_multi_txn(c1 number, c2 string)" + " as select 10, 'z'"); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - String multiStmtQuery = - "begin;\n" - + "delete from test_multi_txn;\n" - + "insert into test_multi_txn values (1, 'a'), (2, 'b');\n" - + "commit"; - - boolean hasResultSet = statement.execute(multiStmtQuery); - // first statement - assertFalse(hasResultSet); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(1, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // fourth statement - assertFalse(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("drop table if exists test_multi_txn"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_multi_txn(c1 number, c2 string)" + " as select 10, 'z'"); + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + String multiStmtQuery = + "begin;\n" + + "delete from test_multi_txn;\n" + + "insert into test_multi_txn values (1, 'a'), (2, 'b');\n" + + "commit"; + + boolean hasResultSet = statement.execute(multiStmtQuery); + // first statement + assertFalse(hasResultSet); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(1, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // fourth statement + assertFalse(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + + } finally { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("drop table if exists test_multi_txn"); + } + } } @Test public void testMultiStmtExecuteUpdate() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - int rowCount = statement.executeUpdate(multiStmtQuery); - // first statement - assertEquals(0, rowCount); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + int rowCount = statement.executeUpdate(multiStmtQuery); + // first statement + assertEquals(0, rowCount); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } } @Test public void testMultiStmtTransactionRollback() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table test_multi_txn_rb(c1 number, c2 string)" + " as select 10, 'z'"); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - String multiStmtQuery = - "begin;\n" - + "delete from test_multi_txn_rb;\n" - + "rollback;\n" - + "select count(*) from test_multi_txn_rb"; - - boolean hasResultSet = statement.execute(multiStmtQuery); - // first statement - assertFalse(hasResultSet); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(1, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // fourth statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("drop table if exists test_multi_txn_rb"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_multi_txn_rb(c1 number, c2 string)" + + " as select 10, 'z'"); + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + String multiStmtQuery = + "begin;\n" + + "delete from test_multi_txn_rb;\n" + + "rollback;\n" + + "select count(*) from test_multi_txn_rb"; + + boolean hasResultSet = statement.execute(multiStmtQuery); + // first statement + assertFalse(hasResultSet); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(1, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // fourth statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } finally { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("drop table if exists test_multi_txn_rb"); + } + } } @Test public void testMultiStmtExecuteQuery() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "select 1;\n" - + "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - ResultSet rs = statement.executeQuery(multiStmtQuery); - // first statement - assertNotNull(rs); - assertNotNull(statement.getResultSet()); - assertEquals(-1, statement.getUpdateCount()); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertFalse(rs.next()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // fourth statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "select 1;\n" + + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + try (ResultSet rs = statement.executeQuery(multiStmtQuery)) { + // first statement + assertNotNull(rs); + assertNotNull(statement.getResultSet()); + assertEquals(-1, statement.getUpdateCount()); + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertFalse(rs.next()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // fourth statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } } @Test public void testMultiStmtUpdateCount() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - boolean isResultSet = - statement.execute( - "CREATE OR REPLACE TEMPORARY TABLE TABLIST AS " - + "SELECT TABLE_SCHEMA, TABLE_NAME FROM INFORMATION_SCHEMA.TABLES " - + "WHERE TABLE_NAME LIKE 'K%' " - + "ORDER BY TABLE_SCHEMA, TABLE_NAME; " - + "SELECT * FROM TABLIST " - + "JOIN INFORMATION_SCHEMA.COLUMNS " - + "ON COLUMNS.TABLE_SCHEMA = TABLIST.TABLE_SCHEMA " - + "AND COLUMNS.TABLE_NAME = TABLIST.TABLE_NAME;"); - assertEquals(isResultSet, false); - int statementUpdateCount = statement.getUpdateCount(); - assertEquals(statementUpdateCount, 0); - isResultSet = statement.getMoreResults(); - assertEquals(isResultSet, true); - statementUpdateCount = statement.getUpdateCount(); - assertEquals(statementUpdateCount, -1); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + boolean isResultSet = + statement.execute( + "CREATE OR REPLACE TEMPORARY TABLE TABLIST AS " + + "SELECT TABLE_SCHEMA, TABLE_NAME FROM INFORMATION_SCHEMA.TABLES " + + "WHERE TABLE_NAME LIKE 'K%' " + + "ORDER BY TABLE_SCHEMA, TABLE_NAME; " + + "SELECT * FROM TABLIST " + + "JOIN INFORMATION_SCHEMA.COLUMNS " + + "ON COLUMNS.TABLE_SCHEMA = TABLIST.TABLE_SCHEMA " + + "AND COLUMNS.TABLE_NAME = TABLIST.TABLE_NAME;"); + assertEquals(isResultSet, false); + int statementUpdateCount = statement.getUpdateCount(); + assertEquals(statementUpdateCount, 0); + isResultSet = statement.getMoreResults(); + assertEquals(isResultSet, true); + statementUpdateCount = statement.getUpdateCount(); + assertEquals(statementUpdateCount, -1); + } } /** Test use of anonymous blocks (SNOW-758262) */ @Test public void testAnonymousBlocksUse() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table tab758262(c1 number)"); - // Test anonymous block with multistatement - int multistatementcount = 2; - statement - .unwrap(SnowflakeStatement.class) - .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); - String multiStmtQuery = - "begin\n" - + "insert into tab758262 values (1);\n" - + "return 'done';\n" - + "end;\n" - + "select * from tab758262;"; - - statement.execute(multiStmtQuery); - for (int i = 0; i < multistatementcount - 1; i++) { - assertTrue(statement.getMoreResults()); - } - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - - // Test anonymous block in the middle of other queries in multistatement - multiStmtQuery = - "insert into tab758262 values (25), (26);\n" - + "begin\n" - + "insert into tab758262 values (27);\n" - + "return 'done';\n" - + "end;\n" - + "select * from tab758262;"; - multistatementcount = 3; - statement - .unwrap(SnowflakeStatement.class) - .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); - statement.execute(multiStmtQuery); - for (int i = 0; i < multistatementcount - 1; i++) { - assertTrue(statement.getMoreResults()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table tab758262(c1 number)"); + // Test anonymous block with multistatement + int multistatementcount = 2; + statement + .unwrap(SnowflakeStatement.class) + .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); + String multiStmtQuery = + "begin\n" + + "insert into tab758262 values (1);\n" + + "return 'done';\n" + + "end;\n" + + "select * from tab758262;"; + + statement.execute(multiStmtQuery); + for (int i = 0; i < multistatementcount - 1; i++) { + assertTrue(statement.getMoreResults()); + } + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + + // Test anonymous block in the middle of other queries in multistatement + multiStmtQuery = + "insert into tab758262 values (25), (26);\n" + + "begin\n" + + "insert into tab758262 values (27);\n" + + "return 'done';\n" + + "end;\n" + + "select * from tab758262;"; + multistatementcount = 3; + statement + .unwrap(SnowflakeStatement.class) + .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); + statement.execute(multiStmtQuery); + for (int i = 0; i < multistatementcount - 1; i++) { + assertTrue(statement.getMoreResults()); + } + try (ResultSet rs = statement.getResultSet()) { + assertEquals(4, getSizeOfResultSet(rs)); + } } - rs = statement.getResultSet(); - assertEquals(4, getSizeOfResultSet(rs)); - rs.close(); - statement.close(); - connection.close(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java index d34cc4bc9..d767456a2 100644 --- a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java @@ -142,8 +142,8 @@ public void testSystemFunctions() throws SQLException { } static void testFunction(Connection connection, String sql, String expected) throws SQLException { - try (Statement statement = connection.createStatement()) { - ResultSet resultSet = statement.executeQuery(sql); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(sql)) { assertTrue(resultSet.next()); assertEquals(expected, resultSet.getString(1)); } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java index 112df2724..3d1997193 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java @@ -3,6 +3,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.PreparedStatement; @@ -21,204 +22,215 @@ public class PreparedMultiStmtIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testExecuteUpdateCount() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - statement.execute("create or replace table test_multi_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement( - "insert into test_multi_bind(c1) values(?); insert into " - + "test_multi_bind values (?), (?)"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); - - preparedStatement.setInt(1, 20); - preparedStatement.setInt(2, 30); - preparedStatement.setInt(3, 40); - - // first statement - int rowCount = preparedStatement.executeUpdate(); - assertThat(rowCount, is(1)); - assertThat(preparedStatement.getResultSet(), is(nullValue())); - assertThat(preparedStatement.getUpdateCount(), is(1)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(false)); - assertThat(preparedStatement.getUpdateCount(), is(2)); - - ResultSet resultSet = statement.executeQuery("select c1 from test_multi_bind order by c1 asc"); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(30)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - - statement.execute("drop table if exists test_multi_bind"); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + statement.execute("create or replace table test_multi_bind(c1 number)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into test_multi_bind(c1) values(?); insert into " + + "test_multi_bind values (?), (?)")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); + + preparedStatement.setInt(1, 20); + preparedStatement.setInt(2, 30); + preparedStatement.setInt(3, 40); + + // first statement + int rowCount = preparedStatement.executeUpdate(); + assertThat(rowCount, is(1)); + assertThat(preparedStatement.getResultSet(), is(nullValue())); + assertThat(preparedStatement.getUpdateCount(), is(1)); + + // second statement + assertThat(preparedStatement.getMoreResults(), is(false)); + assertThat(preparedStatement.getUpdateCount(), is(2)); + + try (ResultSet resultSet = + statement.executeQuery("select c1 from test_multi_bind order by c1 asc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(30)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + } + } + } finally { + statement.execute("drop table if exists test_multi_bind"); + } + } } /** Less bindings than expected in statement */ @Test public void testExecuteLessBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - statement.execute("create or replace table test_multi_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement( - "insert into test_multi_bind(c1) values(?); insert into " - + "test_multi_bind values (?), (?)"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); - - preparedStatement.setInt(1, 20); - preparedStatement.setInt(2, 30); - - // first statement - try { - preparedStatement.executeUpdate(); - Assert.fail(); - } catch (SQLException e) { - // error code comes from xp, which is js execution failed. - assertThat(e.getErrorCode(), is(100132)); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + statement.execute("create or replace table test_multi_bind(c1 number)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into test_multi_bind(c1) values(?); insert into " + + "test_multi_bind values (?), (?)")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); + + preparedStatement.setInt(1, 20); + preparedStatement.setInt(2, 30); + + // first statement + try { + preparedStatement.executeUpdate(); + Assert.fail(); + } catch (SQLException e) { + // error code comes from xp, which is js execution failed. + assertThat(e.getErrorCode(), is(100132)); + } + } + } finally { + statement.execute("drop table if exists test_multi_bind"); + } } - - statement.execute("drop table if exists test_multi_bind"); - preparedStatement.close(); - connection.close(); } @Test public void testExecuteMoreBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - statement.execute("create or replace table test_multi_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement( - "insert into test_multi_bind(c1) values(?); insert into " - + "test_multi_bind values (?), (?)"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); - - preparedStatement.setInt(1, 20); - preparedStatement.setInt(2, 30); - preparedStatement.setInt(3, 40); - // 4th binding should be ignored - preparedStatement.setInt(4, 50); - - // first statement - int rowCount = preparedStatement.executeUpdate(); - assertThat(rowCount, is(1)); - assertThat(preparedStatement.getResultSet(), is(nullValue())); - assertThat(preparedStatement.getUpdateCount(), is(1)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(false)); - assertThat(preparedStatement.getUpdateCount(), is(2)); - - ResultSet resultSet = statement.executeQuery("select c1 from test_multi_bind order by c1 asc"); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(30)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - - statement.execute("drop table if exists test_multi_bind"); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + statement.execute("create or replace table test_multi_bind(c1 number)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into test_multi_bind(c1) values(?); insert into " + + "test_multi_bind values (?), (?)")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); + + preparedStatement.setInt(1, 20); + preparedStatement.setInt(2, 30); + preparedStatement.setInt(3, 40); + // 4th binding should be ignored + preparedStatement.setInt(4, 50); + + // first statement + int rowCount = preparedStatement.executeUpdate(); + assertThat(rowCount, is(1)); + assertThat(preparedStatement.getResultSet(), is(nullValue())); + assertThat(preparedStatement.getUpdateCount(), is(1)); + + // second statement + assertThat(preparedStatement.getMoreResults(), is(false)); + assertThat(preparedStatement.getUpdateCount(), is(2)); + + try (ResultSet resultSet = + statement.executeQuery("select c1 from test_multi_bind order by c1 asc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(30)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + } + } + } finally { + statement.execute("drop table if exists test_multi_bind"); + } + } } @Test public void testExecuteQueryBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - - PreparedStatement preparedStatement = - connection.prepareStatement("select ?; select ?, ?; select ?, ?, ?"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(6)); - - preparedStatement.setInt(1, 10); - preparedStatement.setInt(2, 20); - preparedStatement.setInt(3, 30); - preparedStatement.setInt(4, 40); - preparedStatement.setInt(5, 50); - preparedStatement.setInt(6, 60); - - // first statement - ResultSet resultSet = preparedStatement.executeQuery(); - assertThat(resultSet.next(), is(true)); - assertThat(resultSet.getInt(1), is(10)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - assertThat(resultSet.getInt(2), is(30)); - - // third statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - assertThat(resultSet.getInt(2), is(50)); - assertThat(resultSet.getInt(3), is(60)); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("select ?; select ?, ?; select ?, ?, ?")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(6)); + + preparedStatement.setInt(1, 10); + preparedStatement.setInt(2, 20); + preparedStatement.setInt(3, 30); + preparedStatement.setInt(4, 40); + preparedStatement.setInt(5, 50); + preparedStatement.setInt(6, 60); + + // first statement + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertThat(resultSet.next(), is(true)); + assertThat(resultSet.getInt(1), is(10)); + } + // second statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertThat(resultSet.getInt(2), is(30)); + } + + // third statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + assertThat(resultSet.getInt(2), is(50)); + assertThat(resultSet.getInt(3), is(60)); + } + } + } } @Test public void testExecuteQueryNoBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - - PreparedStatement preparedStatement = - connection.prepareStatement("select 10; select 20, 30; select 40, 50, 60"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(0)); - - // first statement - ResultSet resultSet = preparedStatement.executeQuery(); - assertThat(resultSet.next(), is(true)); - assertThat(resultSet.getInt(1), is(10)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - assertThat(resultSet.getInt(2), is(30)); - - // third statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - assertThat(resultSet.getInt(2), is(50)); - assertThat(resultSet.getInt(3), is(60)); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("select 10; select 20, 30; select 40, 50, 60")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(0)); + + // first statement + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertThat(resultSet.next(), is(true)); + assertThat(resultSet.getInt(1), is(10)); + } + + // second statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertThat(resultSet.getInt(2), is(30)); + } + + // third statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + assertThat(resultSet.getInt(2), is(50)); + assertThat(resultSet.getInt(3), is(60)); + } + } + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java index f3a2c942a..7c05163dc 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java @@ -15,9 +15,9 @@ abstract class PreparedStatement0IT extends BaseJDBCTest { Connection init() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java index bd23d803d..56bef419f 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java @@ -85,7 +85,8 @@ public void testGetParameterMetaData() throws SQLException { /** Trigger default stage array binding threshold so that it can be run on travis */ @Test public void testInsertStageArrayBind() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { connection .createStatement() .execute("create or replace table testStageArrayBind(c1 integer)"); @@ -98,14 +99,12 @@ public void testInsertStageArrayBind() throws SQLException { } prepStatement.executeBatch(); - try (Statement statement = connection.createStatement()) { - try (ResultSet resultSet = - statement.executeQuery("select * from testStageArrayBind order by c1 asc")) { - int count = 0; - while (resultSet.next()) { - assertThat(resultSet.getInt(1), is(count)); - count++; - } + try (ResultSet resultSet = + statement.executeQuery("select * from testStageArrayBind order by c1 asc")) { + int count = 0; + while (resultSet.next()) { + assertThat(resultSet.getInt(1), is(count)); + count++; } } } @@ -227,16 +226,15 @@ public void testInsertBatchStageMultipleTimes() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchNull() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled for (int threshold : thresholds) { - connection.createStatement().execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table - connection - .createStatement() - .execute( - String.format( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); + statement.execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table + statement.execute( + String.format( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { prepStatement.setNull(1, Types.INTEGER); prepStatement.setNull(2, Types.DOUBLE); @@ -250,9 +248,8 @@ public void testStageBatchNull() throws SQLException { assertEquals(1, countResult[0]); } - try (ResultSet resultSet = - connection.createStatement().executeQuery("SELECT * FROM TEST_PREPST")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("SELECT * FROM TEST_PREPST")) { + assertTrue(resultSet.next()); String errorMessage = "Column should be null (" + (threshold > 0 ? "stage" : "non-stage") + ")"; resultSet.getInt(1); @@ -275,19 +272,18 @@ public void testStageBatchNull() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageString() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled String[] rows = { null, "", "\"", ",", "\n", "\r\n", "\"\"", "null", "\\\n", "\",", "\\\",\\\"" }; for (int threshold : thresholds) { - connection.createStatement().execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table - connection - .createStatement() - .execute( - String.format( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); + statement.execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table + statement.execute( + String.format( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { for (int i = 0; i < rows.length; i++) { bindOneParamSet(prepStatement, i, 0.0, 0.0f, rows[i], 0, (short) 0); @@ -296,13 +292,11 @@ public void testStageString() throws SQLException { prepStatement.executeBatch(); try (ResultSet resultSet = - connection - .createStatement() - .executeQuery("SELECT colC FROM TEST_PREPST ORDER BY id ASC")) { + statement.executeQuery("SELECT colC FROM TEST_PREPST ORDER BY id ASC")) { String errorMessage = "Strings should match (" + (threshold > 0 ? "stage" : "non-stage") + ")"; for (String row : rows) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(errorMessage, row, resultSet.getString(1)); } } @@ -314,16 +308,15 @@ public void testStageString() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testIncorrectTypes() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled for (int threshold : thresholds) { - connection.createStatement().execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table - connection - .createStatement() - .execute( - String.format( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); + statement.execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table + statement.execute( + String.format( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { prepStatement.setString(1, "notAnInt"); // should cause error @@ -348,7 +341,8 @@ public void testIncorrectTypes() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchTimestamps() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { Timestamp tsEpoch = new Timestamp(0L); Timestamp tsEpochMinusOneSec = new Timestamp(-1000L); // negative epoch no fraction of seconds Timestamp tsPast = new Timestamp(-2208988800100L); // very large negative epoch @@ -363,22 +357,14 @@ public void testStageBatchTimestamps() throws SQLException { try { // Test that stage and non-stage bindings are consistent for each timestamp type for (String tsType : tsTypes) { - connection - .createStatement() - .execute("ALTER SESSION SET TIMESTAMP_TYPE_MAPPING = " + tsType); - connection - .createStatement() - .execute("ALTER SESSION SET CLIENT_TIMESTAMP_TYPE_MAPPING = " + tsType); - - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_ts (id INTEGER, tz TIMESTAMP)"); + statement.execute("ALTER SESSION SET TIMESTAMP_TYPE_MAPPING = " + tsType); + statement.execute("ALTER SESSION SET CLIENT_TIMESTAMP_TYPE_MAPPING = " + tsType); + + statement.execute("CREATE OR REPLACE TABLE test_prepst_ts (id INTEGER, tz TIMESTAMP)"); try (PreparedStatement prepStatement = connection.prepareStatement("INSERT INTO test_prepst_ts(id, tz) VALUES(?,?)")) { // First, run with non-stage binding - connection - .createStatement() - .executeQuery("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); + statement.executeQuery("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); for (int i = 0; i < timestamps.length; i++) { prepStatement.setInt(1, i); prepStatement.setTimestamp(2, timestamps[i]); @@ -390,22 +376,18 @@ public void testStageBatchTimestamps() throws SQLException { } Timestamp[] nonStageResult = new Timestamp[timestamps.length]; - ResultSet rsNonStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC"); - for (int i = 0; i < nonStageResult.length; i++) { - rsNonStage.next(); - nonStageResult[i] = rsNonStage.getTimestamp(2); + try (ResultSet rsNonStage = + statement.executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC")) { + for (int i = 0; i < nonStageResult.length; i++) { + assertTrue(rsNonStage.next()); + nonStageResult[i] = rsNonStage.getTimestamp(2); + } } - - connection.createStatement().execute("DELETE FROM test_prepst_ts WHERE 1=1"); + statement.execute("DELETE FROM test_prepst_ts WHERE 1=1"); // Now, run with stage binding - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage // bind for (int i = 0; i < timestamps.length; i++) { prepStatement.setInt(1, i); @@ -418,27 +400,26 @@ public void testStageBatchTimestamps() throws SQLException { } Timestamp[] stageResult = new Timestamp[timestamps.length]; - ResultSet rsStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC"); - for (int i = 0; i < stageResult.length; i++) { - rsStage.next(); - stageResult[i] = rsStage.getTimestamp(2); - } - - for (int i = 0; i < timestamps.length; i++) { - assertEquals( - "Stage binding timestamp should match non-stage binding timestamp (" - + tsType - + ")", - nonStageResult[i], - stageResult[i]); + try (ResultSet rsStage = + statement.executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC")) { + for (int i = 0; i < stageResult.length; i++) { + assertTrue(rsStage.next()); + stageResult[i] = rsStage.getTimestamp(2); + } + + for (int i = 0; i < timestamps.length; i++) { + assertEquals( + "Stage binding timestamp should match non-stage binding timestamp (" + + tsType + + ")", + nonStageResult[i], + stageResult[i]); + } } } } } finally { - connection.createStatement().execute("DROP TABLE IF EXISTS test_prepst_ts"); + statement.execute("DROP TABLE IF EXISTS test_prepst_ts"); } } } @@ -446,7 +427,8 @@ public void testStageBatchTimestamps() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchTimes() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { Time tMidnight = new Time(0); Time tNeg = new Time(-1); Time tPos = new Time(1); @@ -455,16 +437,12 @@ public void testStageBatchTimes() throws SQLException { Time[] times = new Time[] {tMidnight, tNeg, tPos, tNow, tNoon, null}; int[] countResult; try { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_time (id INTEGER, tod TIME)"); + statement.execute("CREATE OR REPLACE TABLE test_prepst_time (id INTEGER, tod TIME)"); try (PreparedStatement prepStatement = connection.prepareStatement("INSERT INTO test_prepst_time(id, tod) VALUES(?,?)")) { // First, run with non-stage binding - connection - .createStatement() - .execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); for (int i = 0; i < times.length; i++) { prepStatement.setInt(1, i); prepStatement.setTime(2, times[i]); @@ -477,21 +455,17 @@ public void testStageBatchTimes() throws SQLException { Time[] nonStageResult = new Time[times.length]; ResultSet rsNonStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC"); + statement.executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC"); for (int i = 0; i < nonStageResult.length; i++) { - rsNonStage.next(); + assertTrue(rsNonStage.next()); nonStageResult[i] = rsNonStage.getTime(2); } - connection.createStatement().execute("DELETE FROM test_prepst_time WHERE 1=1"); + statement.execute("DELETE FROM test_prepst_time WHERE 1=1"); // Now, run with stage binding - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage // bind for (int i = 0; i < times.length; i++) { prepStatement.setInt(1, i); @@ -504,24 +478,23 @@ public void testStageBatchTimes() throws SQLException { } Time[] stageResult = new Time[times.length]; - ResultSet rsStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC"); - for (int i = 0; i < stageResult.length; i++) { - rsStage.next(); - stageResult[i] = rsStage.getTime(2); - } + try (ResultSet rsStage = + statement.executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC")) { + for (int i = 0; i < stageResult.length; i++) { + assertTrue(rsStage.next()); + stageResult[i] = rsStage.getTime(2); + } - for (int i = 0; i < times.length; i++) { - assertEquals( - "Stage binding time should match non-stage binding time", - nonStageResult[i], - stageResult[i]); + for (int i = 0; i < times.length; i++) { + assertEquals( + "Stage binding time should match non-stage binding time", + nonStageResult[i], + stageResult[i]); + } } } } finally { - connection.createStatement().execute("DROP TABLE IF EXISTS test_prepst_time"); + statement.execute("DROP TABLE IF EXISTS test_prepst_time"); } } } @@ -541,7 +514,7 @@ public void testClearParameters() throws SQLException { prepStatement.executeUpdate(); try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(3, resultSet.getInt(1)); assertFalse(resultSet.next()); } @@ -574,7 +547,7 @@ public void testClearBatch() throws SQLException { assertThat(batchSize, is(0)); try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(3, resultSet.getInt(1)); assertFalse(resultSet.next()); } @@ -584,15 +557,14 @@ public void testClearBatch() throws SQLException { @Test public void testInsertOneRow() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); assertEquals(1, prepStatement.executeUpdate()); } - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(1, getSizeOfResultSet(resultSet)); } try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -606,10 +578,9 @@ public void testInsertOneRow() throws SQLException { @Test public void testUpdateOneRow() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -621,8 +592,8 @@ public void testUpdateOneRow() throws SQLException { prepStatement.setInt(1, 1); int count = prepStatement.executeUpdate(); assertEquals(1, count); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertTrue(resultSet.next()); assertEquals("newString", resultSet.getString(4)); } } @@ -631,9 +602,9 @@ public void testUpdateOneRow() throws SQLException { assertFalse(prepStatement.execute()); assertEquals(1, prepStatement.getUpdateCount()); assertEquals(1L, prepStatement.getLargeUpdateCount()); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertTrue(resultSet.next()); + assertTrue(resultSet.next()); assertEquals("newString", resultSet.getString(4)); } } @@ -642,10 +613,9 @@ public void testUpdateOneRow() throws SQLException { @Test public void testDeleteOneRow() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -658,7 +628,7 @@ public void testDeleteOneRow() throws SQLException { prepStatement.setInt(1, 1); int count = prepStatement.executeUpdate(); assertEquals(1, count); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(1, getSizeOfResultSet(resultSet)); } // evaluate query ids @@ -672,7 +642,7 @@ public void testDeleteOneRow() throws SQLException { assertFalse(prepStatement.execute()); assertEquals(1, prepStatement.getUpdateCount()); assertEquals(1L, prepStatement.getLargeUpdateCount()); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(0, getSizeOfResultSet(resultSet)); // evaluate query ids assertTrue(prepStatement.isWrapperFor(SnowflakePreparedStatement.class)); @@ -736,9 +706,9 @@ public void testUpdateBatch() throws SQLException { assertEquals(0, prepStatement.getUpdateCount()); assertEquals(0L, prepStatement.getLargeUpdateCount()); try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getString(4), is("newString")); - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getString(4), is("newString")); } } @@ -748,9 +718,10 @@ public void testUpdateBatch() throws SQLException { @Test public void testBatchInsertWithCacheEnabled() throws SQLException { int[] countResult; - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // ensure enable the cache result use - connection.createStatement().execute(enableCacheReuse); + statement.execute(enableCacheReuse); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 1); @@ -770,14 +741,14 @@ public void testBatchInsertWithCacheEnabled() throws SQLException { assertEquals(1, countResult[0]); assertEquals(1, countResult[1]); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertTrue(resultSet.next()); assertEquals(1, resultSet.getInt(1)); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(2, resultSet.getInt(1)); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(3, resultSet.getInt(1)); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(4, resultSet.getInt(1)); assertFalse(resultSet.next()); } @@ -805,16 +776,18 @@ public void manualTestForPreparedStatementLogging() throws SQLException { props.put("user", params.get("user")); props.put("password", params.get("password")); props.put("tracing", "info"); - Connection con = DriverManager.getConnection(uri, props); - con.createStatement() - .executeUpdate("alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=true"); - con.createStatement().execute(createTableSQL); - PreparedStatement prepStatement = con.prepareStatement(insertSQL, Statement.NO_GENERATED_KEYS); - bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); - prepStatement.addBatch(); - prepStatement.executeBatch(); - con.createStatement() - .executeUpdate("alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=false"); - con.close(); + try (Connection con = DriverManager.getConnection(uri, props); + Statement statement = con.createStatement()) { + statement.executeUpdate("alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=true"); + statement.execute(createTableSQL); + try (PreparedStatement prepStatement = + con.prepareStatement(insertSQL, Statement.NO_GENERATED_KEYS)) { + bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); + prepStatement.addBatch(); + prepStatement.executeBatch(); + statement.executeUpdate( + "alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=false"); + } + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java index 52b4f4518..872c8aab6 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java @@ -42,9 +42,10 @@ public PreparedStatement1LatestIT() { @Test public void testPrepStWithCacheEnabled() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // ensure enable the cache result use - connection.createStatement().execute(enableCacheReuse); + statement.execute(enableCacheReuse); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); @@ -54,13 +55,12 @@ public void testPrepStWithCacheEnabled() throws SQLException { prepStatement.execute(); } - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from test_prepst")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from test_prepst")) { + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(1), 1); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(1), 1); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(1), 100); } @@ -69,13 +69,13 @@ public void testPrepStWithCacheEnabled() throws SQLException { prepStatement.setInt(1, 1); prepStatement.setInt(2, 1); try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(2), 2); prepStatement.setInt(1, 1); prepStatement.setInt(2, 100); } try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(2), 101); } } @@ -110,35 +110,37 @@ public void testPrepStWithCacheEnabled() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testInsertStageArrayBindWithTime() throws SQLException { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - statement.execute("alter session set CLIENT_STAGE_ARRAY_BINDING_THRESHOLD=2"); - statement.execute("create or replace table testStageBindTime (c1 time, c2 time)"); - PreparedStatement prepSt = - connection.prepareStatement("insert into testStageBindTime values (?, ?)"); - Time[][] timeValues = { - {new Time(0), new Time(1)}, - {new Time(1000), new Time(Integer.MAX_VALUE)}, - {new Time(123456), new Time(55555)}, - {Time.valueOf("01:02:00"), new Time(-100)}, - }; - for (Time[] value : timeValues) { - prepSt.setTime(1, value[0]); - prepSt.setTime(2, value[1]); - prepSt.addBatch(); - } - prepSt.executeBatch(); - // check results - ResultSet rs = statement.executeQuery("select * from testStageBindTime"); - for (Time[] timeValue : timeValues) { - rs.next(); - assertEquals(timeValue[0].toString(), rs.getTime(1).toString()); - assertEquals(timeValue[1].toString(), rs.getTime(2).toString()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set CLIENT_STAGE_ARRAY_BINDING_THRESHOLD=2"); + statement.execute("create or replace table testStageBindTime (c1 time, c2 time)"); + PreparedStatement prepSt = + connection.prepareStatement("insert into testStageBindTime values (?, ?)"); + Time[][] timeValues = { + {new Time(0), new Time(1)}, + {new Time(1000), new Time(Integer.MAX_VALUE)}, + {new Time(123456), new Time(55555)}, + {Time.valueOf("01:02:00"), new Time(-100)}, + }; + for (Time[] value : timeValues) { + prepSt.setTime(1, value[0]); + prepSt.setTime(2, value[1]); + prepSt.addBatch(); + } + prepSt.executeBatch(); + // check results + try (ResultSet rs = statement.executeQuery("select * from testStageBindTime")) { + for (Time[] timeValue : timeValues) { + assertTrue(rs.next()); + assertEquals(timeValue[0].toString(), rs.getTime(1).toString()); + assertEquals(timeValue[1].toString(), rs.getTime(2).toString()); + } + } + } finally { + statement.execute("drop table if exists testStageBindTime"); + statement.execute("alter session unset CLIENT_STAGE_ARRAY_BINDING_THRESHOLD"); } - rs.close(); - statement.execute("drop table if exists testStageBindTime"); - statement.execute("alter session unset CLIENT_STAGE_ARRAY_BINDING_THRESHOLD"); - statement.close(); } } @@ -155,48 +157,48 @@ public void testInsertStageArrayBindWithTime() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSetObjectForTimestampTypes() throws SQLException { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // set timestamp mapping to default value - statement.execute("ALTER SESSION UNSET CLIENT_TIMESTAMP_TYPE_MAPPING"); - statement.execute("create or replace table TS (ntz TIMESTAMP_NTZ, ltz TIMESTAMP_LTZ)"); - PreparedStatement prepst = connection.prepareStatement("insert into TS values (?, ?)"); - String date1 = "2014-01-01 16:00:00"; - String date2 = "1945-11-12 5:25:00"; - Timestamp[] testTzs = {Timestamp.valueOf(date1), Timestamp.valueOf(date2)}; - for (int i = 0; i < testTzs.length; i++) { - // Disable stage array binding and insert the timestamp values - statement.execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); // disable stage bind - prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); - prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); - prepst.addBatch(); - prepst.executeBatch(); - // Enable stage array binding and insert the same timestamp values as above - statement.execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind - prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); - prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); - prepst.addBatch(); - prepst.executeBatch(); - } - ResultSet rs = statement.executeQuery("select * from TS"); - // Get results for each timestamp value tested - for (int i = 0; i < testTzs.length; i++) { - // Assert that the first row of inserts with payload binding matches the second row of - // inserts that used stage array binding - rs.next(); - Timestamp expectedNTZTs = rs.getTimestamp(1); - Timestamp expectedLTZTs = rs.getTimestamp(2); - rs.next(); - assertEquals(expectedNTZTs, rs.getTimestamp(1)); - assertEquals(expectedLTZTs, rs.getTimestamp(2)); + try { + statement.execute("ALTER SESSION UNSET CLIENT_TIMESTAMP_TYPE_MAPPING"); + statement.execute("create or replace table TS (ntz TIMESTAMP_NTZ, ltz TIMESTAMP_LTZ)"); + PreparedStatement prepst = connection.prepareStatement("insert into TS values (?, ?)"); + String date1 = "2014-01-01 16:00:00"; + String date2 = "1945-11-12 5:25:00"; + Timestamp[] testTzs = {Timestamp.valueOf(date1), Timestamp.valueOf(date2)}; + for (int i = 0; i < testTzs.length; i++) { + // Disable stage array binding and insert the timestamp values + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); // disable stage bind + prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); + prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); + prepst.addBatch(); + prepst.executeBatch(); + // Enable stage array binding and insert the same timestamp values as above + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind + prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); + prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); + prepst.addBatch(); + prepst.executeBatch(); + } + try (ResultSet rs = statement.executeQuery("select * from TS")) { + // Get results for each timestamp value tested + for (int i = 0; i < testTzs.length; i++) { + // Assert that the first row of inserts with payload binding matches the second row of + // inserts that used stage array binding + assertTrue(rs.next()); + Timestamp expectedNTZTs = rs.getTimestamp(1); + Timestamp expectedLTZTs = rs.getTimestamp(2); + assertTrue(rs.next()); + assertEquals(expectedNTZTs, rs.getTimestamp(1)); + assertEquals(expectedLTZTs, rs.getTimestamp(2)); + } + } + } finally { + statement.execute("ALTER SESSION UNSET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD;"); } - - // clean up - statement.execute("ALTER SESSION UNSET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD;"); - rs.close(); - statement.close(); } } @@ -290,26 +292,29 @@ public void testSetObjectMethodWithLargeBigIntegerColumn() { @Test public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - statement.execute("alter session set TIMESTAMP_INPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FFTZH'"); - statement.execute( - "create or replace table testStageBindTypes (c1 date, c2 datetime, c3 timestamp)"); - java.util.Date today = new java.util.Date(); - java.sql.Date sqldate = new java.sql.Date(today.getDate()); - java.sql.Timestamp todaySQL = new java.sql.Timestamp(today.getTime()); - PreparedStatement prepSt = - connection.prepareStatement("insert into testStageBindTypes values (?, ?, ?)"); - for (int i = 1; i < 30000; i++) { - prepSt.setDate(1, sqldate); - prepSt.setDate(2, sqldate); - prepSt.setTimestamp(3, todaySQL); - prepSt.addBatch(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set TIMESTAMP_INPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FFTZH'"); + statement.execute( + "create or replace table testStageBindTypes (c1 date, c2 datetime, c3 timestamp)"); + java.util.Date today = new java.util.Date(); + java.sql.Date sqldate = new java.sql.Date(today.getDate()); + java.sql.Timestamp todaySQL = new java.sql.Timestamp(today.getTime()); + try (PreparedStatement prepSt = + connection.prepareStatement("insert into testStageBindTypes values (?, ?, ?)")) { + for (int i = 1; i < 30000; i++) { + prepSt.setDate(1, sqldate); + prepSt.setDate(2, sqldate); + prepSt.setTimestamp(3, todaySQL); + prepSt.addBatch(); + } + prepSt.executeBatch(); // should not throw a parsing error. + } + } finally { + statement.execute("drop table if exists testStageBindTypes"); + statement.execute("alter session unset TIMESTAMP_INPUT_FORMAT"); } - prepSt.executeBatch(); // should not throw a parsing error. - statement.execute("drop table if exists testStageBindTypes"); - statement.execute("alter session unset TIMESTAMP_INPUT_FORMAT"); - statement.close(); } } @@ -322,34 +327,36 @@ public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { @Test @Ignore public void testCallStatement() throws SQLException { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); - statement.executeQuery( - "ALTER SESSION SET USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS=true"); - statement.executeQuery( - "create or replace procedure\n" - + "TEST_SP_CALL_STMT_ENABLED(in1 float, in2 variant)\n" - + "returns string language javascript as $$\n" - + "let res = snowflake.execute({sqlText: 'select ? c1, ? c2', binds:[IN1, JSON.stringify(IN2)]});\n" - + "res.next();\n" - + "return res.getColumnValueAsString(1) + ' ' + res.getColumnValueAsString(2) + ' ' + IN2;\n" - + "$$;"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.executeQuery( + "ALTER SESSION SET USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS=true"); + statement.executeQuery( + "create or replace procedure\n" + + "TEST_SP_CALL_STMT_ENABLED(in1 float, in2 variant)\n" + + "returns string language javascript as $$\n" + + "let res = snowflake.execute({sqlText: 'select ? c1, ? c2', binds:[IN1, JSON.stringify(IN2)]});\n" + + "res.next();\n" + + "return res.getColumnValueAsString(1) + ' ' + res.getColumnValueAsString(2) + ' ' + IN2;\n" + + "$$;"); - PreparedStatement prepStatement = - connection.prepareStatement("call TEST_SP_CALL_STMT_ENABLED(?, to_variant(?))"); - prepStatement.setDouble(1, 1); - prepStatement.setString(2, "[2,3]"); + try (PreparedStatement prepStatement = + connection.prepareStatement("call TEST_SP_CALL_STMT_ENABLED(?, to_variant(?))")) { + prepStatement.setDouble(1, 1); + prepStatement.setString(2, "[2,3]"); - ResultSet rs = prepStatement.executeQuery(); - String result = "1 \"[2,3]\" [2,3]"; - while (rs.next()) { - assertEquals(result, rs.getString(1)); + try (ResultSet rs = prepStatement.executeQuery()) { + String result = "1 \"[2,3]\" [2,3]"; + while (rs.next()) { + assertEquals(result, rs.getString(1)); + } + } + } + } finally { + statement.executeQuery( + "drop procedure if exists TEST_SP_CALL_STMT_ENABLED(float, variant)"); } - - statement.executeQuery("drop procedure if exists TEST_SP_CALL_STMT_ENABLED(float, variant)"); - rs.close(); - prepStatement.close(); - statement.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java index 71f2fe3d5..efb8ef944 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java @@ -48,7 +48,8 @@ public PreparedStatement2IT() { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchDates() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { Date dEpoch = new Date(0); Date dAfterEpoch = new Date(24 * 60 * 60 * 1000); Date dBeforeEpoch = new Date(-1 * 24 * 60 * 60 * 1000); @@ -59,16 +60,12 @@ public void testStageBatchDates() throws SQLException { int[] countResult; try { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement("INSERT INTO test_prepst_date(id, d) VALUES(?,?)")) { // First, run with non-stage binding - connection - .createStatement() - .execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); for (int i = 0; i < dates.length; i++) { prepStatement.setInt(1, i); prepStatement.setDate(2, dates[i]); @@ -80,22 +77,19 @@ public void testStageBatchDates() throws SQLException { } Date[] nonStageResult = new Date[dates.length]; - ResultSet rsNonStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC"); - for (int i = 0; i < nonStageResult.length; i++) { - rsNonStage.next(); - nonStageResult[i] = rsNonStage.getDate(2); - } + try (ResultSet rsNonStage = + statement.executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC")) { - connection.createStatement().execute("DELETE FROM test_prepst_date WHERE 1=1"); + for (int i = 0; i < nonStageResult.length; i++) { + assertTrue(rsNonStage.next()); + nonStageResult[i] = rsNonStage.getDate(2); + } + } + statement.execute("DELETE FROM test_prepst_date WHERE 1=1"); // Now, run with stage binding - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage // bind for (int i = 0; i < dates.length; i++) { prepStatement.setInt(1, i); @@ -108,35 +102,33 @@ public void testStageBatchDates() throws SQLException { } Date[] stageResult = new Date[dates.length]; - ResultSet rsStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC"); - for (int i = 0; i < stageResult.length; i++) { - rsStage.next(); - stageResult[i] = rsStage.getDate(2); - } + try (ResultSet rsStage = + statement.executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC")) { + for (int i = 0; i < stageResult.length; i++) { + assertTrue(rsStage.next()); + stageResult[i] = rsStage.getDate(2); + } - for (int i = 0; i < dates.length; i++) { - assertEquals( - "Stage binding date should match non-stage binding date", - nonStageResult[i], - stageResult[i]); + for (int i = 0; i < dates.length; i++) { + assertEquals( + "Stage binding date should match non-stage binding date", + nonStageResult[i], + stageResult[i]); + } } } } finally { - connection.createStatement().execute("DROP TABLE IF EXISTS test_prepst_date"); + statement.execute("DROP TABLE IF EXISTS test_prepst_date"); } } } @Test public void testBindWithNullValue() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute( - "create or replace table testBindNull(cola date, colb time, colc timestamp, cold number)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table testBindNull(cola date, colb time, colc timestamp, cold number)"); try (PreparedStatement prepStatement = connection.prepareStatement("insert into testBindNull values (?, ?, ?, ?)")) { @@ -146,9 +138,8 @@ public void testBindWithNullValue() throws SQLException { prepStatement.setBigDecimal(4, null); prepStatement.addBatch(); prepStatement.executeBatch(); - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from testBindNull")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from testBindNull")) { + assertTrue(resultSet.next()); Date date = resultSet.getDate(1); assertNull(date); assertTrue(resultSet.wasNull()); @@ -165,7 +156,7 @@ public void testBindWithNullValue() throws SQLException { assertNull(bg); assertTrue(resultSet.wasNull()); } - connection.createStatement().execute("TRUNCATE table testbindnull"); + statement.execute("TRUNCATE table testbindnull"); prepStatement.setDate(1, null, Calendar.getInstance()); prepStatement.setTime(2, null, Calendar.getInstance()); prepStatement.setTimestamp(3, null, Calendar.getInstance()); @@ -174,9 +165,8 @@ public void testBindWithNullValue() throws SQLException { prepStatement.addBatch(); prepStatement.executeBatch(); - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from testBindNull")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from testBindNull")) { + assertTrue(resultSet.next()); Date date = resultSet.getDate(1); assertNull(date); assertTrue(resultSet.wasNull()); @@ -195,20 +185,20 @@ public void testBindWithNullValue() throws SQLException { @Test public void testPrepareDDL() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { try { try (PreparedStatement prepStatement = connection.prepareStatement("create or replace table testprepareddl(cola number)")) { prepStatement.execute(); } - try (ResultSet resultSet = - connection.createStatement().executeQuery("show tables like 'testprepareddl'")) { + try (ResultSet resultSet = statement.executeQuery("show tables like 'testprepareddl'")) { // result should only have one row since table is created assertThat(resultSet.next(), is(true)); assertThat(resultSet.next(), is(false)); } } finally { - connection.createStatement().execute("drop table if exists testprepareddl"); + statement.execute("drop table if exists testprepareddl"); } } } @@ -236,7 +226,7 @@ public void testPrepareTCL() throws SQLException { for (String testCase : testCases) { try (PreparedStatement prepStatement = connection.prepareStatement(testCase)) { try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getString(1), is("Statement executed successfully.")); } } @@ -266,26 +256,26 @@ public void testPrepareShowCommand() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPrepareTimeout() throws SQLException, InterruptedException { - try (Connection adminCon = getSnowflakeAdminConnection()) { - adminCon.createStatement().execute("alter system set enable_combined_describe=true"); + try (Connection adminCon = getSnowflakeAdminConnection(); + Statement adminStatement = adminCon.createStatement()) { + adminStatement.execute("alter system set enable_combined_describe=true"); try { - try (Connection connection = init()) { - connection.createStatement().execute("create or replace table t(c1 string) as select 1"); - connection - .createStatement() - .execute("alter session set jdbc_enable_combined_describe=true"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table t(c1 string) as select 1"); + statement.execute("alter session set jdbc_enable_combined_describe=true"); try (PreparedStatement prepStatement = connection.prepareStatement("select c1 from t order by c1 limit 1")) { Thread.sleep(5000); try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getInt(1), is(1)); } } - connection.createStatement().execute("drop table if exists t"); + statement.execute("drop table if exists t"); } } finally { - adminCon.createStatement().execute("alter system set enable_combined_describe=default"); + adminStatement.execute("alter system set enable_combined_describe=default"); } } } @@ -293,40 +283,41 @@ public void testPrepareTimeout() throws SQLException, InterruptedException { /** Test case to make sure 2 non null bind refs was not constant folded into one */ @Test public void testSnow36284() throws Exception { - Connection connection = init(); - String query = "select * from (values ('a'), ('b')) x where x.COLUMN1 in (?,?);"; - PreparedStatement preparedStatement = connection.prepareStatement(query); - preparedStatement.setString(1, "a"); - preparedStatement.setString(2, "b"); - ResultSet rs = preparedStatement.executeQuery(); - int rowcount = 0; - Set valuesReturned = Sets.newHashSetWithExpectedSize(2); - while (rs.next()) { - rowcount++; - valuesReturned.add(rs.getString(1)); + + try (Connection connection = init(); + PreparedStatement preparedStatement = connection.prepareStatement(query)) { + preparedStatement.setString(1, "a"); + preparedStatement.setString(2, "b"); + try (ResultSet rs = preparedStatement.executeQuery()) { + int rowcount = 0; + Set valuesReturned = Sets.newHashSetWithExpectedSize(2); + while (rs.next()) { + rowcount++; + valuesReturned.add(rs.getString(1)); + } + assertEquals("Should get back 2 rows", 2, rowcount); + assertEquals("", valuesReturned, Sets.newHashSet("a", "b")); + } } - assertEquals("Should get back 2 rows", 2, rowcount); - assertEquals("", valuesReturned, Sets.newHashSet("a", "b")); } /** Test for coalesce with bind and null arguments in a prepared statement */ @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSnow35923() throws Exception { - try (Connection connection = init()) { - connection - .createStatement() - .execute("alter session set " + "optimizer_eliminate_scans_for_constant_select=false"); - connection.createStatement().execute("create or replace table inc(a int, b int)"); - connection - .createStatement() - .execute("insert into inc(a, b) values (1, 2), " + "(NULL, 4), (5,NULL), (7,8)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + "optimizer_eliminate_scans_for_constant_select=false"); + statement.execute("create or replace table inc(a int, b int)"); + statement.execute("insert into inc(a, b) values (1, 2), " + "(NULL, 4), (5,NULL), (7,8)"); // Query used to cause an incident. - PreparedStatement preparedStatement = - connection.prepareStatement("SELECT coalesce(?, NULL) from inc;"); - preparedStatement.setInt(1, 0); - ResultSet rs = preparedStatement.executeQuery(); + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT coalesce(?, NULL) from inc;")) { + preparedStatement.setInt(1, 0); + try (ResultSet rs = preparedStatement.executeQuery()) {} + } } } @@ -337,162 +328,180 @@ public void testSnow35923() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testBindObjectLiteral() throws Exception { - try (Connection conn = init()) { - Statement stmt = conn.createStatement(); + long t1Id = 0; + long t2Id = 0; + String t1 = null; - String sqlText = "create or replace table identifier(?) (c1 number)"; - SnowflakePreparedStatementV1 pStmt = - (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - String t1 = "bindObjectTable1"; - // Bind the table name - pStmt.setString(1, t1); - ResultSet result = pStmt.executeQuery(); + try (Connection conn = init(); + Statement stmt = conn.createStatement()) { + String sqlText = "create or replace table identifier(?) (c1 number)"; + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + t1 = "bindObjectTable1"; + // Bind the table name + pStmt.setString(1, t1); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify the table has been created and get the table ID stmt.execute("select parse_json(system$dict_id('table', '" + t1 + "')):entityId;"); - result = stmt.getResultSet(); - - long t1Id = 0; - if (result.next()) { - t1Id = Long.valueOf(result.getString(1)); + try (ResultSet result = stmt.getResultSet()) { + if (result.next()) { + t1Id = Long.valueOf(result.getString(1)); + } + assertTrue(t1Id != 0); } - assertTrue(t1Id != 0); - // Mix of object literal binds and value binds sqlText = "insert into identifier(?) values (1), (2), (3)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - // Bind by object IDs - pStmt.setLong(1, t1Id); - - result = pStmt.executeQuery(); - + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + // Bind by object IDs + pStmt.setLong(1, t1Id); + try (ResultSet result = pStmt.executeQuery()) {} + } // Perform some selection sqlText = "select * from identifier(?) order by 1"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t1); - result = pStmt.executeQuery(); - // Verify 3 rows have been inserted - for (int i = 0; i < 3; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t1); + try (ResultSet result = pStmt.executeQuery()) { + // Verify 3 rows have been inserted + for (int i = 0; i < 3; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); - // Alter Table sqlText = "alter table identifier(?) add column c2 number"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setLong(1, t1Id); - result = pStmt.executeQuery(); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setLong(1, t1Id); + try (ResultSet result = pStmt.executeQuery()) {} + } // Describe sqlText = "desc table identifier(?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t1); - result = pStmt.executeQuery(); - // Verify two columns have been created - for (int i = 0; i < 2; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t1); + try (ResultSet result = pStmt.executeQuery()) { + // Verify two columns have been created + for (int i = 0; i < 2; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); // Create another table String t2 = "bindObjectTable2"; sqlText = "create or replace table identifier(?) (c1 number)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t2); - result = pStmt.executeQuery(); - + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t2); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify the table has been created and get the table ID stmt.execute("select parse_json(system$dict_id('table', '" + t2 + "')):entityId;"); - result = stmt.getResultSet(); - - long t2Id = 0; - if (result.next()) { - t2Id = Long.valueOf(result.getString(1)); + try (ResultSet result = stmt.getResultSet()) { + if (result.next()) { + t2Id = Long.valueOf(result.getString(1)); + } + assertTrue(t2Id != 0); } - assertTrue(t2Id != 0); - // Mix object binds with value binds sqlText = "insert into identifier(?) values (?), (?), (?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t2); - pStmt.setInt(2, 1); - pStmt.setInt(3, 2); - pStmt.setInt(4, 3); - result = pStmt.executeQuery(); - + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t2); + pStmt.setInt(2, 1); + pStmt.setInt(3, 2); + pStmt.setInt(4, 3); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify that 3 rows have been inserted sqlText = "select * from identifier(?) order by 1"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setLong(1, t2Id); - result = pStmt.executeQuery(); - for (int i = 0; i < 3; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setLong(1, t2Id); + try (ResultSet result = pStmt.executeQuery()) { + for (int i = 0; i < 3; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); // Multiple Object Binds sqlText = "select t2.c1 from identifier(?) as t1, identifier(?) as t2 " + "where t1.c1 = t2.c1 and t1.c1 > (?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setString(1, t1); - pStmt.setLong(2, t2Id); - pStmt.setInt(3, 1); - result = pStmt.executeQuery(); - for (int i = 0; i < 2; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setString(1, t1); + pStmt.setLong(2, t2Id); + pStmt.setInt(3, 1); + try (ResultSet result = pStmt.executeQuery()) { + for (int i = 0; i < 2; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); // Drop Tables sqlText = "drop table identifier(?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, "bindObjectTable1"); - result = pStmt.executeQuery(); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, "bindObjectTable1"); + try (ResultSet result = pStmt.executeQuery()) {} + } sqlText = "drop table identifier(?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setLong(1, t2Id); - result = pStmt.executeQuery(); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setLong(1, t2Id); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify that the tables have been dropped stmt.execute("show tables like 'bindobjecttable%'"); - result = stmt.getResultSet(); - assertFalse(result.next()); + try (ResultSet result = stmt.getResultSet()) { + assertFalse(result.next()); + } } } @Test public void testBindTimestampTZViaString() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute( - "alter session set timestamp_tz_output_format='YYYY-MM" - + "-DD HH24:MI:SS.FF9 TZHTZM'"); - connection - .createStatement() - .execute("create or replace table testbindtstz(cola timestamp_tz)"); - - try (PreparedStatement preparedStatement = - connection.prepareStatement("insert into testbindtstz values(?)")) { - preparedStatement.setString(1, "2017-11-30T18:17:05.123456789+08:00"); - int count = preparedStatement.executeUpdate(); - assertThat(count, is(1)); - } - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from testbindtstz")) { - assertTrue(resultSet.next()); - assertThat(resultSet.getString(1), is("2017-11-30 18:17:05.123456789 +0800")); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "alter session set timestamp_tz_output_format='YYYY-MM" + "-DD HH24:MI:SS.FF9 TZHTZM'"); + statement.execute("create or replace table testbindtstz(cola timestamp_tz)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testbindtstz values(?)")) { + preparedStatement.setString(1, "2017-11-30T18:17:05.123456789+08:00"); + int count = preparedStatement.executeUpdate(); + assertThat(count, is(1)); + } + try (ResultSet resultSet = statement.executeQuery("select * from testbindtstz")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is("2017-11-30 18:17:05.123456789 +0800")); + } + } finally { + statement.execute("drop table if exists testbindtstz"); } - connection.createStatement().execute("drop table if exists testbindtstz"); } } @@ -503,41 +512,40 @@ public void testBindTimestampTZViaString() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testBindTimestampTZViaStringBatch() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind - connection - .createStatement() - .execute("create or replace table testbindtstz(cola timestamp_tz, colb timestamp_ntz)"); - - try (PreparedStatement preparedStatement = - connection.prepareStatement("insert into testbindtstz values(?,?)")) { - - preparedStatement.setString(1, "2017-11-30 18:17:05.123456789 +08:00"); - preparedStatement.setString(2, "2017-11-30 18:17:05.123456789"); - preparedStatement.addBatch(); - preparedStatement.setString(1, "2017-05-03 16:44:42.0"); - preparedStatement.setString(2, "2017-05-03 16:44:42.0"); - preparedStatement.addBatch(); - int[] count = preparedStatement.executeBatch(); - assertThat(count[0], is(1)); - - try (ResultSet resultSet = - connection - .createStatement() - .executeQuery("select * from testbindtstz order by 1 desc")) { - assertTrue(resultSet.next()); - assertThat(resultSet.getString(1), is("Thu, 30 Nov 2017 18:17:05 +0800")); - assertThat(resultSet.getString(2), is("Thu, 30 Nov 2017 18:17:05 Z")); - - assertTrue(resultSet.next()); - assertThat(resultSet.getString(1), is("Wed, 03 May 2017 16:44:42 -0700")); - assertThat(resultSet.getString(2), is("Wed, 03 May 2017 16:44:42 Z")); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind + statement.execute( + "create or replace table testbindtstz(cola timestamp_tz, colb timestamp_ntz)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testbindtstz values(?,?)")) { + + preparedStatement.setString(1, "2017-11-30 18:17:05.123456789 +08:00"); + preparedStatement.setString(2, "2017-11-30 18:17:05.123456789"); + preparedStatement.addBatch(); + preparedStatement.setString(1, "2017-05-03 16:44:42.0"); + preparedStatement.setString(2, "2017-05-03 16:44:42.0"); + preparedStatement.addBatch(); + int[] count = preparedStatement.executeBatch(); + assertThat(count[0], is(1)); + + try (ResultSet resultSet = + statement.executeQuery("select * from testbindtstz order by 1 desc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is("Thu, 30 Nov 2017 18:17:05 +0800")); + assertThat(resultSet.getString(2), is("Thu, 30 Nov 2017 18:17:05 Z")); + + assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is("Wed, 03 May 2017 16:44:42 -0700")); + assertThat(resultSet.getString(2), is("Wed, 03 May 2017 16:44:42 Z")); + } } + } finally { + statement.execute("drop table if exists testbindtstz"); } - connection.createStatement().execute("drop table if exists testbindtstz"); } } @@ -549,14 +557,11 @@ public void testBindTimestampTZViaStringBatch() throws SQLException { */ @Test public void testSnow41620() throws Exception { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // Create a table and insert 3 records - connection - .createStatement() - .execute("CREATE or REPLACE TABLE SNOW41620 (c1 varchar(20)," + "c2 int" + " )"); - connection - .createStatement() - .execute("insert into SNOW41620 values('value1', 1), ('value2', 2), ('value3', 3)"); + statement.execute("CREATE or REPLACE TABLE SNOW41620 (c1 varchar(20)," + "c2 int" + " )"); + statement.execute("insert into SNOW41620 values('value1', 1), ('value2', 2), ('value3', 3)"); String PARAMETERIZED_QUERY = "SELECT t0.C1, " @@ -564,28 +569,24 @@ public void testSnow41620() throws Exception { + "CASE WHEN t0.C1 IN (?, ?) THEN t0.C2 ELSE null END " + "FROM SNOW41620 t0"; - ResultSet bindStmtResultSet; try (PreparedStatement pst = connection.prepareStatement(PARAMETERIZED_QUERY)) { // bind values pst.setObject(1, "value1"); pst.setObject(2, "value3"); pst.setObject(3, "value2"); pst.setObject(4, "value3"); - bindStmtResultSet = pst.executeQuery(); - - // Execute the same query with bind values replaced in the sql - String DIRECT_QUERY = - "SELECT t0.C1, " - + "CASE WHEN t0.C1 IN ('value1', 'value3') THEN t0.C2 ELSE null END," - + "CASE WHEN t0.C1 IN ('value2', 'value3') THEN t0.C2 ELSE null END " - + "FROM SNOW41620 t0"; - try (PreparedStatement pst1 = connection.prepareStatement(DIRECT_QUERY)) { - ResultSet directStmtResultSet = pst1.executeQuery(); - - checkResultSetEqual(bindStmtResultSet, directStmtResultSet); - - bindStmtResultSet.close(); - directStmtResultSet.close(); + try (ResultSet bindStmtResultSet = pst.executeQuery()) { + + // Execute the same query with bind values replaced in the sql + String DIRECT_QUERY = + "SELECT t0.C1, " + + "CASE WHEN t0.C1 IN ('value1', 'value3') THEN t0.C2 ELSE null END," + + "CASE WHEN t0.C1 IN ('value2', 'value3') THEN t0.C2 ELSE null END " + + "FROM SNOW41620 t0"; + try (PreparedStatement pst1 = connection.prepareStatement(DIRECT_QUERY); + ResultSet directStmtResultSet = pst1.executeQuery()) { + checkResultSetEqual(bindStmtResultSet, directStmtResultSet); + } } } } @@ -650,8 +651,9 @@ public void testPreparedStatementWithSkipParsing() throws Exception { @Test public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { - try (Connection con = init()) { - con.createStatement().execute("create or replace table t(c1 int)"); + try (Connection con = init(); + Statement statement = con.createStatement()) { + statement.execute("create or replace table t(c1 int)"); try { try (PreparedStatement stmt = con.unwrap(SnowflakeConnectionV1.class) @@ -661,13 +663,13 @@ public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { assertThat(ret, is(1)); } try (PreparedStatement stmt = - con.unwrap(SnowflakeConnectionV1.class).prepareStatement("select * from t", true)) { - ResultSet rs = stmt.executeQuery(); + con.unwrap(SnowflakeConnectionV1.class).prepareStatement("select * from t", true); + ResultSet rs = stmt.executeQuery()) { assertThat(rs.next(), is(true)); assertThat(rs.getInt(1), is(123)); } } finally { - con.createStatement().execute("drop table if exists t"); + statement.execute("drop table if exists t"); } } } @@ -685,11 +687,12 @@ public void testSnow44393() throws Exception { .execute("alter session set timestamp_ntz_output_format='YYYY-MM-DD HH24:MI:SS'")); try (PreparedStatement stmt = con.prepareStatement("select to_timestamp_ntz(?, 3)")) { stmt.setBigDecimal(1, new BigDecimal("1261440000000")); - ResultSet resultSet = stmt.executeQuery(); - resultSet.next(); + try (ResultSet resultSet = stmt.executeQuery()) { + assertTrue(resultSet.next()); - String res = resultSet.getString(1); - assertThat(res, is("2009-12-22 00:00:00")); + String res = resultSet.getString(1); + assertThat(res, is("2009-12-22 00:00:00")); + } } } } @@ -775,36 +778,38 @@ public void testAddBatchNumericNullFloatMixed() throws Exception { @Test public void testInvalidUsageOfApi() throws Exception { - Connection connection = init(); - final PreparedStatement preparedStatement = connection.prepareStatement("select 1"); - final int expectedCode = ErrorCode.UNSUPPORTED_STATEMENT_TYPE_IN_EXECUTION_API.getMessageCode(); - - assertException( - new RunnableWithSQLException() { - @Override - public void run() throws SQLException { - preparedStatement.executeUpdate("select 1"); - } - }, - expectedCode); - - assertException( - new RunnableWithSQLException() { - @Override - public void run() throws SQLException { - preparedStatement.execute("select 1"); - } - }, - expectedCode); - - assertException( - new RunnableWithSQLException() { - @Override - public void run() throws SQLException { - preparedStatement.addBatch("select 1"); - } - }, - expectedCode); + try (Connection connection = init(); + PreparedStatement preparedStatement = connection.prepareStatement("select 1")) { + final int expectedCode = + ErrorCode.UNSUPPORTED_STATEMENT_TYPE_IN_EXECUTION_API.getMessageCode(); + + assertException( + new RunnableWithSQLException() { + @Override + public void run() throws SQLException { + preparedStatement.executeUpdate("select 1"); + } + }, + expectedCode); + + assertException( + new RunnableWithSQLException() { + @Override + public void run() throws SQLException { + preparedStatement.execute("select 1"); + } + }, + expectedCode); + + assertException( + new RunnableWithSQLException() { + @Override + public void run() throws SQLException { + preparedStatement.addBatch("select 1"); + } + }, + expectedCode); + } } private void assertException(RunnableWithSQLException runnable, int expectedCode) { diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java index 84e5f20ea..f7ca395de 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java @@ -41,23 +41,20 @@ public PreparedStatement2LatestIT() { @Test public void testPrepareUDTF() throws Exception { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { try { - connection - .createStatement() - .execute("create or replace table employee(id number, address text)"); - connection - .createStatement() - .execute( - "create or replace function employee_detail(sid number, addr text)\n" - + " returns table(id number, address text)\n" - + "LANGUAGE SQL\n" - + "as\n" - + "$$\n" - + "select *\n" - + "from employee\n" - + "where id=sid\n" - + "$$;"); + statement.execute("create or replace table employee(id number, address text)"); + statement.execute( + "create or replace function employee_detail(sid number, addr text)\n" + + " returns table(id number, address text)\n" + + "LANGUAGE SQL\n" + + "as\n" + + "$$\n" + + "select *\n" + + "from employee\n" + + "where id=sid\n" + + "$$;"); // should resolve successfully try (PreparedStatement prepStatement = @@ -87,17 +84,15 @@ public void testPrepareUDTF() throws Exception { } // create a udf with same name but different arguments and return type - connection - .createStatement() - .execute( - "create or replace function employee_detail(name text , addr text)\n" - + " returns table(id number)\n" - + "LANGUAGE SQL\n" - + "as\n" - + "$$\n" - + "select id\n" - + "from employee\n" - + "$$;"); + statement.execute( + "create or replace function employee_detail(name text , addr text)\n" + + " returns table(id number)\n" + + "LANGUAGE SQL\n" + + "as\n" + + "$$\n" + + "select id\n" + + "from employee\n" + + "$$;"); try (PreparedStatement prepStatement = connection.prepareStatement("select * from table(employee_detail(?, 'abc'))")) { @@ -105,10 +100,8 @@ public void testPrepareUDTF() throws Exception { prepStatement.execute(); } } finally { - connection - .createStatement() - .execute("drop function if exists employee_detail(number, text)"); - connection.createStatement().execute("drop function if exists employee_detail(text, text)"); + statement.execute("drop function if exists employee_detail(number, text)"); + statement.execute("drop function if exists employee_detail(text, text)"); } } } @@ -119,38 +112,34 @@ public void testPrepareUDTF() throws Exception { */ @Test public void testSelectWithBinding() throws Throwable { - try (Connection connection = init()) { - connection - .createStatement() - .execute("create or replace table TESTNULL(created_time timestamp_ntz, mid int)"); - PreparedStatement ps; - ResultSet rs; + try (Connection connection = init(); + Statement statement = connection.createStatement()) { try { + statement.execute("create or replace table TESTNULL(created_time timestamp_ntz, mid int)"); // skip bind parameter index check if prepare fails and defer the error checks to execute - ps = + try (PreparedStatement ps = connection.prepareStatement( - "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?, 3) and MID = ?"); - ps.setObject(1, 0); - ps.setObject(2, null); - rs = ps.executeQuery(); - assertFalse(rs.next()); - rs.close(); - ps.close(); + "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?, 3) and MID = ?")) { + ps.setObject(1, 0); + ps.setObject(2, null); + try (ResultSet rs = ps.executeQuery()) { + assertFalse(rs.next()); + } + } // describe is success and do the index range check - ps = + try (PreparedStatement ps = connection.prepareStatement( - "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?::NUMBER, 3) and MID = ?"); - ps.setObject(1, 0); - ps.setObject(2, null); - - rs = ps.executeQuery(); - assertFalse(rs.next()); - rs.close(); - ps.close(); + "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?::NUMBER, 3) and MID = ?")) { + ps.setObject(1, 0); + ps.setObject(2, null); + try (ResultSet rs = ps.executeQuery()) { + assertFalse(rs.next()); + } + } } finally { - connection.createStatement().execute("drop table if exists TESTNULL"); + statement.execute("drop table if exists TESTNULL"); } } } @@ -175,7 +164,7 @@ public void testConstOptLimitBind() throws SQLException { prepStatement.setInt(1, 10); prepStatement.setInt(2, 0); try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getInt(1), is(1)); assertThat(resultSet.next(), is(false)); } @@ -198,8 +187,9 @@ public void testTableFuncBindInput() throws SQLException { @Test public void testExecuteLargeBatch() throws SQLException { - try (Connection con = init()) { - try (Statement statement = con.createStatement()) { + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { statement.execute("create or replace table mytab(id int)"); try (PreparedStatement pstatement = con.prepareStatement("insert into mytab(id) values (?)")) { @@ -212,117 +202,138 @@ public void testExecuteLargeBatch() throws SQLException { pstatement.executeLargeBatch(); con.commit(); try (ResultSet resultSet = statement.executeQuery("select * from mytab")) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(4, resultSet.getInt(1)); } - statement.execute("drop table if exists mytab"); } + } finally { + statement.execute("drop table if exists mytab"); } } } @Test public void testRemoveExtraDescribeCalls() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_uuid_with_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_uuid_with_bind values (?)"); - preparedStatement.setInt(1, 5); - assertEquals(1, preparedStatement.executeUpdate()); - String queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - // Calling getMetadata() should no longer require an additional server call because we have the - // metadata form the executeUpdate - String queryId2 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - // Assert the query IDs are the same. This will be the case if there is no additional describe - // call for getMetadata(). - assertEquals(queryId1, queryId2); - - preparedStatement.addBatch(); - - preparedStatement = - connection.prepareStatement("select * from test_uuid_with_bind where c1 = ?"); - assertFalse(preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - preparedStatement.setInt(1, 5); - - ResultSet resultSet = preparedStatement.executeQuery(); - assertThat(resultSet.next(), is(true)); - queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - queryId2 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - String queryId3 = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); - // Assert all 3 query IDs are the same because only 1 server call was executed - assertEquals(queryId1, queryId2); - assertEquals(queryId1, queryId3); - - resultSet.close(); - preparedStatement.close(); - - statement.execute("drop table if exists test_uuid_with_bind"); - connection.close(); + String queryId1 = null; + String queryId2 = null; + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_uuid_with_bind(c1 number)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_uuid_with_bind values (?)")) { + preparedStatement.setInt(1, 5); + assertEquals(1, preparedStatement.executeUpdate()); + queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + // Calling getMetadata() should no longer require an additional server call because we + // have + // the + // metadata form the executeUpdate + queryId2 = + preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); + // Assert the query IDs are the same. This will be the case if there is no additional + // describe + // call for getMetadata(). + assertEquals(queryId1, queryId2); + + preparedStatement.addBatch(); + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("select * from test_uuid_with_bind where c1 = ?")) { + assertFalse( + preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + preparedStatement.setInt(1, 5); + + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertThat(resultSet.next(), is(true)); + queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + queryId2 = + preparedStatement + .getMetaData() + .unwrap(SnowflakeResultSetMetaData.class) + .getQueryID(); + String queryId3 = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); + // Assert all 3 query IDs are the same because only 1 server call was executed + assertEquals(queryId1, queryId2); + assertEquals(queryId1, queryId3); + } + } + } finally { + statement.execute("drop table if exists test_uuid_with_bind"); + } + } } @Test public void testRemoveExtraDescribeCallsSanityCheck() throws SQLException { - Connection connection = init(); - PreparedStatement preparedStatement = - connection.prepareStatement( - "create or replace table test_uuid_with_bind(c1 number, c2 string)"); - preparedStatement.execute(); - String queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - preparedStatement = - connection.prepareStatement("insert into test_uuid_with_bind values (?, ?)"); - assertFalse(preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - preparedStatement.setInt(1, 5); - preparedStatement.setString(2, "hello"); - preparedStatement.addBatch(); - preparedStatement.setInt(1, 7); - preparedStatement.setString(2, "hello1"); - preparedStatement.addBatch(); - String queryId2 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - // These query IDs should not match because they are from 2 different prepared statements - assertNotEquals(queryId1, queryId2); - preparedStatement.executeBatch(); - String queryId3 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - // Another execute call was created, so prepared statement has new query ID - assertNotEquals(queryId2, queryId3); - // Calling getMetadata() should no longer require an additional server call because we have the - // metadata form the executeUpdate - String queryId4 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - // Assert the query IDs for the 2 identical getMetadata() calls are the same. They should match - // since metadata no longer gets overwritten after successive query calls. - assertEquals(queryId2, queryId4); - - connection.createStatement().execute("drop table if exists test_uuid_with_bind"); - preparedStatement.close(); - connection.close(); + String queryId1; + try (Connection connection = init()) { + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "create or replace table test_uuid_with_bind(c1 number, c2 string)")) { + preparedStatement.execute(); + queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_uuid_with_bind values (?, ?)")) { + assertFalse( + preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + preparedStatement.setInt(1, 5); + preparedStatement.setString(2, "hello"); + preparedStatement.addBatch(); + preparedStatement.setInt(1, 7); + preparedStatement.setString(2, "hello1"); + preparedStatement.addBatch(); + String queryId2 = + preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); + // These query IDs should not match because they are from 2 different prepared statements + assertNotEquals(queryId1, queryId2); + preparedStatement.executeBatch(); + String queryId3 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + // Another execute call was created, so prepared statement has new query ID + assertNotEquals(queryId2, queryId3); + // Calling getMetadata() should no longer require an additional server call because we + // have + // the + // metadata form the executeUpdate + String queryId4 = + preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); + // Assert the query IDs for the 2 identical getMetadata() calls are the same. They should + // match + // since metadata no longer gets overwritten after successive query calls. + assertEquals(queryId2, queryId4); + connection.createStatement().execute("drop table if exists test_uuid_with_bind"); + } + } } @Test public void testAlreadyDescribedMultipleResults() throws SQLException { - Connection connection = init(); - PreparedStatement prepStatement = connection.prepareStatement(insertSQL); - bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); - prepStatement.execute(); - // The statement above has already been described since it has been executed - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - prepStatement = connection.prepareStatement(selectSQL); - // Assert the statement, once it has been re-created, has already described set to false - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - prepStatement.setInt(1, 1); - ResultSet rs = prepStatement.executeQuery(); - assertTrue(rs.next()); - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - prepStatement = connection.prepareStatement(selectAllSQL); - // Assert the statement, once it has been re-created, has already described set to false - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - rs = prepStatement.executeQuery(); - assertTrue(rs.next()); - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + try (Connection connection = init()) { + try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { + bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); + prepStatement.execute(); + // The statement above has already been described since it has been executed + assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + } + try (PreparedStatement prepStatement = connection.prepareStatement(selectSQL)) { + // Assert the statement, once it has been re-created, has already described set to false + assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + prepStatement.setInt(1, 1); + try (ResultSet rs = prepStatement.executeQuery()) { + assertTrue(rs.next()); + assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + } + } + try (PreparedStatement prepStatement = connection.prepareStatement(selectAllSQL)) { + // Assert the statement, once it has been re-created, has already described set to false + assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + try (ResultSet rs = prepStatement.executeQuery()) { + assertTrue(rs.next()); + assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + } + } + } } /** @@ -333,40 +344,48 @@ public void testAlreadyDescribedMultipleResults() throws SQLException { */ @Test public void testConsecutiveBatchInsertError() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("create or replace table testStageArrayBind(c1 integer, c2 string)"); - PreparedStatement prepStatement = - connection.prepareStatement("insert into testStageArrayBind values (?, ?)"); - // Assert to begin with that before the describe call, array binding is not supported - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); - // Insert enough rows to hit the default binding array threshold - for (int i = 0; i < 35000; i++) { - prepStatement.setInt(1, i); - prepStatement.setString(2, "test" + i); - prepStatement.addBatch(); - } - prepStatement.executeBatch(); - // After executing the first batch, verify that array bind support is still true - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); - for (int i = 0; i < 35000; i++) { - prepStatement.setInt(1, i); - prepStatement.setString(2, "test" + i); - prepStatement.addBatch(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table testStageArrayBind(c1 integer, c2 string)"); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into testStageArrayBind values (?, ?)")) { + // Assert to begin with that before the describe call, array binding is not supported + assertFalse( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + assertFalse( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); + // Insert enough rows to hit the default binding array threshold + for (int i = 0; i < 35000; i++) { + prepStatement.setInt(1, i); + prepStatement.setString(2, "test" + i); + prepStatement.addBatch(); + } + prepStatement.executeBatch(); + // After executing the first batch, verify that array bind support is still true + assertTrue( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); + for (int i = 0; i < 35000; i++) { + prepStatement.setInt(1, i); + prepStatement.setString(2, "test" + i); + prepStatement.addBatch(); + } + prepStatement.executeBatch(); + // After executing the second batch, verify that array bind support is still true + assertTrue( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); + } + } finally { + statement.execute("drop table if exists testStageArrayBind"); } - prepStatement.executeBatch(); - // After executing the second batch, verify that array bind support is still true - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); } } @Test public void testToString() throws SQLException { - try (Connection connection = init()) { - PreparedStatement prepStatement = - connection.prepareStatement("select current_version() --testing toString()"); + try (Connection connection = init(); + PreparedStatement prepStatement = + connection.prepareStatement("select current_version() --testing toString()")) { // Query ID is going to be null since we didn't execute the statement yet assertEquals( diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java index 9311d1f96..f80a00528 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java @@ -14,10 +14,8 @@ public class PreparedStatementFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { - try (Connection connection = getConnection()) { - PreparedStatement preparedStatement = connection.prepareStatement("select ?"); - expectFeatureNotSupportedException( - () -> preparedStatement.setArray(1, new BaseJDBCTest.FakeArray())); + try (Connection connection = getConnection(); + PreparedStatement preparedStatement = connection.prepareStatement("select ?")) { expectFeatureNotSupportedException( () -> preparedStatement.setAsciiStream(1, new BaseJDBCTest.FakeInputStream())); expectFeatureNotSupportedException( diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java index a1dfbb81c..883fe0c4d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java @@ -9,6 +9,7 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.sql.Statement; import java.util.Map; import net.snowflake.client.ConditionalIgnoreRule; import net.snowflake.client.RunningOnGithubAction; @@ -29,26 +30,31 @@ public class PreparedStatementLargeUpdateLatestIT extends BaseJDBCTest { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testLargeUpdate() throws Throwable { - try (Connection con = getConnection()) { - long expectedUpdateRows = (long) Integer.MAX_VALUE + 10L; - con.createStatement().execute("create or replace table test_large_update(c1 boolean)"); - PreparedStatement st = - con.prepareStatement( - "insert into test_large_update select true from table(generator(rowcount=>" - + expectedUpdateRows - + "))"); - PreparedStatement spyp = spy(st); - // Mock internal method which returns rowcount - Mockito.doReturn(expectedUpdateRows) - .when((SnowflakePreparedStatementV1) spyp) - .executeUpdateInternal( - Mockito.any(String.class), - Mockito.any(Map.class), - Mockito.any(boolean.class), - Mockito.any(ExecTimeTelemetryData.class)); - long updatedRows = spyp.executeLargeUpdate(); - assertEquals(expectedUpdateRows, updatedRows); - con.createStatement().execute("drop table if exists test_large_update"); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + long expectedUpdateRows = (long) Integer.MAX_VALUE + 10L; + statement.execute("create or replace table test_large_update(c1 boolean)"); + try (PreparedStatement st = + con.prepareStatement( + "insert into test_large_update select true from table(generator(rowcount=>" + + expectedUpdateRows + + "))"); + PreparedStatement spyp = spy(st)) { + // Mock internal method which returns rowcount + Mockito.doReturn(expectedUpdateRows) + .when((SnowflakePreparedStatementV1) spyp) + .executeUpdateInternal( + Mockito.any(String.class), + Mockito.any(Map.class), + Mockito.any(boolean.class), + Mockito.any(ExecTimeTelemetryData.class)); + long updatedRows = spyp.executeLargeUpdate(); + assertEquals(expectedUpdateRows, updatedRows); + } + } finally { + statement.execute("drop table if exists test_large_update"); + } } } @@ -60,26 +66,30 @@ public void testLargeUpdate() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testExecuteLargeBatchOverIntMax() throws SQLException { - try (Connection connection = getConnection()) { - connection - .createStatement() - .execute("create or replace table over_int_table (val string, id int)"); - PreparedStatement pstmt = connection.prepareStatement("UPDATE over_int_table SET ID=200"); - PreparedStatement spyp = spy(pstmt); - long numRows = Integer.MAX_VALUE + 10L; - // Mock internal method which returns rowcount - Mockito.doReturn(numRows) - .when((SnowflakePreparedStatementV1) spyp) - .executeUpdateInternal( - Mockito.any(String.class), - Mockito.any(Map.class), - Mockito.any(boolean.class), - Mockito.any(ExecTimeTelemetryData.class)); - pstmt.addBatch(); - long[] queryResult = spyp.executeLargeBatch(); - assertEquals(1, queryResult.length); - assertEquals(numRows, queryResult[0]); - connection.createStatement().execute("drop table if exists over_int_table"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table over_int_table (val string, id int)"); + try (PreparedStatement pstmt = + connection.prepareStatement("UPDATE over_int_table SET ID=200"); + PreparedStatement spyp = spy(pstmt)) { + long numRows = Integer.MAX_VALUE + 10L; + // Mock internal method which returns rowcount + Mockito.doReturn(numRows) + .when((SnowflakePreparedStatementV1) spyp) + .executeUpdateInternal( + Mockito.any(String.class), + Mockito.any(Map.class), + Mockito.any(boolean.class), + Mockito.any(ExecTimeTelemetryData.class)); + pstmt.addBatch(); + long[] queryResult = spyp.executeLargeBatch(); + assertEquals(1, queryResult.length); + assertEquals(numRows, queryResult[0]); + } + } finally { + statement.execute("drop table if exists over_int_table"); + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java index e421aebff..5cd03355c 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java @@ -11,6 +11,7 @@ import java.io.FileOutputStream; import java.sql.Connection; import java.sql.ResultSet; +import java.sql.Statement; import net.snowflake.client.TestUtil; import net.snowflake.client.category.TestCategoryOthers; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; @@ -49,52 +50,52 @@ public void putFileWithSpaceIncluded() throws Exception { TarArchiveEntry tarEntry; while ((tarEntry = tis.getNextTarEntry()) != null) { File outputFile = new File(dataFolder, tarEntry.getName()); - FileOutputStream fos = new FileOutputStream(outputFile); - IOUtils.copy(tis, fos); - fos.close(); + try (FileOutputStream fos = new FileOutputStream(outputFile)) { + IOUtils.copy(tis, fos); + } } - try (Connection con = getConnection()) { - con.createStatement() - .execute( - "create or replace stage snow13400 url='s3://" - + SF_AWS_USER_BUCKET - + "/snow13400'" - + "credentials=(AWS_KEY_ID='" - + AWS_KEY_ID - + "' AWS_SECRET_KEY='" - + AWS_SECRET_KEY - + "')"); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute( + "create or replace stage snow13400 url='s3://" + + SF_AWS_USER_BUCKET + + "/snow13400'" + + "credentials=(AWS_KEY_ID='" + + AWS_KEY_ID + + "' AWS_SECRET_KEY='" + + AWS_SECRET_KEY + + "')"); - { - ResultSet resultSet = - con.createStatement() - .executeQuery( - "put file://" - + dataFolder.getCanonicalPath() - + "/* @snow13400 auto_compress=false"); - int cnt = 0; - while (resultSet.next()) { - cnt++; + try (ResultSet resultSet = + statement.executeQuery( + "put file://" + + dataFolder.getCanonicalPath() + + "/* @snow13400 auto_compress=false")) { + int cnt = 0; + while (resultSet.next()) { + cnt++; + } + assertEquals(cnt, 1); } - assertEquals(cnt, 1); - } - con.createStatement().execute("create or replace table snow13400(a string)"); - con.createStatement().execute("copy into snow13400 from @snow13400"); - { - ResultSet resultSet = con.createStatement().executeQuery("select * from snow13400"); - int cnt = 0; - String output = null; - while (resultSet.next()) { - output = resultSet.getString(1); - cnt++; + statement.execute("create or replace table snow13400(a string)"); + statement.execute("copy into snow13400 from @snow13400"); + try (ResultSet resultSet = con.createStatement().executeQuery("select * from snow13400")) { + int cnt = 0; + String output = null; + while (resultSet.next()) { + output = resultSet.getString(1); + cnt++; + } + assertEquals(cnt, 1); + assertEquals(output, "hello"); } - assertEquals(cnt, 1); - assertEquals(output, "hello"); + } finally { + statement.execute("rm @snow13400"); + statement.execute("drop stage if exists snow13400"); + statement.execute("drop table if exists snow13400"); } - con.createStatement().execute("rm @snow13400"); - con.createStatement().execute("drop stage if exists snow13400"); - con.createStatement().execute("drop table if exists snow13400"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java index e27bf02a2..f9579636d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java @@ -38,10 +38,6 @@ public void testPutFileUnescapeBackslashes() throws Exception { String remoteSubDir = "testPut"; String testDataFileName = "testdata.txt"; - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - Writer writer = null; Path topDataDir = null; try { topDataDir = Files.createTempDirectory("testPutFileUnescapeBackslashes"); @@ -53,39 +49,36 @@ public void testPutFileUnescapeBackslashes() throws Exception { // create a test data File dataFile = new File(subDir.toFile(), testDataFileName); - writer = + try (Writer writer = new BufferedWriter( - new OutputStreamWriter(new FileOutputStream(dataFile.getCanonicalPath()), "UTF-8")); - writer.write("1,test1"); - writer.close(); - + new OutputStreamWriter(new FileOutputStream(dataFile.getCanonicalPath()), "UTF-8"))) { + writer.write("1,test1"); + } // run PUT command - connection = getConnection(); - statement = connection.createStatement(); - String sql = - String.format("PUT 'file://%s' @~/%s/", dataFile.getCanonicalPath(), remoteSubDir); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + String sql = + String.format("PUT 'file://%s' @~/%s/", dataFile.getCanonicalPath(), remoteSubDir); - // Escape backslashes. This must be done by the application. - sql = sql.replaceAll("\\\\", "\\\\\\\\"); - statement.execute(sql); + // Escape backslashes. This must be done by the application. + sql = sql.replaceAll("\\\\", "\\\\\\\\"); + statement.execute(sql); - resultSet = - connection.createStatement().executeQuery(String.format("LS @~/%s/", remoteSubDir)); - while (resultSet.next()) { - assertThat( - "File name doesn't match", - resultSet.getString(1), - startsWith(String.format("%s/%s", remoteSubDir, testDataFileName))); + try (ResultSet resultSet = + connection.createStatement().executeQuery(String.format("LS @~/%s/", remoteSubDir))) { + while (resultSet.next()) { + assertThat( + "File name doesn't match", + resultSet.getString(1), + startsWith(String.format("%s/%s", remoteSubDir, testDataFileName))); + } + } + } finally { + statement.execute(String.format("RM @~/%s", remoteSubDir)); + } } - } finally { - if (connection != null) { - connection.createStatement().execute(String.format("RM @~/%s", remoteSubDir)); - } - closeSQLObjects(resultSet, statement, connection); - if (writer != null) { - writer.close(); - } FileUtils.deleteDirectory(topDataDir.toFile()); } } diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java index ed6e165d1..ae56a49f7 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java @@ -5,9 +5,12 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.junit.Assume.assumeFalse; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -17,9 +20,11 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import net.snowflake.client.RunningNotOnLinuxMac; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.util.DecorrelatedJitterBackoff; import org.apache.http.StatusLine; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; @@ -272,6 +277,7 @@ class TestCase { testCases.add(new TestCase(509, false, false)); testCases.add(new TestCase(510, false, false)); testCases.add(new TestCase(511, false, false)); + testCases.add(new TestCase(513, false, false)); // do retry on HTTP 403 option testCases.add(new TestCase(100, true, true)); testCases.add(new TestCase(101, true, true)); @@ -325,6 +331,7 @@ class TestCase { testCases.add(new TestCase(509, true, false)); testCases.add(new TestCase(510, true, false)); testCases.add(new TestCase(511, true, false)); + testCases.add(new TestCase(513, true, false)); for (TestCase t : testCases) { if (t.result) { @@ -519,8 +526,9 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl } } - @Test(expected = SnowflakeSQLException.class) - public void testLoginTimeout() throws IOException, SnowflakeSQLException { + @Test + public void testLoginTimeout() throws IOException { + assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -542,8 +550,11 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl try { TelemetryService.disable(); - execute(client, "/session/v1/login-request", 1, 0, 0, true, false, 10); - fail("testMaxRetries"); + assertThrows( + SnowflakeSQLException.class, + () -> { + execute(client, "/session/v1/login-request", 1, 0, 0, true, false, 10); + }); } finally { if (telemetryEnabled) { TelemetryService.enable(); @@ -587,4 +598,41 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl } } } + + @Test + public void shouldGenerateBackoffInRangeExceptTheLastBackoff() { + int minBackoffInMilli = 1000; + int maxBackoffInMilli = 16000; + long backoffInMilli = minBackoffInMilli; + long elapsedMilliForTransientIssues = 0; + DecorrelatedJitterBackoff decorrelatedJitterBackoff = + new DecorrelatedJitterBackoff(minBackoffInMilli, maxBackoffInMilli); + int retryTimeoutInMilli = 5 * 60 * 1000; + while (true) { + backoffInMilli = + RestRequest.getNewBackoffInMilli( + backoffInMilli, + true, + decorrelatedJitterBackoff, + 10, + retryTimeoutInMilli, + elapsedMilliForTransientIssues); + + assertTrue( + "Backoff should be lower or equal to max backoff limit", + backoffInMilli <= maxBackoffInMilli); + if (elapsedMilliForTransientIssues + backoffInMilli >= retryTimeoutInMilli) { + assertEquals( + "Backoff should fill time till retry timeout", + retryTimeoutInMilli - elapsedMilliForTransientIssues, + backoffInMilli); + break; + } else { + assertTrue( + "Backoff should be higher or equal to min backoff limit", + backoffInMilli >= minBackoffInMilli); + } + elapsedMilliForTransientIssues += backoffInMilli; + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java index 2ce20192a..b6832632b 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java @@ -12,7 +12,6 @@ import java.sql.Statement; import java.util.Properties; import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.After; import org.junit.Before; import org.junit.experimental.categories.Category; @@ -23,79 +22,69 @@ public class ResultSet0IT extends BaseJDBCTest { public Connection init(int injectSocketTimeout) throws SQLException { Connection connection = BaseJDBCTest.getConnection(injectSocketTimeout); - - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + } return connection; } public Connection init() throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } public Connection init(Properties paramProperties) throws SQLException { Connection conn = BaseJDBCTest.getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Before public void setUp() throws SQLException { - Connection con = init(); - - // TEST_RS - con.createStatement().execute("create or replace table test_rs (colA string)"); - con.createStatement().execute("insert into test_rs values('rowOne')"); - con.createStatement().execute("insert into test_rs values('rowTwo')"); - con.createStatement().execute("insert into test_rs values('rowThree')"); - - // ORDERS_JDBC - Statement statement = con.createStatement(); - statement.execute( - "create or replace table orders_jdbc" - + "(C1 STRING NOT NULL COMMENT 'JDBC', " - + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " - + "C7 STRING, C8 STRING, C9 STRING) " - + "stage_file_format = (field_delimiter='|' " - + "error_on_column_count_mismatch=false)"); - // put files - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); - - int numRows = statement.executeUpdate("copy into orders_jdbc"); - - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); - - con.close(); - } - - @After - public void tearDown() throws SQLException { - Connection con = init(); - con.createStatement().execute("drop table if exists orders_jdbc"); - con.createStatement().execute("drop table if exists test_rs"); - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement()) { + + // TEST_RS + statement.execute("create or replace table test_rs (colA string)"); + statement.execute("insert into test_rs values('rowOne')"); + statement.execute("insert into test_rs values('rowTwo')"); + statement.execute("insert into test_rs values('rowThree')"); + + // ORDERS_JDBC + statement.execute( + "create or replace table orders_jdbc" + + "(C1 STRING NOT NULL COMMENT 'JDBC', " + + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " + + "C7 STRING, C8 STRING, C9 STRING) " + + "stage_file_format = (field_delimiter='|' " + + "error_on_column_count_mismatch=false)"); + // put files + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + + int numRows = statement.executeUpdate("copy into orders_jdbc"); + + assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + } } ResultSet numberCrossTesting() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java index 40e536ebe..d2939cc8a 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java @@ -21,9 +21,10 @@ public class ResultSetAlreadyClosedIT extends BaseJDBCTest { @Test public void testQueryResultSetAlreadyClosed() throws Throwable { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { ResultSet resultSet = statement.executeQuery("select 1"); + resultSet.close(); checkAlreadyClosed(resultSet); } } @@ -43,11 +44,21 @@ public void testMetadataResultSetAlreadyClosed() throws Throwable { } } + @Test + public void testResultSetAlreadyClosed() throws Throwable { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT 1")) { + checkAlreadyClosed(resultSet); + } + } + @Test public void testEmptyResultSetAlreadyClosed() throws Throwable { - ResultSet resultSet = new SnowflakeResultSetV1.EmptyResultSet(); - checkAlreadyClosed(resultSet); - checkAlreadyClosedEmpty(resultSet); + try (SnowflakeResultSetV1.EmptyResultSet resultSet = + new SnowflakeResultSetV1.EmptyResultSet()) { + checkAlreadyClosedEmpty(resultSet); + } } private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { @@ -67,7 +78,6 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException(() -> resultSet.getDouble(1)); expectResultSetAlreadyClosedException(() -> resultSet.getBigDecimal(1)); expectResultSetAlreadyClosedException(() -> resultSet.getBytes(1)); - expectResultSetAlreadyClosedException(() -> resultSet.getString(1)); expectResultSetAlreadyClosedException(() -> resultSet.getDate(1)); expectResultSetAlreadyClosedException(() -> resultSet.getTime(1)); expectResultSetAlreadyClosedException(() -> resultSet.getTimestamp(1)); @@ -104,7 +114,13 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException(() -> resultSet.getBigDecimal("col1", 38)); expectResultSetAlreadyClosedException(resultSet::getWarnings); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).getWarnings()); + expectResultSetAlreadyClosedException(resultSet::clearWarnings); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).clearWarnings()); + expectResultSetAlreadyClosedException(resultSet::getMetaData); expectResultSetAlreadyClosedException(() -> resultSet.findColumn("col1")); @@ -118,11 +134,20 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException( () -> resultSet.setFetchDirection(ResultSet.FETCH_FORWARD)); expectResultSetAlreadyClosedException(() -> resultSet.setFetchSize(10)); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).setFetchSize(10)); + expectResultSetAlreadyClosedException(resultSet::getFetchDirection); expectResultSetAlreadyClosedException(resultSet::getFetchSize); expectResultSetAlreadyClosedException(resultSet::getType); expectResultSetAlreadyClosedException(resultSet::getConcurrency); + expectResultSetAlreadyClosedException( + resultSet.unwrap(SnowflakeBaseResultSet.class)::getConcurrency); + expectResultSetAlreadyClosedException(resultSet::getHoldability); + expectResultSetAlreadyClosedException( + resultSet.unwrap(SnowflakeBaseResultSet.class)::getHoldability); + expectResultSetAlreadyClosedException(resultSet::getStatement); } @@ -132,7 +157,8 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { * @param resultSet * @throws SQLException */ - private void checkAlreadyClosedEmpty(ResultSet resultSet) throws SQLException { + private void checkAlreadyClosedEmpty(SnowflakeResultSetV1.EmptyResultSet resultSet) + throws SQLException { resultSet.close(); resultSet.close(); // second close won't raise exception assertTrue(resultSet.isClosed()); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java index 67a78d3ce..c6edc67fb 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java @@ -57,12 +57,12 @@ Connection init(String table, String column, String values) throws SQLException + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - } - con.createStatement() - .execute("alter session set jdbc_query_result_format" + " = '" + queryResultFormat + "'"); - con.createStatement().execute("create or replace table " + table + " " + column); - con.createStatement().execute("insert into " + table + " values " + values); + statement.execute( + "alter session set jdbc_query_result_format" + " = '" + queryResultFormat + "'"); + statement.execute("create or replace table " + table + " " + column); + statement.execute("insert into " + table + " values " + values); + } return con; } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java index 56e389fc5..f998fb5d4 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java @@ -5,6 +5,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -75,11 +76,11 @@ private void testTimestampLTZWithScale(int scale) throws SQLException { ResultSet rs = con.createStatement().executeQuery("select * from " + table); int i = 0; while (i < cases.length) { - rs.next(); + assertTrue(rs.next()); assertEquals(times[i++], rs.getTimestamp(1).getTime()); assertEquals(0, rs.getTimestamp(1).getNanos()); } - rs.next(); + assertTrue(rs.next()); assertNull(rs.getString(1)); finish(table, con); } @@ -98,52 +99,56 @@ public void testTimestampLTZOutputFormat() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'),('") + "')"; - Connection con = init(table, column, values); - - Statement statement = con.createStatement(); - - // use initialized ltz output format - ResultSet rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String weekday = rs.getString(1).split(",")[0]; - assertEquals(3, weekday.length()); - } - - // change ltz output format - statement.execute( - "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS TZH:TZM'"); - rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String year = rs.getString(1).split("-")[0]; - assertEquals(4, year.length()); - } - - // unset ltz output format, then it should use timestamp_output_format - statement.execute("alter session unset TIMESTAMP_LTZ_OUTPUT_FORMAT"); - rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String weekday = rs.getString(1).split(",")[0]; - assertEquals(3, weekday.length()); - } - - // set ltz output format back to init value - statement.execute( - "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String weekday = rs.getString(1).split(",")[0]; - assertEquals(3, weekday.length()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try { + // use initialized ltz output format + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String weekday = rs.getString(1).split(",")[0]; + assertEquals(3, weekday.length()); + } + } + // change ltz output format + statement.execute( + "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS TZH:TZM'"); + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String year = rs.getString(1).split("-")[0]; + assertEquals(4, year.length()); + } + } + + // unset ltz output format, then it should use timestamp_output_format + statement.execute("alter session unset TIMESTAMP_LTZ_OUTPUT_FORMAT"); + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String weekday = rs.getString(1).split(",")[0]; + assertEquals(3, weekday.length()); + } + } + // set ltz output format back to init value + statement.execute( + "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String weekday = rs.getString(1).split(",")[0]; + assertEquals(3, weekday.length()); + } + } + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - - finish(table, con); } @Test @@ -178,20 +183,26 @@ public void testTimestampLTZWithNulls() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'), (null),('") + "')"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < 2 * cases.length - 1) { - rs.next(); - if (i % 2 != 0) { - assertNull(rs.getTimestamp(1)); - } else { - assertEquals(times[i / 2], rs.getTimestamp(1).getTime()); - assertEquals(0, rs.getTimestamp(1).getNanos()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < 2 * cases.length - 1) { + assertTrue(rs.next()); + if (i % 2 != 0) { + assertNull(rs.getTimestamp(1)); + } else { + assertEquals(times[i / 2], rs.getTimestamp(1).getTime()); + assertEquals(0, rs.getTimestamp(1).getNanos()); + } + i++; + } + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - i++; } - finish(table, con); } @Test @@ -218,16 +229,22 @@ public void testTimestampLTZWithNanos() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java index 156bf10bd..e073bfccf 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java @@ -5,10 +5,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Statement; import java.util.Collection; import net.snowflake.client.category.TestCategoryArrow; import org.apache.commons.lang3.StringUtils; @@ -65,17 +67,23 @@ private void testTimestampTZWithScale(int scale) throws SQLException { String column = "(a timestamp_tz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(times[i++], rs.getTimestamp(1).getTime()); - assertEquals(0, rs.getTimestamp(1).getNanos()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(times[i++], rs.getTimestamp(1).getTime()); + assertEquals(0, rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } @Test @@ -111,22 +119,28 @@ public void testTimestampTZWithNanos() throws SQLException { String column = "(a timestamp_tz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { - // TODO: Is this a JDBC bug which happens in both arrow and json cases? - assertEquals("0001-01-01 00:00:01.790870987", rs.getTimestamp(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { + // TODO: Is this a JDBC bug which happens in both arrow and json cases? + assertEquals("0001-01-01 00:00:01.790870987", rs.getTimestamp(1).toString()); + } + + assertEquals(times[i], rs.getTimestamp(1).getTime()); + assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - - assertEquals(times[i], rs.getTimestamp(1).getTime()); - assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } @Test @@ -164,21 +178,27 @@ public void testTimestampTZWithMicros() throws SQLException { String column = "(a timestamp_tz(6))"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { - // TODO: Is this a JDBC bug which happens in both arrow and json cases? - assertEquals("0001-01-01 00:00:01.79087", rs.getTimestamp(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { + // TODO: Is this a JDBC bug which happens in both arrow and json cases? + assertEquals("0001-01-01 00:00:01.79087", rs.getTimestamp(1).toString()); + } + + assertEquals(times[i], rs.getTimestamp(1).getTime()); + assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - - assertEquals(times[i], rs.getTimestamp(1).getTime()); - assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java index 6e881615c..1351ea4f1 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java @@ -38,177 +38,193 @@ public class ResultSetAsyncIT extends BaseJDBCTest { @Test public void testAsyncResultSetFunctionsWithNewSession() throws SQLException { - Connection connection = getConnection(); final Map params = getConnectionParameters(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - String createTableSql = "select * from test_rsmd"; - ResultSet rs = statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql); - String queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - statement.execute("drop table if exists test_rsmd"); - rs.close(); - // close statement and connection - statement.close(); - connection.close(); - connection = getConnection(); - // open a new connection and create a result set - ResultSet resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // getCatalogName(), getSchemaName(), and getTableName() are empty - // when session is re-opened - assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getTableName(1)); - assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); - assertEquals("COLA", resultSetMetaData.getColumnName(1)); - assertEquals(3, resultSetMetaData.getColumnType(1)); - assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); - assertEquals(20, resultSetMetaData.getPrecision(1)); - assertEquals(5, resultSetMetaData.getScale(1)); - assertFalse(resultSetMetaData.isAutoIncrement(1)); - assertFalse(resultSetMetaData.isCaseSensitive(1)); - assertFalse(resultSetMetaData.isCurrency(1)); - assertFalse(resultSetMetaData.isDefinitelyWritable(1)); - assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); - assertTrue(resultSetMetaData.isReadOnly(1)); - assertTrue(resultSetMetaData.isSearchable(1)); - assertTrue(resultSetMetaData.isSigned(1)); + String queryID = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + String createTableSql = "select * from test_rsmd"; + try (ResultSet rs = + statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql)) { + queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + } + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } + try (Connection connection = getConnection(); + // open a new connection and create a result set + ResultSet resultSet = + connection.unwrap(SnowflakeConnection.class).createResultSet(queryID)) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // getCatalogName(), getSchemaName(), and getTableName() are empty + // when session is re-opened + assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getTableName(1)); + assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); + assertEquals("COLA", resultSetMetaData.getColumnName(1)); + assertEquals(3, resultSetMetaData.getColumnType(1)); + assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); + assertEquals(20, resultSetMetaData.getPrecision(1)); + assertEquals(5, resultSetMetaData.getScale(1)); + assertFalse(resultSetMetaData.isAutoIncrement(1)); + assertFalse(resultSetMetaData.isCaseSensitive(1)); + assertFalse(resultSetMetaData.isCurrency(1)); + assertFalse(resultSetMetaData.isDefinitelyWritable(1)); + assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); + assertTrue(resultSetMetaData.isReadOnly(1)); + assertTrue(resultSetMetaData.isSearchable(1)); + assertTrue(resultSetMetaData.isSigned(1)); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - List colNames = secretMetaData.getColumnNames(); - assertEquals("COLA", colNames.get(0)); - assertEquals("COLB", colNames.get(1)); - assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); - assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); - TestUtil.assertValidQueryId(secretMetaData.getQueryID()); - assertEquals( - secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); - resultSet.close(); - statement.close(); - connection.close(); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + List colNames = secretMetaData.getColumnNames(); + assertEquals("COLA", colNames.get(0)); + assertEquals("COLB", colNames.get(1)); + assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); + assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); + TestUtil.assertValidQueryId(secretMetaData.getQueryID()); + assertEquals( + secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); + } } @Test public void testResultSetMetadata() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getTableName(1)); - assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); - assertEquals("COLA", resultSetMetaData.getColumnName(1)); - assertEquals(3, resultSetMetaData.getColumnType(1)); - assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); - assertEquals(20, resultSetMetaData.getPrecision(1)); - assertEquals(5, resultSetMetaData.getScale(1)); - assertFalse(resultSetMetaData.isAutoIncrement(1)); - assertFalse(resultSetMetaData.isCaseSensitive(1)); - assertFalse(resultSetMetaData.isCurrency(1)); - assertFalse(resultSetMetaData.isDefinitelyWritable(1)); - assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); - assertTrue(resultSetMetaData.isReadOnly(1)); - assertTrue(resultSetMetaData.isSearchable(1)); - assertTrue(resultSetMetaData.isSigned(1)); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - List colNames = secretMetaData.getColumnNames(); - assertEquals("COLA", colNames.get(0)); - assertEquals("COLB", colNames.get(1)); - assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); - assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); - TestUtil.assertValidQueryId(secretMetaData.getQueryID()); - assertEquals( - secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); - - statement.execute("drop table if exists test_rsmd"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + try (ResultSet resultSet = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from test_rsmd")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getTableName(1)); + assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); + assertEquals("COLA", resultSetMetaData.getColumnName(1)); + assertEquals(3, resultSetMetaData.getColumnType(1)); + assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); + assertEquals(20, resultSetMetaData.getPrecision(1)); + assertEquals(5, resultSetMetaData.getScale(1)); + assertFalse(resultSetMetaData.isAutoIncrement(1)); + assertFalse(resultSetMetaData.isCaseSensitive(1)); + assertFalse(resultSetMetaData.isCurrency(1)); + assertFalse(resultSetMetaData.isDefinitelyWritable(1)); + assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); + assertTrue(resultSetMetaData.isReadOnly(1)); + assertTrue(resultSetMetaData.isSearchable(1)); + assertTrue(resultSetMetaData.isSigned(1)); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + List colNames = secretMetaData.getColumnNames(); + assertEquals("COLA", colNames.get(0)); + assertEquals("COLB", colNames.get(1)); + assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); + assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); + TestUtil.assertValidQueryId(secretMetaData.getQueryID()); + assertEquals( + secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); + } + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } } @Test public void testOrderAndClosureFunctions() throws SQLException { // Set up environment - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); + String queryID = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + try { + ResultSet resultSet = + statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); - // test isFirst, isBeforeFirst - assertTrue("should be before the first", resultSet.isBeforeFirst()); - assertFalse("should not be the first", resultSet.isFirst()); - resultSet.next(); - assertFalse("should not be before the first", resultSet.isBeforeFirst()); - assertTrue("should be the first", resultSet.isFirst()); + // test isFirst, isBeforeFirst + assertTrue("should be before the first", resultSet.isBeforeFirst()); + assertFalse("should not be the first", resultSet.isFirst()); + resultSet.next(); + assertFalse("should not be before the first", resultSet.isBeforeFirst()); + assertTrue("should be the first", resultSet.isFirst()); - // test isClosed functions - String queryID = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); - assertFalse(resultSet.isClosed()); - // close resultSet and test again - resultSet.close(); - assertTrue(resultSet.isClosed()); - // close connection and open a new one - statement.execute("drop table if exists test_rsmd"); - statement.close(); - connection.close(); - connection = getConnection(); - resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); - // test out isClosed, isLast, and isAfterLast - assertFalse(resultSet.isClosed()); - resultSet.next(); - resultSet.next(); - // cursor should be on last row - assertTrue(resultSet.isLast()); - resultSet.next(); - // cursor is after last row - assertTrue(resultSet.isAfterLast()); - resultSet.close(); - // resultSet should be closed - assertTrue(resultSet.isClosed()); - statement.close(); - connection.close(); + // test isClosed functions + queryID = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); + assertFalse(resultSet.isClosed()); + // close resultSet and test again + resultSet.close(); + assertTrue(resultSet.isClosed()); + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } + try (Connection connection = getConnection()) { + ResultSet resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); + // test out isClosed, isLast, and isAfterLast + assertFalse(resultSet.isClosed()); + resultSet.next(); + resultSet.next(); + // cursor should be on last row + assertTrue(resultSet.isLast()); + resultSet.next(); + // cursor is after last row + assertTrue(resultSet.isAfterLast()); + resultSet.close(); + // resultSet should be closed + assertTrue(resultSet.isClosed()); + } } @Test public void testWasNull() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - Clob emptyClob = connection.createClob(); - emptyClob.setString(1, ""); - statement.execute( - "create or replace table test_null(colA number, colB string, colNull string, emptyClob string)"); - PreparedStatement prepst = - connection.prepareStatement("insert into test_null values (?, ?, ?, ?)"); - prepst.setNull(1, Types.INTEGER); - prepst.setString(2, "hello"); - prepst.setString(3, null); - prepst.setClob(4, emptyClob); - prepst.execute(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + Clob emptyClob = connection.createClob(); + emptyClob.setString(1, ""); + statement.execute( + "create or replace table test_null(colA number, colB string, colNull string, emptyClob string)"); + try (PreparedStatement prepst = + connection.prepareStatement("insert into test_null values (?, ?, ?, ?)")) { + prepst.setNull(1, Types.INTEGER); + prepst.setString(2, "hello"); + prepst.setString(3, null); + prepst.setClob(4, emptyClob); + prepst.execute(); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_null"); - resultSet.next(); - resultSet.getInt(1); - assertTrue(resultSet.wasNull()); // integer value is null - resultSet.getString(2); - assertFalse(resultSet.wasNull()); // string value is not null - assertNull(resultSet.getClob(3)); - assertNull(resultSet.getClob("COLNULL")); - assertEquals("", resultSet.getClob("EMPTYCLOB").toString()); + try (ResultSet resultSet = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from test_null")) { + resultSet.next(); + resultSet.getInt(1); + assertTrue(resultSet.wasNull()); // integer value is null + resultSet.getString(2); + assertFalse(resultSet.wasNull()); // string value is not null + assertNull(resultSet.getClob(3)); + assertNull(resultSet.getClob("COLNULL")); + assertEquals("", resultSet.getClob("EMPTYCLOB").toString()); + } + } + } finally { + statement.execute("drop table if exists test_null"); + } + } } @Test @@ -228,88 +244,92 @@ public void testGetMethods() throws Throwable { Time time = new Time(500); Timestamp ts = new Timestamp(333); - Connection connection = getConnection(); - Clob clob = connection.createClob(); - clob.setString(1, "hello world"); - Statement statement = connection.createStatement(); - // TODO structuredType - add to test when WRITE is ready - SNOW-1157904 - statement.execute( - "create or replace table test_get(colA integer, colB number, colC number, colD string, colE double, colF float, colG boolean, colH text, colI binary(3), colJ number(38,9), colK int, colL date, colM time, colN timestamp_ltz)"); + try (Connection connection = getConnection()) { + Clob clob = connection.createClob(); + clob.setString(1, "hello world"); + try (Statement statement = connection.createStatement()) { + try { + // TODO structuredType - add to test when WRITE is ready - SNOW-1157904 + statement.execute( + "create or replace table test_get(colA integer, colB number, colC number, colD string, colE double, colF float, colG boolean, colH text, colI binary(3), colJ number(38,9), colK int, colL date, colM time, colN timestamp_ltz)"); - PreparedStatement prepStatement = connection.prepareStatement(prepInsertString); - prepStatement.setInt(1, bigInt); - prepStatement.setLong(2, bigLong); - prepStatement.setLong(3, bigShort); - prepStatement.setString(4, str); - prepStatement.setDouble(5, bigDouble); - prepStatement.setFloat(6, bigFloat); - prepStatement.setBoolean(7, true); - prepStatement.setClob(8, clob); - prepStatement.setBytes(9, bytes); - prepStatement.setBigDecimal(10, bigDecimal); - prepStatement.setByte(11, oneByte); - prepStatement.setDate(12, date); - prepStatement.setTime(13, time); - prepStatement.setTimestamp(14, ts); - prepStatement.execute(); + try (PreparedStatement prepStatement = connection.prepareStatement(prepInsertString)) { + prepStatement.setInt(1, bigInt); + prepStatement.setLong(2, bigLong); + prepStatement.setLong(3, bigShort); + prepStatement.setString(4, str); + prepStatement.setDouble(5, bigDouble); + prepStatement.setFloat(6, bigFloat); + prepStatement.setBoolean(7, true); + prepStatement.setClob(8, clob); + prepStatement.setBytes(9, bytes); + prepStatement.setBigDecimal(10, bigDecimal); + prepStatement.setByte(11, oneByte); + prepStatement.setDate(12, date); + prepStatement.setTime(13, time); + prepStatement.setTimestamp(14, ts); + prepStatement.execute(); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_get"); - resultSet.next(); - assertEquals(bigInt, resultSet.getInt(1)); - assertEquals(bigInt, resultSet.getInt("COLA")); - assertEquals(bigLong, resultSet.getLong(2)); - assertEquals(bigLong, resultSet.getLong("COLB")); - assertEquals(bigShort, resultSet.getShort(3)); - assertEquals(bigShort, resultSet.getShort("COLC")); - assertEquals(str, resultSet.getString(4)); - assertEquals(str, resultSet.getString("COLD")); - Reader reader = resultSet.getCharacterStream("COLD"); - char[] sample = new char[str.length()]; + try (ResultSet resultSet = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from test_get")) { + resultSet.next(); + assertEquals(bigInt, resultSet.getInt(1)); + assertEquals(bigInt, resultSet.getInt("COLA")); + assertEquals(bigLong, resultSet.getLong(2)); + assertEquals(bigLong, resultSet.getLong("COLB")); + assertEquals(bigShort, resultSet.getShort(3)); + assertEquals(bigShort, resultSet.getShort("COLC")); + assertEquals(str, resultSet.getString(4)); + assertEquals(str, resultSet.getString("COLD")); + Reader reader = resultSet.getCharacterStream("COLD"); + char[] sample = new char[str.length()]; - assertEquals(str.length(), reader.read(sample)); - assertEquals(str.charAt(0), sample[0]); - assertEquals(str, new String(sample)); + assertEquals(str.length(), reader.read(sample)); + assertEquals(str.charAt(0), sample[0]); + assertEquals(str, new String(sample)); - assertEquals(bigDouble, resultSet.getDouble(5), 0); - assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); - assertEquals(bigFloat, resultSet.getFloat(6), 0); - assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); - assertTrue(resultSet.getBoolean(7)); - assertTrue(resultSet.getBoolean("COLG")); - assertEquals("hello world", resultSet.getClob("COLH").toString()); + assertEquals(bigDouble, resultSet.getDouble(5), 0); + assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); + assertEquals(bigFloat, resultSet.getFloat(6), 0); + assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); + assertTrue(resultSet.getBoolean(7)); + assertTrue(resultSet.getBoolean("COLG")); + assertEquals("hello world", resultSet.getClob("COLH").toString()); - // TODO: figure out why getBytes returns an offset. - // assertEquals(bytes, resultSet.getBytes(9)); - // assertEquals(bytes, resultSet.getBytes("COLI")); + // TODO: figure out why getBytes returns an offset. + // assertEquals(bytes, resultSet.getBytes(9)); + // assertEquals(bytes, resultSet.getBytes("COLI")); - DecimalFormat df = new DecimalFormat("#.00"); - assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal(10))); - assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal("COLJ"))); + DecimalFormat df = new DecimalFormat("#.00"); + assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal(10))); + assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal("COLJ"))); - assertEquals(oneByte, resultSet.getByte(11)); - assertEquals(oneByte, resultSet.getByte("COLK")); + assertEquals(oneByte, resultSet.getByte(11)); + assertEquals(oneByte, resultSet.getByte("COLK")); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); - assertEquals(sdf.format(date), sdf.format(resultSet.getDate(12))); - assertEquals(sdf.format(date), sdf.format(resultSet.getDate("COLL"))); - assertEquals(time, resultSet.getTime(13)); - assertEquals(time, resultSet.getTime("COLM")); - assertEquals(ts, resultSet.getTimestamp(14)); - assertEquals(ts, resultSet.getTimestamp("COLN")); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); + assertEquals(sdf.format(date), sdf.format(resultSet.getDate(12))); + assertEquals(sdf.format(date), sdf.format(resultSet.getDate("COLL"))); + assertEquals(time, resultSet.getTime(13)); + assertEquals(time, resultSet.getTime("COLM")); + assertEquals(ts, resultSet.getTimestamp(14)); + assertEquals(ts, resultSet.getTimestamp("COLN")); - // test getObject - assertEquals(str, resultSet.getObject(4).toString()); - assertEquals(str, resultSet.getObject("COLD").toString()); + // test getObject + assertEquals(str, resultSet.getObject(4).toString()); + assertEquals(str, resultSet.getObject("COLD").toString()); - // test getStatement method - assertEquals(statement, resultSet.getStatement()); - - prepStatement.close(); - statement.execute("drop table if exists table_get"); - statement.close(); - resultSet.close(); - connection.close(); + // test getStatement method + assertEquals(statement, resultSet.getStatement()); + } + } + } finally { + statement.execute("drop table if exists table_get"); + } + } + } } /** @@ -323,23 +343,23 @@ public void testGetMethods() throws Throwable { */ @Test public void testEmptyResultSet() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet rs = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from empty_table"); - // if user never calls getMetadata() or next(), empty result set is used to get results. - // empty ResultSet returns all nulls, 0s, and empty values. - assertFalse(rs.isClosed()); - assertEquals(0, rs.getInt(1)); - try { - rs.getInt("col1"); - fail("Fetching from a column name that does not exist should return a SQLException"); - } catch (SQLException e) { - // findColumn fails with empty metadata with exception "Column not found". - assertEquals(SqlState.UNDEFINED_COLUMN, e.getSQLState()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet rs = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from empty_table")) { + // if user never calls getMetadata() or next(), empty result set is used to get results. + // empty ResultSet returns all nulls, 0s, and empty values. + assertFalse(rs.isClosed()); + assertEquals(0, rs.getInt(1)); + try { + rs.getInt("col1"); + fail("Fetching from a column name that does not exist should return a SQLException"); + } catch (SQLException e) { + // findColumn fails with empty metadata with exception "Column not found". + assertEquals(SqlState.UNDEFINED_COLUMN, e.getSQLState()); + } } - rs.close(); // close empty result set - assertTrue(rs.isClosed()); - connection.close(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java index e5dc110ce..dd534d469 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java @@ -21,30 +21,32 @@ public class ResultSetAsyncLatestIT extends BaseJDBCTest { @Test public void testAsyncResultSet() throws SQLException { String queryID; - Connection connection = getConnection(); - try (Statement statement = connection.createStatement()) { - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - String createTableSql = "select * from test_rsmd"; - ResultSet rs = statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql); - queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - statement.execute("drop table if exists test_rsmd"); - rs.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + String createTableSql = "select * from test_rsmd"; + try (ResultSet rs = + statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql)) { + queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + } + } finally { + statement.execute("drop table if exists test_rsmd"); + } } // Close and reopen connection - connection.close(); - connection = getConnection(); - // open a new connection and create a result set - ResultSet resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); - // Process result set - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - assertEquals( - secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); - // Close statement and resultset - resultSet.getStatement().close(); - resultSet.close(); - connection.close(); + + try (Connection connection = getConnection(); + // open a new connection and create a result set + ResultSet resultSet = + connection.unwrap(SnowflakeConnection.class).createResultSet(queryID)) { + // Process result set + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + assertEquals( + secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java index 2535a6579..e71f69d1a 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java @@ -18,11 +18,10 @@ public class ResultSetFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testQueryResultSetNotSupportedException() throws Throwable { - try (Connection connection = getConnection()) { - try (Statement statement = connection.createStatement()) { - ResultSet resultSet = statement.executeQuery("select 1"); - checkFeatureNotSupportedException(resultSet); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("select 1")) { + checkFeatureNotSupportedException(resultSet); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFormatType.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFormatType.java new file mode 100644 index 000000000..a080ce9af --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFormatType.java @@ -0,0 +1,12 @@ +package net.snowflake.client.jdbc; + +public enum ResultSetFormatType { + JSON("JSON"), + ARROW_WITH_JSON_STRUCTURED_TYPES("ARROW"), + NATIVE_ARROW("ARROW"); + public final String sessionParameterTypeValue; + + ResultSetFormatType(String sessionParameterTypeValue) { + this.sessionParameterTypeValue = sessionParameterTypeValue; + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java index e521078c6..3e5343117 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java @@ -57,35 +57,37 @@ public ResultSetIT() { @Test public void testFindColumn() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(selectAllSQL); - assertEquals(1, resultSet.findColumn("COLA")); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertEquals(1, resultSet.findColumn("COLA")); + } } @Test public void testGetColumnClassNameForBinary() throws Throwable { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table bintable (b binary)"); - statement.execute("insert into bintable values ('00f1f2')"); - ResultSet resultSet = statement.executeQuery("select * from bintable"); - ResultSetMetaData metaData = resultSet.getMetaData(); - assertEquals(SnowflakeType.BINARY_CLASS_NAME, metaData.getColumnClassName(1)); - assertTrue(resultSet.next()); - Class klass = Class.forName(SnowflakeType.BINARY_CLASS_NAME); - Object ret0 = resultSet.getObject(1); - assertEquals(ret0.getClass(), klass); - byte[] ret = (byte[]) ret0; - assertEquals(3, ret.length); - assertEquals(ret[0], (byte) 0); - assertEquals(ret[1], (byte) -15); - assertEquals(ret[2], (byte) -14); - statement.execute("drop table if exists bintable"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table bintable (b binary)"); + statement.execute("insert into bintable values ('00f1f2')"); + try (ResultSet resultSet = statement.executeQuery("select * from bintable")) { + ResultSetMetaData metaData = resultSet.getMetaData(); + assertEquals(SnowflakeType.BINARY_CLASS_NAME, metaData.getColumnClassName(1)); + assertTrue(resultSet.next()); + Class klass = Class.forName(SnowflakeType.BINARY_CLASS_NAME); + Object ret0 = resultSet.getObject(1); + assertEquals(ret0.getClass(), klass); + byte[] ret = (byte[]) ret0; + assertEquals(3, ret.length); + assertEquals(ret[0], (byte) 0); + assertEquals(ret[1], (byte) -15); + assertEquals(ret[2], (byte) -14); + } + } finally { + statement.execute("drop table if exists bintable"); + } + } } @Test @@ -98,379 +100,388 @@ public void testGetMethod() throws Throwable { double bigDouble = Double.MAX_VALUE; float bigFloat = Float.MAX_VALUE; - Connection connection = init(); - Clob clob = connection.createClob(); - clob.setString(1, "hello world"); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table test_get(colA integer, colB number, colC number, " - + "colD string, colE double, colF float, colG boolean, colH text)"); - - PreparedStatement prepStatement = connection.prepareStatement(prepInsertString); - prepStatement.setInt(1, bigInt); - prepStatement.setLong(2, bigLong); - prepStatement.setLong(3, bigShort); - prepStatement.setString(4, str); - prepStatement.setDouble(5, bigDouble); - prepStatement.setFloat(6, bigFloat); - prepStatement.setBoolean(7, true); - prepStatement.setClob(8, clob); - prepStatement.execute(); - - statement.execute("select * from test_get"); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - assertEquals(bigInt, resultSet.getInt(1)); - assertEquals(bigInt, resultSet.getInt("COLA")); - assertEquals(bigLong, resultSet.getLong(2)); - assertEquals(bigLong, resultSet.getLong("COLB")); - assertEquals(bigShort, resultSet.getShort(3)); - assertEquals(bigShort, resultSet.getShort("COLC")); - assertEquals(str, resultSet.getString(4)); - assertEquals(str, resultSet.getString("COLD")); - Reader reader = resultSet.getCharacterStream("COLD"); - char[] sample = new char[str.length()]; - - assertEquals(str.length(), reader.read(sample)); - assertEquals(str.charAt(0), sample[0]); - assertEquals(str, new String(sample)); - - // assertEquals(bigDouble, resultSet.getDouble(5), 0); - // assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); - assertEquals(bigFloat, resultSet.getFloat(6), 0); - assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); - assertTrue(resultSet.getBoolean(7)); - assertTrue(resultSet.getBoolean("COLG")); - assertEquals("hello world", resultSet.getClob("COLH").toString()); - - // test getStatement method - assertEquals(statement, resultSet.getStatement()); - - prepStatement.close(); - statement.execute("drop table if exists table_get"); - statement.close(); - resultSet.close(); - connection.close(); + try (Connection connection = init()) { + Clob clob = connection.createClob(); + clob.setString(1, "hello world"); + try (Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_get(colA integer, colB number, colC number, " + + "colD string, colE double, colF float, colG boolean, colH text)"); + + try (PreparedStatement prepStatement = connection.prepareStatement(prepInsertString)) { + prepStatement.setInt(1, bigInt); + prepStatement.setLong(2, bigLong); + prepStatement.setLong(3, bigShort); + prepStatement.setString(4, str); + prepStatement.setDouble(5, bigDouble); + prepStatement.setFloat(6, bigFloat); + prepStatement.setBoolean(7, true); + prepStatement.setClob(8, clob); + prepStatement.execute(); + + statement.execute("select * from test_get"); + try (ResultSet resultSet = statement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(bigInt, resultSet.getInt(1)); + assertEquals(bigInt, resultSet.getInt("COLA")); + assertEquals(bigLong, resultSet.getLong(2)); + assertEquals(bigLong, resultSet.getLong("COLB")); + assertEquals(bigShort, resultSet.getShort(3)); + assertEquals(bigShort, resultSet.getShort("COLC")); + assertEquals(str, resultSet.getString(4)); + assertEquals(str, resultSet.getString("COLD")); + Reader reader = resultSet.getCharacterStream("COLD"); + char[] sample = new char[str.length()]; + + assertEquals(str.length(), reader.read(sample)); + assertEquals(str.charAt(0), sample[0]); + assertEquals(str, new String(sample)); + + // assertEquals(bigDouble, resultSet.getDouble(5), 0); + // assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); + assertEquals(bigFloat, resultSet.getFloat(6), 0); + assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); + assertTrue(resultSet.getBoolean(7)); + assertTrue(resultSet.getBoolean("COLG")); + assertEquals("hello world", resultSet.getClob("COLH").toString()); + + // test getStatement method + assertEquals(statement, resultSet.getStatement()); + } + } + } finally { + statement.execute("drop table if exists table_get"); + } + } + } } @Test public void testGetObjectOnDatabaseMetadataResultSet() throws SQLException { - Connection connection = init(); - DatabaseMetaData databaseMetaData = connection.getMetaData(); - ResultSet resultSet = databaseMetaData.getTypeInfo(); - resultSet.next(); - // SNOW-21375 "NULLABLE" Column is a SMALLINT TYPE - assertEquals(DatabaseMetaData.typeNullable, resultSet.getObject("NULLABLE")); - resultSet.close(); - connection.close(); + try (Connection connection = init()) { + DatabaseMetaData databaseMetaData = connection.getMetaData(); + try (ResultSet resultSet = databaseMetaData.getTypeInfo()) { + assertTrue(resultSet.next()); + // SNOW-21375 "NULLABLE" Column is a SMALLINT TYPE + assertEquals(DatabaseMetaData.typeNullable, resultSet.getObject("NULLABLE")); + } + } } @Test public void testGetShort() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getShort(i)); - } - - resultSet.next(); - assertEquals(2, resultSet.getShort(1)); - assertEquals(5, resultSet.getShort(2)); - assertEquals(3, resultSet.getShort(3)); - assertEquals(1, resultSet.getShort(4)); - assertEquals(1, resultSet.getShort(5)); - assertEquals(1, resultSet.getShort(6)); - assertEquals(9126, resultSet.getShort(7)); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getShort(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getShort(i)); } - } - resultSet.next(); - // certain column types can only have certain values when called by getShort() or else a - // SQLexception is thrown. - // These column types are varchar, char, and float. - for (int i = 5; i < 7; i++) { - try { - resultSet.getShort(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getShort(1)); + assertEquals(5, resultSet.getShort(2)); + assertEquals(3, resultSet.getShort(3)); + assertEquals(1, resultSet.getShort(4)); + assertEquals(1, resultSet.getShort(5)); + assertEquals(1, resultSet.getShort(6)); + assertEquals(9126, resultSet.getShort(7)); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getShort(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getShort() or else a + // SQLexception is thrown. + // These column types are varchar, char, and float. + + for (int i = 5; i < 7; i++) { + try { + resultSet.getShort(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetInt() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getInt(i)); - } - - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - assertEquals(5, resultSet.getInt(2)); - assertEquals(3, resultSet.getInt(3)); - assertEquals(1, resultSet.getInt(4)); - assertEquals(1, resultSet.getInt(5)); - assertEquals(1, resultSet.getInt(6)); - assertEquals(9126, resultSet.getInt(7)); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getInt(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getInt(i)); } - } - resultSet.next(); - // certain column types can only have certain values when called by getInt() or else a - // SQLException is thrown. - // These column types are varchar, char, and float. - for (int i = 5; i < 7; i++) { - try { - resultSet.getInt(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertEquals(5, resultSet.getInt(2)); + assertEquals(3, resultSet.getInt(3)); + assertEquals(1, resultSet.getInt(4)); + assertEquals(1, resultSet.getInt(5)); + assertEquals(1, resultSet.getInt(6)); + assertEquals(9126, resultSet.getInt(7)); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getInt(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getInt() or else a + // SQLException is thrown. + // These column types are varchar, char, and float. + for (int i = 5; i < 7; i++) { + try { + resultSet.getInt(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetLong() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getLong(i)); - } - - resultSet.next(); - assertEquals(2, resultSet.getLong(1)); - assertEquals(5, resultSet.getLong(2)); - assertEquals(3, resultSet.getLong(3)); - assertEquals(1, resultSet.getLong(4)); - assertEquals(1, resultSet.getLong(5)); - assertEquals(1, resultSet.getLong(6)); - assertEquals(9126, resultSet.getLong(7)); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getLong(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getLong(i)); } - } - resultSet.next(); - // certain column types can only have certain values when called by getLong() or else a - // SQLexception is thrown. - // These column types are varchar, char, and float. - for (int i = 5; i < 7; i++) { - try { - resultSet.getLong(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getLong(1)); + assertEquals(5, resultSet.getLong(2)); + assertEquals(3, resultSet.getLong(3)); + assertEquals(1, resultSet.getLong(4)); + assertEquals(1, resultSet.getLong(5)); + assertEquals(1, resultSet.getLong(6)); + assertEquals(9126, resultSet.getLong(7)); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getLong(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getLong() or else a + // SQLexception is thrown. + // These column types are varchar, char, and float. + for (int i = 5; i < 7; i++) { + try { + resultSet.getLong(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetFloat() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getFloat(i), .1); - } - - resultSet.next(); - assertEquals(2, resultSet.getFloat(1), .1); - assertEquals(5, resultSet.getFloat(2), .1); - assertEquals(3.5, resultSet.getFloat(3), .1); - assertEquals(1, resultSet.getFloat(4), .1); - assertEquals(1, resultSet.getFloat(5), .1); - assertEquals(1, resultSet.getFloat(6), .1); - assertEquals(9126, resultSet.getFloat(7), .1); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getFloat(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getFloat(i), .1); } - } - resultSet.next(); - // certain column types can only have certain values when called by getFloat() or else a - // SQLexception is thrown. - // These column types are varchar and char. - for (int i = 5; i < 7; i++) { - try { - resultSet.getFloat(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getFloat(1), .1); + assertEquals(5, resultSet.getFloat(2), .1); + assertEquals(3.5, resultSet.getFloat(3), .1); + assertEquals(1, resultSet.getFloat(4), .1); + assertEquals(1, resultSet.getFloat(5), .1); + assertEquals(1, resultSet.getFloat(6), .1); + assertEquals(9126, resultSet.getFloat(7), .1); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getFloat(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getFloat() or else a + // SQLexception is thrown. + // These column types are varchar and char. + for (int i = 5; i < 7; i++) { + try { + resultSet.getFloat(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetDouble() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getDouble(i), .1); - } - - resultSet.next(); - assertEquals(2, resultSet.getDouble(1), .1); - assertEquals(5, resultSet.getDouble(2), .1); - assertEquals(3.5, resultSet.getDouble(3), .1); - assertEquals(1, resultSet.getDouble(4), .1); - assertEquals(1, resultSet.getDouble(5), .1); - assertEquals(1, resultSet.getDouble(6), .1); - assertEquals(9126, resultSet.getDouble(7), .1); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getDouble(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getDouble(i), .1); } - } - resultSet.next(); - // certain column types can only have certain values when called by getDouble() or else a - // SQLexception is thrown. - // These column types are varchar and char. - for (int i = 5; i < 7; i++) { - try { - resultSet.getDouble(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getDouble(1), .1); + assertEquals(5, resultSet.getDouble(2), .1); + assertEquals(3.5, resultSet.getDouble(3), .1); + assertEquals(1, resultSet.getDouble(4), .1); + assertEquals(1, resultSet.getDouble(5), .1); + assertEquals(1, resultSet.getDouble(6), .1); + assertEquals(9126, resultSet.getDouble(7), .1); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getDouble(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getDouble() or else a + // SQLexception is thrown. + // These column types are varchar and char. + for (int i = 5; i < 7; i++) { + try { + resultSet.getDouble(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetBigDecimal() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_get(colA number(38,9))"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_get values(?)"); - BigDecimal bigDecimal1 = new BigDecimal("10000000000"); - preparedStatement.setBigDecimal(1, bigDecimal1); - preparedStatement.executeUpdate(); - - BigDecimal bigDecimal2 = new BigDecimal("100000000.123456789"); - preparedStatement.setBigDecimal(1, bigDecimal2); - preparedStatement.execute(); - - statement.execute("select * from test_get order by 1"); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - assertEquals(bigDecimal2, resultSet.getBigDecimal(1)); - assertEquals(bigDecimal2, resultSet.getBigDecimal("COLA")); - - preparedStatement.close(); - statement.execute("drop table if exists test_get"); - statement.close(); - resultSet.close(); - connection.close(); - - resultSet = numberCrossTesting(); - resultSet.next(); - for (int i = 1; i < 13; i++) { - assertNull(resultSet.getBigDecimal(i)); - } - resultSet.next(); - assertEquals(new BigDecimal(2), resultSet.getBigDecimal(1)); - assertEquals(new BigDecimal(5), resultSet.getBigDecimal(2)); - assertEquals(new BigDecimal(3.5), resultSet.getBigDecimal(3)); - assertEquals(new BigDecimal(1), resultSet.getBigDecimal(4)); - assertEquals(new BigDecimal(1), resultSet.getBigDecimal(5)); - assertEquals(new BigDecimal(1), resultSet.getBigDecimal(6)); - assertEquals(new BigDecimal(9126), resultSet.getBigDecimal(7)); - for (int i = 8; i < 13; i++) { - try { - resultSet.getBigDecimal(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_get(colA number(38,9))"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_get values(?)")) { + BigDecimal bigDecimal1 = new BigDecimal("10000000000"); + preparedStatement.setBigDecimal(1, bigDecimal1); + preparedStatement.executeUpdate(); + + BigDecimal bigDecimal2 = new BigDecimal("100000000.123456789"); + preparedStatement.setBigDecimal(1, bigDecimal2); + preparedStatement.execute(); + + statement.execute("select * from test_get order by 1"); + try (ResultSet resultSet = statement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(bigDecimal2, resultSet.getBigDecimal(1)); + assertEquals(bigDecimal2, resultSet.getBigDecimal("COLA")); + } } + statement.execute("drop table if exists test_get"); } - resultSet.next(); - for (int i = 5; i < 7; i++) { - try { - resultSet.getBigDecimal(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + for (int i = 1; i < 13; i++) { + assertNull(resultSet.getBigDecimal(i)); + } + assertTrue(resultSet.next()); + assertEquals(new BigDecimal(2), resultSet.getBigDecimal(1)); + assertEquals(new BigDecimal(5), resultSet.getBigDecimal(2)); + assertEquals(new BigDecimal(3.5), resultSet.getBigDecimal(3)); + assertEquals(new BigDecimal(1), resultSet.getBigDecimal(4)); + assertEquals(new BigDecimal(1), resultSet.getBigDecimal(5)); + assertEquals(new BigDecimal(1), resultSet.getBigDecimal(6)); + assertEquals(new BigDecimal(9126), resultSet.getBigDecimal(7)); + for (int i = 8; i < 13; i++) { + try { + resultSet.getBigDecimal(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + for (int i = 5; i < 7; i++) { + try { + resultSet.getBigDecimal(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetBigDecimalNegative() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_dec(colA time)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_dec values(?)"); - java.sql.Time time = new java.sql.Time(System.currentTimeMillis()); - preparedStatement.setTime(1, time); - preparedStatement.executeUpdate(); - - statement.execute("select * from test_dec order by 1"); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - try { - resultSet.getBigDecimal(2, 38); - fail(); - } catch (SQLException ex) { - assertEquals(200032, ex.getErrorCode()); - } - statement.execute("drop table if exists test_dec"); - statement.close(); - resultSet.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_dec(colA time)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_dec values(?)")) { + java.sql.Time time = new java.sql.Time(System.currentTimeMillis()); + preparedStatement.setTime(1, time); + preparedStatement.executeUpdate(); + + statement.execute("select * from test_dec order by 1"); + try (ResultSet resultSet = statement.getResultSet(); ) { + assertTrue(resultSet.next()); + try { + resultSet.getBigDecimal(2, 38); + fail(); + } catch (SQLException ex) { + assertEquals(200032, ex.getErrorCode()); + } + } + } + } finally { + statement.execute("drop table if exists test_dec"); + } + } } @Test public void testCursorPosition() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute(selectAllSQL); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - assertTrue(resultSet.isFirst()); - assertEquals(1, resultSet.getRow()); - resultSet.next(); - assertFalse(resultSet.isFirst()); - assertEquals(2, resultSet.getRow()); - assertFalse(resultSet.isLast()); - resultSet.next(); - assertEquals(3, resultSet.getRow()); - assertTrue(resultSet.isLast()); - resultSet.next(); - assertTrue(resultSet.isAfterLast()); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute(selectAllSQL); + try (ResultSet resultSet = statement.getResultSet()) { + assertTrue(resultSet.next()); + assertTrue(resultSet.isFirst()); + assertEquals(1, resultSet.getRow()); + assertTrue(resultSet.next()); + assertFalse(resultSet.isFirst()); + assertEquals(2, resultSet.getRow()); + assertFalse(resultSet.isLast()); + assertTrue(resultSet.next()); + assertEquals(3, resultSet.getRow()); + assertTrue(resultSet.isLast()); + assertFalse(resultSet.next()); + assertTrue(resultSet.isAfterLast()); + } + } } /** @@ -481,22 +492,27 @@ public void testCursorPosition() throws SQLException { @Test public void testGetBytes() throws SQLException { Properties props = new Properties(); - Connection connection = init(props); - ingestBinaryTestData(connection); - - // Get results in hex format (default). - ResultSet resultSet = connection.createStatement().executeQuery("select * from bin"); - resultSet.next(); - assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); - assertEquals("", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); - assertEquals("ABCD12", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); - assertEquals("00FF4201", resultSet.getString(1)); - connection.createStatement().execute("drop table if exists bin"); - connection.close(); + try (Connection connection = init(props); + Statement statement = connection.createStatement()) { + try { + ingestBinaryTestData(connection); + + // Get results in hex format (default). + try (ResultSet resultSet = statement.executeQuery("select * from bin")) { + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); + assertEquals("", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); + assertEquals("ABCD12", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); + assertEquals("00FF4201", resultSet.getString(1)); + } + } finally { + statement.execute("drop table if exists bin"); + } + } } /** @@ -506,13 +522,16 @@ public void testGetBytes() throws SQLException { * @throws SQLException arises if any exception occurs */ private void ingestBinaryTestData(Connection connection) throws SQLException { - connection.createStatement().execute("create or replace table bin (b Binary)"); - PreparedStatement prepStatement = - connection.prepareStatement("insert into bin values (?), (?), (?)"); - prepStatement.setBytes(1, byteArrayTestCase1); - prepStatement.setBytes(2, byteArrayTestCase2); - prepStatement.setBytes(3, byteArrayTestCase3); - prepStatement.execute(); + try (Statement statement = connection.createStatement()) { + statement.execute("create or replace table bin (b Binary)"); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into bin values (?), (?), (?)")) { + prepStatement.setBytes(1, byteArrayTestCase1); + prepStatement.setBytes(2, byteArrayTestCase2); + prepStatement.setBytes(3, byteArrayTestCase3); + prepStatement.execute(); + } + } } /** @@ -524,266 +543,280 @@ private void ingestBinaryTestData(Connection connection) throws SQLException { public void testGetBytesInBase64() throws Exception { Properties props = new Properties(); props.setProperty("binary_output_format", "BAse64"); - Connection connection = init(props); - ingestBinaryTestData(connection); - - ResultSet resultSet = connection.createStatement().executeQuery("select * from bin"); - resultSet.next(); - assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); - assertEquals("", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); - assertEquals("q80S", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); - assertEquals("AP9CAQ==", resultSet.getString(1)); - - connection.createStatement().execute("drop table if exists bin"); - connection.close(); + try (Connection connection = init(props); + Statement statement = connection.createStatement()) { + try { + ingestBinaryTestData(connection); + + try (ResultSet resultSet = statement.executeQuery("select * from bin")) { + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); + assertEquals("", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); + assertEquals("q80S", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); + assertEquals("AP9CAQ==", resultSet.getString(1)); + } + } finally { + statement.execute("drop table if exists bin"); + } + } } // SNOW-31647 @Test public void testColumnMetaWithZeroPrecision() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table testColDecimal(cola number(38, 0), " + "colb number(17, 5))"); - - ResultSet resultSet = statement.executeQuery("select * from testColDecimal"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - - assertThat(resultSetMetaData.getColumnType(1), is(Types.BIGINT)); - assertThat(resultSetMetaData.getColumnType(2), is(Types.DECIMAL)); - assertThat(resultSetMetaData.isSigned(1), is(true)); - assertThat(resultSetMetaData.isSigned(2), is(true)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table testColDecimal(cola number(38, 0), " + "colb number(17, 5))"); - statement.execute("drop table if exists testColDecimal"); + try (ResultSet resultSet = statement.executeQuery("select * from testColDecimal")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - connection.close(); + assertThat(resultSetMetaData.getColumnType(1), is(Types.BIGINT)); + assertThat(resultSetMetaData.getColumnType(2), is(Types.DECIMAL)); + assertThat(resultSetMetaData.isSigned(1), is(true)); + assertThat(resultSetMetaData.isSigned(2), is(true)); + } + } finally { + statement.execute("drop table if exists testColDecimal"); + } + } } @Test public void testGetObjectOnFixedView() throws Exception { - Connection connection = init(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table testFixedView" - + "(C1 STRING NOT NULL COMMENT 'JDBC', " - + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " - + "C7 STRING, C8 STRING, C9 STRING) " - + "stage_file_format = (field_delimiter='|' " - + "error_on_column_count_mismatch=false)"); - - // put files - assertTrue( - "Failed to put a file", + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView")); - - ResultSet resultSet = - statement.executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%testFixedView"); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - while (resultSet.next()) { - for (int i = 0; i < resultSetMetaData.getColumnCount(); i++) { - assertNotNull(resultSet.getObject(i + 1)); + "create or replace table testFixedView" + + "(C1 STRING NOT NULL COMMENT 'JDBC', " + + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " + + "C7 STRING, C8 STRING, C9 STRING) " + + "stage_file_format = (field_delimiter='|' " + + "error_on_column_count_mismatch=false)"); + + // put files + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView")); + + try (ResultSet resultSet = + statement.executeQuery( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%testFixedView")) { + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + while (resultSet.next()) { + for (int i = 0; i < resultSetMetaData.getColumnCount(); i++) { + assertNotNull(resultSet.getObject(i + 1)); + } + } + } + } finally { + statement.execute("drop table if exists testFixedView"); } } - - resultSet.close(); - statement.execute("drop table if exists testFixedView"); - statement.close(); - connection.close(); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetColumnDisplaySizeAndPrecision() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - ResultSet resultSet = statement.executeQuery("select cast(1 as char)"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(1, resultSetMetaData.getPrecision(1)); - - resultSet = statement.executeQuery("select cast(1 as number(38, 0))"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(39, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(38, resultSetMetaData.getPrecision(1)); + ResultSetMetaData resultSetMetaData = null; + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + + try (ResultSet resultSet = statement.executeQuery("select cast(1 as char)")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(1, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select cast(1 as decimal(25, 15))"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(27, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(25, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as number(38, 0))")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(39, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(38, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select cast(1 as string)"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(1, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as decimal(25, 15))")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(27, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(25, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select cast(1 as string(30))"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(1, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as string)")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(1, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select to_date('2016-12-13', 'YYYY-MM-DD')"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(10, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(10, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as string(30))")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(1, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select to_time('12:34:56', 'HH24:MI:SS')"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(8, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(8, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = + statement.executeQuery("select to_date('2016-12-13', 'YYYY-MM-DD')")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(10, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(10, resultSetMetaData.getPrecision(1)); + } - statement.close(); - connection.close(); + try (ResultSet resultSet = + statement.executeQuery("select to_time('12:34:56', 'HH24:MI:SS')")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(8, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(8, resultSetMetaData.getPrecision(1)); + } + } } @Test public void testGetBoolean() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table testBoolean(cola boolean)"); - statement.execute("insert into testBoolean values(false)"); - ResultSet resultSet = statement.executeQuery("select * from testBoolean"); - resultSet.next(); - assertFalse(resultSet.getBoolean(1)); - - statement.execute("insert into testBoolean values(true)"); - resultSet = statement.executeQuery("select * from testBoolean"); - resultSet.next(); - assertFalse(resultSet.getBoolean(1)); - resultSet.next(); - assertTrue(resultSet.getBoolean(1)); - statement.execute("drop table if exists testBoolean"); - - statement.execute( - "create or replace table test_types(c1 number, c2 integer, c3 varchar, c4 char, " - + "c5 boolean, c6 float, c7 binary, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, " - + "c12 timestamp_tz)"); - statement.execute( - "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " - + "null, null)"); - statement.execute( - "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(1, 1, '1'," - + "'1', true, 1.0, '48454C4C4F', '1994-12-27', " - + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); - statement.execute("insert into test_types (c1, c2, c3, c4) values(2, 3, '4', '5')"); - resultSet = statement.executeQuery("select * from test_types"); - - resultSet.next(); - // assert that getBoolean returns false for null values - for (int i = 1; i < 13; i++) { - assertFalse(resultSet.getBoolean(i)); - } - // do the other columns that are out of order - // go to next row of result set column - resultSet.next(); - // assert that getBoolean returns true for values that equal 1 - assertTrue(resultSet.getBoolean(1)); - assertTrue(resultSet.getBoolean(2)); - assertTrue(resultSet.getBoolean(3)); - assertTrue(resultSet.getBoolean(4)); - assertTrue(resultSet.getBoolean(5)); - for (int i = 6; i < 13; i++) { - try { - resultSet.getBoolean(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table testBoolean(cola boolean)"); + statement.execute("insert into testBoolean values(false)"); + try (ResultSet resultSet = statement.executeQuery("select * from testBoolean")) { + assertTrue(resultSet.next()); + assertFalse(resultSet.getBoolean(1)); } - } - - resultSet.next(); - for (int i = 1; i < 5; i++) { - try { - resultSet.getBoolean(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + statement.execute("insert into testBoolean values(true)"); + try (ResultSet resultSet = statement.executeQuery("select * from testBoolean")) { + assertTrue(resultSet.next()); + assertFalse(resultSet.getBoolean(1)); + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(1)); + } + statement.execute("drop table if exists testBoolean"); + + statement.execute( + "create or replace table test_types(c1 number, c2 integer, c3 varchar, c4 char, " + + "c5 boolean, c6 float, c7 binary, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, " + + "c12 timestamp_tz)"); + statement.execute( + "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " + + "null, null)"); + statement.execute( + "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(1, 1, '1'," + + "'1', true, 1.0, '48454C4C4F', '1994-12-27', " + + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); + statement.execute("insert into test_types (c1, c2, c3, c4) values(2, 3, '4', '5')"); + try (ResultSet resultSet = statement.executeQuery("select * from test_types")) { + + assertTrue(resultSet.next()); + // assert that getBoolean returns false for null values + for (int i = 1; i < 13; i++) { + assertFalse(resultSet.getBoolean(i)); + } + // do the other columns that are out of order + // go to next row of result set column + assertTrue(resultSet.next()); + // assert that getBoolean returns true for values that equal 1 + assertTrue(resultSet.getBoolean(1)); + assertTrue(resultSet.getBoolean(2)); + assertTrue(resultSet.getBoolean(3)); + assertTrue(resultSet.getBoolean(4)); + assertTrue(resultSet.getBoolean(5)); + for (int i = 6; i < 13; i++) { + try { + resultSet.getBoolean(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + + assertTrue(resultSet.next()); + for (int i = 1; i < 5; i++) { + try { + resultSet.getBoolean(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } } } - - statement.close(); - connection.close(); } @Test public void testGetClob() throws Throwable { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table testClob(cola text)"); - statement.execute("insert into testClob values('hello world')"); - statement.execute("insert into testClob values('hello world1')"); - statement.execute("insert into testClob values('hello world2')"); - statement.execute("insert into testClob values('hello world3')"); - ResultSet resultSet = statement.executeQuery("select * from testClob"); - resultSet.next(); - // test reading Clob - char[] chars = new char[100]; - Reader reader = resultSet.getClob(1).getCharacterStream(); - int charRead; - charRead = reader.read(chars, 0, chars.length); - assertEquals(charRead, 11); - assertEquals("hello world", resultSet.getClob(1).toString()); - - // test reading truncated clob - resultSet.next(); - Clob clob = resultSet.getClob(1); - assertEquals(clob.length(), 12); - clob.truncate(5); - reader = clob.getCharacterStream(); - - charRead = reader.read(chars, 0, chars.length); - assertEquals(charRead, 5); - - // read from input stream - resultSet.next(); - final InputStream input = resultSet.getClob(1).getAsciiStream(); - - Reader in = new InputStreamReader(input, StandardCharsets.UTF_8); - charRead = in.read(chars, 0, chars.length); - assertEquals(charRead, 12); - - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table testClob(cola text)"); + statement.execute("insert into testClob values('hello world')"); + statement.execute("insert into testClob values('hello world1')"); + statement.execute("insert into testClob values('hello world2')"); + statement.execute("insert into testClob values('hello world3')"); + try (ResultSet resultSet = statement.executeQuery("select * from testClob")) { + assertTrue(resultSet.next()); + // test reading Clob + char[] chars = new char[100]; + Reader reader = resultSet.getClob(1).getCharacterStream(); + int charRead; + charRead = reader.read(chars, 0, chars.length); + assertEquals(charRead, 11); + assertEquals("hello world", resultSet.getClob(1).toString()); + + // test reading truncated clob + assertTrue(resultSet.next()); + Clob clob = resultSet.getClob(1); + assertEquals(clob.length(), 12); + clob.truncate(5); + reader = clob.getCharacterStream(); + + charRead = reader.read(chars, 0, chars.length); + assertEquals(charRead, 5); + + // read from input stream + assertTrue(resultSet.next()); + final InputStream input = resultSet.getClob(1).getAsciiStream(); + + Reader in = new InputStreamReader(input, StandardCharsets.UTF_8); + charRead = in.read(chars, 0, chars.length); + assertEquals(charRead, 12); + } + } } @Test public void testFetchOnClosedResultSet() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(selectAllSQL); - assertFalse(resultSet.isClosed()); - resultSet.close(); - assertTrue(resultSet.isClosed()); - assertFalse(resultSet.next()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + ResultSet resultSet = statement.executeQuery(selectAllSQL); + assertFalse(resultSet.isClosed()); + resultSet.close(); + assertTrue(resultSet.isClosed()); + assertFalse(resultSet.next()); + } } @Test public void testReleaseDownloaderCurrentMemoryUsage() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); + try (Connection connection = init()) { + final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); + + try (Statement statement = connection.createStatement()) { - statement.executeQuery( - "select current_date(), true,2345234, 2343.0, 'testrgint\\n\\t' from table(generator(rowcount=>1000000))"); + statement.executeQuery( + "select current_date(), true,2345234, 2343.0, 'testrgint\\n\\t' from table(generator(rowcount=>1000000))"); - assertThat( - "hold memory usage for the resultSet before close", - SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); - statement.close(); - assertThat( - "closing statement didn't release memory allocated for result", - SnowflakeChunkDownloader.getCurrentMemoryUsage(), - equalTo(initialMemoryUsage)); - connection.close(); + assertThat( + "hold memory usage for the resultSet before close", + SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); + } + assertThat( + "closing statement didn't release memory allocated for result", + SnowflakeChunkDownloader.getCurrentMemoryUsage(), + equalTo(initialMemoryUsage)); + } } @Test @@ -800,200 +833,209 @@ public void testResultColumnSearchCaseSensitive() throws Exception { private void subTestResultColumnSearchCaseSensitive(String parameterName) throws Exception { Properties prop = new Properties(); prop.put("tracing", "FINEST"); - Connection connection = init(prop); - Statement statement = connection.createStatement(); - - ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL"); - - resultSet.next(); - assertEquals("1", resultSet.getString("TESTCOL")); - assertEquals("1", resultSet.getString("TESTCOL")); - try { - resultSet.getString("testcol"); - fail(); - } catch (SQLException e) { - assertEquals("Column not found: testcol", e.getMessage()); + try (Connection connection = init(prop); + Statement statement = connection.createStatement()) { + + try (ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL")) { + + assertTrue(resultSet.next()); + assertEquals("1", resultSet.getString("TESTCOL")); + assertEquals("1", resultSet.getString("TESTCOL")); + try { + resultSet.getString("testcol"); + fail(); + } catch (SQLException e) { + assertEquals("Column not found: testcol", e.getMessage()); + } + } + // try to do case-insensitive search + statement.executeQuery(String.format("alter session set %s=true", parameterName)); + + try (ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL")) { + assertTrue(resultSet.next()); + + // get twice so that the code path can hit the place where + // we use cached key pair (columnName, index) + assertEquals("1", resultSet.getString("TESTCOL")); + assertEquals("1", resultSet.getString("TESTCOL")); + assertEquals("1", resultSet.getString("testcol")); + assertEquals("1", resultSet.getString("testcol")); + } } - - // try to do case-insensitive search - statement.executeQuery(String.format("alter session set %s=true", parameterName)); - - resultSet = statement.executeQuery("select 1 AS TESTCOL"); - resultSet.next(); - - // get twice so that the code path can hit the place where - // we use cached key pair (columnName, index) - assertEquals("1", resultSet.getString("TESTCOL")); - assertEquals("1", resultSet.getString("TESTCOL")); - assertEquals("1", resultSet.getString("testcol")); - assertEquals("1", resultSet.getString("testcol")); } @Test public void testInvalidColumnIndex() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(selectAllSQL); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectAllSQL)) { - resultSet.next(); - try { - resultSet.getString(0); - fail(); - } catch (SQLException e) { - assertEquals(200032, e.getErrorCode()); - } - try { - resultSet.getString(2); - fail(); - } catch (SQLException e) { - assertEquals(200032, e.getErrorCode()); + assertTrue(resultSet.next()); + try { + resultSet.getString(0); + fail(); + } catch (SQLException e) { + assertEquals(200032, e.getErrorCode()); + } + try { + resultSet.getString(2); + fail(); + } catch (SQLException e) { + assertEquals(200032, e.getErrorCode()); + } } - resultSet.close(); - statement.close(); - connection.close(); } /** SNOW-28882: wasNull was not set properly */ @Test public void testWasNull() throws Exception { - Connection con = init(); - ResultSet ret = - con.createStatement() - .executeQuery( - "select cast(1/nullif(0,0) as double)," - + "cast(1/nullif(0,0) as int), 100, " - + "cast(1/nullif(0,0) as number(8,2))"); - ret.next(); - assertThat("Double value cannot be null", ret.getDouble(1), equalTo(0.0)); - assertThat("wasNull should be true", ret.wasNull()); - assertThat("Integer value cannot be null", ret.getInt(2), equalTo(0)); - assertThat("wasNull should be true", ret.wasNull()); - assertThat("Non null column", ret.getInt(3), equalTo(100)); - assertThat("wasNull should be false", !ret.wasNull()); - assertThat("BigDecimal value must be null", ret.getBigDecimal(4), nullValue()); - assertThat("wasNull should be true", ret.wasNull()); + try (Connection con = init(); + ResultSet ret = + con.createStatement() + .executeQuery( + "select cast(1/nullif(0,0) as double)," + + "cast(1/nullif(0,0) as int), 100, " + + "cast(1/nullif(0,0) as number(8,2))")) { + assertTrue(ret.next()); + assertThat("Double value cannot be null", ret.getDouble(1), equalTo(0.0)); + assertThat("wasNull should be true", ret.wasNull()); + assertThat("Integer value cannot be null", ret.getInt(2), equalTo(0)); + assertThat("wasNull should be true", ret.wasNull()); + assertThat("Non null column", ret.getInt(3), equalTo(100)); + assertThat("wasNull should be false", !ret.wasNull()); + assertThat("BigDecimal value must be null", ret.getBigDecimal(4), nullValue()); + assertThat("wasNull should be true", ret.wasNull()); + } } /** SNOW-28390 */ @Test public void testParseInfAndNaNNumber() throws Exception { - Connection con = init(); - ResultSet ret = - con.createStatement().executeQuery("select to_double('inf'), to_double('-inf')"); - ret.next(); - assertThat("Positive Infinite Number", ret.getDouble(1), equalTo(Double.POSITIVE_INFINITY)); - assertThat("Negative Infinite Number", ret.getDouble(2), equalTo(Double.NEGATIVE_INFINITY)); - assertThat("Positive Infinite Number", ret.getFloat(1), equalTo(Float.POSITIVE_INFINITY)); - assertThat("Negative Infinite Number", ret.getFloat(2), equalTo(Float.NEGATIVE_INFINITY)); - - ret = con.createStatement().executeQuery("select to_double('nan')"); - ret.next(); - assertThat("Parse NaN", ret.getDouble(1), equalTo(Double.NaN)); - assertThat("Parse NaN", ret.getFloat(1), equalTo(Float.NaN)); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try (ResultSet ret = statement.executeQuery("select to_double('inf'), to_double('-inf')")) { + assertTrue(ret.next()); + assertThat("Positive Infinite Number", ret.getDouble(1), equalTo(Double.POSITIVE_INFINITY)); + assertThat("Negative Infinite Number", ret.getDouble(2), equalTo(Double.NEGATIVE_INFINITY)); + assertThat("Positive Infinite Number", ret.getFloat(1), equalTo(Float.POSITIVE_INFINITY)); + assertThat("Negative Infinite Number", ret.getFloat(2), equalTo(Float.NEGATIVE_INFINITY)); + } + try (ResultSet ret = statement.executeQuery("select to_double('nan')")) { + assertTrue(ret.next()); + assertThat("Parse NaN", ret.getDouble(1), equalTo(Double.NaN)); + assertThat("Parse NaN", ret.getFloat(1), equalTo(Float.NaN)); + } + } } /** SNOW-33227 */ @Test public void testTreatDecimalAsInt() throws Exception { - Connection con = init(); - ResultSet ret = con.createStatement().executeQuery("select 1"); - - ResultSetMetaData metaData = ret.getMetaData(); - assertThat(metaData.getColumnType(1), equalTo(Types.BIGINT)); + ResultSetMetaData metaData; + try (Connection con = init(); + Statement statement = con.createStatement()) { + try (ResultSet ret = statement.executeQuery("select 1")) { - con.createStatement().execute("alter session set jdbc_treat_decimal_as_int = false"); - - ret = con.createStatement().executeQuery("select 1"); - metaData = ret.getMetaData(); - assertThat(metaData.getColumnType(1), equalTo(Types.DECIMAL)); + metaData = ret.getMetaData(); + assertThat(metaData.getColumnType(1), equalTo(Types.BIGINT)); + } + statement.execute("alter session set jdbc_treat_decimal_as_int = false"); - con.close(); + try (ResultSet ret = statement.executeQuery("select 1")) { + metaData = ret.getMetaData(); + assertThat(metaData.getColumnType(1), equalTo(Types.DECIMAL)); + } + } } @Test public void testIsLast() throws Exception { - Connection con = init(); - ResultSet ret = con.createStatement().executeQuery("select * from orders_jdbc"); - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); - - ret.next(); - - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); - - int cnt = 0; - while (ret.next()) { - cnt++; - if (cnt == 72) { - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try (ResultSet ret = statement.executeQuery("select * from orders_jdbc")) { + assertTrue("should be before the first", ret.isBeforeFirst()); + assertFalse("should not be the first", ret.isFirst()); + + assertTrue(ret.next()); + + assertFalse("should not be before the first", ret.isBeforeFirst()); + assertTrue("should be the first", ret.isFirst()); + + int cnt = 0; + while (ret.next()) { + cnt++; + if (cnt == 72) { + assertTrue("should be the last", ret.isLast()); + assertFalse("should not be after the last", ret.isAfterLast()); + } + } + assertEquals(72, cnt); + + assertFalse(ret.next()); + + assertFalse("should not be the last", ret.isLast()); + assertTrue("should be afterthe last", ret.isAfterLast()); } - } - assertEquals(72, cnt); - - ret.next(); - - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be afterthe last", ret.isAfterLast()); - - // PUT one file - ret = - con.createStatement() - .executeQuery("PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~"); + // PUT one file + try (ResultSet ret = + statement.executeQuery( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~")) { - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); + assertTrue("should be before the first", ret.isBeforeFirst()); + assertFalse("should not be the first", ret.isFirst()); - ret.next(); + assertTrue(ret.next()); - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); + assertFalse("should not be before the first", ret.isBeforeFirst()); + assertTrue("should be the first", ret.isFirst()); - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + assertTrue("should be the last", ret.isLast()); + assertFalse("should not be after the last", ret.isAfterLast()); - ret.next(); + assertFalse(ret.next()); - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be after the last", ret.isAfterLast()); + assertFalse("should not be the last", ret.isLast()); + assertTrue("should be after the last", ret.isAfterLast()); + } + } } @Test public void testUpdateCountOnCopyCmd() throws Exception { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute("create or replace table testcopy(cola string)"); - - // stage table has no file. Should return 0. - int rowCount = statement.executeUpdate("copy into testcopy"); - assertThat(rowCount, is(0)); - - // copy one file into table stage - statement.execute("copy into @%testcopy from (select 'test_string')"); - rowCount = statement.executeUpdate("copy into testcopy"); - assertThat(rowCount, is(1)); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table testcopy(cola string)"); - // cleanup - statement.execute("drop table if exists testcopy"); + // stage table has no file. Should return 0. + int rowCount = statement.executeUpdate("copy into testcopy"); + assertThat(rowCount, is(0)); - con.close(); + // copy one file into table stage + statement.execute("copy into @%testcopy from (select 'test_string')"); + rowCount = statement.executeUpdate("copy into testcopy"); + assertThat(rowCount, is(1)); + } finally { + // cleanup + statement.execute("drop table if exists testcopy"); + } + } } @Test public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { - try (Connection con = init()) { - con.createStatement().execute("create or replace table testnullts(c1 timestamp, c2 time)"); + try (Connection con = init(); + Statement statement = con.createStatement()) { try { - con.createStatement().execute("insert into testnullts(c1, c2) values(null, null)"); - ResultSet rs = con.createStatement().executeQuery("select * from testnullts"); - assertTrue("should return result", rs.next()); - assertNull("return value must be null", rs.getTime(1)); - assertNull("return value must be null", rs.getTimestamp(2)); - rs.close(); + statement.execute("create or replace table testnullts(c1 timestamp, c2 time)"); + statement.execute("insert into testnullts(c1, c2) values(null, null)"); + try (ResultSet rs = statement.executeQuery("select * from testnullts")) { + assertTrue("should return result", rs.next()); + assertNull("return value must be null", rs.getTime(1)); + assertNull("return value must be null", rs.getTimestamp(2)); + } } finally { - con.createStatement().execute("drop table if exists testnullts"); + statement.execute("drop table if exists testnullts"); } } } @@ -1001,17 +1043,35 @@ public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { @Test public void testNextNegative() throws SQLException { try (Connection con = init()) { - ResultSet rs = con.createStatement().executeQuery("select 1"); - rs.next(); - System.setProperty("snowflake.enable_incident_test2", "true"); - try { - rs.next(); - fail(); - } catch (SQLException ex) { - assertEquals(200014, ex.getErrorCode()); + try (ResultSet rs = con.createStatement().executeQuery("select 1")) { + assertTrue(rs.next()); + System.setProperty("snowflake.enable_incident_test2", "true"); + try { + assertTrue(rs.next()); + fail(); + } catch (SQLException ex) { + assertEquals(200014, ex.getErrorCode()); + } + System.setProperty("snowflake.enable_incident_test2", "false"); } - System.setProperty("snowflake.enable_incident_test2", "false"); - rs.close(); + } + } + + /** SNOW-1416051; Added in > 3.16.0 */ + @Test + public void shouldSerializeArrayAndObjectAsStringOnGetObject() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "select ARRAY_CONSTRUCT(1,2,3), OBJECT_CONSTRUCT('a', 4, 'b', 'test')")) { + assertTrue(resultSet.next()); + String expectedArrayAsString = "[\n 1,\n 2,\n 3\n]"; + assertEquals(expectedArrayAsString, resultSet.getObject(1)); + assertEquals(expectedArrayAsString, resultSet.getString(1)); + String expectedObjectAsString = "{\n \"a\": 4,\n \"b\": \"test\"\n}"; + assertEquals(expectedObjectAsString, resultSet.getObject(2)); + assertEquals(expectedObjectAsString, resultSet.getString(2)); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java index 5a13d368e..65cc27242 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java @@ -55,124 +55,147 @@ public ResultSetJsonVsArrowIT(String queryResultFormat) { public Connection init() throws SQLException { Connection conn = getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testGSResult() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - ResultSet rs = - statement.executeQuery( - "select 1, 128, 65500, 10000000000000, " - + "1000000000000000000000000000000000000, NULL, " - + "current_timestamp, current_timestamp(0), current_timestamp(5)," - + "current_date, current_time, current_time(0), current_time(5);"); - rs.next(); - assertEquals((byte) 1, rs.getByte(1)); - assertEquals((short) 128, rs.getShort(2)); - assertEquals(65500, rs.getInt(3)); - assertEquals(10000000000000l, rs.getLong(4)); - assertEquals(new BigDecimal("1000000000000000000000000000000000000"), rs.getBigDecimal(5)); - assertNull(rs.getString(6)); - assertNotNull(rs.getTimestamp(7)); - assertNotNull(rs.getTimestamp(8)); - assertNotNull(rs.getTimestamp(9)); - - assertNotNull(rs.getDate(10)); - assertNotNull(rs.getTime(11)); - assertNotNull(rs.getTime(12)); - assertNotNull(rs.getTime(13)); + try (Connection con = init(); + Statement statement = con.createStatement(); + ResultSet rs = + statement.executeQuery( + "select 1, 128, 65500, 10000000000000, " + + "1000000000000000000000000000000000000, NULL, " + + "current_timestamp, current_timestamp(0), current_timestamp(5)," + + "current_date, current_time, current_time(0), current_time(5);")) { + assertTrue(rs.next()); + assertEquals((byte) 1, rs.getByte(1)); + assertEquals((short) 128, rs.getShort(2)); + assertEquals(65500, rs.getInt(3)); + assertEquals(10000000000000l, rs.getLong(4)); + assertEquals(new BigDecimal("1000000000000000000000000000000000000"), rs.getBigDecimal(5)); + assertNull(rs.getString(6)); + assertNotNull(rs.getTimestamp(7)); + assertNotNull(rs.getTimestamp(8)); + assertNotNull(rs.getTimestamp(9)); + + assertNotNull(rs.getDate(10)); + assertNotNull(rs.getTime(11)); + assertNotNull(rs.getTime(12)); + assertNotNull(rs.getTime(13)); + } } @Test public void testGSResultReal() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - statement.execute("create or replace table t (a real)"); - statement.execute("insert into t values (123.456)"); - ResultSet rs = statement.executeQuery("select * from t;"); - rs.next(); - assertEquals(123.456, rs.getFloat(1), 0.001); - finish("t", con); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table t (a real)"); + statement.execute("insert into t values (123.456)"); + try (ResultSet rs = statement.executeQuery("select * from t;")) { + assertTrue(rs.next()); + assertEquals(123.456, rs.getFloat(1), 0.001); + } + } finally { + statement.execute("drop table if exists t"); + } + } } @Test public void testGSResultScan() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - statement.execute("create or replace table t (a text)"); - statement.execute("insert into t values ('test')"); - ResultSet rs = statement.executeQuery("select count(*) from t;"); - rs.next(); - assertEquals(1, rs.getInt(1)); - String queryId = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - rs = con.createStatement().executeQuery("select * from table(result_scan('" + queryId + "'))"); - rs.next(); - assertEquals(1, rs.getInt(1)); - finish("t", con); + String queryId = null; + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table t (a text)"); + statement.execute("insert into t values ('test')"); + try (ResultSet rs = statement.executeQuery("select count(*) from t;")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + queryId = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + } + try (ResultSet rs = + statement.executeQuery("select * from table(result_scan('" + queryId + "'))")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + } finally { + statement.execute("drop table if exists t"); + } + } } @Test public void testGSResultForEmptyAndSmallTable() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - statement.execute("create or replace table t (a int)"); - ResultSet rs = statement.executeQuery("select * from t;"); - assertFalse(rs.next()); - statement.execute("insert into t values (1)"); - rs = statement.executeQuery("select * from t;"); - rs.next(); - assertEquals(1, rs.getInt(1)); - finish("t", con); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table t (a int)"); + try (ResultSet rs = statement.executeQuery("select * from t;")) { + assertFalse(rs.next()); + } + statement.execute("insert into t values (1)"); + try (ResultSet rs = statement.executeQuery("select * from t;")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + } finally { + statement.execute("drop table if exists t"); + } + } } @Test public void testSNOW89737() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute( - "create or replace table test_types(c1 number, c2 integer, c3 float, c4 varchar, c5 char, c6 " - + "binary, c7 boolean, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, c12 timestamp_tz, c13 " - + "variant, c14 object, c15 array)"); - statement.execute( - "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " - + "null, null, null, null, null)"); - statement.execute( - "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(5, 5, 5.0," - + "'hello', 'h', '48454C4C4F', true, '1994-12-27', " - + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); - statement.execute( - "insert into test_types(c13) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); - statement.execute( - "insert into test_types(c14) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); - statement.execute( - "insert into test_types(c15) select parse_json('{\"fruits\" : [\"apples\", \"pears\", " - + "\"oranges\"]}')"); - ResultSet resultSet = statement.executeQuery("select * from test_types"); - // test first row of result set against all "get" methods - assertTrue(resultSet.next()); - // test getString method against all other data types - assertEquals(null, resultSet.getString(1)); - assertEquals(null, resultSet.getString(2)); - assertEquals(null, resultSet.getString(3)); - assertEquals(null, resultSet.getString(4)); - assertEquals(null, resultSet.getString(5)); - assertEquals(null, resultSet.getString(6)); - assertEquals(null, resultSet.getString(7)); - assertEquals(null, resultSet.getString(8)); - assertEquals(null, resultSet.getString(9)); - assertEquals(null, resultSet.getString(10)); - assertEquals(null, resultSet.getString(11)); - assertEquals(null, resultSet.getString(12)); - assertEquals(null, resultSet.getString(13)); - assertEquals(null, resultSet.getString(14)); - assertEquals(null, resultSet.getString(15)); - finish("test_types", con); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute( + "create or replace table test_types(c1 number, c2 integer, c3 float, c4 varchar, c5 char, c6 " + + "binary, c7 boolean, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, c12 timestamp_tz, c13 " + + "variant, c14 object, c15 array)"); + statement.execute( + "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " + + "null, null, null, null, null)"); + statement.execute( + "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(5, 5, 5.0," + + "'hello', 'h', '48454C4C4F', true, '1994-12-27', " + + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); + statement.execute( + "insert into test_types(c13) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); + statement.execute( + "insert into test_types(c14) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); + statement.execute( + "insert into test_types(c15) select parse_json('{\"fruits\" : [\"apples\", \"pears\", " + + "\"oranges\"]}')"); + ResultSet resultSet = statement.executeQuery("select * from test_types"); + // test first row of result set against all "get" methods + assertTrue(resultSet.next()); + // test getString method against all other data types + assertEquals(null, resultSet.getString(1)); + assertEquals(null, resultSet.getString(2)); + assertEquals(null, resultSet.getString(3)); + assertEquals(null, resultSet.getString(4)); + assertEquals(null, resultSet.getString(5)); + assertEquals(null, resultSet.getString(6)); + assertEquals(null, resultSet.getString(7)); + assertEquals(null, resultSet.getString(8)); + assertEquals(null, resultSet.getString(9)); + assertEquals(null, resultSet.getString(10)); + assertEquals(null, resultSet.getString(11)); + assertEquals(null, resultSet.getString(12)); + assertEquals(null, resultSet.getString(13)); + assertEquals(null, resultSet.getString(14)); + assertEquals(null, resultSet.getString(15)); + } finally { + statement.execute("drop table if exists t"); + } + } } /** @@ -182,10 +205,10 @@ public void testSNOW89737() throws SQLException { */ @Test public void testSemiStructuredData() throws SQLException { - Connection con = init(); - ResultSet rs = - con.createStatement() - .executeQuery( + try (Connection con = init(); + Statement statement = con.createStatement(); + ResultSet rs = + statement.executeQuery( "select array_construct(10, 20, 30), " + "array_construct(null, 'hello', 3::double, 4, 5), " + "array_construct(), " @@ -193,59 +216,59 @@ public void testSemiStructuredData() throws SQLException { + "object_construct('Key_One', parse_json('NULL'), 'Key_Two', null, 'Key_Three', 'null')," + "to_variant(3.2)," + "parse_json('{ \"a\": null}')," - + " 100::variant;"); - while (rs.next()) { - assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); - assertEquals( - "[\n" - + " undefined,\n" - + " \"hello\",\n" - + " 3.000000000000000e+00,\n" - + " 4,\n" - + " 5\n" - + "]", - rs.getString(2)); - assertEquals("{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\"\n" + "}", rs.getString(4)); - assertEquals( - "{\n" + " \"Key_One\": null,\n" + " \"Key_Three\": \"null\"\n" + "}", rs.getString(5)); - assertEquals("{\n" + " \"a\": null\n" + "}", rs.getString(7)); - assertEquals("[]", rs.getString(3)); - assertEquals("3.2", rs.getString(6)); - assertEquals("100", rs.getString(8)); + + " 100::variant;")) { + while (rs.next()) { + assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); + assertEquals( + "[\n" + + " undefined,\n" + + " \"hello\",\n" + + " 3.000000000000000e+00,\n" + + " 4,\n" + + " 5\n" + + "]", + rs.getString(2)); + assertEquals("{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\"\n" + "}", rs.getString(4)); + assertEquals( + "{\n" + " \"Key_One\": null,\n" + " \"Key_Three\": \"null\"\n" + "}", + rs.getString(5)); + assertEquals("{\n" + " \"a\": null\n" + "}", rs.getString(7)); + assertEquals("[]", rs.getString(3)); + assertEquals("3.2", rs.getString(6)); + assertEquals("100", rs.getString(8)); + } } - con.close(); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStructuredTypes() throws SQLException { - Connection con = init(); - - Statement stmt = con.createStatement(); - stmt.execute("alter session set feature_structured_types = 'ENABLED';"); - - stmt.close(); - - ResultSet rs = - con.createStatement() - .executeQuery( - "select array_construct(10, 20, 30)::array(int), " - + "object_construct_keep_null('a', 1, 'b', 'BBBB', 'c', null)::object(a int, b varchar, c int), " - + "object_construct_keep_null('k1', 'v1', 'k2', null)::map(varchar, varchar);"); - while (rs.next()) { - assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); - assertEquals( - "{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\",\n" + " \"c\": null\n" + "}", - rs.getString(2)); - assertEquals("{\n" + " \"k1\": \"v1\",\n" + " \"k2\": null\n" + "}", rs.getString(3)); + try (Connection con = init(); + Statement stmt = con.createStatement()) { + stmt.execute("alter session set feature_structured_types = 'ENABLED';"); + + try (ResultSet rs = + stmt.executeQuery( + "select array_construct(10, 20, 30)::array(int), " + + "object_construct_keep_null('a', 1, 'b', 'BBBB', 'c', null)::object(a int, b varchar, c int), " + + "object_construct_keep_null('k1', 'v1', 'k2', null)::map(varchar, varchar);")) { + while (rs.next()) { + assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); + assertEquals( + "{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\",\n" + " \"c\": null\n" + "}", + rs.getString(2)); + assertEquals("{\n" + " \"k1\": \"v1\",\n" + " \"k2\": null\n" + "}", rs.getString(3)); + } + } } - con.close(); } private Connection init(String table, String column, String values) throws SQLException { Connection con = init(); - con.createStatement().execute("create or replace table " + table + " " + column); - con.createStatement().execute("insert into " + table + " values " + values); + try (Statement statement = con.createStatement()) { + statement.execute("create or replace table " + table + " " + column); + statement.execute("insert into " + table + " values " + values); + } return con; } @@ -253,11 +276,6 @@ private boolean isJSON() { return queryResultFormat.equalsIgnoreCase("json"); } - private void finish(String table, Connection con) throws SQLException { - con.createStatement().execute("drop table " + table); - con.close(); - } - /** * compare behaviors (json vs arrow) * @@ -275,43 +293,47 @@ public void testTinyInt() throws SQLException { String table = "test_arrow_tiny_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(cases[i], rs.getInt(1)); - assertEquals((short) cases[i], rs.getShort(1)); - assertEquals((long) cases[i], rs.getLong(1)); - assertEquals((Integer.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - assertEquals(cases[i], rs.getByte(1)); - - byte[] bytes = new byte[1]; - bytes[0] = (byte) cases[i]; - assertArrayEquals(bytes, rs.getBytes(1)); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(cases[i], rs.getInt(1)); + assertEquals((short) cases[i], rs.getShort(1)); + assertEquals((long) cases[i], rs.getLong(1)); + assertEquals((Integer.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + assertEquals(cases[i], rs.getByte(1)); + + byte[] bytes = new byte[1]; + bytes[0] = (byte) cases[i]; + assertArrayEquals(bytes, rs.getBytes(1)); + } + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); + } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -333,80 +355,84 @@ public void testScaledTinyInt() throws SQLException { String table = "test_arrow_tiny_int"; String column = "(a number(3,2))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_tiny_int"); - double delta = 0.001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_tiny_int")) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals((String.format("%.2f", cases[i])), rs.getString(1)); - assertEquals(cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - if (isJSON()) { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + assertEquals((String.format("%.2f", cases[i])), rs.getString(1)); + assertEquals(cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + if (isJSON()) { + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } + } else { + assertEquals(((byte) (cases[i] * 100)), rs.getByte(1)); + } + + if (!isJSON()) { + byte[] bytes = new byte[1]; + bytes[0] = rs.getByte(1); + assertArrayEquals(bytes, rs.getBytes(1)); + } } - } else { - assertEquals(((byte) (cases[i] * 100)), rs.getByte(1)); - } - if (!isJSON()) { - byte[] bytes = new byte[1]; - bytes[0] = rs.getByte(1); - assertArrayEquals(bytes, rs.getBytes(1)); + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -426,65 +452,70 @@ public void testSmallInt() throws SQLException { String table = "test_arrow_small_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(cases[i], rs.getInt(1)); - assertEquals(cases[i], rs.getShort(1)); - assertEquals((long) cases[i], rs.getLong(1)); - assertEquals((Integer.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - if (cases[i] <= 127 && cases[i] >= -128) { - assertEquals(cases[i], rs.getByte(1)); - } else { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(cases[i], rs.getInt(1)); + assertEquals(cases[i], rs.getShort(1)); + assertEquals((long) cases[i], rs.getLong(1)); + assertEquals((Integer.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + if (cases[i] <= 127 && cases[i] >= -128) { + assertEquals(cases[i], rs.getByte(1)); + } else { + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + } + ByteBuffer bb = ByteBuffer.allocate(2); + bb.putShort(cases[i]); if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + byte[] res = rs.getBytes(1); + for (int j = res.length - 1; j >= 0; j--) { + assertEquals(bb.array()[2 - res.length + j], res[j]); + } } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertArrayEquals(bb.array(), rs.getBytes(1)); } } - } - ByteBuffer bb = ByteBuffer.allocate(2); - bb.putShort(cases[i]); - if (isJSON()) { - byte[] res = rs.getBytes(1); - for (int j = res.length - 1; j >= 0; j--) { - assertEquals(bb.array()[2 - res.length + j], res[j]); - } - } else { - assertArrayEquals(bb.array(), rs.getBytes(1)); + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -507,87 +538,93 @@ public void testScaledSmallInt() throws SQLException { String table = "test_arrow_small_int"; String column = "(a number(5,3))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_small_int"); - double delta = 0.0001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_small_int")) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.0001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals((String.format("%.3f", cases[i])), rs.getString(1)); - assertEquals(cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - ByteBuffer byteBuffer = ByteBuffer.allocate(2); - byteBuffer.putShort(shortCompact[i]); - assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals((String.format("%.3f", cases[i])), rs.getString(1)); + assertEquals(cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + ByteBuffer byteBuffer = ByteBuffer.allocate(2); + byteBuffer.putShort(shortCompact[i]); + assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -610,78 +647,84 @@ public void testInt() throws SQLException { String table = "test_arrow_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(cases[i], rs.getInt(1)); - if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { - assertEquals((short) cases[i], rs.getShort(1)); - } else { - try { - assertEquals((short) cases[i], rs.getShort(1)); - fail(); - } catch (Exception e) { - { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(cases[i], rs.getInt(1)); + if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { + assertEquals((short) cases[i], rs.getShort(1)); + } else { + try { + assertEquals((short) cases[i], rs.getShort(1)); + fail(); + } catch (Exception e) { + { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } - } - } - assertEquals((long) cases[i], rs.getLong(1)); - assertEquals((Integer.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - if (cases[i] <= 127 && cases[i] >= -128) { - assertEquals(cases[i], rs.getByte(1)); - } else { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { + assertEquals((long) cases[i], rs.getLong(1)); + assertEquals((Integer.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + if (cases[i] <= 127 && cases[i] >= -128) { + assertEquals(cases[i], rs.getByte(1)); + } else { + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + } + ByteBuffer bb = ByteBuffer.allocate(4); + bb.putInt(cases[i]); if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + byte[] res = rs.getBytes(1); + for (int j = res.length - 1; j >= 0; j--) { + assertEquals(bb.array()[4 - res.length + j], res[j]); + } } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertArrayEquals(bb.array(), rs.getBytes(1)); } } - } - ByteBuffer bb = ByteBuffer.allocate(4); - bb.putInt(cases[i]); - if (isJSON()) { - byte[] res = rs.getBytes(1); - for (int j = res.length - 1; j >= 0; j--) { - assertEquals(bb.array()[4 - res.length + j], res[j]); - } - } else { - assertArrayEquals(bb.array(), rs.getBytes(1)); + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -712,87 +755,93 @@ public void testScaledInt() throws SQLException { String column = String.format("(a number(10,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_int"); - double delta = 0.0000000001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_int")) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.0000000001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - ByteBuffer byteBuffer = ByteBuffer.allocate(4); - byteBuffer.putInt(intCompacts[i]); - assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + ByteBuffer byteBuffer = ByteBuffer.allocate(4); + byteBuffer.putInt(intCompacts[i]); + assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -831,88 +880,95 @@ public void testBigInt() throws SQLException { String table = "test_arrow_big_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - - if (cases[i] >= Integer.MIN_VALUE && cases[i] <= Integer.MAX_VALUE) { - assertEquals(cases[i], rs.getInt(1)); - } else { - try { - assertEquals(cases[i], rs.getInt(1)); - fail(); - } catch (Exception e) { - { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + + if (cases[i] >= Integer.MIN_VALUE && cases[i] <= Integer.MAX_VALUE) { + assertEquals(cases[i], rs.getInt(1)); + } else { + try { + assertEquals(cases[i], rs.getInt(1)); + fail(); + } catch (Exception e) { + { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } - } - } - if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { - assertEquals((short) cases[i], rs.getShort(1)); - } else { - try { - assertEquals((short) cases[i], rs.getShort(1)); - fail(); - } catch (Exception e) { - { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { + assertEquals((short) cases[i], rs.getShort(1)); + } else { + try { + assertEquals((short) cases[i], rs.getShort(1)); + fail(); + } catch (Exception e) { + { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } - } - } - assertEquals(cases[i], rs.getLong(1)); - assertEquals((Long.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Long.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - if (cases[i] <= 127 && cases[i] >= -128) { - assertEquals(cases[i], rs.getByte(1)); - } else { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + assertEquals(cases[i], rs.getLong(1)); + assertEquals((Long.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Long.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + if (cases[i] <= 127 && cases[i] >= -128) { + assertEquals(cases[i], rs.getByte(1)); } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + } + ByteBuffer bb = ByteBuffer.allocate(8); + bb.putLong(cases[i]); + byte[] res = rs.getBytes(1); + for (int j = res.length - 1; j >= 0; j--) { + assertEquals(bb.array()[8 - res.length + j], res[j]); } } - } - ByteBuffer bb = ByteBuffer.allocate(8); - bb.putLong(cases[i]); - byte[] res = rs.getBytes(1); - for (int j = res.length - 1; j >= 0; j--) { - assertEquals(bb.array()[8 - res.length + j], res[j]); + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -945,88 +1001,93 @@ public void testScaledBigInt() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.0000000000000000001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); - try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.0000000000000000001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - ByteBuffer byteBuffer = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); - byteBuffer.putLong(longCompacts[i]); - assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + ByteBuffer byteBuffer = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + byteBuffer.putLong(longCompacts[i]); + assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1059,79 +1120,83 @@ public void testDecimalNoScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); + assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } - assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1164,86 +1229,91 @@ public void testDecimalWithLargeScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.00000000000000000000000000000000000001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); - try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.00000000000000000000000000000000000001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1277,87 +1347,92 @@ public void testDecimal() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.00000000000000000000000000000000000001; - ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.00000000000000000000000000000000000001; + ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - assertArrayEquals(byteBuf.putLong(0, longCompacts[i]).array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + assertArrayEquals(byteBuf.putLong(0, longCompacts[i]).array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1395,20 +1470,25 @@ public void testDoublePrecision() throws SQLException { String column = "(a double)"; String values = "(" + StringUtils.join(cases, "),(") + ")"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - if (isJSON()) { - while (rs.next()) { - assertEquals(json_results[i++], Double.toString(rs.getDouble(1))); - } - } else { - // Arrow results has no precision loss - while (rs.next()) { - assertEquals(cases[i++], Double.toString(rs.getDouble(1))); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + if (isJSON()) { + while (rs.next()) { + assertEquals(json_results[i++], Double.toString(rs.getDouble(1))); + } + } else { + // Arrow results has no precision loss + while (rs.next()) { + assertEquals(cases[i++], Double.toString(rs.getDouble(1))); + } + } + } finally { + statement.execute("drop table if exists " + table); } } - finish(table, con); } @Test @@ -1416,19 +1496,20 @@ public void testBoolean() throws SQLException { String table = "test_arrow_boolean"; String column = "(a boolean)"; String values = "(true),(null),(false)"; - Connection conn = init(table, column, values); - Statement statement = conn.createStatement(); - ResultSet rs = statement.executeQuery("select * from " + table); - assertTrue(rs.next()); - assertTrue(rs.getBoolean(1)); - assertEquals("TRUE", rs.getString(1)); - assertTrue(rs.next()); - assertFalse(rs.getBoolean(1)); - assertTrue(rs.next()); - assertFalse(rs.getBoolean(1)); - assertEquals("FALSE", rs.getString(1)); - assertFalse(rs.next()); - finish(table, conn); + try (Connection conn = init(table, column, values); + Statement statement = conn.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + assertTrue(rs.next()); + assertTrue(rs.getBoolean(1)); + assertEquals("TRUE", rs.getString(1)); + assertTrue(rs.next()); + assertFalse(rs.getBoolean(1)); + assertTrue(rs.next()); + assertFalse(rs.getBoolean(1)); + assertEquals("FALSE", rs.getString(1)); + assertFalse(rs.next()); + statement.execute("drop table if exists " + table); + } } @Test @@ -1436,19 +1517,24 @@ public void testClientSideSorting() throws SQLException { String table = "test_arrow_sort_on"; String column = "( a int, b double, c string)"; String values = "(1,2.0,'test'),(0,2.0, 'test'),(1,2.0,'abc')"; - Connection conn = init(table, column, values); - Statement statement = conn.createStatement(); - // turn on sorting mode - statement.execute("set-sf-property sort on"); - - ResultSet rs = statement.executeQuery("select * from " + table); - rs.next(); - assertEquals("0", rs.getString(1)); - rs.next(); - assertEquals("1", rs.getString(1)); - rs.next(); - assertEquals("test", rs.getString(3)); - finish(table, conn); + try (Connection conn = init(table, column, values); + Statement statement = conn.createStatement()) { + try { + // turn on sorting mode + statement.execute("set-sf-property sort on"); + + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + assertTrue(rs.next()); + assertEquals("0", rs.getString(1)); + assertTrue(rs.next()); + assertEquals("1", rs.getString(1)); + assertTrue(rs.next()); + assertEquals("test", rs.getString(3)); + } + } finally { + statement.execute("drop table if exists " + table); + } + } } @Test @@ -1471,58 +1557,69 @@ public void testClientSideSortingOnBatchedChunk() throws SQLException { "insert into T values (3);", }; - try (Connection conn = init()) { - Statement stat = conn.createStatement(); - for (String q : queries) { - stat.execute(q); - } + try (Connection conn = init(); + Statement stat = conn.createStatement()) { + try { + for (String q : queries) { + stat.execute(q); + } - ResultSet rs = stat.executeQuery("select * from S"); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(3, rs.getInt(1)); - assertFalse(rs.next()); - stat.execute("drop stream S"); - stat.execute("drop table T"); + try (ResultSet rs = stat.executeQuery("select * from S")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(3, rs.getInt(1)); + assertFalse(rs.next()); + } + } finally { + stat.execute("drop stream S"); + stat.execute("drop table T"); + } } } @Test public void testTimestampNTZAreAllNulls() throws SQLException { - try (Connection con = init()) { - Statement statement = con.createStatement(); - statement.executeQuery( - "create or replace table test_null_ts_ntz (a timestampntz(9)) as select null from table(generator" - + "(rowcount => 1000000)) v " - + "order by 1;"); - ResultSet rs = statement.executeQuery("select * from test_null_ts_ntz"); - while (rs.next()) { - rs.getObject(1); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.executeQuery( + "create or replace table test_null_ts_ntz (a timestampntz(9)) as select null from table(generator" + + "(rowcount => 1000000)) v " + + "order by 1;"); + try (ResultSet rs = statement.executeQuery("select * from test_null_ts_ntz")) { + while (rs.next()) { + rs.getObject(1); + } + } + } finally { + statement.executeQuery("drop table if exists test_null_ts_ntz"); } - statement.executeQuery("drop table if exists test_null_ts_ntz"); - statement.close(); } } @Test public void TestArrowStringRoundTrip() throws SQLException { String big_number = "11111111112222222222333333333344444444"; - try (Connection con = init()) { - Statement st = con.createStatement(); - for (int i = 0; i < 38; i++) { - StringBuilder to_insert = new StringBuilder(big_number); - if (i != 0) { - int insert_to = 38 - i; - to_insert.insert(insert_to, "."); + try (Connection con = init(); + Statement st = con.createStatement()) { + try { + for (int i = 0; i < 38; i++) { + StringBuilder to_insert = new StringBuilder(big_number); + if (i != 0) { + int insert_to = 38 - i; + to_insert.insert(insert_to, "."); + } + st.execute("create or replace table test_arrow_string (a NUMBER(38, " + i + ") )"); + st.execute("insert into test_arrow_string values (" + to_insert + ")"); + try (ResultSet rs = st.executeQuery("select * from test_arrow_string")) { + assertTrue(rs.next()); + assertEquals(to_insert.toString(), rs.getString(1)); + } } - st.execute("create or replace table test_arrow_string (a NUMBER(38, " + i + ") )"); - st.execute("insert into test_arrow_string values (" + to_insert + ")"); - ResultSet rs = st.executeQuery("select * from test_arrow_string"); - assertTrue(rs.next()); - assertEquals(to_insert.toString(), rs.getString(1)); + } finally { st.execute("drop table if exists test_arrow_string"); } } @@ -1531,14 +1628,18 @@ public void TestArrowStringRoundTrip() throws SQLException { @Test public void TestArrowFloatRoundTrip() throws SQLException { float[] cases = {Float.MAX_VALUE, Float.MIN_VALUE}; - try (Connection con = init()) { - Statement st = con.createStatement(); - for (float f : cases) { - st.executeQuery("create or replace table test_arrow_float (a FLOAT)"); - st.executeQuery("insert into test_arrow_float values (" + f + ")"); - ResultSet rs = st.executeQuery("select * from test_arrow_float"); - assertTrue(rs.next()); - assertEquals(f, rs.getFloat(1), Float.MIN_VALUE); + try (Connection con = init(); + Statement st = con.createStatement()) { + try { + for (float f : cases) { + st.executeQuery("create or replace table test_arrow_float (a FLOAT)"); + st.executeQuery("insert into test_arrow_float values (" + f + ")"); + try (ResultSet rs = st.executeQuery("select * from test_arrow_float")) { + assertTrue(rs.next()); + assertEquals(f, rs.getFloat(1), Float.MIN_VALUE); + } + } + } finally { st.executeQuery("drop table if exists test_arrow_float"); } } @@ -1549,8 +1650,8 @@ public void TestArrowFloatRoundTrip() throws SQLException { public void TestTimestampNTZWithDLS() throws SQLException { TimeZone origTz = TimeZone.getDefault(); String[] timeZones = new String[] {"America/New_York", "America/Los_Angeles"}; - try (Connection con = init()) { - Statement st = con.createStatement(); + try (Connection con = init(); + Statement st = con.createStatement()) { for (String timeZone : timeZones) { TimeZone.setDefault(TimeZone.getTimeZone(timeZone)); st.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); @@ -1627,21 +1728,22 @@ public void TestTimestampNTZWithDLS() throws SQLException { + "')"); } - ResultSet resultSet = st.executeQuery("select col1, col2, col3 from src_ts"); - int j = 0; - while (resultSet.next()) { - Object data1 = resultSet.getObject(1); - assertEquals(testTimestampNTZValues.get(j), data1.toString()); - - Object data2 = resultSet.getObject(2); - assertEquals(testTimestampLTZValues.get(j)[1], data2.toString()); - - Object data3 = resultSet.getObject(3); - assertThat(data3, instanceOf(Timestamp.class)); - assertEquals( - parseTimestampTZ(testTimestampTZValues.get(j)).toEpochSecond(), - ((Timestamp) data3).getTime() / 1000); - j++; + try (ResultSet resultSet = st.executeQuery("select col1, col2, col3 from src_ts")) { + int j = 0; + while (resultSet.next()) { + Object data1 = resultSet.getObject(1); + assertEquals(testTimestampNTZValues.get(j), data1.toString()); + + Object data2 = resultSet.getObject(2); + assertEquals(testTimestampLTZValues.get(j)[1], data2.toString()); + + Object data3 = resultSet.getObject(3); + assertThat(data3, instanceOf(Timestamp.class)); + assertEquals( + parseTimestampTZ(testTimestampTZValues.get(j)).toEpochSecond(), + ((Timestamp) data3).getTime() / 1000); + j++; + } } } } finally { @@ -1654,24 +1756,25 @@ public void TestTimestampNTZBinding() throws SQLException { TimeZone origTz = TimeZone.getDefault(); try (Connection con = init()) { TimeZone.setDefault(TimeZone.getTimeZone("PST")); - Statement st = con.createStatement(); - st.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); - st.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); - st.execute("create or replace table src_ts(col1 TIMESTAMP_NTZ)"); - PreparedStatement prepst = con.prepareStatement("insert into src_ts values(?)"); - Timestamp tz = Timestamp.valueOf("2018-03-11 01:10:34.0"); - prepst.setTimestamp(1, tz); - prepst.execute(); - - ResultSet resultSet = st.executeQuery("SELECT COL1 FROM SRC_TS"); - Object data; - int i = 1; - while (resultSet.next()) { - data = resultSet.getObject(i); - System.out.println(data.toString()); + try (Statement st = con.createStatement()) { + st.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); + st.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); + st.execute("create or replace table src_ts(col1 TIMESTAMP_NTZ)"); + try (PreparedStatement prepst = con.prepareStatement("insert into src_ts values(?)")) { + Timestamp tz = Timestamp.valueOf("2018-03-11 01:10:34.0"); + prepst.setTimestamp(1, tz); + prepst.execute(); + } + try (ResultSet resultSet = st.executeQuery("SELECT COL1 FROM SRC_TS")) { + Object data; + int i = 1; + while (resultSet.next()) { + data = resultSet.getObject(i); + System.out.println(data.toString()); + } + } } - } finally { - TimeZone.setDefault(origTz); } + TimeZone.setDefault(origTz); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java index c45e7d9c2..6add203f5 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java @@ -5,6 +5,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -44,16 +45,16 @@ public static Collection data() { public static Connection getConnection(int injectSocketTimeout) throws SQLException { Connection connection = BaseJDBCTest.getConnection(injectSocketTimeout); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + } return connection; } @@ -65,19 +66,14 @@ public ResultSetJsonVsArrowMultiTZIT(String queryResultFormat, String timeZone) private Connection init(String table, String column, String values) throws SQLException { Connection con = getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); - con.createStatement() - .execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - con.createStatement().execute("create or replace table " + table + " " + column); - con.createStatement().execute("insert into " + table + " values " + values); + try (Statement statement = con.createStatement()) { + statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + statement.execute("create or replace table " + table + " " + column); + statement.execute("insert into " + table + " values " + values); + } return con; } - private void finish(String table, Connection con) throws SQLException { - con.createStatement().execute("drop table " + table); - con.close(); - System.clearProperty("user.timezone"); - } - @Test public void testTime() throws SQLException { String[] times = { @@ -116,21 +112,25 @@ public void testDate() throws Exception { String column = "(a date)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - if (i == cases.length - 2) { - assertEquals("0001-01-01", rs.getDate(1).toString()); - } else { - assertEquals(cases[i], rs.getDate(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + if (i == cases.length - 2) { + assertEquals("0001-01-01", rs.getDate(1).toString()); + } else { + assertEquals(cases[i], rs.getDate(1).toString()); + } + i++; + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); } - i++; + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } public void testTimeWithScale(String[] times, int scale) throws SQLException { @@ -138,15 +138,17 @@ public void testTimeWithScale(String[] times, int scale) throws SQLException { String column = "(a time(" + scale + "))"; String values = "('" + StringUtils.join(times, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - for (int i = 0; i < times.length; i++) { - rs.next(); - // Java Time class does not have nanoseconds - assertEquals("00:01:23", rs.getString(1)); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < times.length; i++) { + assertTrue(rs.next()); + // Java Time class does not have nanoseconds + assertEquals("00:01:23", rs.getString(1)); + } + assertTrue(rs.next()); + assertNull(rs.getTime(1)); } - rs.next(); - assertNull(rs.getTime(1)); } @Test @@ -184,16 +186,20 @@ public void testTimestampNTZWithScale(int scale) throws SQLException { String column = "(a timestamp_ntz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(results[i++], rs.getString(1)); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(results[i++], rs.getString(1)); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } @Test @@ -214,15 +220,20 @@ public void testTimestampNTZWithNanos() throws SQLException { String column = "(a timestamp_ntz)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(cases[i++], rs.getTimestamp(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(cases[i++], rs.getTimestamp(1).toString()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index 51fd295e9..efd185926 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -3,6 +3,7 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertArrayEquals; @@ -27,7 +28,6 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; @@ -93,34 +93,34 @@ public ResultSetLatestIT() { */ @Test public void testMemoryClearingAfterInterrupt() throws Throwable { - ResultSet resultSet = null; - final Connection connection = getConnection(); - final Statement statement = connection.createStatement(); - final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); - try { - // Inject an InterruptedException into the SnowflakeChunkDownloader.terminate() function - SnowflakeChunkDownloader.setInjectedDownloaderException(new InterruptedException()); - // 10000 rows should be enough to force result into multiple chunks - resultSet = - statement.executeQuery( - "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))"); - assertThat( - "hold memory usage for the resultSet before close", - SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); - // Result closure should catch InterruptedException and throw a SQLException after its caught - resultSet.close(); - fail("Exception should have been thrown"); - } catch (SQLException ex) { - assertEquals((int) ErrorCode.INTERRUPTED.getMessageCode(), ex.getErrorCode()); - // Assert all memory was released - assertThat( - "closing statement didn't release memory allocated for result", - SnowflakeChunkDownloader.getCurrentMemoryUsage(), - equalTo(initialMemoryUsage)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); + try { + // Inject an InterruptedException into the SnowflakeChunkDownloader.terminate() function + SnowflakeChunkDownloader.setInjectedDownloaderException(new InterruptedException()); + // 10000 rows should be enough to force result into multiple chunks + try (ResultSet resultSet = + statement.executeQuery( + "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))")) { + assertThat( + "hold memory usage for the resultSet before close", + SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); + // Result closure should catch InterruptedException and throw a SQLException after its + // caught + } + fail("Exception should have been thrown"); + } catch (SQLException ex) { + assertEquals((int) ErrorCode.INTERRUPTED.getMessageCode(), ex.getErrorCode()); + // Assert all memory was released + assertThat( + "closing statement didn't release memory allocated for result", + SnowflakeChunkDownloader.getCurrentMemoryUsage(), + equalTo(initialMemoryUsage)); + } + // Unset the exception injection so statement and connection can close without exceptions + SnowflakeChunkDownloader.setInjectedDownloaderException(null); } - // Unset the exception injection so statement and connection can close without exceptions - SnowflakeChunkDownloader.setInjectedDownloaderException(null); - closeSQLObjects(resultSet, statement, connection); } /** @@ -132,34 +132,36 @@ public void testMemoryClearingAfterInterrupt() throws Throwable { public void testChunkDownloaderNoHang() throws SQLException { int stmtCount = 30; int rowCount = 170000; - Connection connection = getConnection(); - List rsList = new ArrayList<>(); - // Set memory limit to low number - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(2000000); - // open multiple statements concurrently to overwhelm current memory allocation - for (int i = 0; i < stmtCount; ++i) { - Statement stmt = connection.createStatement(); - ResultSet resultSet = - stmt.executeQuery( - "select randstr(100, random()) from table(generator(rowcount => " + rowCount + "))"); - rsList.add(resultSet); - } - // Assert that all resultSets exist and can successfully download the needed chunks without - // hanging - for (int i = 0; i < stmtCount; i++) { - rsList.get(i).next(); - assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", rsList.get(i).getString(1))); - rsList.get(i).close(); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement()) { + List rsList = new ArrayList<>(); + // Set memory limit to low number + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(2000000); + // open multiple statements concurrently to overwhelm current memory allocation + for (int i = 0; i < stmtCount; ++i) { + ResultSet resultSet = + stmt.executeQuery( + "select randstr(100, random()) from table(generator(rowcount => " + + rowCount + + "))"); + rsList.add(resultSet); + } + // Assert that all resultSets exist and can successfully download the needed chunks without + // hanging + for (int i = 0; i < stmtCount; i++) { + rsList.get(i).next(); + assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", rsList.get(i).getString(1))); + rsList.get(i).close(); + } + // set memory limit back to default invalid value so it does not get used + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); } - // set memory limit back to default invalid value so it does not get used - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); - connection.close(); } /** This tests that the SnowflakeChunkDownloader doesn't hang when memory limits are low. */ @@ -167,39 +169,42 @@ public void testChunkDownloaderNoHang() throws SQLException { public void testChunkDownloaderSetRetry() throws SQLException { int stmtCount = 3; int rowCount = 170000; - Connection connection = getConnection(); - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(1 * 1024 * 1024); - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 1); - // Set memory limit to low number - // open multiple statements concurrently to overwhelm current memory allocation - for (int i = 0; i < stmtCount; ++i) { - Statement stmt = connection.createStatement(); - ResultSet resultSet = - stmt.executeQuery( - "select randstr(100, random()) from table(generator(rowcount => " + rowCount + "))"); - // consume half of the results and go to the next statement - for (int j = 0; j < rowCount / 2; j++) { - resultSet.next(); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement()) { + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(1 * 1024 * 1024); + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 1); + // Set memory limit to low number + // open multiple statements concurrently to overwhelm current memory allocation + for (int i = 0; i < stmtCount; ++i) { + try (ResultSet resultSet = + stmt.executeQuery( + "select randstr(100, random()) from table(generator(rowcount => " + + rowCount + + "))")) { + // consume half of the results and go to the next statement + for (int j = 0; j < rowCount / 2; j++) { + resultSet.next(); + } + assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", resultSet.getString(1))); + } } - assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", resultSet.getString(1))); + // reset retry to MAX_NUM_OF_RETRY, which is 10 + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 10); + // set memory limit back to default invalid value so it does not get used + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); } - // reset retry to MAX_NUM_OF_RETRY, which is 10 - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 10); - // set memory limit back to default invalid value so it does not get used - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); - connection.close(); } /** @@ -212,57 +217,59 @@ public void testChunkDownloaderSetRetry() throws SQLException { @Test public void testMetadataAPIMetricCollection() throws SQLException, ExecutionException, InterruptedException { - Connection con = init(); - Telemetry telemetry = - con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); - DatabaseMetaData metadata = con.getMetaData(); - // Call one of the DatabaseMetadata API functions but for simplicity, ensure returned ResultSet - // is empty - metadata.getColumns("fakecatalog", "fakeschema", null, null); - LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); - // No result set has been downloaded from server so no chunk downloader metrics have been - // collected - // Logs should contain 1 item: the data about the getColumns() parameters - assertEquals(logs.size(), 1); - // Assert the log is of type client_metadata_api_metrics - assertEquals( - logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), - TelemetryField.METADATA_METRICS.toString()); - // Assert function name and params match and that query id exists - assertEquals(logs.get(0).getMessage().get("function_name").textValue(), "getColumns"); - TestUtil.assertValidQueryId(logs.get(0).getMessage().get("query_id").textValue()); - JsonNode parameterValues = logs.get(0).getMessage().get("function_parameters"); - assertEquals(parameterValues.get("catalog").textValue(), "fakecatalog"); - assertEquals(parameterValues.get("schema").textValue(), "fakeschema"); - assertNull(parameterValues.get("general_name_pattern").textValue()); - assertNull(parameterValues.get("specific_name_pattern").textValue()); - - // send data to clear log for next test - telemetry.sendBatchAsync().get(); - assertEquals(0, ((TelemetryClient) telemetry).logBuffer().size()); - - String catalog = con.getCatalog(); - String schema = con.getSchema(); - metadata.getColumns(catalog, schema, null, null); - logs = ((TelemetryClient) telemetry).logBuffer(); - assertEquals(logs.size(), 2); - // first item in log buffer is metrics on time to consume first result set chunk - assertEquals( - logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), - TelemetryField.TIME_CONSUME_FIRST_RESULT.toString()); - // second item in log buffer is metrics on getProcedureColumns() parameters - // Assert the log is of type client_metadata_api_metrics - assertEquals( - logs.get(1).getMessage().get(TelemetryUtil.TYPE).textValue(), - TelemetryField.METADATA_METRICS.toString()); - // Assert function name and params match and that query id exists - assertEquals(logs.get(1).getMessage().get("function_name").textValue(), "getColumns"); - TestUtil.assertValidQueryId(logs.get(1).getMessage().get("query_id").textValue()); - parameterValues = logs.get(1).getMessage().get("function_parameters"); - assertEquals(parameterValues.get("catalog").textValue(), catalog); - assertEquals(parameterValues.get("schema").textValue(), schema); - assertNull(parameterValues.get("general_name_pattern").textValue()); - assertNull(parameterValues.get("specific_name_pattern").textValue()); + try (Connection con = init()) { + Telemetry telemetry = + con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); + DatabaseMetaData metadata = con.getMetaData(); + // Call one of the DatabaseMetadata API functions but for simplicity, ensure returned + // ResultSet + // is empty + metadata.getColumns("fakecatalog", "fakeschema", null, null); + LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); + // No result set has been downloaded from server so no chunk downloader metrics have been + // collected + // Logs should contain 1 item: the data about the getColumns() parameters + assertEquals(logs.size(), 1); + // Assert the log is of type client_metadata_api_metrics + assertEquals( + logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), + TelemetryField.METADATA_METRICS.toString()); + // Assert function name and params match and that query id exists + assertEquals(logs.get(0).getMessage().get("function_name").textValue(), "getColumns"); + TestUtil.assertValidQueryId(logs.get(0).getMessage().get("query_id").textValue()); + JsonNode parameterValues = logs.get(0).getMessage().get("function_parameters"); + assertEquals(parameterValues.get("catalog").textValue(), "fakecatalog"); + assertEquals(parameterValues.get("schema").textValue(), "fakeschema"); + assertNull(parameterValues.get("general_name_pattern").textValue()); + assertNull(parameterValues.get("specific_name_pattern").textValue()); + + // send data to clear log for next test + telemetry.sendBatchAsync().get(); + assertEquals(0, ((TelemetryClient) telemetry).logBuffer().size()); + + String catalog = con.getCatalog(); + String schema = con.getSchema(); + metadata.getColumns(catalog, schema, null, null); + logs = ((TelemetryClient) telemetry).logBuffer(); + assertEquals(logs.size(), 2); + // first item in log buffer is metrics on time to consume first result set chunk + assertEquals( + logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), + TelemetryField.TIME_CONSUME_FIRST_RESULT.toString()); + // second item in log buffer is metrics on getProcedureColumns() parameters + // Assert the log is of type client_metadata_api_metrics + assertEquals( + logs.get(1).getMessage().get(TelemetryUtil.TYPE).textValue(), + TelemetryField.METADATA_METRICS.toString()); + // Assert function name and params match and that query id exists + assertEquals(logs.get(1).getMessage().get("function_name").textValue(), "getColumns"); + TestUtil.assertValidQueryId(logs.get(1).getMessage().get("query_id").textValue()); + parameterValues = logs.get(1).getMessage().get("function_parameters"); + assertEquals(parameterValues.get("catalog").textValue(), catalog); + assertEquals(parameterValues.get("schema").textValue(), schema); + assertNull(parameterValues.get("general_name_pattern").textValue()); + assertNull(parameterValues.get("specific_name_pattern").textValue()); + } } /** @@ -273,15 +280,15 @@ public void testMetadataAPIMetricCollection() */ @Test public void testGetCharacterStreamNull() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table JDBC_NULL_CHARSTREAM (col1 varchar(16))"); - statement.execute("insert into JDBC_NULL_CHARSTREAM values(NULL)"); - ResultSet rs = statement.executeQuery("select * from JDBC_NULL_CHARSTREAM"); - rs.next(); - assertNull(rs.getCharacterStream(1)); - rs.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table JDBC_NULL_CHARSTREAM (col1 varchar(16))"); + statement.execute("insert into JDBC_NULL_CHARSTREAM values(NULL)"); + try (ResultSet rs = statement.executeQuery("select * from JDBC_NULL_CHARSTREAM")) { + rs.next(); + assertNull(rs.getCharacterStream(1)); + } + } } /** @@ -291,46 +298,50 @@ public void testGetCharacterStreamNull() throws SQLException { */ @Test public void testMultipleChunks() throws Exception { - Connection con = init(); - Statement statement = con.createStatement(); - - // 10000 rows should be enough to force result into multiple chunks - ResultSet resultSet = - statement.executeQuery( - "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))"); - int cnt = 0; - while (resultSet.next()) { - ++cnt; - } - assertTrue(cnt >= 0); - Telemetry telemetry = - con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); - LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); - - // there should be a log for each of the following fields - TelemetryField[] expectedFields = { - TelemetryField.TIME_CONSUME_FIRST_RESULT, TelemetryField.TIME_CONSUME_LAST_RESULT, - TelemetryField.TIME_WAITING_FOR_CHUNKS, TelemetryField.TIME_DOWNLOADING_CHUNKS, - TelemetryField.TIME_PARSING_CHUNKS - }; - boolean[] succeeded = new boolean[expectedFields.length]; - - for (int i = 0; i < expectedFields.length; i++) { - succeeded[i] = false; - for (TelemetryData log : logs) { - if (log.getMessage().get(TelemetryUtil.TYPE).textValue().equals(expectedFields[i].field)) { - succeeded[i] = true; - break; + try (Connection con = init(); + Statement statement = con.createStatement(); + + // 10000 rows should be enough to force result into multiple chunks + ResultSet resultSet = + statement.executeQuery( + "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))")) { + int cnt = 0; + while (resultSet.next()) { + ++cnt; + } + assertTrue(cnt >= 0); + Telemetry telemetry = + con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); + LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); + + // there should be a log for each of the following fields + TelemetryField[] expectedFields = { + TelemetryField.TIME_CONSUME_FIRST_RESULT, TelemetryField.TIME_CONSUME_LAST_RESULT, + TelemetryField.TIME_WAITING_FOR_CHUNKS, TelemetryField.TIME_DOWNLOADING_CHUNKS, + TelemetryField.TIME_PARSING_CHUNKS + }; + boolean[] succeeded = new boolean[expectedFields.length]; + + for (int i = 0; i < expectedFields.length; i++) { + succeeded[i] = false; + for (TelemetryData log : logs) { + if (log.getMessage() + .get(TelemetryUtil.TYPE) + .textValue() + .equals(expectedFields[i].field)) { + succeeded[i] = true; + break; + } } } - } - for (int i = 0; i < expectedFields.length; i++) { - assertThat( - String.format("%s field not found in telemetry logs\n", expectedFields[i].field), - succeeded[i]); + for (int i = 0; i < expectedFields.length; i++) { + assertThat( + String.format("%s field not found in telemetry logs\n", expectedFields[i].field), + succeeded[i]); + } + telemetry.sendBatchAsync(); } - telemetry.sendBatchAsync(); } /** @@ -340,48 +351,49 @@ public void testMultipleChunks() throws Exception { */ @Test public void testResultSetMetadata() throws SQLException { - Connection connection = init(); final Map params = getConnectionParameters(); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - ResultSet resultSet = statement.executeQuery("select * from test_rsmd"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals( - params.get("database").toUpperCase(), resultSetMetaData.getCatalogName(1).toUpperCase()); - assertEquals( - params.get("schema").toUpperCase(), resultSetMetaData.getSchemaName(1).toUpperCase()); - assertEquals("TEST_RSMD", resultSetMetaData.getTableName(1)); - assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); - assertEquals("COLA", resultSetMetaData.getColumnName(1)); - assertEquals(3, resultSetMetaData.getColumnType(1)); - assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); - assertEquals(20, resultSetMetaData.getPrecision(1)); - assertEquals(5, resultSetMetaData.getScale(1)); - assertFalse(resultSetMetaData.isAutoIncrement(1)); - assertFalse(resultSetMetaData.isCaseSensitive(1)); - assertFalse(resultSetMetaData.isCurrency(1)); - assertFalse(resultSetMetaData.isDefinitelyWritable(1)); - assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); - assertTrue(resultSetMetaData.isReadOnly(1)); - assertTrue(resultSetMetaData.isSearchable(1)); - assertTrue(resultSetMetaData.isSigned(1)); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - List colNames = secretMetaData.getColumnNames(); - assertEquals("COLA", colNames.get(0)); - assertEquals("COLB", colNames.get(1)); - assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); - assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); - TestUtil.assertValidQueryId(secretMetaData.getQueryID()); - - statement.execute("drop table if exists test_rsmd"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + ResultSet resultSet = statement.executeQuery("select * from test_rsmd"); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + assertEquals( + params.get("database").toUpperCase(), + resultSetMetaData.getCatalogName(1).toUpperCase()); + assertEquals( + params.get("schema").toUpperCase(), resultSetMetaData.getSchemaName(1).toUpperCase()); + assertEquals("TEST_RSMD", resultSetMetaData.getTableName(1)); + assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); + assertEquals("COLA", resultSetMetaData.getColumnName(1)); + assertEquals(3, resultSetMetaData.getColumnType(1)); + assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); + assertEquals(20, resultSetMetaData.getPrecision(1)); + assertEquals(5, resultSetMetaData.getScale(1)); + assertFalse(resultSetMetaData.isAutoIncrement(1)); + assertFalse(resultSetMetaData.isCaseSensitive(1)); + assertFalse(resultSetMetaData.isCurrency(1)); + assertFalse(resultSetMetaData.isDefinitelyWritable(1)); + assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); + assertTrue(resultSetMetaData.isReadOnly(1)); + assertTrue(resultSetMetaData.isSearchable(1)); + assertTrue(resultSetMetaData.isSigned(1)); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + List colNames = secretMetaData.getColumnNames(); + assertEquals("COLA", colNames.get(0)); + assertEquals("COLB", colNames.get(1)); + assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); + assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); + TestUtil.assertValidQueryId(secretMetaData.getQueryID()); + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } } /** @@ -391,110 +403,107 @@ public void testResultSetMetadata() throws SQLException { */ @Test public void testEmptyResultSet() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - // the only function that returns ResultSetV1.emptyResultSet() - ResultSet rs = statement.getGeneratedKeys(); - assertFalse(rs.next()); - assertFalse(rs.isClosed()); - assertEquals(0, rs.getInt(1)); - assertEquals(0, rs.getInt("col1")); - assertEquals(0L, rs.getLong(2)); - assertEquals(0L, rs.getLong("col2")); - assertEquals(0, rs.getShort(3)); - assertEquals(0, rs.getShort("col3")); - assertEquals("", rs.getString(4)); - assertEquals("", rs.getString("col4")); - assertEquals(0, rs.getDouble(5), 0); - assertEquals(0, rs.getDouble("col5"), 0); - assertEquals(0, rs.getFloat(6), 0); - assertEquals(0, rs.getFloat("col6"), 0); - assertEquals(false, rs.getBoolean(7)); - assertEquals(false, rs.getBoolean("col7")); - assertEquals((byte) 0, rs.getByte(8)); - assertEquals((byte) 0, rs.getByte("col8")); - assertEquals(null, rs.getBinaryStream(9)); - assertEquals(null, rs.getBinaryStream("col9")); - assertEquals(null, rs.getDate(10)); - assertEquals(null, rs.getDate(10, new FakeCalendar())); - assertEquals(null, rs.getDate("col10")); - assertEquals(null, rs.getDate("col10", new FakeCalendar())); - assertEquals(null, rs.getTime(11)); - assertEquals(null, rs.getTime(11, new FakeCalendar())); - assertEquals(null, rs.getTime("col11")); - assertEquals(null, rs.getTime("col11", new FakeCalendar())); - assertEquals(null, rs.getTimestamp(12)); - assertEquals(null, rs.getTimestamp(12, new FakeCalendar())); - assertEquals(null, rs.getTimestamp("col12")); - assertEquals(null, rs.getTimestamp("col12", new FakeCalendar())); - assertEquals(null, rs.getDate(13)); - assertEquals(null, rs.getDate("col13")); - assertEquals(null, rs.getAsciiStream(14)); - assertEquals(null, rs.getAsciiStream("col14")); - assertArrayEquals(new byte[0], rs.getBytes(15)); - assertArrayEquals(new byte[0], rs.getBytes("col15")); - assertNull(rs.getBigDecimal(16)); - assertNull(rs.getBigDecimal(16, 38)); - assertNull(rs.getBigDecimal("col16")); - assertNull(rs.getBigDecimal("col16", 38)); - assertNull(rs.getRef(17)); - assertNull(rs.getRef("col17")); - assertNull(rs.getArray(18)); - assertNull(rs.getArray("col18")); - assertNull(rs.getBlob(19)); - assertNull(rs.getBlob("col19")); - assertNull(rs.getClob(20)); - assertNull(rs.getClob("col20")); - assertEquals(0, rs.findColumn("col1")); - assertNull(rs.getUnicodeStream(21)); - assertNull(rs.getUnicodeStream("col21")); - assertNull(rs.getURL(22)); - assertNull(rs.getURL("col22")); - assertNull(rs.getObject(23)); - assertNull(rs.getObject("col24")); - assertNull(rs.getObject(23, SnowflakeResultSetV1.class)); - assertNull(rs.getObject("col23", SnowflakeResultSetV1.class)); - assertNull(rs.getNString(25)); - assertNull(rs.getNString("col25")); - assertNull(rs.getNClob(26)); - assertNull(rs.getNClob("col26")); - assertNull(rs.getNCharacterStream(27)); - assertNull(rs.getNCharacterStream("col27")); - assertNull(rs.getCharacterStream(28)); - assertNull(rs.getCharacterStream("col28")); - assertNull(rs.getSQLXML(29)); - assertNull(rs.getSQLXML("col29")); - assertNull(rs.getStatement()); - assertNull(rs.getWarnings()); - assertNull(rs.getCursorName()); - assertNull(rs.getMetaData()); - assertNull(rs.getRowId(1)); - assertNull(rs.getRowId("col1")); - assertEquals(0, rs.getRow()); - assertEquals(0, rs.getFetchDirection()); - assertEquals(0, rs.getFetchSize()); - assertEquals(0, rs.getType()); - assertEquals(0, rs.getConcurrency()); - assertEquals(0, rs.getHoldability()); - assertNull(rs.unwrap(SnowflakeResultSetV1.class)); - assertFalse(rs.isWrapperFor(SnowflakeResultSetV1.class)); - assertFalse(rs.wasNull()); - assertFalse(rs.isFirst()); - assertFalse(rs.isBeforeFirst()); - assertFalse(rs.isLast()); - assertFalse(rs.isAfterLast()); - assertFalse(rs.first()); - assertFalse(rs.last()); - assertFalse(rs.previous()); - assertFalse(rs.rowUpdated()); - assertFalse(rs.rowInserted()); - assertFalse(rs.rowDeleted()); - assertFalse(rs.absolute(1)); - assertFalse(rs.relative(1)); - rs.close(); - assertTrue(rs.isClosed()); - statement.close(); - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement(); + // the only function that returns ResultSetV1.emptyResultSet() + ResultSet rs = statement.getGeneratedKeys()) { + assertFalse(rs.next()); + assertFalse(rs.isClosed()); + assertEquals(0, rs.getInt(1)); + assertEquals(0, rs.getInt("col1")); + assertEquals(0L, rs.getLong(2)); + assertEquals(0L, rs.getLong("col2")); + assertEquals(0, rs.getShort(3)); + assertEquals(0, rs.getShort("col3")); + assertEquals("", rs.getString(4)); + assertEquals("", rs.getString("col4")); + assertEquals(0, rs.getDouble(5), 0); + assertEquals(0, rs.getDouble("col5"), 0); + assertEquals(0, rs.getFloat(6), 0); + assertEquals(0, rs.getFloat("col6"), 0); + assertEquals(false, rs.getBoolean(7)); + assertEquals(false, rs.getBoolean("col7")); + assertEquals((byte) 0, rs.getByte(8)); + assertEquals((byte) 0, rs.getByte("col8")); + assertEquals(null, rs.getBinaryStream(9)); + assertEquals(null, rs.getBinaryStream("col9")); + assertEquals(null, rs.getDate(10)); + assertEquals(null, rs.getDate(10, new FakeCalendar())); + assertEquals(null, rs.getDate("col10")); + assertEquals(null, rs.getDate("col10", new FakeCalendar())); + assertEquals(null, rs.getTime(11)); + assertEquals(null, rs.getTime(11, new FakeCalendar())); + assertEquals(null, rs.getTime("col11")); + assertEquals(null, rs.getTime("col11", new FakeCalendar())); + assertEquals(null, rs.getTimestamp(12)); + assertEquals(null, rs.getTimestamp(12, new FakeCalendar())); + assertEquals(null, rs.getTimestamp("col12")); + assertEquals(null, rs.getTimestamp("col12", new FakeCalendar())); + assertEquals(null, rs.getDate(13)); + assertEquals(null, rs.getDate("col13")); + assertEquals(null, rs.getAsciiStream(14)); + assertEquals(null, rs.getAsciiStream("col14")); + assertArrayEquals(new byte[0], rs.getBytes(15)); + assertArrayEquals(new byte[0], rs.getBytes("col15")); + assertNull(rs.getBigDecimal(16)); + assertNull(rs.getBigDecimal(16, 38)); + assertNull(rs.getBigDecimal("col16")); + assertNull(rs.getBigDecimal("col16", 38)); + assertNull(rs.getRef(17)); + assertNull(rs.getRef("col17")); + assertNull(rs.getArray(18)); + assertNull(rs.getArray("col18")); + assertNull(rs.getBlob(19)); + assertNull(rs.getBlob("col19")); + assertNull(rs.getClob(20)); + assertNull(rs.getClob("col20")); + assertEquals(0, rs.findColumn("col1")); + assertNull(rs.getUnicodeStream(21)); + assertNull(rs.getUnicodeStream("col21")); + assertNull(rs.getURL(22)); + assertNull(rs.getURL("col22")); + assertNull(rs.getObject(23)); + assertNull(rs.getObject("col24")); + assertNull(rs.getObject(23, SnowflakeResultSetV1.class)); + assertNull(rs.getObject("col23", SnowflakeResultSetV1.class)); + assertNull(rs.getNString(25)); + assertNull(rs.getNString("col25")); + assertNull(rs.getNClob(26)); + assertNull(rs.getNClob("col26")); + assertNull(rs.getNCharacterStream(27)); + assertNull(rs.getNCharacterStream("col27")); + assertNull(rs.getCharacterStream(28)); + assertNull(rs.getCharacterStream("col28")); + assertNull(rs.getSQLXML(29)); + assertNull(rs.getSQLXML("col29")); + assertNull(rs.getStatement()); + assertNull(rs.getWarnings()); + assertNull(rs.getCursorName()); + assertNull(rs.getMetaData()); + assertNull(rs.getRowId(1)); + assertNull(rs.getRowId("col1")); + assertEquals(0, rs.getRow()); + assertEquals(0, rs.getFetchDirection()); + assertEquals(0, rs.getFetchSize()); + assertEquals(0, rs.getType()); + assertEquals(0, rs.getConcurrency()); + assertEquals(0, rs.getHoldability()); + assertNull(rs.unwrap(SnowflakeResultSetV1.class)); + assertFalse(rs.isWrapperFor(SnowflakeResultSetV1.class)); + assertFalse(rs.wasNull()); + assertFalse(rs.isFirst()); + assertFalse(rs.isBeforeFirst()); + assertFalse(rs.isLast()); + assertFalse(rs.isAfterLast()); + assertFalse(rs.first()); + assertFalse(rs.last()); + assertFalse(rs.previous()); + assertFalse(rs.rowUpdated()); + assertFalse(rs.rowInserted()); + assertFalse(rs.rowDeleted()); + assertFalse(rs.absolute(1)); + assertFalse(rs.relative(1)); + } } /** @@ -504,67 +513,69 @@ public void testEmptyResultSet() throws SQLException { */ @Test public void testBytesCrossTypeTests() throws Exception { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertArrayEquals(null, resultSet.getBytes(i)); - } - resultSet.next(); - assertArrayEquals(intToByteArray(2), resultSet.getBytes(1)); - assertArrayEquals(intToByteArray(5), resultSet.getBytes(2)); - assertArrayEquals(floatToByteArray(3.5f), resultSet.getBytes(3)); - assertArrayEquals(new byte[] {1}, resultSet.getBytes(4)); - assertArrayEquals(new byte[] {(byte) '1'}, resultSet.getBytes(5)); - assertArrayEquals("1".getBytes(), resultSet.getBytes(6)); - - for (int i = 7; i < 12; i++) { - try { - resultSet.getBytes(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertArrayEquals(null, resultSet.getBytes(i)); + } + assertTrue(resultSet.next()); + assertArrayEquals(intToByteArray(2), resultSet.getBytes(1)); + assertArrayEquals(intToByteArray(5), resultSet.getBytes(2)); + assertArrayEquals(floatToByteArray(3.5f), resultSet.getBytes(3)); + assertArrayEquals(new byte[] {1}, resultSet.getBytes(4)); + assertArrayEquals(new byte[] {(byte) '1'}, resultSet.getBytes(5)); + assertArrayEquals("1".getBytes(), resultSet.getBytes(6)); + + for (int i = 7; i < 12; i++) { + try { + resultSet.getBytes(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } - } - byte[] decoded = SFBinary.fromHex("48454C4C4F").getBytes(); + byte[] decoded = SFBinary.fromHex("48454C4C4F").getBytes(); - assertArrayEquals(decoded, resultSet.getBytes(12)); + assertArrayEquals(decoded, resultSet.getBytes(12)); + } } // SNOW-204185 // 30s for timeout. This test usually finishes in around 10s. @Test(timeout = 30000) public void testResultChunkDownloaderException() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { - // The generated resultSet must be big enough for triggering result chunk downloader - String query = - "select current_date(), true,2345234, 2343.0, 'testrgint\\n" - + "\\t' from table(generator(rowcount=>10000))"; + // The generated resultSet must be big enough for triggering result chunk downloader + String query = + "select current_date(), true,2345234, 2343.0, 'testrgint\\n" + + "\\t' from table(generator(rowcount=>10000))"; - ResultSet resultSet = statement.executeQuery(query); - resultSet.next(); // should finish successfully + try (ResultSet resultSet = statement.executeQuery(query)) { + assertTrue(resultSet.next()); // should finish successfully + } - try { - SnowflakeChunkDownloader.setInjectedDownloaderException( - new OutOfMemoryError("Fake OOM error for testing")); - resultSet = statement.executeQuery(query); try { - // Normally this step won't cause too long. Because we will get exception once trying to get - // result from the first chunk downloader - while (resultSet.next()) {} - fail("Should not reach here. Last next() command is supposed to throw an exception"); - } catch (SnowflakeSQLException ex) { - // pass, do nothing + SnowflakeChunkDownloader.setInjectedDownloaderException( + new OutOfMemoryError("Fake OOM error for testing")); + try (ResultSet resultSet = statement.executeQuery(query)) { + try { + // Normally this step won't cause too long. Because we will get exception once trying to + // get + // result from the first chunk downloader + while (resultSet.next()) {} + fail("Should not reach here. Last next() command is supposed to throw an exception"); + } catch (SnowflakeSQLException ex) { + // pass, do nothing + } + } + } finally { + SnowflakeChunkDownloader.setInjectedDownloaderException(null); } - } finally { - SnowflakeChunkDownloader.setInjectedDownloaderException(null); } - - statement.close(); - connection.close(); } /** @@ -574,21 +585,21 @@ public void testResultChunkDownloaderException() throws SQLException { */ @Test public void testGetObjectWithBigInt() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='json'"); - // test with greatest possible number and greatest negative possible number - String[] extremeNumbers = { - "99999999999999999999999999999999999999", "-99999999999999999999999999999999999999" - }; - for (int i = 0; i < extremeNumbers.length; i++) { - ResultSet resultSet = statement.executeQuery("select " + extremeNumbers[i]); - resultSet.next(); - assertEquals(Types.BIGINT, resultSet.getMetaData().getColumnType(1)); - assertEquals(new BigDecimal(extremeNumbers[i]), resultSet.getObject(1)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set jdbc_query_result_format ='json'"); + // test with greatest possible number and greatest negative possible number + String[] extremeNumbers = { + "99999999999999999999999999999999999999", "-99999999999999999999999999999999999999" + }; + for (int i = 0; i < extremeNumbers.length; i++) { + try (ResultSet resultSet = statement.executeQuery("select " + extremeNumbers[i])) { + assertTrue(resultSet.next()); + assertEquals(Types.BIGINT, resultSet.getMetaData().getColumnType(1)); + assertEquals(new BigDecimal(extremeNumbers[i]), resultSet.getObject(1)); + } + } } - statement.close(); - connection.close(); } private byte[] intToByteArray(int i) { @@ -607,50 +618,57 @@ private byte[] floatToByteArray(float i) { */ @Test public void testGetBigDecimalWithScale() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_get(colA number(38,9))"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_get values(?)"); - preparedStatement.setBigDecimal(1, null); - preparedStatement.addBatch(); - BigDecimal bigDecimal = new BigDecimal("100000000.123456789"); - preparedStatement.setBigDecimal(1, bigDecimal); - preparedStatement.addBatch(); - preparedStatement.executeBatch(); - - ResultSet resultSet = statement.executeQuery("select * from test_get"); - resultSet.next(); - assertEquals(null, resultSet.getBigDecimal(1, 5)); - assertEquals(null, resultSet.getBigDecimal("COLA", 5)); - resultSet.next(); - assertEquals(bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal(1, 5)); - assertEquals(bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal("COLA", 5)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_get(colA number(38,9))"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_get values(?)")) { + preparedStatement.setBigDecimal(1, null); + preparedStatement.addBatch(); + BigDecimal bigDecimal = new BigDecimal("100000000.123456789"); + preparedStatement.setBigDecimal(1, bigDecimal); + preparedStatement.addBatch(); + preparedStatement.executeBatch(); + + try (ResultSet resultSet = statement.executeQuery("select * from test_get")) { + assertTrue(resultSet.next()); + assertEquals(null, resultSet.getBigDecimal(1, 5)); + assertEquals(null, resultSet.getBigDecimal("COLA", 5)); + assertTrue(resultSet.next()); + assertEquals(bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal(1, 5)); + assertEquals( + bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal("COLA", 5)); + } + } + } } @Test public void testGetDataTypeWithTimestampTz() throws Exception { try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); - statement.executeQuery("create or replace table ts_test(ts timestamp_tz)"); - ResultSet resultSet = statement.executeQuery("select * from ts_test"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP_WITH_TIMEZONE - assertEquals(resultSetMetaData.getColumnType(1), 2014); - // Assert that TIMESTAMP_TZ column returns Timestamp class name - assertEquals(resultSetMetaData.getColumnClassName(1), Timestamp.class.getName()); - + ResultSetMetaData resultSetMetaData = null; + try (Statement statement = connection.createStatement()) { + statement.executeQuery("create or replace table ts_test(ts timestamp_tz)"); + try (ResultSet resultSet = statement.executeQuery("select * from ts_test")) { + resultSetMetaData = resultSet.getMetaData(); + // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP_WITH_TIMEZONE + assertEquals(resultSetMetaData.getColumnType(1), 2014); + // Assert that TIMESTAMP_TZ column returns Timestamp class name + assertEquals(resultSetMetaData.getColumnClassName(1), Timestamp.class.getName()); + } + } SFBaseSession baseSession = connection.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); Field field = SFBaseSession.class.getDeclaredField("enableReturnTimestampWithTimeZone"); field.setAccessible(true); field.set(baseSession, false); - statement = connection.createStatement(); - resultSet = statement.executeQuery("select * from ts_test"); - resultSetMetaData = resultSet.getMetaData(); - // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP when - // enableReturnTimestampWithTimeZone is false. - assertEquals(resultSetMetaData.getColumnType(1), Types.TIMESTAMP); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("select * from ts_test")) { + resultSetMetaData = resultSet.getMetaData(); + // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP when + // enableReturnTimestampWithTimeZone is false. + assertEquals(resultSetMetaData.getColumnType(1), Types.TIMESTAMP); + } } } @@ -662,29 +680,32 @@ public void testGetDataTypeWithTimestampTz() throws Exception { */ @Test public void testGetEmptyOrNullClob() throws SQLException { - Connection connection = init(); - Clob clob = connection.createClob(); - clob.setString(1, "hello world"); - Clob emptyClob = connection.createClob(); - emptyClob.setString(1, ""); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table test_get_clob(colA varchar, colNull varchar, colEmpty text)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_get_clob values(?, ?, ?)"); - preparedStatement.setClob(1, clob); - preparedStatement.setString(2, null); - preparedStatement.setClob(3, emptyClob); - preparedStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select * from test_get_clob"); - resultSet.next(); - assertEquals("hello world", resultSet.getClob(1).toString()); - assertEquals("hello world", resultSet.getClob("COLA").toString()); - assertNull(resultSet.getClob(2)); - assertNull(resultSet.getClob("COLNULL")); - assertEquals("", resultSet.getClob(3).toString()); - assertEquals("", resultSet.getClob("COLEMPTY").toString()); + try (Connection connection = init()) { + Clob clob = connection.createClob(); + clob.setString(1, "hello world"); + Clob emptyClob = connection.createClob(); + emptyClob.setString(1, ""); + try (Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table test_get_clob(colA varchar, colNull varchar, colEmpty text)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_get_clob values(?, ?, ?)")) { + preparedStatement.setClob(1, clob); + preparedStatement.setString(2, null); + preparedStatement.setClob(3, emptyClob); + preparedStatement.execute(); + } + try (ResultSet resultSet = statement.executeQuery("select * from test_get_clob")) { + assertTrue(resultSet.next()); + assertEquals("hello world", resultSet.getClob(1).toString()); + assertEquals("hello world", resultSet.getClob("COLA").toString()); + assertNull(resultSet.getClob(2)); + assertNull(resultSet.getClob("COLNULL")); + assertEquals("", resultSet.getClob(3).toString()); + assertEquals("", resultSet.getClob("COLEMPTY").toString()); + } + } + } } /** @@ -695,30 +716,35 @@ public void testGetEmptyOrNullClob() throws SQLException { */ @Test public void testSetNullClob() throws SQLException { - Connection connection = init(); - Clob clob = null; - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_set_clob(colNull varchar)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_set_clob values(?)"); - preparedStatement.setClob(1, clob); - preparedStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select * from test_set_clob"); - resultSet.next(); - assertNull(resultSet.getClob(1)); - assertNull(resultSet.getClob("COLNULL")); + try (Connection connection = init()) { + Clob clob = null; + try (Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_set_clob(colNull varchar)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_set_clob values(?)")) { + preparedStatement.setClob(1, clob); + preparedStatement.execute(); + } + + try (ResultSet resultSet = statement.executeQuery("select * from test_set_clob")) { + assertTrue(resultSet.next()); + assertNull(resultSet.getClob(1)); + assertNull(resultSet.getClob("COLNULL")); + } + } + } } @Test public void testCallStatementType() throws SQLException { Properties props = new Properties(); props.put("USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS", "true"); - try (Connection connection = getConnection(props)) { - try (Statement statement = connection.createStatement()) { + try (Connection connection = getConnection(props); + Statement statement = connection.createStatement()) { + try { String sp = "CREATE OR REPLACE PROCEDURE \"SP_ZSDLEADTIME_ARCHIVE_DAILY\"()\n" - + "RETURNS VARCHAR(16777216)\n" + + "RETURNS VARCHAR\n" + "LANGUAGE SQL\n" + "EXECUTE AS CALLER\n" + "AS \n" @@ -761,15 +787,14 @@ public void testCallStatementType() throws SQLException { statement.execute("create or replace table MYTABLE1 (ID int, NAME string)"); statement.execute(sp); - CallableStatement cs = connection.prepareCall("CALL SP_ZSDLEADTIME_ARCHIVE_DAILY()"); - cs.execute(); - ResultSetMetaData resultSetMetaData = cs.getMetaData(); - assertEquals("SP_ZSDLEADTIME_ARCHIVE_DAILY", resultSetMetaData.getColumnName(1)); - assertEquals("VARCHAR", resultSetMetaData.getColumnTypeName(1)); - assertEquals(0, resultSetMetaData.getScale(1)); - assertEquals(16777216, resultSetMetaData.getPrecision(1)); - - cs.close(); + try (CallableStatement cs = connection.prepareCall("CALL SP_ZSDLEADTIME_ARCHIVE_DAILY()")) { + cs.execute(); + ResultSetMetaData resultSetMetaData = cs.getMetaData(); + assertEquals("SP_ZSDLEADTIME_ARCHIVE_DAILY", resultSetMetaData.getColumnName(1)); + assertEquals("VARCHAR", resultSetMetaData.getColumnTypeName(1)); + assertEquals(0, resultSetMetaData.getScale(1)); + } + } finally { statement.execute("drop procedure if exists SP_ZSDLEADTIME_ARCHIVE_DAILY()"); statement.execute("drop table if exists MYTABLE1"); statement.execute("drop table if exists MYCSVTABLE"); @@ -782,90 +807,143 @@ public void testCallStatementType() throws SQLException { * implemented for synchronous queries * */ @Test - public void testNewFeaturesNotSupported() throws SQLException { - Connection con = init(); - ResultSet rs = con.createStatement().executeQuery("select 1"); - try { - rs.unwrap(SnowflakeResultSet.class).getQueryErrorMessage(); - } catch (SQLFeatureNotSupportedException ex) { - // catch SQLFeatureNotSupportedException - assertEquals("This function is only supported for asynchronous queries.", ex.getMessage()); + public void testNewFeaturesNotSupportedExeceptions() throws SQLException { + try (Connection con = init(); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select 1")) { + expectSnowflakeLoggedFeatureNotSupportedException( + rs.unwrap(SnowflakeResultSet.class)::getQueryErrorMessage); + expectSnowflakeLoggedFeatureNotSupportedException( + rs.unwrap(SnowflakeResultSet.class)::getStatus); + expectSnowflakeLoggedFeatureNotSupportedException(() -> rs.getArray(1)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getList(1, String.class)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getArray(1, String.class)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getMap(1, String.class)); + + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getUnicodeStream(1)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getUnicodeStream("column1")); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateAsciiStream("column1", new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateBinaryStream("column1", new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateCharacterStream("column1", new FakeReader(), 5L)); + + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateAsciiStream(1, new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateBinaryStream(1, new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateCharacterStream(1, new FakeReader(), 5L)); + } + } + + @Test + public void testInvalidUnWrap() throws SQLException { + try (Connection con = init(); + ResultSet rs = con.createStatement().executeQuery("select 1")) { + try { + rs.unwrap(SnowflakeUtil.class); + } catch (SQLException ex) { + assertEquals( + ex.getMessage(), + "net.snowflake.client.jdbc.SnowflakeResultSetV1 not unwrappable from net.snowflake.client.jdbc.SnowflakeUtil"); + } } - rs.close(); - con.close(); } @Test public void testGetObjectJsonResult() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='json'"); - statement.execute("create or replace table testObj (colA double, colB boolean)"); - - PreparedStatement preparedStatement = - connection.prepareStatement("insert into testObj values(?, ?)"); - preparedStatement.setDouble(1, 22.2); - preparedStatement.setBoolean(2, true); - preparedStatement.executeQuery(); - - ResultSet resultSet = statement.executeQuery("select * from testObj"); - resultSet.next(); - assertEquals(22.2, resultSet.getObject(1)); - assertEquals(true, resultSet.getObject(2)); - - statement.execute("drop table if exists testObj"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set jdbc_query_result_format ='json'"); + statement.execute("create or replace table testObj (colA double, colB boolean)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testObj values(?, ?)")) { + preparedStatement.setDouble(1, 22.2); + preparedStatement.setBoolean(2, true); + preparedStatement.executeQuery(); + } + try (ResultSet resultSet = statement.executeQuery("select * from testObj")) { + assertTrue(resultSet.next()); + assertEquals(22.2, resultSet.getObject(1)); + assertEquals(true, resultSet.getObject(2)); + } + } finally { + statement.execute("drop table if exists testObj"); + } + } } @Test public void testMetadataIsCaseSensitive() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - String sampleCreateTableWithAllColTypes = - "CREATE or replace TABLE case_sensitive (" - + " boolean_col BOOLEAN," - + " date_col DATE," - + " time_col TIME," - + " timestamp_col TIMESTAMP," - + " timestamp_ltz_col TIMESTAMP_LTZ," - + " timestamp_ntz_col TIMESTAMP_NTZ," - + " number_col NUMBER," - + " float_col FLOAT," - + " double_col DOUBLE," - + " binary_col BINARY," - + " geography_col GEOGRAPHY," - + " variant_col VARIANT," - + " object_col1 OBJECT," - + " array_col1 ARRAY," - + " text_col1 TEXT," - + " varchar_col VARCHAR(16777216)," - + " char_col CHAR(16777216)" - + ");"; - - statement.execute(sampleCreateTableWithAllColTypes); - ResultSet rs = statement.executeQuery("select * from case_sensitive"); - ResultSetMetaData metaData = rs.getMetaData(); - - assertFalse(metaData.isCaseSensitive(1)); // BOOLEAN - assertFalse(metaData.isCaseSensitive(2)); // DATE - assertFalse(metaData.isCaseSensitive(3)); // TIME - assertFalse(metaData.isCaseSensitive(4)); // TIMESTAMP - assertFalse(metaData.isCaseSensitive(5)); // TIMESTAMP_LTZ - assertFalse(metaData.isCaseSensitive(6)); // TIMESTAMP_NTZ - assertFalse(metaData.isCaseSensitive(7)); // NUMBER - assertFalse(metaData.isCaseSensitive(8)); // FLOAT - assertFalse(metaData.isCaseSensitive(9)); // DOUBLE - assertFalse(metaData.isCaseSensitive(10)); // BINARY - - assertTrue(metaData.isCaseSensitive(11)); // GEOGRAPHY - assertTrue(metaData.isCaseSensitive(12)); // VARIANT - assertTrue(metaData.isCaseSensitive(13)); // OBJECT - assertTrue(metaData.isCaseSensitive(14)); // ARRAY - assertTrue(metaData.isCaseSensitive(15)); // TEXT - assertTrue(metaData.isCaseSensitive(16)); // VARCHAR - assertTrue(metaData.isCaseSensitive(17)); // CHAR + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + + String sampleCreateTableWithAllColTypes = + "CREATE or replace TABLE case_sensitive (" + + " boolean_col BOOLEAN," + + " date_col DATE," + + " time_col TIME," + + " timestamp_col TIMESTAMP," + + " timestamp_ltz_col TIMESTAMP_LTZ," + + " timestamp_ntz_col TIMESTAMP_NTZ," + + " number_col NUMBER," + + " float_col FLOAT," + + " double_col DOUBLE," + + " binary_col BINARY," + + " geography_col GEOGRAPHY," + + " variant_col VARIANT," + + " object_col1 OBJECT," + + " array_col1 ARRAY," + + " text_col1 TEXT," + + " varchar_col VARCHAR(16777216)," + + " char_col CHAR(16777216)" + + ");"; + + statement.execute(sampleCreateTableWithAllColTypes); + try (ResultSet rs = statement.executeQuery("select * from case_sensitive")) { + ResultSetMetaData metaData = rs.getMetaData(); + + assertFalse(metaData.isCaseSensitive(1)); // BOOLEAN + assertFalse(metaData.isCaseSensitive(2)); // DATE + assertFalse(metaData.isCaseSensitive(3)); // TIME + assertFalse(metaData.isCaseSensitive(4)); // TIMESTAMP + assertFalse(metaData.isCaseSensitive(5)); // TIMESTAMP_LTZ + assertFalse(metaData.isCaseSensitive(6)); // TIMESTAMP_NTZ + assertFalse(metaData.isCaseSensitive(7)); // NUMBER + assertFalse(metaData.isCaseSensitive(8)); // FLOAT + assertFalse(metaData.isCaseSensitive(9)); // DOUBLE + assertFalse(metaData.isCaseSensitive(10)); // BINARY + + assertTrue(metaData.isCaseSensitive(11)); // GEOGRAPHY + assertTrue(metaData.isCaseSensitive(12)); // VARIANT + assertTrue(metaData.isCaseSensitive(13)); // OBJECT + assertTrue(metaData.isCaseSensitive(14)); // ARRAY + assertTrue(metaData.isCaseSensitive(15)); // TEXT + assertTrue(metaData.isCaseSensitive(16)); // VARCHAR + assertTrue(metaData.isCaseSensitive(17)); // CHAR + } + } } @Test @@ -873,21 +951,23 @@ public void testMetadataIsCaseSensitive() throws SQLException { public void testAutoIncrementJsonResult() throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("ENABLE_FIX_759900", true); - Connection connection = init(paramProperties); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='json'"); + try (Connection connection = init(paramProperties); + Statement statement = connection.createStatement()) { + statement.execute("alter session set jdbc_query_result_format ='json'"); - statement.execute( - "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); - statement.execute("insert into auto_inc(name) values('test1')"); + statement.execute( + "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); + statement.execute("insert into auto_inc(name) values('test1')"); - ResultSet resultSet = statement.executeQuery("select * from auto_inc"); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from auto_inc")) { + assertTrue(resultSet.next()); - ResultSetMetaData metaData = resultSet.getMetaData(); - assertTrue(metaData.isAutoIncrement(1)); - assertFalse(metaData.isAutoIncrement(2)); - assertTrue(metaData.isAutoIncrement(3)); + ResultSetMetaData metaData = resultSet.getMetaData(); + assertTrue(metaData.isAutoIncrement(1)); + assertFalse(metaData.isAutoIncrement(2)); + assertTrue(metaData.isAutoIncrement(3)); + } + } } @Test @@ -895,71 +975,68 @@ public void testAutoIncrementJsonResult() throws SQLException { public void testAutoIncrementArrowResult() throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("ENABLE_FIX_759900", true); - Connection connection = init(paramProperties); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='arrow'"); + try (Connection connection = init(paramProperties); + Statement statement = connection.createStatement()) { + statement.execute("alter session set jdbc_query_result_format ='arrow'"); - statement.execute( - "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); - statement.execute("insert into auto_inc(name) values('test1')"); + statement.execute( + "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); + statement.execute("insert into auto_inc(name) values('test1')"); - ResultSet resultSet = statement.executeQuery("select * from auto_inc"); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from auto_inc")) { + assertTrue(resultSet.next()); - ResultSetMetaData metaData = resultSet.getMetaData(); - assertTrue(metaData.isAutoIncrement(1)); - assertFalse(metaData.isAutoIncrement(2)); - assertTrue(metaData.isAutoIncrement(3)); + ResultSetMetaData metaData = resultSet.getMetaData(); + assertTrue(metaData.isAutoIncrement(1)); + assertFalse(metaData.isAutoIncrement(2)); + assertTrue(metaData.isAutoIncrement(3)); + } + } } @Test public void testGranularTimeFunctionsInSessionTimezone() throws SQLException { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("create or replace table testGranularTime(t time)"); - statement.execute("insert into testGranularTime values ('10:10:10')"); - ResultSet resultSet = statement.executeQuery("select * from testGranularTime"); - resultSet.next(); - assertEquals(Time.valueOf("10:10:10"), resultSet.getTime(1)); - assertEquals(10, resultSet.getTime(1).getHours()); - assertEquals(10, resultSet.getTime(1).getMinutes()); - assertEquals(10, resultSet.getTime(1).getSeconds()); - resultSet.close(); - } finally { - statement.execute("drop table if exists testGranularTime"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table testGranularTime(t time)"); + statement.execute("insert into testGranularTime values ('10:10:10')"); + try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { + assertTrue(resultSet.next()); + assertEquals(Time.valueOf("10:10:10"), resultSet.getTime(1)); + assertEquals(10, resultSet.getTime(1).getHours()); + assertEquals(10, resultSet.getTime(1).getMinutes()); + assertEquals(10, resultSet.getTime(1).getSeconds()); + } + } finally { + statement.execute("drop table if exists testGranularTime"); + } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGranularTimeFunctionsInUTC() throws SQLException { - Connection connection = null; - Statement statement = null; - TimeZone origTz = TimeZone.getDefault(); - TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); - try { - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); - statement.execute("create or replace table testGranularTime(t time)"); - statement.execute("insert into testGranularTime values ('10:10:10')"); - ResultSet resultSet = statement.executeQuery("select * from testGranularTime"); - resultSet.next(); - assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); - assertEquals(02, resultSet.getTime(1).getHours()); - assertEquals(10, resultSet.getTime(1).getMinutes()); - assertEquals(10, resultSet.getTime(1).getSeconds()); - resultSet.close(); - } finally { - TimeZone.setDefault(origTz); - statement.execute("drop table if exists testGranularTime"); - statement.close(); - connection.close(); + try (Connection connection = getConnection()) { + TimeZone origTz = TimeZone.getDefault(); + try (Statement statement = connection.createStatement()) { + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); + statement.execute("create or replace table testGranularTime(t time)"); + statement.execute("insert into testGranularTime values ('10:10:10')"); + try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { + assertTrue(resultSet.next()); + assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); + assertEquals(02, resultSet.getTime(1).getHours()); + assertEquals(10, resultSet.getTime(1).getMinutes()); + assertEquals(10, resultSet.getTime(1).getSeconds()); + } + } finally { + TimeZone.setDefault(origTz); + statement.execute("drop table if exists testGranularTime"); + } + } } } @@ -970,6 +1047,12 @@ public void testLargeStringRetrieval() throws SQLException { int colLength = 16777216; try (Connection con = getConnection(); Statement statement = con.createStatement()) { + SFBaseSession session = con.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); + Integer maxVarcharSize = + (Integer) session.getOtherParameter("VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"); + if (maxVarcharSize != null) { + colLength = maxVarcharSize; + } statement.execute("create or replace table " + tableName + " (c1 string(" + colLength + "))"); statement.execute( "insert into " + tableName + " select randstr(" + colLength + ", random())"); @@ -1143,7 +1226,7 @@ private void assertResultValueAndType( Statement statement, Object expected, String columnName, Class type) throws SQLException { try (ResultSet resultSetString = statement.executeQuery(String.format("select %s from test_all_types", columnName))) { - resultSetString.next(); + assertTrue(resultSetString.next()); assertEquals(expected, resultSetString.getObject(1, type)); } } @@ -1152,7 +1235,7 @@ private void assertResultValueAsString( Statement statement, Object expected, String columnName, Class type) throws SQLException { try (ResultSet resultSetString = statement.executeQuery(String.format("select %s from test_all_types", columnName))) { - resultSetString.next(); + assertTrue(resultSetString.next()); assertEquals(expected.toString(), resultSetString.getObject(1, type).toString()); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java index 1ed7d09ea..c0a494613 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java @@ -64,125 +64,128 @@ public ResultSetMultiTimeZoneIT(String queryResultFormat, String timeZone) { public Connection init() throws SQLException { Connection connection = BaseJDBCTest.getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); - connection - .createStatement() - .execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return connection; } public Connection init(Properties paramProperties) throws SQLException { Connection conn = getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Before public void setUp() throws SQLException { - Connection con = init(); - - // TEST_RS - con.createStatement().execute("create or replace table test_rs (colA string)"); - con.createStatement().execute("insert into test_rs values('rowOne')"); - con.createStatement().execute("insert into test_rs values('rowTwo')"); - con.createStatement().execute("insert into test_rs values('rowThree')"); - - // ORDERS_JDBC - Statement statement = con.createStatement(); - statement.execute( - "create or replace table orders_jdbc" - + "(C1 STRING NOT NULL COMMENT 'JDBC', " - + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " - + "C7 STRING, C8 STRING, C9 STRING) " - + "stage_file_format = (field_delimiter='|' " - + "error_on_column_count_mismatch=false)"); - // put files - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); - - int numRows = statement.executeUpdate("copy into orders_jdbc"); - - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); - - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement()) { + + // TEST_RS + statement.execute("create or replace table test_rs (colA string)"); + statement.execute("insert into test_rs values('rowOne')"); + statement.execute("insert into test_rs values('rowTwo')"); + statement.execute("insert into test_rs values('rowThree')"); + + // ORDERS_JDBC + statement.execute( + "create or replace table orders_jdbc" + + "(C1 STRING NOT NULL COMMENT 'JDBC', " + + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " + + "C7 STRING, C8 STRING, C9 STRING) " + + "stage_file_format = (field_delimiter='|' " + + "error_on_column_count_mismatch=false)"); + // put files + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + + int numRows = statement.executeUpdate("copy into orders_jdbc"); + + assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + } } @After public void tearDown() throws SQLException { System.clearProperty("user.timezone"); - Connection con = init(); - con.createStatement().execute("drop table if exists orders_jdbc"); - con.createStatement().execute("drop table if exists test_rs"); - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement()) { + statement.execute("drop table if exists orders_jdbc"); + statement.execute("drop table if exists test_rs"); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetDateAndTime() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table dateTime(colA Date, colB Timestamp, colC Time)"); - - java.util.Date today = new java.util.Date(); - Date date = buildDate(2016, 3, 20); - Timestamp ts = new Timestamp(today.getTime()); - Time tm = new Time(12345678); // 03:25:45.678 - final String insertTime = "insert into datetime values(?, ?, ?)"; - PreparedStatement prepStatement = connection.prepareStatement(insertTime); - prepStatement.setDate(1, date); - prepStatement.setTimestamp(2, ts); - prepStatement.setTime(3, tm); - - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select * from datetime"); - resultSet.next(); - assertEquals(date, resultSet.getDate(1)); - assertEquals(date, resultSet.getDate("COLA")); - assertEquals(ts, resultSet.getTimestamp(2)); - assertEquals(ts, resultSet.getTimestamp("COLB")); - assertEquals(tm, resultSet.getTime(3)); - assertEquals(tm, resultSet.getTime("COLC")); - - statement.execute( - "create or replace table datetime(colA timestamp_ltz, colB timestamp_ntz, colC timestamp_tz)"); - statement.execute( - "insert into dateTime values ('2019-01-01 17:17:17', '2019-01-01 17:17:17', '2019-01-01 " - + "17:17:17')"); - prepStatement = - connection.prepareStatement( - "insert into datetime values(?, '2019-01-01 17:17:17', '2019-01-01 17:17:17')"); - Timestamp dateTime = new Timestamp(date.getTime()); - prepStatement.setTimestamp(1, dateTime); - prepStatement.execute(); - resultSet = statement.executeQuery("select * from datetime"); - resultSet.next(); - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - formatter.setTimeZone(TimeZone.getDefault()); - String d = formatter.format(resultSet.getDate("COLA")); - assertEquals("2019-01-02 01:17:17", d); - resultSet.next(); - assertEquals(date, resultSet.getDate(1)); - assertEquals(date, resultSet.getDate("COLA")); - statement.execute("drop table if exists datetime"); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table dateTime(colA Date, colB Timestamp, colC Time)"); + + java.util.Date today = new java.util.Date(); + Date date = buildDate(2016, 3, 20); + Timestamp ts = new Timestamp(today.getTime()); + Time tm = new Time(12345678); // 03:25:45.678 + final String insertTime = "insert into datetime values(?, ?, ?)"; + try (PreparedStatement prepStatement = connection.prepareStatement(insertTime)) { + prepStatement.setDate(1, date); + prepStatement.setTimestamp(2, ts); + prepStatement.setTime(3, tm); + + prepStatement.execute(); + + ResultSet resultSet = statement.executeQuery("select * from datetime"); + assertTrue(resultSet.next()); + assertEquals(date, resultSet.getDate(1)); + assertEquals(date, resultSet.getDate("COLA")); + assertEquals(ts, resultSet.getTimestamp(2)); + assertEquals(ts, resultSet.getTimestamp("COLB")); + assertEquals(tm, resultSet.getTime(3)); + assertEquals(tm, resultSet.getTime("COLC")); + } + statement.execute( + "create or replace table datetime(colA timestamp_ltz, colB timestamp_ntz, colC timestamp_tz)"); + statement.execute( + "insert into dateTime values ('2019-01-01 17:17:17', '2019-01-01 17:17:17', '2019-01-01 " + + "17:17:17')"); + try (PreparedStatement prepStatement = + connection.prepareStatement( + "insert into datetime values(?, '2019-01-01 17:17:17', '2019-01-01 17:17:17')")) { + Timestamp dateTime = new Timestamp(date.getTime()); + prepStatement.setTimestamp(1, dateTime); + prepStatement.execute(); + try (ResultSet resultSet = statement.executeQuery("select * from datetime")) { + assertTrue(resultSet.next()); + SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + formatter.setTimeZone(TimeZone.getDefault()); + String d = formatter.format(resultSet.getDate("COLA")); + assertEquals("2019-01-02 01:17:17", d); + assertTrue(resultSet.next()); + assertEquals(date, resultSet.getDate(1)); + assertEquals(date, resultSet.getDate("COLA")); + } + } + } finally { + statement.execute("drop table if exists datetime"); + } + } } // SNOW-25029: The driver should reduce Time milliseconds mod 24h. @@ -190,246 +193,275 @@ public void testGetDateAndTime() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testTimeRange() throws SQLException { final String insertTime = "insert into timeTest values (?), (?), (?), (?)"; - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table timeTest (c1 time)"); - - long ms1 = -2202968667333L; // 1900-03-11 09:15:33.667 - long ms2 = -1; // 1969-12-31 23:59:99.999 - long ms3 = 86400 * 1000; // 1970-01-02 00:00:00 - long ms4 = 1451680250123L; // 2016-01-01 12:30:50.123 - - Time tm1 = new Time(ms1); - Time tm2 = new Time(ms2); - Time tm3 = new Time(ms3); - Time tm4 = new Time(ms4); - - PreparedStatement prepStatement = connection.prepareStatement(insertTime); - prepStatement.setTime(1, tm1); - prepStatement.setTime(2, tm2); - prepStatement.setTime(3, tm3); - prepStatement.setTime(4, tm4); - - prepStatement.execute(); - - // Note that the resulting Time objects are NOT equal because they have - // their milliseconds in the range 0 to 86,399,999, i.e. inside Jan 1, 1970. - // PreparedStatement accepts Time objects outside this range, but it reduces - // modulo 24 hours to discard the date information before sending to GS. - - final long M = 86400 * 1000; - ResultSet resultSet = statement.executeQuery("select * from timeTest"); - resultSet.next(); - assertNotEquals(tm1, resultSet.getTime(1)); - assertEquals(new Time((ms1 % M + M) % M), resultSet.getTime(1)); - resultSet.next(); - assertNotEquals(tm2, resultSet.getTime(1)); - assertEquals(new Time((ms2 % M + M) % M), resultSet.getTime(1)); - resultSet.next(); - assertNotEquals(tm3, resultSet.getTime(1)); - assertEquals(new Time((ms3 % M + M) % M), resultSet.getTime(1)); - resultSet.next(); - assertNotEquals(tm4, resultSet.getTime(1)); - assertEquals(new Time((ms4 % M + M) % M), resultSet.getTime(1)); - statement.execute("drop table if exists timeTest"); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table timeTest (c1 time)"); + + long ms1 = -2202968667333L; // 1900-03-11 09:15:33.667 + long ms2 = -1; // 1969-12-31 23:59:99.999 + long ms3 = 86400 * 1000; // 1970-01-02 00:00:00 + long ms4 = 1451680250123L; // 2016-01-01 12:30:50.123 + + Time tm1 = new Time(ms1); + Time tm2 = new Time(ms2); + Time tm3 = new Time(ms3); + Time tm4 = new Time(ms4); + + try (PreparedStatement prepStatement = connection.prepareStatement(insertTime)) { + prepStatement.setTime(1, tm1); + prepStatement.setTime(2, tm2); + prepStatement.setTime(3, tm3); + prepStatement.setTime(4, tm4); + + prepStatement.execute(); + } + + // Note that the resulting Time objects are NOT equal because they have + // their milliseconds in the range 0 to 86,399,999, i.e. inside Jan 1, 1970. + // PreparedStatement accepts Time objects outside this range, but it reduces + // modulo 24 hours to discard the date information before sending to GS. + + final long M = 86400 * 1000; + try (ResultSet resultSet = statement.executeQuery("select * from timeTest")) { + assertTrue(resultSet.next()); + assertNotEquals(tm1, resultSet.getTime(1)); + assertEquals(new Time((ms1 % M + M) % M), resultSet.getTime(1)); + assertTrue(resultSet.next()); + assertNotEquals(tm2, resultSet.getTime(1)); + assertEquals(new Time((ms2 % M + M) % M), resultSet.getTime(1)); + assertTrue(resultSet.next()); + assertNotEquals(tm3, resultSet.getTime(1)); + assertEquals(new Time((ms3 % M + M) % M), resultSet.getTime(1)); + assertTrue(resultSet.next()); + assertNotEquals(tm4, resultSet.getTime(1)); + assertEquals(new Time((ms4 % M + M) % M), resultSet.getTime(1)); + } + } finally { + statement.execute("drop table if exists timeTest"); + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCurrentTime() throws SQLException { final String insertTime = "insert into datetime values (?, ?, ?)"; - Connection connection = init(); - - assertFalse(connection.createStatement().execute("alter session set TIMEZONE='UTC'")); - - Statement statement = connection.createStatement(); - statement.execute("create or replace table datetime (d date, ts timestamp, tm time)"); - PreparedStatement prepStatement = connection.prepareStatement(insertTime); - - long currentMillis = System.currentTimeMillis(); - Date currentDate = new Date(currentMillis); - Timestamp currentTS = new Timestamp(currentMillis); - Time currentTime = new Time(currentMillis); - - prepStatement.setDate(1, currentDate); - prepStatement.setTimestamp(2, currentTS); - prepStatement.setTime(3, currentTime); - - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select ts::date = d from datetime"); - resultSet.next(); - assertTrue(resultSet.getBoolean(1)); - resultSet = statement.executeQuery("select ts::time = tm from datetime"); - resultSet.next(); - assertTrue(resultSet.getBoolean(1)); - - statement.execute("drop table if exists datetime"); - connection.close(); + try (Connection connection = init()) { + + assertFalse(connection.createStatement().execute("alter session set TIMEZONE='UTC'")); + + try (Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table datetime (d date, ts timestamp, tm time)"); + try (PreparedStatement prepStatement = connection.prepareStatement(insertTime)) { + + long currentMillis = System.currentTimeMillis(); + Date currentDate = new Date(currentMillis); + Timestamp currentTS = new Timestamp(currentMillis); + Time currentTime = new Time(currentMillis); + + prepStatement.setDate(1, currentDate); + prepStatement.setTimestamp(2, currentTS); + prepStatement.setTime(3, currentTime); + + prepStatement.execute(); + + try (ResultSet resultSet = + statement.executeQuery("select ts::date = d from datetime")) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(1)); + } + try (ResultSet resultSet = + statement.executeQuery("select ts::time = tm from datetime")) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(1)); + } + } + } finally { + statement.execute("drop table if exists datetime"); + } + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testBindTimestampTZ() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table testBindTimestampTZ(" + "cola int, colb timestamp_tz)"); - - long millSeconds = System.currentTimeMillis(); - Timestamp ts = new Timestamp(millSeconds); - PreparedStatement prepStatement = - connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)"); - prepStatement.setInt(1, 123); - prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("UTC"))); - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select cola, colb from testBindTimestampTz"); - resultSet.next(); - assertThat("integer", resultSet.getInt(1), equalTo(123)); - assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); - - statement.execute("drop table if exists testBindTimestampTZ"); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table testBindTimestampTZ(" + "cola int, colb timestamp_tz)"); + + long millSeconds = System.currentTimeMillis(); + Timestamp ts = new Timestamp(millSeconds); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)")) { + prepStatement.setInt(1, 123); + prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("UTC"))); + prepStatement.execute(); + } + + try (ResultSet resultSet = + statement.executeQuery("select cola, colb from testBindTimestampTz")) { + assertTrue(resultSet.next()); + assertThat("integer", resultSet.getInt(1), equalTo(123)); + assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); + } + } finally { + statement.execute("drop table if exists testBindTimestampTZ"); + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetOldDate() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table testOldDate(d date)"); - statement.execute( - "insert into testOldDate values ('0001-01-01'), " - + "(to_date('1000-01-01')), ('1300-01-01'), ('1400-02-02'), " - + "('1500-01-01'), ('1600-02-03')"); - - ResultSet resultSet = statement.executeQuery("select * from testOldDate order by d"); - resultSet.next(); - assertEquals("0001-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("0001-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1000-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("1000-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1300-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("1300-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1400-02-02", resultSet.getString(1)); - assertEquals(Date.valueOf("1400-02-02"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1500-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("1500-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1600-02-03", resultSet.getString(1)); - assertEquals(Date.valueOf("1600-02-03"), resultSet.getDate(1)); - - resultSet.close(); - statement.execute("drop table if exists testOldDate"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table testOldDate(d date)"); + statement.execute( + "insert into testOldDate values ('0001-01-01'), " + + "(to_date('1000-01-01')), ('1300-01-01'), ('1400-02-02'), " + + "('1500-01-01'), ('1600-02-03')"); + + try (ResultSet resultSet = statement.executeQuery("select * from testOldDate order by d")) { + assertTrue(resultSet.next()); + assertEquals("0001-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("0001-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1000-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("1000-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1300-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("1300-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1400-02-02", resultSet.getString(1)); + assertEquals(Date.valueOf("1400-02-02"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1500-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("1500-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1600-02-03", resultSet.getString(1)); + assertEquals(Date.valueOf("1600-02-03"), resultSet.getDate(1)); + } + } finally { + statement.execute("drop table if exists testOldDate"); + } + } } @Test public void testGetStringForDates() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - String expectedDate1 = "2020-08-01"; - String expectedDate2 = "1920-11-11"; - ResultSet rs = statement.executeQuery("SELECT '" + expectedDate1 + "'::DATE as D1"); - rs.next(); - assertEquals(expectedDate1, rs.getString(1)); - rs = statement.executeQuery("SELECT '" + expectedDate2 + "'::DATE as D1"); - rs.next(); - assertEquals(expectedDate2, rs.getString(1)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + String expectedDate1 = "2020-08-01"; + String expectedDate2 = "1920-11-11"; + try (ResultSet rs = statement.executeQuery("SELECT '" + expectedDate1 + "'::DATE as D1")) { + rs.next(); + assertEquals(expectedDate1, rs.getString(1)); + } + try (ResultSet rs = statement.executeQuery("SELECT '" + expectedDate2 + "'::DATE as D1")) { + rs.next(); + assertEquals(expectedDate2, rs.getString(1)); + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDateTimeRelatedTypeConversion() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table testDateTime" - + "(colDate DATE, colTS timestamp_ltz, colTime TIME, colString string)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into testDateTime values(?, ?, ?, ?)"); - - Timestamp ts = buildTimestamp(2016, 3, 20, 3, 25, 45, 67800000); - Date date = buildDate(2016, 3, 20); - Time time = new Time(12345678); // 03:25:45.678 - - preparedStatement.setDate(1, date); - preparedStatement.setTimestamp(2, ts); - preparedStatement.setTime(3, time); - preparedStatement.setString(4, "aaa"); - - preparedStatement.execute(); - ResultSet resultSet = statement.executeQuery("select * from testDateTime"); - resultSet.next(); - - // ResultSet.getDate() - assertEquals(date, resultSet.getDate("COLDATE")); - try { - resultSet.getDate("COLTIME"); - fail(); - } catch (SnowflakeSQLException e) { - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); - } - - // ResultSet.getTimestamp() - assertEquals(new Timestamp(date.getTime()), resultSet.getTimestamp("COLDATE")); - assertEquals(ts, resultSet.getTimestamp("COLTS")); - assertEquals(new Timestamp(time.getTime()), resultSet.getTimestamp("COLTIME")); - try { - resultSet.getTimestamp("COLSTRING"); - fail(); - } catch (SnowflakeSQLException e) { - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); - } - - // ResultSet.getTime() - try { - resultSet.getTime("COLDATE"); - fail(); - } catch (SnowflakeSQLException e) { - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table testDateTime" + + "(colDate DATE, colTS timestamp_ltz, colTime TIME, colString string)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testDateTime values(?, ?, ?, ?)")) { + Timestamp ts = buildTimestamp(2016, 3, 20, 3, 25, 45, 67800000); + Date date = buildDate(2016, 3, 20); + Time time = new Time(12345678); // 03:25:45.678 + + preparedStatement.setDate(1, date); + preparedStatement.setTimestamp(2, ts); + preparedStatement.setTime(3, time); + preparedStatement.setString(4, "aaa"); + + preparedStatement.execute(); + try (ResultSet resultSet = statement.executeQuery("select * from testDateTime")) { + assertTrue(resultSet.next()); + + // ResultSet.getDate() + assertEquals(date, resultSet.getDate("COLDATE")); + try { + resultSet.getDate("COLTIME"); + fail(); + } catch (SnowflakeSQLException e) { + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + } + + // ResultSet.getTimestamp() + assertEquals(new Timestamp(date.getTime()), resultSet.getTimestamp("COLDATE")); + assertEquals(ts, resultSet.getTimestamp("COLTS")); + assertEquals(new Timestamp(time.getTime()), resultSet.getTimestamp("COLTIME")); + try { + resultSet.getTimestamp("COLSTRING"); + fail(); + } catch (SnowflakeSQLException e) { + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + } + + // ResultSet.getTime() + try { + resultSet.getTime("COLDATE"); + fail(); + } catch (SnowflakeSQLException e) { + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + } + assertEquals(time, resultSet.getTime("COLTIME")); + assertEquals(new Time(ts.getTime()), resultSet.getTime("COLTS")); + } + } + } finally { + statement.execute("drop table if exists testDateTime"); + } } - assertEquals(time, resultSet.getTime("COLTIME")); - assertEquals(new Time(ts.getTime()), resultSet.getTime("COLTS")); - - statement.execute("drop table if exists testDateTime"); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetOldTimestamp() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute("create or replace table testOldTs(cola timestamp_ntz)"); - statement.execute( - "insert into testOldTs values ('1582-06-22 17:00:00'), " + "('1000-01-01 17:00:00')"); - - ResultSet resultSet = statement.executeQuery("select * from testOldTs"); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table testOldTs(cola timestamp_ntz)"); + statement.execute( + "insert into testOldTs values ('1582-06-22 17:00:00'), " + "('1000-01-01 17:00:00')"); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from testOldTs")) { - assertThat(resultSet.getTimestamp(1).toString(), equalTo("1582-06-22 17:00:00.0")); - assertThat(resultSet.getString(1), equalTo("Fri, 22 Jun 1582 17:00:00 Z")); + assertTrue(resultSet.next()); - resultSet.next(); - assertThat(resultSet.getTimestamp(1).toString(), equalTo("1000-01-01 17:00:00.0")); - assertThat(resultSet.getString(1), equalTo("Mon, 01 Jan 1000 17:00:00 Z")); + assertThat(resultSet.getTimestamp(1).toString(), equalTo("1582-06-22 17:00:00.0")); + assertThat(resultSet.getString(1), equalTo("Fri, 22 Jun 1582 17:00:00 Z")); - statement.execute("drop table if exists testOldTs"); - statement.close(); - con.close(); + assertTrue(resultSet.next()); + assertThat(resultSet.getTimestamp(1).toString(), equalTo("1000-01-01 17:00:00.0")); + assertThat(resultSet.getString(1), equalTo("Mon, 01 Jan 1000 17:00:00 Z")); + } + } finally { + statement.execute("drop table if exists testOldTs"); + } + } } @Test @@ -437,31 +469,26 @@ public void testGetOldTimestamp() throws SQLException { public void testPrepareOldTimestamp() throws SQLException { TimeZone origTz = TimeZone.getDefault(); TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - try { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute("create or replace table testPrepOldTs(cola timestamp_ntz, colb date)"); - statement.execute("alter session set client_timestamp_type_mapping=timestamp_ntz"); - PreparedStatement ps = con.prepareStatement("insert into testPrepOldTs values (?, ?)"); - - ps.setTimestamp(1, Timestamp.valueOf("0001-01-01 08:00:00")); - ps.setDate(2, Date.valueOf("0001-01-01")); - ps.executeUpdate(); - - ResultSet resultSet = statement.executeQuery("select * from testPrepOldTs"); - - resultSet.next(); - assertThat(resultSet.getTimestamp(1).toString(), equalTo("0001-01-01 08:00:00.0")); - assertThat(resultSet.getDate(2).toString(), equalTo("0001-01-01")); - - statement.execute("drop table if exists testPrepOldTs"); - - statement.close(); - - con.close(); - } finally { - TimeZone.setDefault(origTz); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table testPrepOldTs(cola timestamp_ntz, colb date)"); + statement.execute("alter session set client_timestamp_type_mapping=timestamp_ntz"); + PreparedStatement ps = con.prepareStatement("insert into testPrepOldTs values (?, ?)"); + + ps.setTimestamp(1, Timestamp.valueOf("0001-01-01 08:00:00")); + ps.setDate(2, Date.valueOf("0001-01-01")); + ps.executeUpdate(); + + ResultSet resultSet = statement.executeQuery("select * from testPrepOldTs"); + + assertTrue(resultSet.next()); + assertThat(resultSet.getTimestamp(1).toString(), equalTo("0001-01-01 08:00:00.0")); + assertThat(resultSet.getDate(2).toString(), equalTo("0001-01-01")); + } finally { + statement.execute("drop table if exists testPrepOldTs"); + TimeZone.setDefault(origTz); + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java index 3720b9ae5..06a253b95 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java @@ -1,6 +1,7 @@ package net.snowflake.client.jdbc; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.Date; @@ -55,19 +56,17 @@ public ResultSetMultiTimeZoneLatestIT(String queryResultFormat, String timeZone) public Connection init() throws SQLException { Connection connection = BaseJDBCTest.getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); - connection - .createStatement() - .execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return connection; } @@ -79,27 +78,29 @@ public Connection init() throws SQLException { */ @Test public void testTimesWithGetTimestamp() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - String timeStringValue = "10:30:50.123456789"; - String timestampStringValue = "1970-01-01 " + timeStringValue; - int length = timestampStringValue.length(); - statement.execute( - "create or replace table SRC_DATE_TIME (C2_TIME_3 TIME(3), C3_TIME_5 TIME(5), C4_TIME" - + " TIME(9))"); - statement.execute( - "insert into SRC_DATE_TIME values ('" - + timeStringValue - + "','" - + timeStringValue - + "','" - + timeStringValue - + "')"); - ResultSet rs = statement.executeQuery("select * from SRC_DATE_TIME"); - rs.next(); - assertEquals(timestampStringValue.substring(0, length - 6), rs.getTimestamp(1).toString()); - assertEquals(timestampStringValue.substring(0, length - 4), rs.getTimestamp(2).toString()); - assertEquals(timestampStringValue, rs.getTimestamp(3).toString()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + String timeStringValue = "10:30:50.123456789"; + String timestampStringValue = "1970-01-01 " + timeStringValue; + int length = timestampStringValue.length(); + statement.execute( + "create or replace table SRC_DATE_TIME (C2_TIME_3 TIME(3), C3_TIME_5 TIME(5), C4_TIME" + + " TIME(9))"); + statement.execute( + "insert into SRC_DATE_TIME values ('" + + timeStringValue + + "','" + + timeStringValue + + "','" + + timeStringValue + + "')"); + try (ResultSet rs = statement.executeQuery("select * from SRC_DATE_TIME")) { + assertTrue(rs.next()); + assertEquals(timestampStringValue.substring(0, length - 6), rs.getTimestamp(1).toString()); + assertEquals(timestampStringValue.substring(0, length - 4), rs.getTimestamp(2).toString()); + assertEquals(timestampStringValue, rs.getTimestamp(3).toString()); + } + } } /** @@ -112,17 +113,16 @@ public void testTimesWithGetTimestamp() throws SQLException { */ @Test public void testTimestampNTZWithDaylightSavings() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set TIMESTAMP_TYPE_MAPPING='TIMESTAMP_NTZ'," + "TIMEZONE='Europe/London'"); - ResultSet rs = statement.executeQuery("select TIMESTAMP '2011-09-04 00:00:00'"); - rs.next(); - Timestamp expected = Timestamp.valueOf("2011-09-04 00:00:00"); - assertEquals(expected, rs.getTimestamp(1)); - rs.close(); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter session set TIMESTAMP_TYPE_MAPPING='TIMESTAMP_NTZ'," + "TIMEZONE='Europe/London'"); + try (ResultSet rs = statement.executeQuery("select TIMESTAMP '2011-09-04 00:00:00'")) { + assertTrue(rs.next()); + Timestamp expected = Timestamp.valueOf("2011-09-04 00:00:00"); + assertEquals(expected, rs.getTimestamp(1)); + } + } } /** @@ -132,57 +132,63 @@ public void testTimestampNTZWithDaylightSavings() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDateAndTimestampWithTimezone() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); - ResultSet rs = - statement.executeQuery( - "SELECT DATE '1970-01-02 00:00:00' as datefield, " - + "TIMESTAMP '1970-01-02 00:00:00' as timestampfield"); - rs.next(); - - // Set a timezone for results to be returned in and set a format for date and timestamp objects - Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - sdf.setTimeZone(cal.getTimeZone()); - - // Date object and calendar object should return the same timezone offset with calendar - Date dateWithZone = rs.getDate(1, cal); - Timestamp timestampWithZone = rs.getTimestamp(2, cal); - assertEquals(sdf.format(dateWithZone), sdf.format(timestampWithZone)); - - // When fetching Date object with getTimestamp versus Timestamp object with getTimestamp, - // results should match - assertEquals(rs.getTimestamp(1, cal), rs.getTimestamp(2, cal)); + Calendar cal = null; + SimpleDateFormat sdf = null; - // When fetching Timestamp object with getDate versus Date object with getDate, results should - // match - assertEquals(rs.getDate(1, cal), rs.getDate(2, cal)); - - // getDate() without Calendar offset called on Date type should return the same date with no - // timezone offset - assertEquals("1970-01-02 00:00:00", sdf.format(rs.getDate(1))); - // getDate() without Calendar offset called on Timestamp type returns date with timezone offset - assertEquals("1970-01-02 08:00:00", sdf.format(rs.getDate(2))); - - // getTimestamp() without Calendar offset called on Timestamp type should return the timezone - // offset - assertEquals("1970-01-02 08:00:00", sdf.format(rs.getTimestamp(2))); - // getTimestamp() without Calendar offset called on Date type should not return the timezone - // offset - assertEquals("1970-01-02 00:00:00", sdf.format(rs.getTimestamp(1))); - - // test that session parameter functions as expected. When false, getDate() has same behavior - // with or without Calendar input - statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=false"); - rs = statement.executeQuery("SELECT DATE '1945-05-10 00:00:00' as datefield"); - rs.next(); - assertEquals(rs.getDate(1, cal), rs.getDate(1)); - assertEquals("1945-05-10 00:00:00", sdf.format(rs.getDate(1, cal))); - - rs.close(); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); + try (ResultSet rs = + statement.executeQuery( + "SELECT DATE '1970-01-02 00:00:00' as datefield, " + + "TIMESTAMP '1970-01-02 00:00:00' as timestampfield")) { + assertTrue(rs.next()); + + // Set a timezone for results to be returned in and set a format for date and timestamp + // objects + cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(cal.getTimeZone()); + + // Date object and calendar object should return the same timezone offset with calendar + Date dateWithZone = rs.getDate(1, cal); + Timestamp timestampWithZone = rs.getTimestamp(2, cal); + assertEquals(sdf.format(dateWithZone), sdf.format(timestampWithZone)); + + // When fetching Date object with getTimestamp versus Timestamp object with getTimestamp, + // results should match + assertEquals(rs.getTimestamp(1, cal), rs.getTimestamp(2, cal)); + + // When fetching Timestamp object with getDate versus Date object with getDate, results + // should + // match + assertEquals(rs.getDate(1, cal), rs.getDate(2, cal)); + + // getDate() without Calendar offset called on Date type should return the same date with no + // timezone offset + assertEquals("1970-01-02 00:00:00", sdf.format(rs.getDate(1))); + // getDate() without Calendar offset called on Timestamp type returns date with timezone + // offset + assertEquals("1970-01-02 08:00:00", sdf.format(rs.getDate(2))); + + // getTimestamp() without Calendar offset called on Timestamp type should return the + // timezone + // offset + assertEquals("1970-01-02 08:00:00", sdf.format(rs.getTimestamp(2))); + // getTimestamp() without Calendar offset called on Date type should not return the timezone + // offset + assertEquals("1970-01-02 00:00:00", sdf.format(rs.getTimestamp(1))); + } + // test that session parameter functions as expected. When false, getDate() has same behavior + // with or without Calendar input + statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=false"); + try (ResultSet rs = + statement.executeQuery("SELECT DATE '1945-05-10 00:00:00' as datefield")) { + assertTrue(rs.next()); + assertEquals(rs.getDate(1, cal), rs.getDate(1)); + assertEquals("1945-05-10 00:00:00", sdf.format(rs.getDate(1, cal))); + } + } } /** @@ -226,139 +232,152 @@ public void testUseSessionTimeZoneOverrides() throws SQLException { * @throws SQLException */ private void testUseSessionTimeZoneHelper(boolean useDefaultParamSettings) throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - // create table with all timestamp types, time, and date - statement.execute( - "create or replace table datetimetypes(colA timestamp_ltz, colB timestamp_ntz, colC" - + " timestamp_tz, colD time, colE date)"); - // Enable session parameter JDBC_USE_SESSION_TIMEZONE - statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=true"); - if (!useDefaultParamSettings) { - // these are 3 other session params that also alter the session display behavior - statement.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); - statement.execute("alter session set CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ=false"); - statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + // create table with all timestamp types, time, and date + statement.execute( + "create or replace table datetimetypes(colA timestamp_ltz, colB timestamp_ntz, colC" + + " timestamp_tz, colD time, colE date)"); + // Enable session parameter JDBC_USE_SESSION_TIMEZONE + statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=true"); + if (!useDefaultParamSettings) { + // these are 3 other session params that also alter the session display behavior + statement.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); + statement.execute("alter session set CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ=false"); + statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); + } + + String expectedTimestamp = "2019-01-01 17:17:17.6"; + String expectedTime = "17:17:17"; + String expectedDate = "2019-01-01"; + String expectedTimestamp2 = "1943-12-31 01:01:33.0"; + String expectedTime2 = "01:01:33"; + String expectedDate2 = "1943-12-31"; + try (PreparedStatement prepSt = + connection.prepareStatement("insert into datetimetypes values(?, ?, ?, ?, ?)")) { + prepSt.setString(1, expectedTimestamp); + prepSt.setString(2, expectedTimestamp); + prepSt.setString(3, expectedTimestamp); + prepSt.setString(4, expectedTime); + prepSt.setString(5, expectedDate); + prepSt.execute(); + prepSt.setString(1, expectedTimestamp2); + prepSt.setString(2, expectedTimestamp2); + prepSt.setString(3, expectedTimestamp2); + prepSt.setString(4, expectedTime2); + prepSt.setString(5, expectedDate2); + prepSt.execute(); + } + // Results differ depending on whether flag JDBC_USE_SESSION_TIMEZONE=true. If true, the + // returned ResultSet value should match the value inserted into the table with no offset + // (with + // exceptions for getTimestamp() on date and time objects). + try (ResultSet rs = statement.executeQuery("select * from datetimetypes")) { + assertTrue(rs.next()); + // Assert date has no offset. When flag is false, timestamp_ltz and timestamp_ntz will + // show + // offset. + assertEquals(expectedDate, rs.getDate("COLA").toString()); + // always true since timezone_ntz doesn't add time offset + assertEquals(expectedDate, rs.getDate("COLB").toString()); + assertEquals(expectedDate, rs.getDate("COLC").toString()); + // cannot getDate() for Time column (ColD) + // always true since Date objects don't have timezone offsets + assertEquals(expectedDate, rs.getDate("COLE").toString()); + + // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz + // will + // show + // offset. + assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); + // always true since timezone_ntz doesn't add time offset + assertEquals(expectedTimestamp, rs.getTimestamp("COLB").toString()); + assertEquals(expectedTimestamp, rs.getTimestamp("COLC").toString()); + // Getting timestamp from Time column will default to epoch start date so date portion is + // different than input date of the timestamp + assertEquals("1970-01-01 17:17:17.0", rs.getTimestamp("COLD").toString()); + // Getting timestamp from Date column will default to wallclock time of 0 so time portion + // is + // different than input time of the timestamp + assertEquals("2019-01-01 00:00:00.0", rs.getTimestamp("COLE").toString()); + + // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will + // show + // offset. + assertEquals(expectedTime, rs.getTime("COLA").toString()); + assertEquals(expectedTime, rs.getTime("COLB").toString()); + assertEquals(expectedTime, rs.getTime("COLC").toString()); + assertEquals(expectedTime, rs.getTime("COLD").toString()); + // Cannot getTime() for Date column (colE) + + assertTrue(rs.next()); + // Assert date has no offset. Offset will never be seen regardless of flag because + // 01:01:33 + // is + // too early for any timezone to round it to the next day. + assertEquals(expectedDate2, rs.getDate("COLA").toString()); + assertEquals(expectedDate2, rs.getDate("COLB").toString()); + assertEquals(expectedDate2, rs.getDate("COLC").toString()); + // cannot getDate() for Time column (ColD) + assertEquals(expectedDate2, rs.getDate("COLE").toString()); + + // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz + // will + // show + // offset. + assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); + assertEquals(expectedTimestamp2, rs.getTimestamp("COLB").toString()); + assertEquals(expectedTimestamp2, rs.getTimestamp("COLC").toString()); + // Getting timestamp from Time column will default to epoch start date + assertEquals("1970-01-01 01:01:33.0", rs.getTimestamp("COLD").toString()); + // Getting timestamp from Date column will default to wallclock time of 0 + assertEquals("1943-12-31 00:00:00.0", rs.getTimestamp("COLE").toString()); + + // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will + // show + // offset. + assertEquals(expectedTime2, rs.getTime("COLA").toString()); + assertEquals(expectedTime2, rs.getTime("COLB").toString()); + assertEquals(expectedTime2, rs.getTime("COLC").toString()); + assertEquals(expectedTime2, rs.getTime("COLD").toString()); + // Cannot getTime() for Date column (colE) + } + // Test special case for timestamp_tz (offset added) + // create table with of type timestamp_tz + statement.execute("create or replace table tabletz (colA timestamp_tz)"); + try (PreparedStatement prepSt = + connection.prepareStatement("insert into tabletz values(?), (?)")) { + // insert 2 timestamp values, but add an offset of a few hours on the end of each value + prepSt.setString( + 1, expectedTimestamp + " +0500"); // inserted value is 2019-01-01 17:17:17.6 +0500 + prepSt.setString( + 2, expectedTimestamp2 + " -0200"); // inserted value is 1943-12-31 01:01:33.0 -0200 + prepSt.execute(); + + try (ResultSet rs = statement.executeQuery("select * from tabletz")) { + assertTrue(rs.next()); + // Assert timestamp is displayed with no offset when flag is true. Timestamp should look + // identical to inserted value + assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); + // Time value looks identical to the time portion of inserted timestamp_tz value + assertEquals(expectedTime, rs.getTime("COLA").toString()); + // Date value looks identical to the date portion of inserted timestamp_tz value + assertEquals(expectedDate, rs.getDate("COLA").toString()); + assertTrue(rs.next()); + // Test that the same results occur for 2nd timestamp_tz value + assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); + assertEquals(expectedTime2, rs.getTime("COLA").toString()); + assertEquals(expectedDate2, rs.getDate("COLA").toString()); + } + } + } finally { + // clean up + statement.execute("alter session unset JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC"); + statement.execute("alter session unset CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ"); + statement.execute("alter session unset JDBC_FORMAT_DATE_WITH_TIMEZONE"); + statement.execute("alter session unset JDBC_USE_SESSION_TIMEZONE"); + } } - - String expectedTimestamp = "2019-01-01 17:17:17.6"; - String expectedTime = "17:17:17"; - String expectedDate = "2019-01-01"; - String expectedTimestamp2 = "1943-12-31 01:01:33.0"; - String expectedTime2 = "01:01:33"; - String expectedDate2 = "1943-12-31"; - PreparedStatement prepSt = - connection.prepareStatement("insert into datetimetypes values(?, ?, ?, ?, ?)"); - prepSt.setString(1, expectedTimestamp); - prepSt.setString(2, expectedTimestamp); - prepSt.setString(3, expectedTimestamp); - prepSt.setString(4, expectedTime); - prepSt.setString(5, expectedDate); - prepSt.execute(); - prepSt.setString(1, expectedTimestamp2); - prepSt.setString(2, expectedTimestamp2); - prepSt.setString(3, expectedTimestamp2); - prepSt.setString(4, expectedTime2); - prepSt.setString(5, expectedDate2); - prepSt.execute(); - - // Results differ depending on whether flag JDBC_USE_SESSION_TIMEZONE=true. If true, the - // returned ResultSet value should match the value inserted into the table with no offset (with - // exceptions for getTimestamp() on date and time objects). - ResultSet rs = statement.executeQuery("select * from datetimetypes"); - rs.next(); - // Assert date has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedDate, rs.getDate("COLA").toString()); - // always true since timezone_ntz doesn't add time offset - assertEquals(expectedDate, rs.getDate("COLB").toString()); - assertEquals(expectedDate, rs.getDate("COLC").toString()); - // cannot getDate() for Time column (ColD) - // always true since Date objects don't have timezone offsets - assertEquals(expectedDate, rs.getDate("COLE").toString()); - - // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); - // always true since timezone_ntz doesn't add time offset - assertEquals(expectedTimestamp, rs.getTimestamp("COLB").toString()); - assertEquals(expectedTimestamp, rs.getTimestamp("COLC").toString()); - // Getting timestamp from Time column will default to epoch start date so date portion is - // different than input date of the timestamp - assertEquals("1970-01-01 17:17:17.0", rs.getTimestamp("COLD").toString()); - // Getting timestamp from Date column will default to wallclock time of 0 so time portion is - // different than input time of the timestamp - assertEquals("2019-01-01 00:00:00.0", rs.getTimestamp("COLE").toString()); - - // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTime, rs.getTime("COLA").toString()); - assertEquals(expectedTime, rs.getTime("COLB").toString()); - assertEquals(expectedTime, rs.getTime("COLC").toString()); - assertEquals(expectedTime, rs.getTime("COLD").toString()); - // Cannot getTime() for Date column (colE) - - rs.next(); - // Assert date has no offset. Offset will never be seen regardless of flag because 01:01:33 is - // too early for any timezone to round it to the next day. - assertEquals(expectedDate2, rs.getDate("COLA").toString()); - assertEquals(expectedDate2, rs.getDate("COLB").toString()); - assertEquals(expectedDate2, rs.getDate("COLC").toString()); - // cannot getDate() for Time column (ColD) - assertEquals(expectedDate2, rs.getDate("COLE").toString()); - - // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); - assertEquals(expectedTimestamp2, rs.getTimestamp("COLB").toString()); - assertEquals(expectedTimestamp2, rs.getTimestamp("COLC").toString()); - // Getting timestamp from Time column will default to epoch start date - assertEquals("1970-01-01 01:01:33.0", rs.getTimestamp("COLD").toString()); - // Getting timestamp from Date column will default to wallclock time of 0 - assertEquals("1943-12-31 00:00:00.0", rs.getTimestamp("COLE").toString()); - - // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTime2, rs.getTime("COLA").toString()); - assertEquals(expectedTime2, rs.getTime("COLB").toString()); - assertEquals(expectedTime2, rs.getTime("COLC").toString()); - assertEquals(expectedTime2, rs.getTime("COLD").toString()); - // Cannot getTime() for Date column (colE) - - // Test special case for timestamp_tz (offset added) - // create table with of type timestamp_tz - statement.execute("create or replace table tabletz (colA timestamp_tz)"); - prepSt = connection.prepareStatement("insert into tabletz values(?), (?)"); - // insert 2 timestamp values, but add an offset of a few hours on the end of each value - prepSt.setString( - 1, expectedTimestamp + " +0500"); // inserted value is 2019-01-01 17:17:17.6 +0500 - prepSt.setString( - 2, expectedTimestamp2 + " -0200"); // inserted value is 1943-12-31 01:01:33.0 -0200 - prepSt.execute(); - - rs = statement.executeQuery("select * from tabletz"); - rs.next(); - // Assert timestamp is displayed with no offset when flag is true. Timestamp should look - // identical to inserted value - assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); - // Time value looks identical to the time portion of inserted timestamp_tz value - assertEquals(expectedTime, rs.getTime("COLA").toString()); - // Date value looks identical to the date portion of inserted timestamp_tz value - assertEquals(expectedDate, rs.getDate("COLA").toString()); - rs.next(); - // Test that the same results occur for 2nd timestamp_tz value - assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); - assertEquals(expectedTime2, rs.getTime("COLA").toString()); - assertEquals(expectedDate2, rs.getDate("COLA").toString()); - - // clean up - statement.execute("alter session unset JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC"); - statement.execute("alter session unset CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ"); - statement.execute("alter session unset JDBC_FORMAT_DATE_WITH_TIMEZONE"); - statement.execute("alter session unset JDBC_USE_SESSION_TIMEZONE"); - - rs.close(); - statement.close(); - connection.close(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java index 5af26db35..bbc145516 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java @@ -151,10 +151,59 @@ public void testGetFloatVectorFromTable() throws SQLException { } } + /** Added in > 3.16.1 */ + @Test + public void testGetVectorViaGetStringIsEqualToTheGetObject() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + enforceQueryResultFormat(stmt); + Integer[] intVector = {-1, 5}; + Float[] floatVector = {-1.2f, 5.1f, 15.87f}; + try (ResultSet resultSet = + stmt.executeQuery( + "select " + + vectorToString(intVector, "int") + + ", " + + vectorToString(floatVector, "float") + + ", " + + nullVectorToString("int") + + ", " + + nullVectorToString("float"))) { + + assertTrue(resultSet.next()); + assertGetObjectAndGetStringBeTheSame(resultSet, "[-1,5]", 1); + String floatArrayRepresentation = + "json".equals(queryResultFormat) + // in json we have slightly different format that we accept in the result + ? "[-1.200000,5.100000,15.870000]" + : "[-1.2,5.1,15.87]"; + assertGetObjectAndGetStringBeTheSame(resultSet, floatArrayRepresentation, 2); + assertGetObjectAndGetStringAreNull(resultSet, 3); + assertGetObjectAndGetStringAreNull(resultSet, 4); + } + } + } + + private static void assertGetObjectAndGetStringBeTheSame( + ResultSet resultSet, String intArrayRepresentation, int columnIndex) throws SQLException { + assertEquals(intArrayRepresentation, resultSet.getString(columnIndex)); + assertEquals(intArrayRepresentation, resultSet.getObject(columnIndex)); + } + + private static void assertGetObjectAndGetStringAreNull(ResultSet resultSet, int columnIndex) + throws SQLException { + assertNull(resultSet.getString(columnIndex)); + assertNull(resultSet.getObject(columnIndex)); + } + private String vectorToString(T[] vector, String vectorType) { return Arrays.toString(vector) + "::vector(" + vectorType + ", " + vector.length + ")"; } + private String nullVectorToString(String vectorType) { + return "null::vector(" + vectorType + ", 2)"; + } + private void enforceQueryResultFormat(Statement stmt) throws SQLException { String sql = String.format( diff --git a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java index f32af5470..bd51ef533 100644 --- a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java @@ -127,16 +127,21 @@ public void testAddServiceNameToRequestHeader() throws Throwable { props.setProperty(SFSessionProperty.USER.getPropertyKey(), "fakeuser"); props.setProperty(SFSessionProperty.PASSWORD.getPropertyKey(), "fakepassword"); props.setProperty(SFSessionProperty.INSECURE_MODE.getPropertyKey(), Boolean.TRUE.toString()); - SnowflakeConnectionV1 con = + try (SnowflakeConnectionV1 con = new SnowflakeConnectionV1( - "jdbc:snowflake://http://fakeaccount.snowflakecomputing.com", props); - assertThat(con.getSfSession().getServiceName(), is(INITIAL_SERVICE_NAME)); + "jdbc:snowflake://http://fakeaccount.snowflakecomputing.com", props)) { + assertThat(con.getSfSession().getServiceName(), is(INITIAL_SERVICE_NAME)); - SnowflakeStatementV1 stmt = (SnowflakeStatementV1) con.createStatement(); - stmt.execute("SELECT 1"); - assertThat( - stmt.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession().getServiceName(), - is(NEW_SERVICE_NAME)); + try (SnowflakeStatementV1 stmt = (SnowflakeStatementV1) con.createStatement()) { + stmt.execute("SELECT 1"); + assertThat( + stmt.getConnection() + .unwrap(SnowflakeConnectionV1.class) + .getSfSession() + .getServiceName(), + is(NEW_SERVICE_NAME)); + } + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java index 76c3b0466..b597c4dd0 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java @@ -37,24 +37,25 @@ public void testChunkDownloaderRetry() throws SQLException, InterruptedException SnowflakeChunkDownloader snowflakeChunkDownloaderSpy = null; - try (Connection connection = getConnection(properties)) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { // execute a query that will require chunk downloading - ResultSet resultSet = + try (ResultSet resultSet = statement.executeQuery( - "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))"); - List resultSetSerializables = - ((SnowflakeResultSet) resultSet).getResultSetSerializables(100 * 1024 * 1024); - SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); - SnowflakeChunkDownloader downloader = - new SnowflakeChunkDownloader((SnowflakeResultSetSerializableV1) resultSetSerializable); - snowflakeChunkDownloaderSpy = Mockito.spy(downloader); - snowflakeChunkDownloaderSpy.getNextChunkToConsume(); + "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))")) { + List resultSetSerializables = + ((SnowflakeResultSet) resultSet).getResultSetSerializables(100 * 1024 * 1024); + SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); + SnowflakeChunkDownloader downloader = + new SnowflakeChunkDownloader((SnowflakeResultSetSerializableV1) resultSetSerializable); + snowflakeChunkDownloaderSpy = Mockito.spy(downloader); + snowflakeChunkDownloaderSpy.getNextChunkToConsume(); + } } catch (SnowflakeSQLException exception) { // verify that request was retried twice before reaching max retries Mockito.verify(snowflakeChunkDownloaderSpy, Mockito.times(2)).getResultStreamProvider(); - assertTrue(exception.getMessage().contains("Max retry reached for the download of #chunk0")); - assertTrue(exception.getMessage().contains("retry=2")); + assertTrue(exception.getMessage().contains("Max retry reached for the download of chunk#0")); + assertTrue(exception.getMessage().contains("retry: 2")); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java index 7d9cc5f05..161e9c939 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java @@ -4,6 +4,8 @@ package net.snowflake.client.jdbc; +import static org.junit.Assert.assertNotNull; + import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; @@ -88,7 +90,7 @@ private static void connectAndQuery(int num_queries) throws SQLException { try (ResultSet resultSet = statement.executeQuery(QUERY)) { while (resultSet.next()) { final String user = resultSet.getString(1); - assert user != null; + assertNotNull(user); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java index aa82813f0..a66dd4c4a 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java @@ -116,27 +116,26 @@ public static void setUp() throws Throwable { @AfterClass public static void tearDown() throws SQLException { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.execute("drop table if exists clustered_jdbc"); statement.execute("drop table if exists orders_jdbc"); - statement.close(); } } public static Connection getConnection(int injectSocketTimeout) throws SQLException { Connection connection = AbstractDriverIT.getConnection(injectSocketTimeout); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + } return connection; } @@ -149,33 +148,38 @@ public static Connection getConnection() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testOauthConnection() throws SQLException { Map params = getConnectionParameters(); - Connection con = getConnection("s3testaccount"); - Statement statement = con.createStatement(); - statement.execute("use role accountadmin"); - statement.execute( - "create or replace security integration jdbc_oauth_integration\n" - + " type=oauth\n" - + " oauth_client=CUSTOM\n" - + " oauth_client_type=CONFIDENTIAL\n" - + " oauth_redirect_uri='https://localhost.com/oauth'\n" - + " oauth_issue_refresh_tokens=true\n" - + " enabled=true oauth_refresh_token_validity=86400;"); - String role = params.get("role"); - ResultSet rs = - statement.executeQuery( - "select system$it('create_oauth_access_token', 'JDBC_OAUTH_INTEGRATION', '" - + role - + "')"); - rs.next(); - String token = rs.getString(1); - con.close(); + String role = null; + String token = null; + + try (Connection con = getConnection("s3testaccount"); + Statement statement = con.createStatement()) { + statement.execute("use role accountadmin"); + statement.execute( + "create or replace security integration jdbc_oauth_integration\n" + + " type=oauth\n" + + " oauth_client=CUSTOM\n" + + " oauth_client_type=CONFIDENTIAL\n" + + " oauth_redirect_uri='https://localhost.com/oauth'\n" + + " oauth_issue_refresh_tokens=true\n" + + " enabled=true oauth_refresh_token_validity=86400;"); + role = params.get("role"); + try (ResultSet rs = + statement.executeQuery( + "select system$it('create_oauth_access_token', 'JDBC_OAUTH_INTEGRATION', '" + + role + + "')")) { + assertTrue(rs.next()); + token = rs.getString(1); + } + } Properties props = new Properties(); props.put("authenticator", ClientAuthnDTO.AuthenticatorType.OAUTH.name()); props.put("token", token); props.put("role", role); - con = getConnection("s3testaccount", props); - con.createStatement().execute("select 1"); - con.close(); + try (Connection con = getConnection("s3testaccount", props); + Statement statement = con.createStatement()) { + statement.execute("select 1"); + } } @Ignore @@ -195,16 +199,10 @@ public void testConnections() throws Throwable { futures.add( executorService.submit( () -> { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - ResultSetMetaData resultSetMetaData; - - try { - connection = getConnection(); - statement = connection.createStatement(); - resultSet = statement.executeQuery("SELECT system$sleep(10) % 1"); - resultSetMetaData = resultSet.getMetaData(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT system$sleep(10) % 1")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); // assert column count assertEquals(1, resultSetMetaData.getColumnCount()); @@ -220,10 +218,6 @@ public void testConnections() throws Throwable { } logger.info("Query " + queryIdx + " passed "); - - statement.close(); - } finally { - closeSQLObjects(resultSet, statement, connection); } return true; })); @@ -239,18 +233,11 @@ public void testConnections() throws Throwable { /** Test show columns */ @Test public void testShowColumns() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - Properties paramProperties = new Properties(); - connection = getConnection(paramProperties); - statement = connection.createStatement(); - resultSet = statement.executeQuery("show columns in clustered_jdbc"); + Properties paramProperties = new Properties(); + try (Connection connection = getConnection(paramProperties); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("show columns in clustered_jdbc")) { assertEquals("number of columns", 2, countRows(resultSet)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @@ -264,51 +251,39 @@ private int countRows(ResultSet rset) throws Throwable { @Test public void testRowsPerResultset() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - try { - connection = getConnection(); + try (Connection connection = getConnection()) { connection.createStatement().execute("alter session set rows_per_resultset=2048"); - statement = connection.createStatement(); - resultSet = statement.executeQuery("SELECT * FROM orders_jdbc"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - int numColumns = resultSetMetaData.getColumnCount(); - assertEquals(9, numColumns); - assertEquals("number of columns", 73, countRows(resultSet)); - statement.close(); - } finally { - closeSQLObjects(resultSet, statement, connection); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT * FROM orders_jdbc")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + int numColumns = resultSetMetaData.getColumnCount(); + assertEquals(9, numColumns); + assertEquals("number of columns", 73, countRows(resultSet)); + } } } @Test public void testDDLs() throws Throwable { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - - statement.execute("CREATE OR REPLACE TABLE testDDLs(version number, name string)"); - - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE TABLE testDDLs(version number, name string)"); + } finally { statement.execute("DROP TABLE testDDLs"); } - closeSQLObjects(statement, connection); } } private long getCurrentTransaction(Connection connection) throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute(getCurrenTransactionStmt); - ResultSet rs = statement.getResultSet(); - if (rs.next()) { - String txnId = rs.getString(1); - return txnId != null ? Long.valueOf(txnId) : 0L; + try (ResultSet rs = statement.getResultSet()) { + if (rs.next()) { + String txnId = rs.getString(1); + return txnId != null ? Long.valueOf(txnId) : 0L; + } } } @@ -318,57 +293,51 @@ private long getCurrentTransaction(Connection connection) throws SQLException { /** Tests autocommit */ @Test public void testAutocommit() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // 1. test commit - connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); - assertEquals(Connection.TRANSACTION_READ_COMMITTED, connection.getTransactionIsolation()); - connection.setAutoCommit(false); // disable autocommit - assertFalse(connection.getAutoCommit()); - - assertEquals(0, getCurrentTransaction(connection)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // 1. test commit + connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); + assertEquals(Connection.TRANSACTION_READ_COMMITTED, connection.getTransactionIsolation()); + connection.setAutoCommit(false); // disable autocommit + assertFalse(connection.getAutoCommit()); - // create a table, this should not start a transaction - statement.executeUpdate("CREATE OR REPLACE TABLE AUTOCOMMIT_API_TEST (i int)"); - assertEquals(0, getCurrentTransaction(connection)); + assertEquals(0, getCurrentTransaction(connection)); - // insert into it this should start a transaction. - statement.executeUpdate("INSERT INTO AUTOCOMMIT_API_TEST VALUES (1)"); - assertNotEquals(0, getCurrentTransaction(connection)); + // create a table, this should not start a transaction + statement.executeUpdate("CREATE OR REPLACE TABLE AUTOCOMMIT_API_TEST (i int)"); + assertEquals(0, getCurrentTransaction(connection)); - // commit it using the api - connection.commit(); - assertFalse(connection.getAutoCommit()); - assertEquals(0, getCurrentTransaction(connection)); - resultSet = statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1"); - assertTrue(resultSet.next()); - assertEquals(1, resultSet.getInt(1)); - resultSet.close(); - - // 2. test rollback == - // delete from the table, should start a transaction. - statement.executeUpdate("DELETE FROM AUTOCOMMIT_API_TEST"); - assertNotEquals(0, getCurrentTransaction(connection)); - - // roll it back using the api - connection.rollback(); - assertFalse(connection.getAutoCommit()); - assertEquals(0, getCurrentTransaction(connection)); - resultSet = statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1"); - assertTrue(resultSet.next()); - assertEquals(1, resultSet.getInt(1)); - } finally { - if (statement != null) { + // insert into it this should start a transaction. + statement.executeUpdate("INSERT INTO AUTOCOMMIT_API_TEST VALUES (1)"); + assertNotEquals(0, getCurrentTransaction(connection)); + + // commit it using the api + connection.commit(); + assertFalse(connection.getAutoCommit()); + assertEquals(0, getCurrentTransaction(connection)); + try (ResultSet resultSet = + statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1")) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + // 2. test rollback == + // delete from the table, should start a transaction. + statement.executeUpdate("DELETE FROM AUTOCOMMIT_API_TEST"); + assertNotEquals(0, getCurrentTransaction(connection)); + + // roll it back using the api + connection.rollback(); + assertFalse(connection.getAutoCommit()); + assertEquals(0, getCurrentTransaction(connection)); + try (ResultSet resultSet = + statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1")) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + } finally { statement.execute("DROP TABLE AUTOCOMMIT_API_TEST"); } - closeSQLObjects(resultSet, statement, connection); } } @@ -405,396 +374,362 @@ private void assertConstraintResults( @Test public void testBoolean() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - - DatabaseMetaData metadata = connection.getMetaData(); - - // Create a table with boolean columns - statement.execute("create or replace table testBooleanT1(c1 boolean)"); - - // Insert values into the table - statement.execute("insert into testBooleanT1 values(true), (false), (null)"); - - // Get values from the table - PreparedStatement preparedStatement = - connection.prepareStatement("select c1 from testBooleanT1"); - - // I. Test ResultSetMetaData interface - resultSet = preparedStatement.executeQuery(); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // Verify the column type is Boolean - assertEquals(Types.BOOLEAN, resultSetMetaData.getColumnType(1)); - - // II. Test DatabaseMetadata interface - ResultSet columnMetaDataResultSet = - metadata.getColumns( - null, // catalog - null, // schema - "TESTBOOLEANT1", // table - null // column - ); - - resultSetMetaData = columnMetaDataResultSet.getMetaData(); - - // assert column count - assertEquals(24, resultSetMetaData.getColumnCount()); - - assertTrue(columnMetaDataResultSet.next()); - assertEquals(Types.BOOLEAN, columnMetaDataResultSet.getInt(5)); - } finally // cleanup - { - // drop the table - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); + + DatabaseMetaData metadata = connection.getMetaData(); + + // Create a table with boolean columns + statement.execute("create or replace table testBooleanT1(c1 boolean)"); + + // Insert values into the table + statement.execute("insert into testBooleanT1 values(true), (false), (null)"); + + // Get values from the table + try (PreparedStatement preparedStatement = + connection.prepareStatement("select c1 from testBooleanT1")) { + + // I. Test ResultSetMetaData interface + try (ResultSet resultSet = preparedStatement.executeQuery()) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // Verify the column type is Boolean + assertEquals(Types.BOOLEAN, resultSetMetaData.getColumnType(1)); + + // II. Test DatabaseMetadata interface + try (ResultSet columnMetaDataResultSet = + metadata.getColumns( + null, // catalog + null, // schema + "TESTBOOLEANT1", // table + null // column + )) { + resultSetMetaData = columnMetaDataResultSet.getMetaData(); + // assert column count + assertEquals(24, resultSetMetaData.getColumnCount()); + + assertTrue(columnMetaDataResultSet.next()); + assertEquals(Types.BOOLEAN, columnMetaDataResultSet.getInt(5)); + } + } + } + } finally { statement.execute("drop table testBooleanT1"); } - closeSQLObjects(resultSet, statement, connection); } } @Test public void testConstraints() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - - DatabaseMetaData metadata = connection.getMetaData(); - - // Create primary key tables - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsP1(c1 number unique, c2 " - + "number, constraint cons0 primary key (c1, c2))"); - - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsP2(c1 number " - + "constraint cons1 primary key, c2 number)"); - - // Create foreign key tables - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsF1(c1 number, c2 number, " - + "constraint cons3 foreign key (c1, c2) references " - + "testConstraintsP1(c1, c2))"); + ResultSet manualResultSet = null; - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsF2(c1 number, c2 number, " - + "constraint cons4 foreign key (c1, c2) references " - + "testConstraintsP1(c1, c2), constraint cons5 " - + "foreign key (c2) references testConstraintsP2(c1))"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - // show primary keys - resultSet = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP1"); + DatabaseMetaData metadata = connection.getMetaData(); - // primary key for testConstraintsP1 should contain two rows - assertConstraintResults(resultSet, 2, 6, "testConstraintsP1", null); + // Create primary key tables + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsP1(c1 number unique, c2 " + + "number, constraint cons0 primary key (c1, c2))"); - resultSet = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP2"); + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsP2(c1 number " + + "constraint cons1 primary key, c2 number)"); - // primary key for testConstraintsP2 contains 1 row - assertConstraintResults(resultSet, 1, 6, "testConstraintsP2", null); - resultSet.close(); - resultSet.next(); + // Create foreign key tables + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsF1(c1 number, c2 number, " + + "constraint cons3 foreign key (c1, c2) references " + + "testConstraintsP1(c1, c2))"); - // Show imported keys - resultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF1"); + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsF2(c1 number, c2 number, " + + "constraint cons4 foreign key (c1, c2) references " + + "testConstraintsP1(c1, c2), constraint cons5 " + + "foreign key (c2) references testConstraintsP2(c1))"); - assertConstraintResults(resultSet, 2, 14, null, "testConstraintsF1"); + // show primary keys + try (ResultSet resultSet = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP1")) { - resultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF2"); + // primary key for testConstraintsP1 should contain two rows + assertConstraintResults(resultSet, 2, 6, "testConstraintsP1", null); + } - assertConstraintResults(resultSet, 3, 14, null, "testConstraintsF2"); - resultSet.close(); - resultSet.next(); + ResultSet resultSet1 = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP2"); - // show exported keys - resultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP1"); + // primary key for testConstraintsP2 contains 1 row + assertConstraintResults(resultSet1, 1, 6, "testConstraintsP2", null); + resultSet1.close(); + assertFalse(resultSet1.next()); - assertConstraintResults(resultSet, 4, 14, "testConstraintsP1", null); + // Show imported keys + try (ResultSet resultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF1")) { + assertConstraintResults(resultSet, 2, 14, null, "testConstraintsF1"); + } - resultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP2"); + manualResultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF2"); - assertConstraintResults(resultSet, 1, 14, "testConstraintsP2", null); - resultSet.close(); - resultSet.next(); + assertConstraintResults(manualResultSet, 3, 14, null, "testConstraintsF2"); + manualResultSet.close(); + assertFalse(manualResultSet.next()); - // show cross references - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF1"); + // show exported keys + try (ResultSet resultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP1")) { + assertConstraintResults(resultSet, 4, 14, "testConstraintsP1", null); + } - assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF1"); + manualResultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP2"); - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF2"); + assertConstraintResults(manualResultSet, 1, 14, "testConstraintsP2", null); + manualResultSet.close(); + assertFalse(manualResultSet.next()); - assertConstraintResults(resultSet, 1, 14, "testConstraintsP2", "testConstraintsF2"); + // show cross references + try (ResultSet resultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF1")) { + assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF1"); + } - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF2"); + try (ResultSet resultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF2")) { + assertConstraintResults(resultSet, 1, 14, "testConstraintsP2", "testConstraintsF2"); + } - assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF2"); + try (ResultSet resultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF2")) { + assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF2"); + } - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF1"); + manualResultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF1"); - assertFalse( - "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty", - resultSet.next()); - resultSet.close(); - resultSet.next(); - } finally { - if (statement != null) { + assertFalse( + "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty", + manualResultSet.next()); + manualResultSet.close(); + assertFalse(manualResultSet.next()); + } finally { statement.execute("DROP TABLE TESTCONSTRAINTSF1"); statement.execute("DROP TABLE TESTCONSTRAINTSF2"); statement.execute("DROP TABLE TESTCONSTRAINTSP1"); statement.execute("DROP TABLE TESTCONSTRAINTSP2"); } - closeSQLObjects(resultSet, statement, connection); } } @Test public void testQueryWithMaxRows() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; final int maxRows = 30; - - try { - connection = getConnection(); - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.setMaxRows(maxRows); - resultSet = statement.executeQuery("SELECT * FROM orders_jdbc"); + try (ResultSet resultSet = statement.executeQuery("SELECT * FROM orders_jdbc")) { - // assert column count - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals(9, resultSetMetaData.getColumnCount()); - assertEquals(maxRows, countRows(resultSet)); - } finally { - closeSQLObjects(resultSet, statement, connection); + // assert column count + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + assertEquals(9, resultSetMetaData.getColumnCount()); + assertEquals(maxRows, countRows(resultSet)); + } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCancelQueryBySystemFunction() throws Throwable { - Statement statement = null; - ResultSet resultSet = null; - - final Connection connection = getConnection(); - - try { - // Get the current session identifier - Statement getSessionIdStmt = connection.createStatement(); + try (Connection connection = getConnection(); + Statement getSessionIdStmt = connection.createStatement()) { getSessionIdStmt.setMaxRows(30); - resultSet = getSessionIdStmt.executeQuery("SELECT current_session()"); - assertTrue(resultSet.next()); - final long sessionId = resultSet.getLong(1); - Timer timer = new Timer(); - timer.schedule( - new TimerTask() { - @Override - public void run() { - try { - PreparedStatement cancelAll; - cancelAll = connection.prepareStatement("call system$cancel_all_queries(?)"); - - // bind integer - cancelAll.setLong(1, sessionId); - cancelAll.executeQuery(); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "Cancel failed with exception {}", ex); + try (ResultSet resultSet = getSessionIdStmt.executeQuery("SELECT current_session()")) { + assertTrue(resultSet.next()); + final long sessionId = resultSet.getLong(1); + Timer timer = new Timer(); + timer.schedule( + new TimerTask() { + @Override + public void run() { + try { + PreparedStatement cancelAll; + cancelAll = connection.prepareStatement("call system$cancel_all_queries(?)"); + + // bind integer + cancelAll.setLong(1, sessionId); + cancelAll.executeQuery(); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Cancel failed with exception {}", ex); + } } - } - }, - 5000); - + }, + 5000); + } // execute a query for 120s - statement = connection.createStatement(); - statement.setMaxRows(30); - - resultSet = statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))"); - + try (Statement statement = connection.createStatement()) { + statement.setMaxRows(30); + try (ResultSet resultSet = + statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))")) {} + } fail("should raise an exception"); } catch (SQLException ex) { // assert the sqlstate is what we expect (QUERY CANCELLED) assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testDBMetadata() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - + int cnt = 0; + try (Connection connection = getConnection()) { + try (Statement statement = connection.createStatement()) { + statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); + } // get database metadata DatabaseMetaData metaData = connection.getMetaData(); // the following will issue - ResultSet databaseSet = metaData.getCatalogs(); - assertTrue("databases shouldn't be empty", databaseSet.next()); - - // "show schemas in [databaseName]" - ResultSet schemaSet = metaData.getSchemas(connection.getCatalog(), connection.getSchema()); - assertTrue("schemas shouldn't be empty", schemaSet.next()); - assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); - assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); - - // snow tables in a schema - ResultSet tableSet = - metaData.getTables( - connection.getCatalog(), connection.getSchema(), ORDERS_JDBC, null); // types - assertTrue( - String.format( - "table %s should exists in db: %s, schema: %s", - ORDERS_JDBC, connection.getCatalog(), connection.getSchema()), - tableSet.next()); - assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); - assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); - assertTrue( - "table should be orders_jdbc", ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3))); - - ResultSet tableMetaDataResultSet = + try (ResultSet databaseSet = metaData.getCatalogs()) { + assertTrue("databases shouldn't be empty", databaseSet.next()); + + // "show schemas in [databaseName]" + ResultSet schemaSet = metaData.getSchemas(connection.getCatalog(), connection.getSchema()); + assertTrue("schemas shouldn't be empty", schemaSet.next()); + assertTrue( + "database should be " + connection.getCatalog(), + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + assertTrue( + "schema should be " + connection.getSchema(), + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + // snow tables in a schema + try (ResultSet tableSet = + metaData.getTables( + connection.getCatalog(), connection.getSchema(), ORDERS_JDBC, null)) { // types + assertTrue( + String.format( + "table %s should exists in db: %s, schema: %s", + ORDERS_JDBC, connection.getCatalog(), connection.getSchema()), + tableSet.next()); + assertTrue( + "database should be " + connection.getCatalog(), + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + assertTrue( + "schema should be " + connection.getSchema(), + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + assertTrue( + "table should be orders_jdbc", ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3))); + } + } + + try (ResultSet tableMetaDataResultSet = metaData.getTables( null, // catalog null, // schema ORDERS_JDBC, // table - null); // types + null)) { // types - ResultSetMetaData resultSetMetaData = tableMetaDataResultSet.getMetaData(); + ResultSetMetaData resultSetMetaData = tableMetaDataResultSet.getMetaData(); - assertEquals(10, resultSetMetaData.getColumnCount()); + assertEquals(10, resultSetMetaData.getColumnCount()); - // assert we get 1 rows - int cnt = 0; - while (tableMetaDataResultSet.next()) { - assertTrue(ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))); - ++cnt; + // assert we get 1 rows + cnt = 0; + while (tableMetaDataResultSet.next()) { + assertTrue(ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))); + ++cnt; + } + assertEquals("number of tables", 1, cnt); } - assertEquals("number of tables", 1, cnt); - - tableMetaDataResultSet.close(); - // test pattern - tableMetaDataResultSet = + try (ResultSet tableMetaDataResultSet = metaData.getTables( null, // catalog null, // schema "%", // table - null); // types - - resultSetMetaData = tableMetaDataResultSet.getMetaData(); + null)) { // types - // assert column count - assertEquals(10, resultSetMetaData.getColumnCount()); + ResultSetMetaData resultSetMetaData = tableMetaDataResultSet.getMetaData(); - // assert we get orders_jdbc - boolean found = false; - while (tableMetaDataResultSet.next()) { - // assert the table name - if (ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))) { - found = true; - break; + // assert column count + assertEquals(10, resultSetMetaData.getColumnCount()); + + // assert we get orders_jdbc + boolean found = false; + while (tableMetaDataResultSet.next()) { + // assert the table name + if (ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))) { + found = true; + break; + } } + assertTrue("orders_jdbc not found", found); } - assertTrue("orders_jdbc not found", found); - - tableMetaDataResultSet.close(); // get column metadata - ResultSet columnMetaDataResultSet = metaData.getColumns(null, null, ORDERS_JDBC, null); + try (ResultSet columnMetaDataResultSet = metaData.getColumns(null, null, ORDERS_JDBC, null)) { - resultSetMetaData = columnMetaDataResultSet.getMetaData(); + ResultSetMetaData resultSetMetaData = columnMetaDataResultSet.getMetaData(); - // assert column count - assertEquals(24, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(24, resultSetMetaData.getColumnCount()); - // assert we get 9 rows - cnt = 0; - while (columnMetaDataResultSet.next()) { - // SNOW-16881: assert database name - assertTrue(connection.getCatalog().equalsIgnoreCase(columnMetaDataResultSet.getString(1))); + // assert we get 9 rows + cnt = 0; + while (columnMetaDataResultSet.next()) { + // SNOW-16881: assert database name + assertTrue( + connection.getCatalog().equalsIgnoreCase(columnMetaDataResultSet.getString(1))); - // assert the table name and column name, data type and type name - assertTrue(ORDERS_JDBC.equalsIgnoreCase(columnMetaDataResultSet.getString(3))); + // assert the table name and column name, data type and type name + assertTrue(ORDERS_JDBC.equalsIgnoreCase(columnMetaDataResultSet.getString(3))); - assertTrue(columnMetaDataResultSet.getString(4).startsWith("C")); + assertTrue(columnMetaDataResultSet.getString(4).startsWith("C")); - assertEquals(Types.VARCHAR, columnMetaDataResultSet.getInt(5)); + assertEquals(Types.VARCHAR, columnMetaDataResultSet.getInt(5)); - assertTrue("VARCHAR".equalsIgnoreCase(columnMetaDataResultSet.getString(6))); + assertTrue("VARCHAR".equalsIgnoreCase(columnMetaDataResultSet.getString(6))); - if (cnt == 0) { - // assert comment - assertEquals("JDBC", columnMetaDataResultSet.getString(12)); + if (cnt == 0) { + // assert comment + assertEquals("JDBC", columnMetaDataResultSet.getString(12)); - // assert nullable - assertEquals(DatabaseMetaData.columnNoNulls, columnMetaDataResultSet.getInt(11)); + // assert nullable + assertEquals(DatabaseMetaData.columnNoNulls, columnMetaDataResultSet.getInt(11)); - // assert is_nullable - assertEquals("NO", columnMetaDataResultSet.getString(18)); + // assert is_nullable + assertEquals("NO", columnMetaDataResultSet.getString(18)); + } + ++cnt; } - ++cnt; + assertEquals(9, cnt); } - assertEquals(9, cnt); - - columnMetaDataResultSet.close(); // create a table with mix cases - statement = connection.createStatement(); - statement.execute("create or replace table \"testDBMetadata\" (a timestamp_ltz)"); - columnMetaDataResultSet = metaData.getColumns(null, null, "testDBMetadata", null); + try (Statement statement = connection.createStatement()) { + statement.execute("create or replace table \"testDBMetadata\" (a timestamp_ltz)"); + try (ResultSet columnMetaDataResultSet = + metaData.getColumns(null, null, "testDBMetadata", null)) { - // assert we get 1 row - cnt = 0; - while (columnMetaDataResultSet.next()) { - // assert the table name and column name, data type and type name - assertTrue("testDBMetadata".equalsIgnoreCase(columnMetaDataResultSet.getString(3))); + // assert we get 1 row + cnt = 0; + while (columnMetaDataResultSet.next()) { + // assert the table name and column name, data type and type name + assertTrue("testDBMetadata".equalsIgnoreCase(columnMetaDataResultSet.getString(3))); - assertEquals(Types.TIMESTAMP, columnMetaDataResultSet.getInt(5)); + assertEquals(Types.TIMESTAMP, columnMetaDataResultSet.getInt(5)); - assertTrue(columnMetaDataResultSet.getString(4).equalsIgnoreCase("a")); - cnt++; - } - assertEquals(1, cnt); - } finally { - if (statement != null) { - statement.execute("DROP TABLE IF EXISTS \"testDBMetadata\""); + assertTrue(columnMetaDataResultSet.getString(4).equalsIgnoreCase("a")); + cnt++; + } + assertEquals(1, cnt); + } } - closeSQLObjects(statement, connection); + connection.createStatement().execute("DROP TABLE IF EXISTS \"testDBMetadata\""); } } @@ -804,57 +739,53 @@ public void testPutWithWildcardGCP() throws Throwable { Properties _connectionProperties = new Properties(); _connectionProperties.put("inject_wait_in_put", 5); _connectionProperties.put("ssl", "off"); - Connection connection = - getConnection( - DONT_INJECT_SOCKET_TIMEOUT, _connectionProperties, false, false, "gcpaccount"); - Statement statement = connection.createStatement(); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - // replace file name with wildcard character - sourceFilePath = sourceFilePath.replace("orders_100.csv", "orders_10*.csv"); - - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + try (Connection connection = + getConnection( + DONT_INJECT_SOCKET_TIMEOUT, _connectionProperties, false, false, "gcpaccount"); + Statement statement = connection.createStatement()) { + try { + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + // replace file name with wildcard character + sourceFilePath = sourceFilePath.replace("orders_100.csv", "orders_10*.csv"); - try { - statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); - statement.execute("CREATE OR REPLACE STAGE wildcard_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage")); + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); + statement.execute("CREATE OR REPLACE STAGE wildcard_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage")); - findFile(statement, "ls @wildcard_stage/"); + findFile(statement, "ls @wildcard_stage/"); - assertTrue( - "Failed to get files", - statement.execute( - "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + assertTrue( + "Failed to get files", + statement.execute( + "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - File downloaded; - // download the files we just uploaded to stage - for (int i = 0; i < fileNames.length; i++) { - // Make sure that the downloaded file exists, it should be gzip compressed - downloaded = new File(destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); - assert (downloaded.exists()); + File downloaded; + // download the files we just uploaded to stage + for (int i = 0; i < fileNames.length; i++) { + // Make sure that the downloaded file exists, it should be gzip compressed + downloaded = new File(destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); + assertTrue(downloaded.exists()); - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); - p.waitFor(); + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); + p.waitFor(); - String individualFilePath = sourceFilePath.replace("orders_10*.csv", fileNames[i]); + String individualFilePath = sourceFilePath.replace("orders_10*.csv", fileNames[i]); - File original = new File(individualFilePath); - File unzipped = new File(destFolderCanonicalPathWithSeparator + fileNames[i]); - assert (original.length() == unzipped.length()); - assert (FileUtils.contentEquals(original, unzipped)); + File original = new File(individualFilePath); + File unzipped = new File(destFolderCanonicalPathWithSeparator + fileNames[i]); + assertEquals(original.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(original, unzipped)); + } + } finally { + statement.execute("DROP STAGE IF EXISTS wildcard_stage"); } - - } finally { - statement.execute("DROP STAGE IF EXISTS wildcard_stage"); - statement.close(); - connection.close(); } } @@ -868,110 +799,104 @@ public void testPutWithWildcardGCP() throws Throwable { private void copyContentFrom(File file1, File file2) throws Exception { FileInputStream inputStream = new FileInputStream(file1); FileOutputStream outputStream = new FileOutputStream(file2); - FileChannel fIn = inputStream.getChannel(); - FileChannel fOut = outputStream.getChannel(); - fOut.transferFrom(fIn, 0, fIn.size()); - fIn.position(0); - fOut.transferFrom(fIn, fIn.size(), fIn.size()); - fOut.close(); - fIn.close(); + try (FileChannel fIn = inputStream.getChannel(); + FileChannel fOut = outputStream.getChannel()) { + fOut.transferFrom(fIn, 0, fIn.size()); + fIn.position(0); + fOut.transferFrom(fIn, fIn.size(), fIn.size()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGetLargeFileGCP() throws Throwable { - Connection connection = getConnection("gcpaccount"); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { + try { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + File largeTempFile = tmpFolder.newFile("largeFile.csv"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + } + File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - File largeTempFile = tmpFolder.newFile("largeFile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile)); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.close(); - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - - String sourceFilePath = largeTempFile.getCanonicalPath(); - - try { - // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, - // which is a large file - for (int i = 0; i < 12; i++) { - copyContentFrom(largeTempFile, largeTempFile2); - copyContentFrom(largeTempFile2, largeTempFile); - } + String sourceFilePath = largeTempFile.getCanonicalPath(); + + // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, + // which is a large file + for (int i = 0; i < 12; i++) { + copyContentFrom(largeTempFile, largeTempFile2); + copyContentFrom(largeTempFile2, largeTempFile); + } - statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); + statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE largefile_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE largefile_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @largefile_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @largefile_stage/"); - // create a new table with columns matching CSV file - statement.execute("create or replace table large_table (colA string)"); - // copy rows from file into table - statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); - // copy back from table into different stage - statement.execute("create or replace stage extra_stage"); - statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); + // create a new table with columns matching CSV file + statement.execute("create or replace table large_table (colA string)"); + // copy rows from file into table + statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); + // copy back from table into different stage + statement.execute("create or replace stage extra_stage"); + statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); - - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - p.waitFor(); - - // compare the original file with the file that's been uploaded, copied into a table, copied - // back into a stage, - // downloaded, and unzipped - File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); - } finally { - statement.execute("DROP STAGE IF EXISTS largefile_stage"); - statement.execute("DROP STAGE IF EXISTS extra_stage"); - statement.execute("DROP TABLE IF EXISTS large_table"); - statement.close(); - connection.close(); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + assertTrue(downloaded.exists()); + + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + p.waitFor(); + + // compare the original file with the file that's been uploaded, copied into a table, copied + // back into a stage, + // downloaded, and unzipped + File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); + assertEquals(largeTempFile.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(largeTempFile, unzipped)); + } finally { + statement.execute("DROP STAGE IF EXISTS largefile_stage"); + statement.execute("DROP STAGE IF EXISTS extra_stage"); + statement.execute("DROP TABLE IF EXISTS large_table"); + } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutOverwrite() throws Throwable { - Connection connection = null; - Statement statement = null; - // create 2 files: an original, and one that will overwrite the original File file1 = tmpFolder.newFile("testfile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(file1)); - bw.write("Writing original file content. This should get overwritten."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { + bw.write("Writing original file content. This should get overwritten."); + } File file2 = tmpFolder2.newFile("testfile.csv"); - bw = new BufferedWriter(new FileWriter(file2)); - bw.write("This is all new! This should be the result of the overwriting."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { + bw.write("This is all new! This should be the result of the overwriting."); + } String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); @@ -982,51 +907,49 @@ public void testPutOverwrite() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE testing_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE testing_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); - // put another file in same stage with same filename with overwrite = true - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true")); + // put another file in same stage with same filename with overwrite = true + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true")); - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - assert (downloaded.exists()); + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + assertTrue(downloaded.exists()); - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - p.waitFor(); + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + p.waitFor(); - File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); - assert (FileUtils.contentEqualsIgnoreEOL(file2, unzipped, null)); - } finally { - statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); - statement.close(); + File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); + assertTrue(FileUtils.contentEqualsIgnoreEOL(file2, unzipped, null)); + } finally { + statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); + } } } } @@ -1034,20 +957,14 @@ public void testPutOverwrite() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPut() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - // load file test - // create a unique data file name by using current timestamp in millis + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { try { + // load file test + // create a unique data file name by using current timestamp in millis statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); // test external table load statement.execute("CREATE OR REPLACE TABLE testLoadToLocalFS(a number)"); @@ -1060,361 +977,293 @@ public void testPut() throws Throwable { + getFullPathFileInResource(TEST_DATA_FILE) + " @%testLoadToLocalFS/orders parallel=10")); - resultSet = statement.getResultSet(); + try (ResultSet resultSet = statement.getResultSet()) { - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertTrue(resultSetMetaData.getColumnCount() > 0); - - assertTrue(resultSet.next()); // one row - assertFalse(resultSet.next()); + // assert column count + assertTrue(resultSetMetaData.getColumnCount() > 0); + assertTrue(resultSet.next()); // one row + assertFalse(resultSet.next()); + } findFile( statement, "ls @%testLoadToLocalFS/ pattern='.*orders/" + TEST_DATA_FILE + ".g.*'"); // remove files - resultSet = + try (ResultSet resultSet = statement.executeQuery( - "rm @%testLoadToLocalFS/ pattern='.*orders/" + TEST_DATA_FILE + ".g.*'"); - - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertTrue(resultSetMetaData.getColumnCount() >= 1); + "rm @%testLoadToLocalFS/ pattern='.*orders/" + TEST_DATA_FILE + ".g.*'")) { + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + + // assert column count + assertTrue(resultSetMetaData.getColumnCount() >= 1); + + // assert we get 1 row for the file we copied + assertTrue(resultSet.next()); + assertNotNull(resultSet.getString(1)); + assertFalse(resultSet.next()); + try { + resultSet.getString(1); // no more row + fail("must fail"); + } catch (SQLException ex) { + assertEquals( + (int) ErrorCode.COLUMN_DOES_NOT_EXIST.getMessageCode(), ex.getErrorCode()); + } - // assert we get 1 row for the file we copied - assertTrue(resultSet.next()); - assertNotNull(resultSet.getString(1)); - assertFalse(resultSet.next()); - try { - resultSet.getString(1); // no more row - fail("must fail"); - } catch (SQLException ex) { - assertEquals((int) ErrorCode.COLUMN_DOES_NOT_EXIST.getMessageCode(), ex.getErrorCode()); + Thread.sleep(100); } - - Thread.sleep(100); - // show files again - resultSet = statement.executeQuery("ls @%testLoadToLocalFS/ pattern='.*orders/orders.*'"); - - // assert we get 0 row - assertFalse(resultSet.next()); + try (ResultSet resultSet = + statement.executeQuery("ls @%testLoadToLocalFS/ pattern='.*orders/orders.*'")) { + // assert we get 0 row + assertFalse(resultSet.next()); + } } finally { statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); - statement.close(); } - - } finally { - closeSQLObjects(resultSet, statement, connection); } } } static void findFile(Statement statement, String checkSQL) throws Throwable { boolean fileFound = false; - ResultSet resultSet = null; // tolerate at most 60 tries for the following loop for (int numSecs = 0; numSecs <= 60; numSecs++) { // show files - resultSet = statement.executeQuery(checkSQL); + try (ResultSet resultSet = statement.executeQuery(checkSQL)) { - if (resultSet.next()) { - fileFound = true; - break; + if (resultSet.next()) { + fileFound = true; + break; + } + // give enough time for s3 eventual consistency for US region + Thread.sleep(1000); + assertTrue("Could not find a file", fileFound); + + // assert the first column not null + assertNotNull("Null result", resultSet.getString(1)); } - // give enough time for s3 eventual consistency for US region - Thread.sleep(1000); } - assertTrue("Could not find a file", fileFound); - - // assert the first column not null - assertNotNull("Null result", resultSet.getString(1)); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSQLError42S02() throws SQLException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // execute a bad query - try { - resultSet = statement.executeQuery("SELECT * FROM nonexistence"); - + try (ResultSet resultSet = statement.executeQuery("SELECT * FROM nonexistence")) { fail("SQL exception not raised"); } catch (SQLException ex1) { // assert the sqlstate "42S02" which means BASE_TABLE_OR_VIEW_NOT_FOUND assertEquals("sqlstate mismatch", "42S02", ex1.getSQLState()); } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testExplainPlan() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); - // test explain plan: sorry not available for general but debugging purpose only - resultSet = statement.executeQuery("EXPLAIN PLAN FOR SELECT c1 FROM orders_jdbc"); + // test explain plan: sorry not available for general but debugging purpose only + ResultSet resultSet = + statement.executeQuery("EXPLAIN PLAN FOR SELECT c1 FROM orders_jdbc")) { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); assertTrue("must return more than 4 columns", resultSetMetaData.getColumnCount() >= 4); assertTrue("must return more than 3 rows", countRows(resultSet) > 3); - - statement.close(); - - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testTimestampParsing() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - resultSet = - statement.executeQuery( - "select to_timestamp('2013-05-08T15:39:20.123-07:00') from orders_jdbc"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "select to_timestamp('2013-05-08T15:39:20.123-07:00') from orders_jdbc")) { assertTrue(resultSet.next()); assertEquals("Wed, 08 May 2013 15:39:20 -0700", resultSet.getString(1)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testDateParsing() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - statement = connection.createStatement(); - resultSet = statement.executeQuery("select to_date('0001-01-01')"); - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("select to_date('0001-01-01')")) { assertTrue(resultSet.next()); assertEquals("0001-01-01", resultSet.getString(1)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testTimeParsing() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - statement = connection.createStatement(); - resultSet = statement.executeQuery("select to_time('15:39:20.123') from orders_jdbc"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery("select to_time('15:39:20.123') from orders_jdbc")) { assertTrue(resultSet.next()); assertEquals("15:39:20", resultSet.getString(1)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testClientSideSorting() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); + ResultSetMetaData resultSetMetaData = null; - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // turn on sorting mode statement.execute("set-sf-property sort on"); - resultSet = statement.executeQuery("SELECT c3 FROM orders_jdbc"); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = statement.executeQuery("SELECT c3 FROM orders_jdbc")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert the values for the first 5 rows - for (int i = 0; i < 5; i++) { - assertTrue(resultSet.next()); + // assert the values for the first 5 rows + for (int i = 0; i < 5; i++) { + assertTrue(resultSet.next()); - // assert each column is 'F' - assertEquals("F", resultSet.getString(1)); + // assert each column is 'F' + assertEquals("F", resultSet.getString(1)); + } } - // turn off sorting mode statement.execute("set-sf-property sort off"); - resultSet = statement.executeQuery("SELECT c3 FROM orders_jdbc order by c3 desc"); + try (ResultSet resultSet = + statement.executeQuery("SELECT c3 FROM orders_jdbc order by c3 desc")) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert the values for the first 4 rows - for (int i = 0; i < 4; i++) { - assertTrue(resultSet.next()); + // assert the values for the first 4 rows + for (int i = 0; i < 4; i++) { + assertTrue(resultSet.next()); - // assert each column is 'P' - assertEquals("P", resultSet.getString(1)); + // assert each column is 'P' + assertEquals("P", resultSet.getString(1)); + } } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testUpdateCount() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // create test table - statement.execute("CREATE OR REPLACE TABLE testUpdateCount(version number, name string)"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // create test table + statement.execute("CREATE OR REPLACE TABLE testUpdateCount(version number, name string)"); - // insert two rows - int numRows = - statement.executeUpdate("INSERT INTO testUpdateCount values (1, 'a'), (2, 'b')"); + // insert two rows + int numRows = + statement.executeUpdate("INSERT INTO testUpdateCount values (1, 'a'), (2, 'b')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 2, numRows); - } finally { - if (statement != null) { + assertEquals("Unexpected number of rows inserted: " + numRows, 2, numRows); + } finally { statement.execute("DROP TABLE if exists testUpdateCount"); } - closeSQLObjects(null, statement, connection); } } @Test public void testSnow4245() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - // set timestamp format - statement.execute("alter session set timestamp_input_format = 'YYYY-MM-DD HH24:MI:SS';"); - - // create test table with different time zone flavors - String createSQL = - "create or replace table testSnow4245(t timestamp with local time " - + "zone,ntz timestamp without time zone,tz timestamp with time zone)"; - statement.execute(createSQL); - - // populate - int numRows = - statement.executeUpdate( - "insert into testSnow4245 values(NULL,NULL,NULL)," - + "('2013-06-04 01:00:04','2013-06-04 01:00:04','2013-06-04 01:00:04')," - + "('2013-06-05 23:00:05','2013-06-05 23:00:05','2013-06-05 23:00:05')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 3, numRows); - - // query the data - resultSet = - statement.executeQuery( - "SELECT * FROM testSnow4245 order by 1 " - + "nulls first, 2 nulls first, 3 nulls first"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // set timestamp format + statement.execute("alter session set timestamp_input_format = 'YYYY-MM-DD HH24:MI:SS';"); + + // create test table with different time zone flavors + String createSQL = + "create or replace table testSnow4245(t timestamp with local time " + + "zone,ntz timestamp without time zone,tz timestamp with time zone)"; + statement.execute(createSQL); + + // populate + int numRows = + statement.executeUpdate( + "insert into testSnow4245 values(NULL,NULL,NULL)," + + "('2013-06-04 01:00:04','2013-06-04 01:00:04','2013-06-04 01:00:04')," + + "('2013-06-05 23:00:05','2013-06-05 23:00:05','2013-06-05 23:00:05')"); + assertEquals("Unexpected number of rows inserted: " + numRows, 3, numRows); + + // query the data + try (ResultSet resultSet = + statement.executeQuery( + "SELECT * FROM testSnow4245 order by 1 " + + "nulls first, 2 nulls first, 3 nulls first")) { - int i = 0; - // assert we get 3 rows + int i = 0; + // assert we get 3 rows - while (resultSet.next()) { - // assert each column is not null except the first row + while (resultSet.next()) { + // assert each column is not null except the first row - if (i == 0) { - for (int j = 1; j < 4; j++) { - assertNull(resultSet.getString(j), resultSet.getString(j)); - } - } else { - for (int j = 1; j < 4; j++) { - assertNotNull(resultSet.getString(j), resultSet.getString(j)); + if (i == 0) { + for (int j = 1; j < 4; j++) { + assertNull(resultSet.getString(j), resultSet.getString(j)); + } + } else { + for (int j = 1; j < 4; j++) { + assertNotNull(resultSet.getString(j), resultSet.getString(j)); + } + } + i = i + 1; } } - i = i + 1; - } - } finally { - if (statement != null) { + } finally { statement.execute("drop table testSnow4245"); } - closeSQLObjects(resultSet, statement, connection); } } /** SNOW-4394 - Four bytes UTF-8 characters are not returned correctly. */ @Test public void testSnow4394() throws Throwable { - Connection connection = null; - Statement statement = null; - String tableName = String.format("snow4394_%s", UUID.randomUUID().toString()).replaceAll("-", "_"); - try { - connection = getConnection(); - - statement = connection.createStatement(); - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // create test table - statement.execute(String.format("CREATE OR REPLACE TABLE %s(str string)", tableName)); - - String data = "What is \ud83d\ude12?"; - // insert two rows - int numRows = - statement.executeUpdate( - String.format("INSERT INTO %s(str) values('%s')", tableName, data)); - assertEquals("Unexpected number of rows inserted: " + numRows, 1, numRows); - - ResultSet rset = statement.executeQuery(String.format("SELECT str FROM %s", tableName)); - String ret = null; - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret, data, ret); - } finally { - if (statement != null) { + try { + statement.execute(String.format("CREATE OR REPLACE TABLE %s(str string)", tableName)); + + String data = "What is \ud83d\ude12?"; + // insert two rows + int numRows = + statement.executeUpdate( + String.format("INSERT INTO %s(str) values('%s')", tableName, data)); + assertEquals("Unexpected number of rows inserted: " + numRows, 1, numRows); + + try (ResultSet rset = + statement.executeQuery(String.format("SELECT str FROM %s", tableName))) { + String ret = null; + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret, data, ret); + } + } finally { statement.execute(String.format("DROP TABLE if exists %s", tableName)); - statement.close(); } - closeSQLObjects(null, statement, connection); } } @@ -1447,1116 +1296,1016 @@ private void addBindBatch(PreparedStatement preparedStatement, java.sql.Date sql @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void test31448() throws Throwable { - Connection connection = getConnection(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - Statement statement = connection.createStatement(); - - statement.execute("alter session set enable_fix_31448_2=2, " + "error_on_generic_pruner=true;"); - - statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); - - statement.execute("create or replace table " + "bug56658(iv number, tsv timestamp_ntz)"); - statement.execute( - "insert into bug56658 select seq8(), " - + "timestampadd(day, seq8(), '1970-01-13 00:00:00'::timestamp_ntz)\n" - + "from table(generator(rowcount=>20))"); - - connection - .unwrap(SnowflakeConnectionV1.class) - .getSfSession() - .setTimestampMappedType(SnowflakeType.TIMESTAMP_NTZ); - Timestamp ts = buildTimestamp(1970, 0, 15, 10, 14, 30, 0); - PreparedStatement preparedStatement = - connection.prepareStatement( - "select iv, tsv from bug56658 where tsv" + " >= ? and tsv <= ? order by iv;"); - statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); - Timestamp ts2 = buildTimestamp(1970, 0, 18, 10, 14, 30, 0); - preparedStatement.setTimestamp(1, ts); - preparedStatement.setTimestamp(2, ts2); - preparedStatement.executeQuery(); + statement.execute( + "alter session set enable_fix_31448_2=2, " + "error_on_generic_pruner=true;"); + + statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); + + statement.execute("create or replace table " + "bug56658(iv number, tsv timestamp_ntz)"); + statement.execute( + "insert into bug56658 select seq8(), " + + "timestampadd(day, seq8(), '1970-01-13 00:00:00'::timestamp_ntz)\n" + + "from table(generator(rowcount=>20))"); + + connection + .unwrap(SnowflakeConnectionV1.class) + .getSfSession() + .setTimestampMappedType(SnowflakeType.TIMESTAMP_NTZ); + Timestamp ts = buildTimestamp(1970, 0, 15, 10, 14, 30, 0); + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "select iv, tsv from bug56658 where tsv" + " >= ? and tsv <= ? order by iv;")) { + statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); + Timestamp ts2 = buildTimestamp(1970, 0, 18, 10, 14, 30, 0); + preparedStatement.setTimestamp(1, ts); + preparedStatement.setTimestamp(2, ts2); + preparedStatement.executeQuery(); + } + } } @Test public void testBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - preparedStatement = connection.prepareStatement("SELECT ?, ?"); - - // bind integer - preparedStatement.setInt(1, 1); - preparedStatement.setString(2, "hello"); - resultSet = preparedStatement.executeQuery(); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - - // assert we get 1 rows - assertTrue(resultSet.next()); - - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - - // bind float - preparedStatement.setDouble(1, 1.2); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(1)); - - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("double", 1.2, resultSet.getDouble(1), 0); - assertEquals("string", "hello", resultSet.getString(2)); - - // bind string - preparedStatement.setString(1, "hello"); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); + ResultSetMetaData resultSetMetaData = null; + Timestamp ts = null; + Time tm = null; + java.sql.Date sqlDate = null; + int[] updateCounts; + try (Connection connection = getConnection()) { + try (PreparedStatement preparedStatement = connection.prepareStatement("SELECT ?, ?")) { - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + // bind integer + preparedStatement.setInt(1, 1); + preparedStatement.setString(2, "hello"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("string1", "hello", resultSet.getString(1)); - assertEquals("string2", "hello", resultSet.getString(2)); + resultSetMetaData = resultSet.getMetaData(); - // bind date - java.sql.Date sqlDate = java.sql.Date.valueOf("2014-08-26"); - preparedStatement.setDate(1, sqlDate); - resultSet = preparedStatement.executeQuery(); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - resultSetMetaData = resultSet.getMetaData(); + // assert we get 1 rows + assertTrue(resultSet.next()); - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(1)); + assertEquals("integer", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind float + preparedStatement.setDouble(1, 1.2); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("string", "2014-08-26", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(1)); - // bind timestamp - Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); - preparedStatement.setTimestamp(1, ts); - resultSet = preparedStatement.executeQuery(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("double", 1.2, resultSet.getDouble(1), 0); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind string + preparedStatement.setString(1, "hello"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - resultSetMetaData = resultSet.getMetaData(); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(1)); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("string1", "hello", resultSet.getString(1)); + assertEquals("string2", "hello", resultSet.getString(2)); + } + // bind date + sqlDate = java.sql.Date.valueOf("2014-08-26"); + preparedStatement.setDate(1, sqlDate); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals( - "Incorrect timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(1)); - // bind time - Time tm = new Time(12345678); // 03:25:45.678 - preparedStatement.setTime(1, tm); - resultSet = preparedStatement.executeQuery(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("string", "2014-08-26", resultSet.getString(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind timestamp + ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); + preparedStatement.setTimestamp(1, ts); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.TIME, resultSetMetaData.getColumnType(1)); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("Incorrect time", "03:25:45", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals( + "Incorrect timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind time + tm = new Time(12345678); // 03:25:45.678 + preparedStatement.setTime(1, tm); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - preparedStatement.close(); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.TIME, resultSetMetaData.getColumnType(1)); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("Incorrect time", "03:25:45", resultSet.getString(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + } // bind in where clause - preparedStatement = - connection.prepareStatement("SELECT * FROM orders_jdbc WHERE to_number(c1) = ?"); - - preparedStatement.setInt(1, 100); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * FROM orders_jdbc WHERE to_number(c1) = ?")) { - // assert column count - assertEquals(9, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + preparedStatement.setInt(1, 100); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("c1", "100", resultSet.getString(1)); - assertEquals("c2", "147004", resultSet.getString(2)); + // assert column count + assertEquals(9, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - preparedStatement.close(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("c1", "100", resultSet.getString(1)); + assertEquals("c2", "147004", resultSet.getString(2)); + } + } // bind in insert statement // create a test table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate( - "create or replace table testBind(a int, b string, c double, d date, " - + "e timestamp, f time, g date)"); - - preparedStatement = - connection.prepareStatement( - "insert into testBind(a, b, c, d, e, f) values(?, ?, ?, ?, ?, ?)"); - - preparedStatement.setInt(1, 1); - preparedStatement.setString(2, "hello"); - preparedStatement.setDouble(3, 1.2); - preparedStatement.setDate(4, sqlDate); - preparedStatement.setTimestamp(5, ts); - preparedStatement.setTime(6, tm); - int rowCount = preparedStatement.executeUpdate(); - - // update count should be 1 - assertEquals("update count", 1, rowCount); - - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBind"); - - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); - - // bind in update statement - preparedStatement = connection.prepareStatement("update testBind set b=? where a=?"); + try (Statement regularStatement = connection.createStatement()) { + regularStatement.executeUpdate( + "create or replace table testBind(a int, b string, c double, d date, " + + "e timestamp, f time, g date)"); - preparedStatement.setString(1, "world"); - preparedStatement.setInt(2, 1); - preparedStatement.execute(); - - preparedStatement.close(); + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testBind(a, b, c, d, e, f) values(?, ?, ?, ?, ?, ?)")) { + + preparedStatement.setInt(1, 1); + preparedStatement.setString(2, "hello"); + preparedStatement.setDouble(3, 1.2); + preparedStatement.setDate(4, sqlDate); + preparedStatement.setTimestamp(5, ts); + preparedStatement.setTime(6, tm); + int rowCount = preparedStatement.executeUpdate(); + + // update count should be 1 + assertEquals("update count", 1, rowCount); + + // test the inserted rows + try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { + + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + assertNull("date", resultSet.getString(7)); + } + } + // bind in update statement + try (PreparedStatement preparedStatement = + connection.prepareStatement("update testBind set b=? where a=?")) { + preparedStatement.setString(1, "world"); + preparedStatement.setInt(2, 1); + preparedStatement.execute(); + } - // test the updated rows - resultSet = regularStatement.executeQuery("select * from testBind"); + // test the updated rows + try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 1, resultSet.getInt(1)); + assertEquals("string", "world", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + assertNull("date", resultSet.getString(7)); + } + // array bind for insert + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testBind (a, b, c, d, e, f, g) " + + "values(?, ?, ?, ?, ?, ?, current_date())")) { + + preparedStatement.setInt(1, 2); + preparedStatement.setString(2, "hello"); + preparedStatement.setDouble(3, 1.2); + preparedStatement.setDate(4, sqlDate); + preparedStatement.setTimestamp(5, ts); + preparedStatement.setTime(6, tm); + preparedStatement.addBatch(); + + preparedStatement.setInt(1, 3); + preparedStatement.setString(2, "hello"); + preparedStatement.setDouble(3, 1.2); + preparedStatement.setDate(4, sqlDate); + preparedStatement.setTimestamp(5, ts); + preparedStatement.setTime(6, tm); + preparedStatement.addBatch(); + + updateCounts = preparedStatement.executeBatch(); + + // GS optimizes this into one insert execution, but we expand the + // return count into an array + assertEquals("Number of update counts", 2, updateCounts.length); + + // update count should be 1 for each + assertEquals("update count", 1, updateCounts[0]); + assertEquals("update count", 1, updateCounts[1]); + } + // test the inserted rows + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBind where a = 2")) { - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "world", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); - - // array bind for insert - preparedStatement = - connection.prepareStatement( - "insert into testBind (a, b, c, d, e, f, g) " - + "values(?, ?, ?, ?, ?, ?, current_date())"); - - preparedStatement.setInt(1, 2); - preparedStatement.setString(2, "hello"); - preparedStatement.setDouble(3, 1.2); - preparedStatement.setDate(4, sqlDate); - preparedStatement.setTimestamp(5, ts); - preparedStatement.setTime(6, tm); - preparedStatement.addBatch(); - - preparedStatement.setInt(1, 3); - preparedStatement.setString(2, "hello"); - preparedStatement.setDouble(3, 1.2); - preparedStatement.setDate(4, sqlDate); - preparedStatement.setTimestamp(5, ts); - preparedStatement.setTime(6, tm); - preparedStatement.addBatch(); - - int[] updateCounts = preparedStatement.executeBatch(); - - // GS optimizes this into one insert execution, but we expand the - // return count into an array - assertEquals("Number of update counts", 2, updateCounts.length); - - // update count should be 1 for each - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); - - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBind where a = 2"); - - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 2, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 2, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + } - resultSet = regularStatement.executeQuery("select * from testBind where a = 3"); + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBind where a = 3")) { - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 3, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - - // describe mode - preparedStatement = - connection.prepareStatement("select * from testBind WHERE to_number(a) = ?"); - - resultSetMetaData = preparedStatement.getMetaData(); - assertEquals(7, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(3)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(4)); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(5)); - assertEquals(Types.TIME, resultSetMetaData.getColumnType(6)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(7)); - - preparedStatement.close(); - preparedStatement = connection.prepareStatement("select ?, ?"); - - resultSetMetaData = preparedStatement.getMetaData(); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - - preparedStatement.close(); - preparedStatement = connection.prepareStatement("select ?, ?"); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 3, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + } - preparedStatement.setInt(1, 1); - preparedStatement.setString(2, "hello"); - ResultSet result = preparedStatement.executeQuery(); + // describe mode + try (PreparedStatement preparedStatement = + connection.prepareStatement("select * from testBind WHERE to_number(a) = ?")) { + + resultSetMetaData = preparedStatement.getMetaData(); + assertEquals(7, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(3)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(4)); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(5)); + assertEquals(Types.TIME, resultSetMetaData.getColumnType(6)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(7)); + } - resultSetMetaData = result.getMetaData(); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + try (PreparedStatement preparedStatement = connection.prepareStatement("select ?, ?")) { + resultSetMetaData = preparedStatement.getMetaData(); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + } - preparedStatement.close(); + try (PreparedStatement preparedStatement = connection.prepareStatement("select ?, ?")) { - // test null binding - preparedStatement = connection.prepareStatement("select ?"); + preparedStatement.setInt(1, 1); + preparedStatement.setString(2, "hello"); + ResultSet result = preparedStatement.executeQuery(); - preparedStatement.setNull(1, Types.VARCHAR); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = result.getMetaData(); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + } - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + // test null binding + try (PreparedStatement preparedStatement = connection.prepareStatement("select ?")) { - // assert we get 1 rows - assertTrue(resultSet.next()); - assertNull(resultSet.getObject(1)); + preparedStatement.setNull(1, Types.VARCHAR); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - preparedStatement.setNull(1, Types.INTEGER); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(1)); + } + preparedStatement.setNull(1, Types.INTEGER); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertNull(resultSet.getObject(1)); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - preparedStatement.close(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(1)); + } + } + } // bind in insert statement // create a test table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate( - "create or replace table testBind1(c1 double, c2 string, c3 date, " - + "c4 date, c5 string, c6 date, c7 string, c8 string, " - + "c9 string, c10 string, c11 string, c12 date, c13 string, " - + "c14 float, c15 string, c16 string, c17 string, c18 string," - + "c19 string, c20 date, c21 string)"); - - // array bind for insert - preparedStatement = - connection.prepareStatement( - "insert into testBind1 (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, " - + "c12, c13, c14, c15, c16, c17, c18, c19, c20, c21) values " - + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?," - + " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); + try (Statement regularStatement = connection.createStatement()) { + regularStatement.executeUpdate( + "create or replace table testBind1(c1 double, c2 string, c3 date, " + + "c4 date, c5 string, c6 date, c7 string, c8 string, " + + "c9 string, c10 string, c11 string, c12 date, c13 string, " + + "c14 float, c15 string, c16 string, c17 string, c18 string," + + "c19 string, c20 date, c21 string)"); + + // array bind for insert + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testBind1 (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, " + + "c12, c13, c14, c15, c16, c17, c18, c19, c20, c21) values " + + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?," + + " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { - for (int idx = 0; idx < 16; idx++) { - addBindBatch(preparedStatement, sqlDate); - } + for (int idx = 0; idx < 16; idx++) { + addBindBatch(preparedStatement, sqlDate); + } - updateCounts = preparedStatement.executeBatch(); + updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 16, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 16, updateCounts.length); - for (int idx = 0; idx < 16; idx++) { - assertEquals("update count", 1, updateCounts[idx]); - } - } finally { - if (regularStatement != null) { - regularStatement.execute("DROP TABLE testBind"); - regularStatement.close(); + for (int idx = 0; idx < 16; idx++) { + assertEquals("update count", 1, updateCounts[idx]); + } + } } - - closeSQLObjects(resultSet, preparedStatement, connection); + connection.createStatement().execute("DROP TABLE testBind"); } } @Test public void testTableBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - // select * from table(?) - preparedStatement = connection.prepareStatement("SELECT * from table(?)"); - ResultSetMetaData resultSetMetaData = preparedStatement.getMetaData(); - // we do not have any metadata, without a specified table - assertEquals(0, resultSetMetaData.getColumnCount()); - - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(9, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); - - preparedStatement.close(); - - // select * from table(?) where c1 = 1 - preparedStatement = connection.prepareStatement("SELECT * from table(?) where c1 = 1"); - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - assertEquals(9, resultSetMetaData.getColumnCount()); - assertTrue(resultSet.next()); - assertFalse(resultSet.next()); - - preparedStatement.close(); - - // select * from table(?) where c1 = 2 order by c3 - preparedStatement = connection.prepareStatement("SELECT * from table(?) order by c3"); - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - assertEquals(9, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); - - preparedStatement.close(); - - regularStatement = connection.createStatement(); - regularStatement.execute("create or replace table testTableBind(c integer, d string)"); - - // insert into table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate("insert into testTableBind (c, d) values (1, 'one')"); - - // select c1, c from table(?), testTableBind - preparedStatement = connection.prepareStatement("SELECT * from table(?), testTableBind"); - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - assertEquals(11, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); - - preparedStatement.close(); + ResultSetMetaData resultSetMetaData = null; - // select * from table(?), table(?) - preparedStatement = connection.prepareStatement("SELECT * from table(?), table(?)"); - preparedStatement.setString(1, ORDERS_JDBC); - preparedStatement.setString(2, "testTableBind"); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + // select * from table(?) + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?)")) { + resultSetMetaData = preparedStatement.getMetaData(); + // we do not have any metadata, without a specified table + assertEquals(0, resultSetMetaData.getColumnCount()); + + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(9, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - assertEquals(11, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); + // select * from table(?) where c1 = 1 + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?) where c1 = 1")) { + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - preparedStatement.close(); + assertEquals(9, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.next()); + assertFalse(resultSet.next()); + } + } - // select tab1.c1, tab2.c from table(?) as a, table(?) as b - preparedStatement = - connection.prepareStatement("SELECT a.c1, b.c from table(?) as a, table(?) as b"); - preparedStatement.setString(1, ORDERS_JDBC); - preparedStatement.setString(2, "testTableBind"); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + // select * from table(?) where c1 = 2 order by c3 + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?) order by c3")) { + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(9, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - assertEquals(2, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); + regularStatement.execute("create or replace table testTableBind(c integer, d string)"); + // insert into table + regularStatement.executeUpdate("insert into testTableBind (c, d) values (1, 'one')"); + // select c1, c from table(?), testTableBind + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?), testTableBind")) { + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(11, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - preparedStatement.close(); + // select * from table(?), table(?) + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?), table(?)")) { + preparedStatement.setString(1, ORDERS_JDBC); + preparedStatement.setString(2, "testTableBind"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(11, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - } finally { - if (regularStatement != null) { + // select tab1.c1, tab2.c from table(?) as a, table(?) as b + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT a.c1, b.c from table(?) as a, table(?) as b")) { + preparedStatement.setString(1, ORDERS_JDBC); + preparedStatement.setString(2, "testTableBind"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(2, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } + } finally { regularStatement.execute("DROP TABLE testTableBind"); } - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test public void testBindInWithClause() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - // create a test table - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table testBind2(a int, b string, c double, " - + "d date, e timestamp, f time, g date)"); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + // create a test table + regularStatement.execute( + "create or replace table testBind2(a int, b string, c double, " + + "d date, e timestamp, f time, g date)"); - // bind in where clause - preparedStatement = - connection.prepareStatement( - "WITH V AS (SELECT * FROM testBind2 WHERE a = ?) " + "SELECT count(*) FROM V"); + // bind in where clause + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "WITH V AS (SELECT * FROM testBind2 WHERE a = ?) " + "SELECT count(*) FROM V")) { - preparedStatement.setInt(1, 100); - resultSet = preparedStatement.executeQuery(); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + preparedStatement.setInt(1, 100); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 rows - assertTrue(resultSet.next()); - preparedStatement.close(); - } finally { - if (regularStatement != null) { + // assert we get 1 rows + assertTrue(resultSet.next()); + } + } + } finally { regularStatement.execute("DROP TABLE testBind2"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test public void testBindTimestampNTZ() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - // create a test table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate( - "create or replace table testBindTimestampNTZ(a timestamp_ntz)"); - - regularStatement.execute("alter session set client_timestamp_type_mapping='timestamp_ntz'"); - - // bind in where clause - preparedStatement = connection.prepareStatement("insert into testBindTimestampNTZ values(?)"); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + // create a test table + regularStatement.executeUpdate( + "create or replace table testBindTimestampNTZ(a timestamp_ntz)"); - Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); - preparedStatement.setTimestamp(1, ts); + regularStatement.execute("alter session set client_timestamp_type_mapping='timestamp_ntz'"); - int updateCount = preparedStatement.executeUpdate(); + // bind in where clause + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testBindTimestampNTZ values(?)")) { - // update count should be 1 - assertEquals("update count", 1, updateCount); + Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); + preparedStatement.setTimestamp(1, ts); - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBindTimestampNTZ"); + int updateCount = preparedStatement.executeUpdate(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("timestamp", "Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1)); + // update count should be 1 + assertEquals("update count", 1, updateCount); - regularStatement.executeUpdate("truncate table testBindTimestampNTZ"); + // test the inserted rows + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBindTimestampNTZ")) { - preparedStatement.setTimestamp( - 1, ts, Calendar.getInstance(TimeZone.getTimeZone("America/Los_Angeles"))); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("timestamp", "Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1)); - updateCount = preparedStatement.executeUpdate(); + regularStatement.executeUpdate("truncate table testBindTimestampNTZ"); - // update count should be 1 - assertEquals("update count", 1, updateCount); + preparedStatement.setTimestamp( + 1, ts, Calendar.getInstance(TimeZone.getTimeZone("America/Los_Angeles"))); - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBindTimestampNTZ"); + updateCount = preparedStatement.executeUpdate(); - // assert we get 1 rows - assertTrue(resultSet.next()); + // update count should be 1 + assertEquals("update count", 1, updateCount); + } + // test the inserted rows + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBindTimestampNTZ")) { - preparedStatement.close(); - } finally { - if (regularStatement != null) { + // assert we get 1 rows + assertTrue(resultSet.next()); + } + } + } finally { regularStatement.execute("DROP TABLE testBindTimestampNTZ"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test public void testNullBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - try { - connection = getConnection(); - - regularStatement = connection.createStatement(); - regularStatement.execute("create or replace table testNullBind(a double)"); - - // array bind with nulls - preparedStatement = connection.prepareStatement("insert into testNullBind (a) values(?)"); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table testNullBind(a double)"); - preparedStatement.setDouble(1, 1.2); - preparedStatement.addBatch(); + // array bind with nulls + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testNullBind (a) values(?)")) { + preparedStatement.setDouble(1, 1.2); + preparedStatement.addBatch(); - preparedStatement.setObject(1, null); - preparedStatement.addBatch(); + preparedStatement.setObject(1, null); + preparedStatement.addBatch(); - int[] updateCounts = preparedStatement.executeBatch(); + int[] updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 2, updateCounts.length); - // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + // update count should be 1 + assertEquals("update count", 1, updateCounts[0]); + assertEquals("update count", 1, updateCounts[1]); - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - preparedStatement.setObject(1, null); - preparedStatement.addBatch(); + preparedStatement.setObject(1, null); + preparedStatement.addBatch(); - preparedStatement.setDouble(1, 1.2); - preparedStatement.addBatch(); + preparedStatement.setDouble(1, 1.2); + preparedStatement.addBatch(); - updateCounts = preparedStatement.executeBatch(); + updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 2, updateCounts.length); - // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + // update count should be 1 + assertEquals("update count", 1, updateCounts[0]); + assertEquals("update count", 1, updateCounts[1]); - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - preparedStatement.setObject(1, null); - preparedStatement.addBatch(); + preparedStatement.setObject(1, null); + preparedStatement.addBatch(); - updateCounts = preparedStatement.executeBatch(); + updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 1, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 1, updateCounts.length); - // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); + // update count should be 1 + assertEquals("update count", 1, updateCounts[0]); - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - // this test causes query count in GS not to be decremented because - // the exception is thrown before registerQC. Discuss with Johnston - // to resolve the issue before enabling the test. - try { - preparedStatement.setObject(1, "Null", Types.DOUBLE); - preparedStatement.addBatch(); - preparedStatement.executeBatch(); - fail("must fail in executeBatch()"); - } catch (SnowflakeSQLException ex) { - assertEquals(2086, ex.getErrorCode()); - } + // this test causes query count in GS not to be decremented because + // the exception is thrown before registerQC. Discuss with Johnston + // to resolve the issue before enabling the test. + try { + preparedStatement.setObject(1, "Null", Types.DOUBLE); + preparedStatement.addBatch(); + preparedStatement.executeBatch(); + fail("must fail in executeBatch()"); + } catch (SnowflakeSQLException ex) { + assertEquals(2086, ex.getErrorCode()); + } - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - try { - preparedStatement.setString(1, "hello"); - preparedStatement.addBatch(); + try { + preparedStatement.setString(1, "hello"); + preparedStatement.addBatch(); - preparedStatement.setDouble(1, 1.2); - preparedStatement.addBatch(); - fail("must fail"); - } catch (SnowflakeSQLException ex) { - assertEquals( - (int) ErrorCode.ARRAY_BIND_MIXED_TYPES_NOT_SUPPORTED.getMessageCode(), - ex.getErrorCode()); - } - } finally { - if (regularStatement != null) { + preparedStatement.setDouble(1, 1.2); + preparedStatement.addBatch(); + fail("must fail"); + } catch (SnowflakeSQLException ex) { + assertEquals( + (int) ErrorCode.ARRAY_BIND_MIXED_TYPES_NOT_SUPPORTED.getMessageCode(), + ex.getErrorCode()); + } + } + } finally { regularStatement.execute("DROP TABLE testNullBind"); - regularStatement.close(); } - - closeSQLObjects(preparedStatement, connection); } } @Test public void testSnow12603() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); + ResultSetMetaData resultSetMetaData = null; + try (Connection connection = getConnection()) { + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT ?, ?, ?, ?, ?, ?")) { - preparedStatement = connection.prepareStatement("SELECT ?, ?, ?, ?, ?, ?"); + java.sql.Date sqlDate = java.sql.Date.valueOf("2014-08-26"); - java.sql.Date sqlDate = java.sql.Date.valueOf("2014-08-26"); + Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); - Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); + preparedStatement.setObject(1, 1); + preparedStatement.setObject(2, "hello"); + preparedStatement.setObject(3, new BigDecimal("1.3")); + preparedStatement.setObject(4, Float.valueOf("1.3")); + preparedStatement.setObject(5, sqlDate); + preparedStatement.setObject(6, ts); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - preparedStatement.setObject(1, 1); - preparedStatement.setObject(2, "hello"); - preparedStatement.setObject(3, new BigDecimal("1.3")); - preparedStatement.setObject(4, Float.valueOf("1.3")); - preparedStatement.setObject(5, sqlDate); - preparedStatement.setObject(6, ts); - resultSet = preparedStatement.executeQuery(); + resultSetMetaData = resultSet.getMetaData(); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // assert column count + assertEquals(6, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); + + // assert we get 1 rows + assertTrue(resultSet.next()); - // assert column count - assertEquals(6, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); + assertEquals("integer", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); + assertEquals("double", 1.3, resultSet.getDouble(4), 0); + assertEquals("date", "2014-08-26", resultSet.getString(5)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + + preparedStatement.setObject(1, 1, Types.INTEGER); + preparedStatement.setObject(2, "hello", Types.VARCHAR); + preparedStatement.setObject(3, new BigDecimal("1.3"), Types.DECIMAL); + preparedStatement.setObject(4, Float.valueOf("1.3"), Types.DOUBLE); + preparedStatement.setObject(5, sqlDate, Types.DATE); + preparedStatement.setObject(6, ts, Types.TIMESTAMP); + } + try (ResultSet resultSet = preparedStatement.executeQuery()) { - // assert we get 1 rows - assertTrue(resultSet.next()); + resultSetMetaData = resultSet.getMetaData(); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); - - preparedStatement.setObject(1, 1, Types.INTEGER); - preparedStatement.setObject(2, "hello", Types.VARCHAR); - preparedStatement.setObject(3, new BigDecimal("1.3"), Types.DECIMAL); - preparedStatement.setObject(4, Float.valueOf("1.3"), Types.DOUBLE); - preparedStatement.setObject(5, sqlDate, Types.DATE); - preparedStatement.setObject(6, ts, Types.TIMESTAMP); - - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(6, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); - - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert column count + assertEquals(6, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); + + // assert we get 1 rows + assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); - } finally { - closeSQLObjects(resultSet, preparedStatement, connection); + assertEquals("integer", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); + assertEquals("double", 1.3, resultSet.getDouble(4), 0); + assertEquals("date", "2014-08-26", resultSet.getString(5)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + } + } } } /** SNOW-6290: timestamp value is shifted by local timezone */ @Test public void testSnow6290() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // create test table - statement.execute("CREATE OR REPLACE TABLE testSnow6290(ts timestamp)"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // create test table + statement.execute("CREATE OR REPLACE TABLE testSnow6290(ts timestamp)"); - PreparedStatement preparedStatement = - connection.prepareStatement("INSERT INTO testSnow6290(ts) values(?)"); + PreparedStatement preparedStatement = + connection.prepareStatement("INSERT INTO testSnow6290(ts) values(?)"); - Timestamp ts = new Timestamp(System.currentTimeMillis()); + Timestamp ts = new Timestamp(System.currentTimeMillis()); - preparedStatement.setTimestamp(1, ts); - preparedStatement.executeUpdate(); + preparedStatement.setTimestamp(1, ts); + preparedStatement.executeUpdate(); - ResultSet res = statement.executeQuery("select ts from testSnow6290"); + ResultSet res = statement.executeQuery("select ts from testSnow6290"); - assertTrue("expect a row", res.next()); + assertTrue("expect a row", res.next()); - Timestamp tsFromDB = res.getTimestamp(1); + Timestamp tsFromDB = res.getTimestamp(1); - assertEquals("timestamp mismatch", ts.getTime(), tsFromDB.getTime()); - } finally { - if (statement != null) { + assertEquals("timestamp mismatch", ts.getTime(), tsFromDB.getTime()); + } finally { statement.execute("DROP TABLE if exists testSnow6290"); - statement.close(); } - closeSQLObjects(statement, connection); } } /** SNOW-6986: null sql shouldn't be allowed */ @Test public void testInvalidSQL() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // execute DDLs statement.executeQuery(null); - statement.close(); fail("expected exception, but no exception"); } catch (SnowflakeSQLException ex) { assertEquals((int) ErrorCode.INVALID_SQL.getMessageCode(), ex.getErrorCode()); - } finally { - closeSQLObjects(statement, connection); } } @Test public void testGetObject() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; ResultSetMetaData resultSetMetaData; - try { - connection = getConnection(); - - preparedStatement = connection.prepareStatement("SELECT ?"); - + try (Connection connection = getConnection(); + PreparedStatement preparedStatement = connection.prepareStatement("SELECT ?")) { // bind integer preparedStatement.setInt(1, 1); - resultSet = preparedStatement.executeQuery(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=BigDecimal", - Long.class.getName(), - resultSetMetaData.getColumnClassName(1)); - - // assert we get 1 rows - assertTrue(resultSet.next()); + assertEquals( + "column class name=BigDecimal", + Long.class.getName(), + resultSetMetaData.getColumnClassName(1)); - assertTrue("integer", resultSet.getObject(1) instanceof Long); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertTrue("integer", resultSet.getObject(1) instanceof Long); + } preparedStatement.setString(1, "hello"); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=String", - String.class.getName(), - resultSetMetaData.getColumnClassName(1)); + assertEquals( + "column class name=String", + String.class.getName(), + resultSetMetaData.getColumnClassName(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert we get 1 rows + assertTrue(resultSet.next()); - assertTrue("string", resultSet.getObject(1) instanceof String); + assertTrue("string", resultSet.getObject(1) instanceof String); + } preparedStatement.setDouble(1, 1.2); - resultSet = preparedStatement.executeQuery(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=Double", - Double.class.getName(), - resultSetMetaData.getColumnClassName(1)); + assertEquals( + "column class name=Double", + Double.class.getName(), + resultSetMetaData.getColumnClassName(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert we get 1 rows + assertTrue(resultSet.next()); - assertTrue("double", resultSet.getObject(1) instanceof Double); + assertTrue("double", resultSet.getObject(1) instanceof Double); + } preparedStatement.setTimestamp(1, new Timestamp(0)); - resultSet = preparedStatement.executeQuery(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=Timestamp", - Timestamp.class.getName(), - resultSetMetaData.getColumnClassName(1)); + assertEquals( + "column class name=Timestamp", + Timestamp.class.getName(), + resultSetMetaData.getColumnClassName(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert we get 1 rows + assertTrue(resultSet.next()); - assertTrue("timestamp", resultSet.getObject(1) instanceof Timestamp); + assertTrue("timestamp", resultSet.getObject(1) instanceof Timestamp); + } preparedStatement.setDate(1, new java.sql.Date(0)); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); - - assertEquals( - "column class name=Date", - java.sql.Date.class.getName(), - resultSetMetaData.getColumnClassName(1)); - - // assert we get 1 rows - assertTrue(resultSet.next()); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - assertTrue("date", resultSet.getObject(1) instanceof java.sql.Date); + assertEquals( + "column class name=Date", + java.sql.Date.class.getName(), + resultSetMetaData.getColumnClassName(1)); - preparedStatement.close(); + // assert we get 1 rows + assertTrue(resultSet.next()); - } finally { - closeSQLObjects(resultSet, preparedStatement, connection); + assertTrue("date", resultSet.getObject(1) instanceof java.sql.Date); + } } } @Test public void testGetDoubleForNull() throws Throwable { - Connection connection = null; - Statement stmt = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - stmt = connection.createStatement(); - resultSet = stmt.executeQuery("select cast(null as int) as null_int"); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement(); + ResultSet resultSet = stmt.executeQuery("select cast(null as int) as null_int")) { assertTrue(resultSet.next()); assertEquals("0 for null", 0, resultSet.getDouble(1), 0.0001); - } finally { - closeSQLObjects(resultSet, stmt, connection); } } // SNOW-27438 @Test public void testGetDoubleForNaN() throws Throwable { - Connection connection = null; - Statement stmt = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - stmt = connection.createStatement(); - resultSet = stmt.executeQuery("select 'nan'::float"); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement(); + ResultSet resultSet = stmt.executeQuery("select 'nan'::float")) { assertTrue(resultSet.next()); assertThat("NaN for NaN", resultSet.getDouble(1), equalTo(Double.NaN)); - } finally { - closeSQLObjects(resultSet, stmt, connection); } } @Test public void testPutViaExecuteQuery() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // load file test - // create a unique data file name by using current timestamp in millis + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { try { + // load file test + // create a unique data file name by using current timestamp in millis // test external table load statement.execute("CREATE OR REPLACE TABLE testPutViaExecuteQuery(a number)"); // put files - resultSet = + try (ResultSet resultSet = statement.executeQuery( "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) - + " @%testPutViaExecuteQuery/orders parallel=10"); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + + " @%testPutViaExecuteQuery/orders parallel=10")) { - // assert column count - assertTrue(resultSetMetaData.getColumnCount() > 0); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - for (int i = 0; i < 1; i++) { - assertTrue(resultSet.next()); + // assert column count + assertTrue(resultSetMetaData.getColumnCount() > 0); + // assert we get 1 rows + for (int i = 0; i < 1; i++) { + assertTrue(resultSet.next()); + } } } finally { statement.execute("DROP TABLE IF EXISTS testPutViaExecuteQuery"); - statement.close(); } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Ignore("takes 7 min. enable this for long running tests") @Test public void testSnow16332() throws Throwable { - Connection conn = null; - Connection connWithNwError = null; - Statement stmt = null; - Statement stmtWithNwError = null; - - try { - // use v1 query request API and inject 200ms socket timeout for first - // http request to simulate network failure - conn = getConnection(); - stmt = conn.createStatement(); - - // create a table - stmt.execute("CREATE OR REPLACE TABLE SNOW16332 (i int)"); - - // make sure QC is JIT optimized. Change the GS JVM args to include - // -Xcomp or -XX:CompileThreshold = < a number smaller than the - // stmtCounter - - int stmtCounter = 2000; - while (stmtCounter > 0) { - // insert into it this should start a transaction. - stmt.executeUpdate("INSERT INTO SNOW16332 VALUES (" + stmtCounter + ")"); - --stmtCounter; - } + // use v1 query request API and inject 200ms socket timeout for first + // http request to simulate network failure + try (Connection conn = getConnection(); + Statement stmt = conn.createStatement()) { + try { + // create a table + stmt.execute("CREATE OR REPLACE TABLE SNOW16332 (i int)"); + + // make sure QC is JIT optimized. Change the GS JVM args to include + // -Xcomp or -XX:CompileThreshold = < a number smaller than the + // stmtCounter + + int stmtCounter = 2000; + while (stmtCounter > 0) { + // insert into it this should start a transaction. + stmt.executeUpdate("INSERT INTO SNOW16332 VALUES (" + stmtCounter + ")"); + --stmtCounter; + } - connWithNwError = getConnection(500); // inject socket timeout in ms - stmtWithNwError = connWithNwError.createStatement(); + try (Connection connWithNwError = getConnection(500)) { // inject socket timeout in ms + try (Statement stmtWithNwError = connWithNwError.createStatement()) { - // execute dml - stmtWithNwError.executeUpdate( - "INSERT INTO SNOW16332 " + "SELECT seq8() " + "FROM table(generator(timeLimit => 1))"); + // execute dml + stmtWithNwError.executeUpdate( + "INSERT INTO SNOW16332 " + + "SELECT seq8() " + + "FROM table(generator(timeLimit => 1))"); - // and execute another dml - stmtWithNwError.executeUpdate( - "INSERT INTO SNOW16332 " + "SELECT seq8() " + "FROM table(generator(timeLimit => 1))"); - } finally { - if (stmt != null) { + // and execute another dml + stmtWithNwError.executeUpdate( + "INSERT INTO SNOW16332 " + + "SELECT seq8() " + + "FROM table(generator(timeLimit => 1))"); + } + } + } finally { stmt.executeQuery("DROP TABLE SNOW16332"); } - closeSQLObjects(stmt, conn); - closeSQLObjects(stmtWithNwError, connWithNwError); } } @Test public void testV1Query() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - // use v1 query request API and inject 200ms socket timeout for first - // http request to simulate network failure - connection = getConnection(200); // inject socket timeout = 200ms - - statement = connection.createStatement(); + ResultSetMetaData resultSetMetaData = null; + // use v1 query request API and inject 200ms socket timeout for first + // http request to simulate network failure + try (Connection connection = getConnection(200); // inject socket timeout = 200m + Statement statement = connection.createStatement()) { // execute query - resultSet = - statement.executeQuery("SELECT count(*) FROM table(generator(rowCount => 100000000))"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = + statement.executeQuery("SELECT count(*) FROM table(generator(rowCount => 100000000))")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 row - for (int i = 0; i < 1; i++) { - assertTrue(resultSet.next()); - assertTrue(resultSet.getInt(1) > 0); + // assert we get 1 row + for (int i = 0; i < 1; i++) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getInt(1) > 0); + } } // Test parsing for timestamp with timezone value that has new encoding // where timezone index follows timestamp value - resultSet = statement.executeQuery("SELECT 'Fri, 23 Oct 2015 12:35:38 -0700'::timestamp_tz"); - resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = + statement.executeQuery("SELECT 'Fri, 23 Oct 2015 12:35:38 -0700'::timestamp_tz")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 row - for (int i = 0; i < 1; i++) { - assertTrue(resultSet.next()); - assertEquals("Fri, 23 Oct 2015 12:35:38 -0700", resultSet.getString(1)); + // assert we get 1 row + for (int i = 0; i < 1; i++) { + assertTrue(resultSet.next()); + assertEquals("Fri, 23 Oct 2015 12:35:38 -0700", resultSet.getString(1)); + } } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testCancelQuery() throws Throwable { - ResultSet resultSet = null; - - final Connection connection = getConnection(); - - final Statement statement = connection.createStatement(); - - // schedule a cancel in 5 seconds - try { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + // schedule a cancel in 5 seconds Timer timer = new Timer(); timer.schedule( new TimerTask() { @@ -2572,162 +2321,143 @@ public void run() { 5000); // now run a query for 120 seconds - resultSet = statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))"); - fail("should be canceled"); - } catch (SQLException ex) { - // assert the sqlstate is what we expect (QUERY CANCELLED) - assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); - } finally { - closeSQLObjects(resultSet, statement, connection); + try (ResultSet resultSet = + statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))")) { + fail("should be canceled"); + } catch (SQLException ex) { + // assert the sqlstate is what we expect (QUERY CANCELLED) + assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); + } } } /** SNOW-14774: timestamp_ntz value should use client time zone to adjust the epoch time. */ @Test public void testSnow14774() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - + Calendar calendar = null; + Timestamp tsInUTC = null; + Timestamp tsInLA = null; + SimpleDateFormat sdf = null; + String tsStrInLA = null; + String tsStrInUTC = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // 30 minutes past daylight saving change (from 2am to 3am) - ResultSet res = statement.executeQuery("select '2015-03-08 03:30:00'::timestamp_ntz"); + try (ResultSet res = statement.executeQuery("select '2015-03-08 03:30:00'::timestamp_ntz")) { - res.next(); + assertTrue(res.next()); - // get timestamp in UTC - Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); - Timestamp tsInUTC = res.getTimestamp(1, calendar); + // get timestamp in UTC + calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + tsInUTC = res.getTimestamp(1, calendar); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss"); - sdf.setTimeZone(TimeZone.getTimeZone("UTC")); - String tsStrInUTC = sdf.format(tsInUTC); + sdf = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss"); + sdf.setTimeZone(TimeZone.getTimeZone("UTC")); + tsStrInUTC = sdf.format(tsInUTC); - // get timestamp in LA timezone - calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - Timestamp tsInLA = res.getTimestamp(1, calendar); + // get timestamp in LA timezone + calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsInLA = res.getTimestamp(1, calendar); - sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - String tsStrInLA = sdf.format(tsInLA); - - // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsStrInLA = sdf.format(tsInLA); + // the timestamp in LA and in UTC should be the same + assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + } // 30 minutes before daylight saving change - res = statement.executeQuery("select '2015-03-08 01:30:00'::timestamp_ntz"); + try (ResultSet res = statement.executeQuery("select '2015-03-08 01:30:00'::timestamp_ntz")) { - res.next(); + assertTrue(res.next()); - // get timestamp in UTC - calendar.setTimeZone(TimeZone.getTimeZone("UTC")); - tsInUTC = res.getTimestamp(1, calendar); + // get timestamp in UTC + calendar.setTimeZone(TimeZone.getTimeZone("UTC")); + tsInUTC = res.getTimestamp(1, calendar); - sdf.setTimeZone(TimeZone.getTimeZone("UTC")); - tsStrInUTC = sdf.format(tsInUTC); + sdf.setTimeZone(TimeZone.getTimeZone("UTC")); + tsStrInUTC = sdf.format(tsInUTC); - // get timestamp in LA timezone - calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - tsInLA = res.getTimestamp(1, calendar); + // get timestamp in LA timezone + calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsInLA = res.getTimestamp(1, calendar); - sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - tsStrInLA = sdf.format(tsInLA); + sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsStrInLA = sdf.format(tsInLA); - // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); - } finally { - closeSQLObjects(null, statement, connection); + // the timestamp in LA and in UTC should be the same + assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + } } } /** SNOW-19172: getMoreResults should return false after executeQuery */ @Test public void testSnow19172() throws SQLException { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.executeQuery("select 1"); assertTrue(!statement.getMoreResults()); - - } finally { - closeSQLObjects(statement, connection); } } @Test public void testSnow19819() throws Throwable { - Connection connection; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - connection = getConnection(); - try { - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table testSnow19819(\n" - + "s string,\n" - + "v variant,\n" - + "t timestamp_ltz)\n"); - - preparedStatement = - connection.prepareStatement( - "insert into testSnow19819 (s, v, t)\n" + "select ?, parse_json(?), to_timestamp(?)"); - - preparedStatement.setString(1, "foo"); - preparedStatement.setString(2, "{ }"); - preparedStatement.setString(3, "2016-05-12 12:15:00"); - preparedStatement.addBatch(); - - preparedStatement.setString(1, "foo2"); - preparedStatement.setString(2, "{ \"a\": 1 }"); - preparedStatement.setString(3, "2016-05-12 12:16:00"); - preparedStatement.addBatch(); - - preparedStatement.executeBatch(); - - resultSet = - connection.createStatement().executeQuery("SELECT s, v, t FROM testSnow19819 ORDER BY 1"); - assertThat("next result", resultSet.next()); - assertThat("String", resultSet.getString(1), equalTo("foo")); - assertThat("Variant", resultSet.getString(2), equalTo("{}")); - assertThat("next result", resultSet.next()); - assertThat("String", resultSet.getString(1), equalTo("foo2")); - assertThat("Variant", resultSet.getString(2), equalTo("{\n \"a\": 1\n}")); - assertThat("no more result", !resultSet.next()); - } finally { - if (regularStatement != null) { - regularStatement.execute("DROP TABLE testSnow19819"); + try (Connection connection = getConnection()) { + try (Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute( + "create or replace table testSnow19819(\n" + + "s string,\n" + + "v variant,\n" + + "t timestamp_ltz)\n"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testSnow19819 (s, v, t)\n" + + "select ?, parse_json(?), to_timestamp(?)")) { + + preparedStatement.setString(1, "foo"); + preparedStatement.setString(2, "{ }"); + preparedStatement.setString(3, "2016-05-12 12:15:00"); + preparedStatement.addBatch(); + + preparedStatement.setString(1, "foo2"); + preparedStatement.setString(2, "{ \"a\": 1 }"); + preparedStatement.setString(3, "2016-05-12 12:16:00"); + preparedStatement.addBatch(); + + preparedStatement.executeBatch(); + + try (ResultSet resultSet = + connection + .createStatement() + .executeQuery("SELECT s, v, t FROM testSnow19819 ORDER BY 1")) { + assertThat("next result", resultSet.next()); + assertThat("String", resultSet.getString(1), equalTo("foo")); + assertThat("Variant", resultSet.getString(2), equalTo("{}")); + assertThat("next result", resultSet.next()); + assertThat("String", resultSet.getString(1), equalTo("foo2")); + assertThat("Variant", resultSet.getString(2), equalTo("{\n \"a\": 1\n}")); + assertThat("no more result", !resultSet.next()); + } + } + } finally { + regularStatement.execute("DROP TABLE testSnow19819"); + } } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) public void testClientInfo() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet res = null; - - try { - System.setProperty( - "snowflake.client.info", - "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\", \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\", \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); - - connection = getConnection(); - - statement = connection.createStatement(); - - res = statement.executeQuery("select current_session_client_info()"); + System.setProperty( + "snowflake.client.info", + "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\", \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\", \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet res = statement.executeQuery("select current_session_client_info()")) { assertTrue("result expected", res.next()); @@ -2746,115 +2476,87 @@ public void testClientInfo() throws Throwable { "SnowflakeSourceSuite", clientInfoJSON.get("spark.app.name").asText()); - } finally { - System.clearProperty("snowflake.client.info"); closeSQLObjects(res, statement, connection); } + System.clearProperty("snowflake.client.info"); } @Test public void testLargeResultSet() throws Throwable { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - // create statement - statement = connection.createStatement(); - + try (Connection connection = getConnection(); + // create statement + Statement statement = connection.createStatement()) { String sql = "SELECT random()||random(), randstr(1000, random()) FROM table(generator(rowcount =>" + " 10000))"; - ResultSet result = statement.executeQuery(sql); - - int cnt = 0; - while (result.next()) { - ++cnt; + try (ResultSet result = statement.executeQuery(sql)) { + int cnt = 0; + while (result.next()) { + ++cnt; + } + assertEquals(10000, cnt); } - assertEquals(10000, cnt); - } finally { - closeSQLObjects(null, statement, connection); } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSnow26503() throws Throwable { - Connection connection = null; - Connection snowflakeConnection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - Statement regularStatementSF = null; - ResultSet resultSet = null; ResultSetMetaData resultSetMetaData; + String queryId = null; + try (Connection connection = getConnection(); + // create a test table + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute( + "create or replace table testBind2(a int) as select * from values(1),(2),(8),(10)"); + + // test binds in BETWEEN predicate + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * FROM testBind2 WHERE a between ? and ?")) { + preparedStatement.setInt(1, 3); + preparedStatement.setInt(2, 9); + // test that the query succeeds; used to fail with incident + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); + + // assert we get 1 row + assertTrue(resultSet.next()); + } + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT last_query_id()"); + ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + queryId = resultSet.getString(1); + } - try { - connection = getConnection(); - - // create a test table - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table testBind2(a int) as select * from values(1),(2),(8),(10)"); - - // test binds in BETWEEN predicate - preparedStatement = - connection.prepareStatement("SELECT * FROM testBind2 WHERE a between ? and ?"); - - preparedStatement.setInt(1, 3); - preparedStatement.setInt(2, 9); - // test that the query succeeds; used to fail with incident - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); - - // assert we get 1 row - assertTrue(resultSet.next()); - - resultSet.close(); - preparedStatement.close(); - preparedStatement = connection.prepareStatement("SELECT last_query_id()"); - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - String queryId = resultSet.getString(1); - - resultSet.close(); - preparedStatement.close(); - - // check that the bind values can be retrieved using system$get_bind_values - snowflakeConnection = getSnowflakeAdminConnection(); - - regularStatementSF = snowflakeConnection.createStatement(); - regularStatementSF.execute("create or replace warehouse wh26503 warehouse_size=xsmall"); - - preparedStatement = - snowflakeConnection.prepareStatement( - "select bv:\"1\":\"value\"::string, bv:\"2\":\"value\"::string from (select" - + " parse_json(system$get_bind_values(?)) bv)"); - preparedStatement.setString(1, queryId); - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - - // check that the bind values are correct - assertEquals(3, resultSet.getInt(1)); - assertEquals(9, resultSet.getInt(2)); - - } finally { - if (regularStatement != null) { + // check that the bind values can be retrieved using system$get_bind_values + try (Connection snowflakeConnection = getSnowflakeAdminConnection()) { + try (Statement regularStatementSF = snowflakeConnection.createStatement()) { + regularStatementSF.execute("create or replace warehouse wh26503 warehouse_size=xsmall"); + + try (PreparedStatement preparedStatement = + snowflakeConnection.prepareStatement( + "select bv:\"1\":\"value\"::string, bv:\"2\":\"value\"::string from (select" + + " parse_json(system$get_bind_values(?)) bv)")) { + preparedStatement.setString(1, queryId); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + + // check that the bind values are correct + assertEquals(3, resultSet.getInt(1)); + assertEquals(9, resultSet.getInt(2)); + } + } + } + snowflakeConnection.createStatement().execute("DROP warehouse wh26503"); + } + } finally { regularStatement.execute("DROP TABLE testBind2"); - regularStatement.close(); - } - - if (regularStatementSF != null) { - regularStatementSF.execute("DROP warehouse wh26503"); - regularStatementSF.close(); - } - - closeSQLObjects(resultSet, preparedStatement, connection); - - if (snowflakeConnection != null) { - snowflakeConnection.close(); } } } @@ -2865,60 +2567,49 @@ public void testSnow26503() throws Throwable { */ @Test public void testSnow28530() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - - try { - connection = getConnection(); - regularStatement = connection.createStatement(); - regularStatement.execute("create or replace table t(a number, b number)"); - - ///////////////////////////////////////// - // bind variables in a view definition + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { try { - preparedStatement = - connection.prepareStatement("create or replace view v as select * from t where a=?"); - preparedStatement.setInt(1, 1); - preparedStatement.execute(); - - // we shouldn't reach here - fail("Bind variable in view definition did not cause a user error"); - } catch (SnowflakeSQLException e) { - assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); - } + regularStatement.execute("create or replace table t(a number, b number)"); + + ///////////////////////////////////////// + // bind variables in a view definition + try (PreparedStatement preparedStatement = + connection.prepareStatement("create or replace view v as select * from t where a=?")) { + preparedStatement.setInt(1, 1); + preparedStatement.execute(); + + // we shouldn't reach here + fail("Bind variable in view definition did not cause a user error"); + } catch (SnowflakeSQLException e) { + assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); + } - ///////////////////////////////////////////// - // bind variables in a scalar UDF definition - try { - preparedStatement = + ///////////////////////////////////////////// + // bind variables in a scalar UDF definition + try (PreparedStatement preparedStatement = connection.prepareStatement( - "create or replace function f(n number) returns number as " + "'n + ?'"); - preparedStatement.execute(); - fail("Bind variable in scalar UDF definition did not cause a user " + "error"); - } catch (SnowflakeSQLException e) { - assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); - } + "create or replace function f(n number) returns number as " + "'n + ?'")) { + preparedStatement.execute(); + fail("Bind variable in scalar UDF definition did not cause a user " + "error"); + } catch (SnowflakeSQLException e) { + assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); + } - /////////////////////////////////////////// - // bind variables in a table UDF definition - try { - preparedStatement = + /////////////////////////////////////////// + // bind variables in a table UDF definition + try (PreparedStatement preparedStatement = connection.prepareStatement( "create or replace function tf(n number) returns table(b number) as" - + " 'select b from t where a=?'"); - preparedStatement.execute(); - fail("Bind variable in table UDF definition did not cause a user " + "error"); - } catch (SnowflakeSQLException e) { - assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); - } - } finally { - if (regularStatement != null) { + + " 'select b from t where a=?'")) { + preparedStatement.execute(); + fail("Bind variable in table UDF definition did not cause a user " + "error"); + } catch (SnowflakeSQLException e) { + assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); + } + } finally { regularStatement.execute("drop table t"); - regularStatement.close(); } - - closeSQLObjects(null, preparedStatement, connection); } } @@ -2928,54 +2619,47 @@ public void testSnow28530() throws Throwable { */ @Test public void testSnow31104() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - Properties paramProperties = new Properties(); - paramProperties.put("TYPESYSTEM_WIDEN_CONSTANTS_EXACTLY", Boolean.TRUE.toString()); - connection = getConnection(paramProperties); - - regularStatement = connection.createStatement(); + Properties paramProperties = new Properties(); + paramProperties.put("TYPESYSTEM_WIDEN_CONSTANTS_EXACTLY", Boolean.TRUE.toString()); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { // Repeat a couple of test cases from snow-31104.sql // We don't need to repeat all of them; we just need to verify // that string bind refs and null bind refs are treated the same as // string and null constants. + try { + regularStatement.execute("create or replace table t(n number)"); - regularStatement.execute("create or replace table t(n number)"); - - regularStatement.executeUpdate( - "insert into t values (1), (90000000000000000000000000000000000000)"); - - preparedStatement = connection.prepareStatement("select n, n > ? from t order by 1"); - preparedStatement.setString(1, "1"); - - // this should not produce a user error - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - assertFalse(resultSet.getBoolean(2)); - resultSet.next(); - assertTrue(resultSet.getBoolean(2)); + regularStatement.executeUpdate( + "insert into t values (1), (90000000000000000000000000000000000000)"); - preparedStatement = - connection.prepareStatement("select n, '1' in (?, '256', n, 10) from t order by 1"); - preparedStatement.setString(1, null); + try (PreparedStatement preparedStatement = + connection.prepareStatement("select n, n > ? from t order by 1")) { + preparedStatement.setString(1, "1"); - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - assertTrue(resultSet.getBoolean(2)); - resultSet.next(); - assertNull(resultSet.getObject(2)); - } finally { - if (regularStatement != null) { + // this should not produce a user error + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + assertFalse(resultSet.getBoolean(2)); + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(2)); + } + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("select n, '1' in (?, '256', n, 10) from t order by 1")) { + preparedStatement.setString(1, null); + + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(2)); + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(2)); + } + } + } finally { regularStatement.execute("drop table t"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @@ -2983,22 +2667,17 @@ public void testSnow31104() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGet() throws Throwable { - Connection connection = null; - Statement statement = null; List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - try { statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); @@ -3016,7 +2695,7 @@ public void testPutGet() throws Throwable { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -3025,13 +2704,10 @@ public void testPutGet() throws Throwable { File original = new File(sourceFilePath); File unzipped = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testGetPut_stage"); - statement.close(); } - } finally { - closeSQLObjects(null, statement, connection); } } } @@ -3046,22 +2722,17 @@ public void testPutGet() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGetToUnencryptedStage() throws Throwable { - Connection connection = null; - Statement statement = null; List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - try { statement.execute("alter session set ENABLE_UNENCRYPTED_INTERNAL_STAGES=true"); statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); statement.execute( @@ -3083,7 +2754,7 @@ public void testPutGetToUnencryptedStage() throws Throwable { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -3092,13 +2763,10 @@ public void testPutGetToUnencryptedStage() throws Throwable { File original = new File(sourceFilePath); File unzipped = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testPutGet_unencstage"); - statement.close(); } - } finally { - closeSQLObjects(null, statement, connection); } } } @@ -3114,39 +2782,33 @@ public void testNotClosedSession() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testToTimestampNullBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - - try { - connection = getConnection(); - - preparedStatement = - connection.prepareStatement( - "select 3 where to_timestamp_ltz(?, 3) = '1970-01-01 00:00:12.345" - + " +000'::timestamp_ltz"); - + try (Connection connection = getConnection(); + PreparedStatement preparedStatement = + connection.prepareStatement( + "select 3 where to_timestamp_ltz(?, 3) = '1970-01-01 00:00:12.345" + + " +000'::timestamp_ltz")) { // First test, normal usage. preparedStatement.setInt(1, 12345); - ResultSet resultSet = preparedStatement.executeQuery(); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // Assert column count. - assertEquals(1, resultSetMetaData.getColumnCount()); - // Assert this returned a 3. - assertTrue(resultSet.next()); - assertEquals(3, resultSet.getInt(1)); - assertFalse(resultSet.next()); - - // Second test, input is null. - preparedStatement.setNull(1, Types.INTEGER); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // Assert column count. + assertEquals(1, resultSetMetaData.getColumnCount()); + // Assert this returned a 3. + assertTrue(resultSet.next()); + assertEquals(3, resultSet.getInt(1)); + assertFalse(resultSet.next()); - resultSet = preparedStatement.executeQuery(); - // Assert no rows returned. - assertFalse(resultSet.next()); - } finally { - closeSQLObjects(preparedStatement, connection); + // Second test, input is null. + preparedStatement.setNull(1, Types.INTEGER); + } + try (ResultSet resultSet = preparedStatement.executeQuery()) { + // Assert no rows returned. + assertFalse(resultSet.next()); + } } + // NOTE: Don't add new tests here. Instead, add it to other appropriate test class or create a + // new + // one. This class is too large to have more tests. } - // NOTE: Don't add new tests here. Instead, add it to other appropriate test class or create a new - // one. This class is too large to have more tests. } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java index e76e5c60e..05df191d5 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java @@ -54,6 +54,7 @@ import net.snowflake.client.jdbc.cloud.storage.StageInfo; import net.snowflake.client.jdbc.cloud.storage.StorageClientFactory; import net.snowflake.client.jdbc.cloud.storage.StorageObjectMetadata; +import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.common.core.SqlState; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; @@ -106,40 +107,37 @@ public void testStaticVersionMatchesManifest() { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) public void testClientInfoConnectionProperty() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet res = null; - - try { - Properties props = new Properties(); - props.put( - "snowflakeClientInfo", - "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\"," - + " \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\"," - + " \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); - connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); - statement = connection.createStatement(); - res = statement.executeQuery("select current_session_client_info()"); + String clientInfoJSONStr = null; + JsonNode clientInfoJSON = null; + Properties props = new Properties(); + props.put( + "snowflakeClientInfo", + "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\"," + + " \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\"," + + " \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); + try (Connection connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); + Statement statement = connection.createStatement(); + ResultSet res = statement.executeQuery("select current_session_client_info()")) { assertTrue(res.next()); - String clientInfoJSONStr = res.getString(1); - JsonNode clientInfoJSON = mapper.readTree(clientInfoJSONStr); + clientInfoJSONStr = res.getString(1); + clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spart version and spark app are found assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); assertEquals( "spark app mismatch", "SnowflakeSourceSuite", clientInfoJSON.get("spark.app.name").asText()); - connection.close(); - - // Test that when session property is set, connection parameter overrides it - System.setProperty( - "snowflake.client.info", - "{\"spark.version\":\"fake\", \"spark.snowflakedb.version\":\"fake\"," - + " \"spark.app.name\":\"fake\", \"scala.version\":\"fake\"," - + " \"java.version\":\"fake\", \"snowflakedb.jdbc.version\":\"fake\"}"); - connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); - statement = connection.createStatement(); - res = statement.executeQuery("select current_session_client_info()"); + } + + // Test that when session property is set, connection parameter overrides it + System.setProperty( + "snowflake.client.info", + "{\"spark.version\":\"fake\", \"spark.snowflakedb.version\":\"fake\"," + + " \"spark.app.name\":\"fake\", \"scala.version\":\"fake\"," + + " \"java.version\":\"fake\", \"snowflakedb.jdbc.version\":\"fake\"}"); + try (Connection connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); + Statement statement = connection.createStatement(); + ResultSet res = statement.executeQuery("select current_session_client_info()")) { assertTrue(res.next()); clientInfoJSONStr = res.getString(1); clientInfoJSON = mapper.readTree(clientInfoJSONStr); @@ -149,21 +147,19 @@ public void testClientInfoConnectionProperty() throws Throwable { "spark app mismatch", "SnowflakeSourceSuite", clientInfoJSON.get("spark.app.name").asText()); - - } finally { - System.clearProperty("snowflake.client.info"); - closeSQLObjects(res, statement, connection); } + System.clearProperty("snowflake.client.info"); } @Test public void testGetSessionID() throws Throwable { - Connection con = getConnection(); - String sessionID = con.unwrap(SnowflakeConnection.class).getSessionID(); - Statement statement = con.createStatement(); - ResultSet rset = statement.executeQuery("select current_session()"); - rset.next(); - assertEquals(sessionID, rset.getString(1)); + try (Connection con = getConnection(); + Statement statement = con.createStatement(); + ResultSet rset = statement.executeQuery("select current_session()")) { + String sessionID = con.unwrap(SnowflakeConnection.class).getSessionID(); + assertTrue(rset.next()); + assertEquals(sessionID, rset.getString(1)); + } } @Test @@ -172,32 +168,33 @@ public void testPutThreshold() throws SQLException { try (Connection connection = getConnection()) { // assert that threshold equals default 200 from server side SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - Statement statement = connection.createStatement(); - SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); - statement.execute("CREATE OR REPLACE STAGE PUTTHRESHOLDSTAGE"); - String command = - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @PUTTHRESHOLDSTAGE"; - SnowflakeFileTransferAgent agent = - new SnowflakeFileTransferAgent(command, sfSession, sfStatement); - assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); - // assert that setting threshold via put statement directly sets the big file threshold - // appropriately - String commandWithPut = command + " threshold=314572800"; - agent = new SnowflakeFileTransferAgent(commandWithPut, sfSession, sfStatement); - assertEquals(314572800, agent.getBigFileThreshold()); - // assert that after put statement, threshold goes back to previous session threshold - agent = new SnowflakeFileTransferAgent(command, sfSession, sfStatement); - assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); - // Attempt to set threshold to an invalid value such as a negative number - String commandWithInvalidThreshold = command + " threshold=-1"; - try { - agent = new SnowflakeFileTransferAgent(commandWithInvalidThreshold, sfSession, sfStatement); - } - // assert invalid value causes exception to be thrown of type INVALID_PARAMETER_VALUE - catch (SQLException e) { - assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); + try (Statement statement = connection.createStatement()) { + SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); + statement.execute("CREATE OR REPLACE STAGE PUTTHRESHOLDSTAGE"); + String command = + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @PUTTHRESHOLDSTAGE"; + SnowflakeFileTransferAgent agent = + new SnowflakeFileTransferAgent(command, sfSession, sfStatement); + assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); + // assert that setting threshold via put statement directly sets the big file threshold + // appropriately + String commandWithPut = command + " threshold=314572800"; + agent = new SnowflakeFileTransferAgent(commandWithPut, sfSession, sfStatement); + assertEquals(314572800, agent.getBigFileThreshold()); + // assert that after put statement, threshold goes back to previous session threshold + agent = new SnowflakeFileTransferAgent(command, sfSession, sfStatement); + assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); + // Attempt to set threshold to an invalid value such as a negative number + String commandWithInvalidThreshold = command + " threshold=-1"; + try { + agent = + new SnowflakeFileTransferAgent(commandWithInvalidThreshold, sfSession, sfStatement); + } + // assert invalid value causes exception to be thrown of type INVALID_PARAMETER_VALUE + catch (SQLException e) { + assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); + } } - statement.close(); } catch (SQLException ex) { throw ex; } @@ -207,99 +204,12 @@ public void testPutThreshold() throws SQLException { @Test @Ignore public void testGCPFileTransferMetadataWithOneFile() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); - try { - connection = getConnection("gcpaccount"); - Statement statement = connection.createStatement(); - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // Test put file with internal compression - String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); - List metadatas1 = sfAgent1.getFileTransferMetadatas(); - - String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { - InputStream inputStream = new FileInputStream(srcPath1); - - assert (oneMetadata.isForOneFile()); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .build()); - } - - // Test Put file with external compression - String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); - List metadatas2 = sfAgent2.getFileTransferMetadatas(); - - String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { - String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; - Process p = - Runtime.getRuntime() - .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); - p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); - - InputStream gzInputStream = new FileInputStream(gzfilePath); - assert (oneMetadata.isForOneFile()); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(gzInputStream) - .setRequireCompress(false) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .build()); - } - // Download two files and verify their content. - assertTrue( - "Failed to get files", - statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); - - // Make sure that the downloaded files are EQUAL, - // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); - } - } - } - - /** Test API for Kafka connector for FileTransferMetadata */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { - Connection connection = null; - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - - List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); - for (String accountName : supportedAccounts) { + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { try { - connection = getConnection(accountName); - Statement statement = connection.createStatement(); - // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE " + testStageName); @@ -315,6 +225,7 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { InputStream inputStream = new FileInputStream(srcPath1); + assertTrue(oneMetadata.isForOneFile()); SnowflakeFileTransferAgent.uploadWithoutConnection( SnowflakeFileTransferConfig.Builder.newInstance() .setSnowflakeFileTransferMetadata(oneMetadata) @@ -322,8 +233,6 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { .setRequireCompress(true) .setNetworkTimeoutInMilli(0) .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand1) .build()); } @@ -344,7 +253,7 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { p.waitFor(); InputStream gzInputStream = new FileInputStream(gzfilePath); - + assertTrue(oneMetadata.isForOneFile()); SnowflakeFileTransferAgent.uploadWithoutConnection( SnowflakeFileTransferConfig.Builder.newInstance() .setSnowflakeFileTransferMetadata(oneMetadata) @@ -352,8 +261,6 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { .setRequireCompress(false) .setNetworkTimeoutInMilli(0) .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand2) .build()); } @@ -365,12 +272,103 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { // Make sure that the downloaded files are EQUAL, // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + assertTrue( + isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assertTrue( + isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + statement.execute("DROP STAGE if exists " + testStageName); + } + } + } + + /** Test API for Kafka connector for FileTransferMetadata */ + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + + List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); + for (String accountName : supportedAccounts) { + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // Test put file with internal compression + String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); + List metadatas1 = sfAgent1.getFileTransferMetadatas(); + + String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { + InputStream inputStream = new FileInputStream(srcPath1); + + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand1) + .build()); + } + + // Test Put file with external compression + String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); + List metadatas2 = sfAgent2.getFileTransferMetadatas(); + + String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); + for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { + String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; + Process p = + Runtime.getRuntime() + .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); + p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); + + InputStream gzInputStream = new FileInputStream(gzfilePath); + + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(gzInputStream) + .setRequireCompress(false) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand2) + .build()); + } + + // Download two files and verify their content. + assertTrue( + "Failed to get files", + statement.execute( + "GET @" + + testStageName + + " 'file://" + + destFolderCanonicalPath + + "/' parallel=8")); + + // Make sure that the downloaded files are EQUAL, + // they should be gzip compressed + assertTrue( + isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assertTrue( + isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -380,45 +378,42 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGCPFileTransferMetadataNegativeOnlySupportPut() throws Throwable { - Connection connection = null; int expectExceptionCount = 1; int actualExceptionCount = -1; - try { - connection = getConnection("gcpaccount"); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); + // Put one file to the stage + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + statement.execute("put file://" + srcPath + " @" + testStageName); - // Put one file to the stage - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - statement.execute("put file://" + srcPath + " @" + testStageName); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String getCommand = "get @" + testStageName + " file://" + destFolderCanonicalPath; - String getCommand = "get @" + testStageName + " file://" + destFolderCanonicalPath; + // The GET can be executed in normal way. + statement.execute(getCommand); - // The GET can be executed in normal way. - statement.execute(getCommand); + // Start negative test for GET. + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); - // Start negative test for GET. - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); + // Below function call should fail. + actualExceptionCount = 0; + sfAgent.getFileTransferMetadatas(); + fail("Above function should raise exception for GET"); - // Below function call should fail. - actualExceptionCount = 0; - sfAgent.getFileTransferMetadatas(); - fail("Above function should raise exception for GET"); - } catch (Exception ex) { - System.out.println("Negative test to hit expected exception: " + ex.getMessage()); - actualExceptionCount++; - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + } catch (Exception ex) { + System.out.println("Negative test to hit expected exception: " + ex.getMessage()); + actualExceptionCount++; + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } assertEquals(expectExceptionCount, actualExceptionCount); @@ -494,20 +489,17 @@ public void testGetPropertyInfo() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutOverwriteFalseNoDigest() throws Throwable { - Connection connection = null; - Statement statement = null; // create 2 files: an original, and one that will overwrite the original File file1 = tmpFolder.newFile("testfile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(file1)); - bw.write("Writing original file content. This should get overwritten."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { + bw.write("Writing original file content. This should get overwritten."); + } File file2 = tmpFolder2.newFile("testfile.csv"); - bw = new BufferedWriter(new FileWriter(file2)); - bw.write("This is all new! This should be the result of the overwriting."); - bw.close(); - + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { + bw.write("This is all new! This should be the result of the overwriting."); + } String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); @@ -520,50 +512,48 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i), paramProperties); - - statement = connection.createStatement(); - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE testing_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); - - // put another file in same stage with same filename with overwrite = true - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false")); - - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); - - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - assertTrue(downloaded.exists()); - - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - p.waitFor(); + try (Connection connection = getConnection(accounts.get(i), paramProperties); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE testing_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); + + // put another file in same stage with same filename with overwrite = true + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false")); + + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); + + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + assertTrue(downloaded.exists()); + + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + p.waitFor(); - // 2nd file should never be uploaded - File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); - assertTrue(FileUtils.contentEqualsIgnoreEOL(file1, unzipped, null)); - } finally { - statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); - statement.close(); + // 2nd file should never be uploaded + File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); + assertTrue(FileUtils.contentEqualsIgnoreEOL(file1, unzipped, null)); + } finally { + statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); + } } } } @@ -576,14 +566,12 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutDisable() throws Throwable { - Connection connection = null; - Statement statement = null; // create a file File file = tmpFolder.newFile("testfile99.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(file)); - bw.write("This content won't be uploaded as PUT is disabled."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file))) { + bw.write("This content won't be uploaded as PUT is disabled."); + } String sourceFilePathOriginal = file.getCanonicalPath(); @@ -592,19 +580,14 @@ public void testPutDisable() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i), paramProperties); - - statement = connection.createStatement(); - + try (Connection connection = getConnection(accounts.get(i), paramProperties); + Statement statement = connection.createStatement()) { statement.execute("PUT file://" + sourceFilePathOriginal + " @testPutGet_disable_stage"); assertTrue("Shouldn't come here", false); } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); - } finally { - statement.close(); } } } @@ -617,8 +600,6 @@ public void testPutDisable() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetDisable() throws Throwable { - Connection connection = null; - Statement statement = null; // create a folder File destFolder = tmpFolder.newFolder(); @@ -629,10 +610,8 @@ public void testGetDisable() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i), paramProperties); - - statement = connection.createStatement(); + try (Connection connection = getConnection(accounts.get(i), paramProperties); + Statement statement = connection.createStatement()) { statement.execute( "GET @testPutGet_disable_stage 'file://" + destFolderCanonicalPath + "' parallel=8"); @@ -641,8 +620,6 @@ public void testGetDisable() throws Throwable { } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); - } finally { - statement.close(); } } } @@ -653,161 +630,164 @@ public void testGetDisable() throws Throwable { */ @Test public void testSnow76376() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table t(a int) as select * from values" + "(1),(2),(8),(10)"); - - preparedStatement = - connection.prepareStatement("SELECT * FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?"); - - //////////////////////////// - // both NULL - preparedStatement.setNull(1, 4); // int - preparedStatement.setNull(2, 4); // int - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET and LIMIT set " + "to NULL"); - } - - //////////////////////////// - // both empty string - preparedStatement.setString(1, ""); - preparedStatement.setString(2, ""); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET and LIMIT set " + "to empty string"); - } + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute( + "create or replace table t(a int) as select * from values" + "(1),(2),(8),(10)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?")) { + + //////////////////////////// + // both NULL + preparedStatement.setNull(1, 4); // int + preparedStatement.setNull(2, 4); // int + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with OFFSET and LIMIT set " + "to NULL"); + } - //////////////////////////// - // only LIMIT NULL - preparedStatement.setNull(1, 4); // int - preparedStatement.setInt(2, 2); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with LIMIT set to NULL"); - } + //////////////////////////// + // both empty string + preparedStatement.setString(1, ""); + preparedStatement.setString(2, ""); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail( + "Could not execute preparedStatement with OFFSET and LIMIT set " + + "to empty string"); + } - //////////////////////////// - // only LIMIT empty string - preparedStatement.setString(1, ""); - preparedStatement.setInt(2, 2); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with LIMIT set to empty " + "string"); - } + //////////////////////////// + // only LIMIT NULL + preparedStatement.setNull(1, 4); // int + preparedStatement.setInt(2, 2); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with LIMIT set to NULL"); + } - //////////////////////////// - // only OFFSET NULL - preparedStatement.setInt(1, 3); // int - preparedStatement.setNull(2, 4); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET set to NULL"); - } + //////////////////////////// + // only LIMIT empty string + preparedStatement.setString(1, ""); + preparedStatement.setInt(2, 2); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with LIMIT set to empty " + "string"); + } - //////////////////////////// - // only OFFSET empty string - preparedStatement.setInt(1, 3); // int - preparedStatement.setNull(2, 4); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET set to empty " + "string"); - } + //////////////////////////// + // only OFFSET NULL + preparedStatement.setInt(1, 3); // int + preparedStatement.setNull(2, 4); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with OFFSET set to NULL"); + } - //////////////////////////// - // OFFSET and LIMIT NULL for constant select query - preparedStatement = - connection.prepareStatement("SELECT 1 FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?"); - preparedStatement.setNull(1, 4); // int - preparedStatement.setNull(2, 4); // int - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - for (int i = 0; i < 4; i++) { - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); + //////////////////////////// + // only OFFSET empty string + preparedStatement.setInt(1, 3); // int + preparedStatement.setNull(2, 4); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with OFFSET set to empty " + "string"); + } } - } else { - fail("Could not execute constant preparedStatement with OFFSET and " + "LIMIT set to NULL"); - } + //////////////////////////// + // OFFSET and LIMIT NULL for constant select query + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT 1 FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?")) { + preparedStatement.setNull(1, 4); // int + preparedStatement.setNull(2, 4); // int + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + for (int i = 0; i < 4; i++) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + } + } else { + fail( + "Could not execute constant preparedStatement with OFFSET and " + + "LIMIT set to NULL"); + } - //////////////////////////// - // OFFSET and LIMIT empty string for constant select query - preparedStatement.setString(1, ""); // int - preparedStatement.setString(2, ""); // int - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - for (int i = 0; i < 4; i++) { - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); + //////////////////////////// + // OFFSET and LIMIT empty string for constant select query + preparedStatement.setString(1, ""); // int + preparedStatement.setString(2, ""); // int + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + for (int i = 0; i < 4; i++) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + } + } else { + fail( + "Could not execute constant preparedStatement with OFFSET and " + + "LIMIT set to empty string"); + } } - } else { - fail( - "Could not execute constant preparedStatement with OFFSET and " - + "LIMIT set to empty string"); - } - - } finally { - if (regularStatement != null) { + } finally { regularStatement.execute("drop table t"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @@ -820,46 +800,37 @@ public void testSnow76376() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeoOutputTypes() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - - try { - Properties paramProperties = new Properties(); - paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); - paramProperties.put("ENABLE_GEOGRAPHY_TYPE", true); - - connection = getConnection(paramProperties); + Properties paramProperties = new Properties(); - regularStatement = connection.createStatement(); + paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); + paramProperties.put("ENABLE_GEOGRAPHY_TYPE", true); - regularStatement.execute("create or replace table t_geo(geo geography);"); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo(geo geography);"); - regularStatement.execute("insert into t_geo values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); + regularStatement.execute( + "insert into t_geo values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); - testGeoOutputTypeSingle( - regularStatement, false, "geoJson", "OBJECT", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, false, "geoJson", "OBJECT", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle( - regularStatement, true, "geoJson", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, true, "geoJson", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle( - regularStatement, false, "wkt", "VARCHAR", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, false, "wkt", "VARCHAR", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle( - regularStatement, true, "wkt", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, true, "wkt", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle(regularStatement, false, "wkb", "BINARY", "[B", Types.BINARY); + testGeoOutputTypeSingle(regularStatement, false, "wkb", "BINARY", "[B", Types.BINARY); - testGeoOutputTypeSingle(regularStatement, true, "wkb", "GEOGRAPHY", "[B", Types.BINARY); - } finally { - if (regularStatement != null) { + testGeoOutputTypeSingle(regularStatement, true, "wkb", "GEOGRAPHY", "[B", Types.BINARY); + } finally { regularStatement.execute("drop table t_geo"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -872,16 +843,13 @@ private void testGeoOutputTypeSingle( String expectedColumnClassName, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - - regularStatement.execute( - "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - resultSet = regularStatement.executeQuery("select * from t_geo"); + regularStatement.execute( + "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); + try (ResultSet resultSet = regularStatement.executeQuery("select * from t_geo")) { ResultSetMetaData metadata = resultSet.getMetaData(); assertEquals(1, metadata.getColumnCount()); @@ -890,51 +858,34 @@ private void testGeoOutputTypeSingle( assertEquals(expectedColumnTypeName, metadata.getColumnTypeName(1)); assertEquals(expectedColumnClassName, metadata.getColumnClassName(1)); assertEquals(expectedColumnType, metadata.getColumnType(1)); - - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeoMetadata() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - - try { - Properties paramProperties = new Properties(); - - paramProperties.put("ENABLE_FIX_182763", true); - - connection = getConnection(paramProperties); - - regularStatement = connection.createStatement(); + Properties paramProperties = new Properties(); - regularStatement.execute("create or replace table t_geo(geo geography);"); + paramProperties.put("ENABLE_FIX_182763", true); - testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo(geo geography);"); - testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); + testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); + testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); + testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); - } finally { - if (regularStatement != null) { + testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); + } finally { regularStatement.execute("drop table t_geo"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -945,13 +896,11 @@ private void testGeoMetadataSingle( String outputFormat, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - DatabaseMetaData md = connection.getMetaData(); - resultSet = md.getColumns(null, null, "T_GEO", null); + DatabaseMetaData md = connection.getMetaData(); + try (ResultSet resultSet = md.getColumns(null, null, "T_GEO", null)) { ResultSetMetaData metadata = resultSet.getMetaData(); assertEquals(24, metadata.getColumnCount()); @@ -960,48 +909,32 @@ private void testGeoMetadataSingle( assertEquals(expectedColumnType, resultSet.getInt(5)); assertEquals("GEOGRAPHY", resultSet.getString(6)); - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeometryOutputTypes() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - - try { - Properties paramProperties = new Properties(); - - paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); - paramProperties.put("ENABLE_GEOMETRY_TYPE", true); - - connection = getConnection(paramProperties); - - regularStatement = connection.createStatement(); + Properties paramProperties = new Properties(); - regularStatement.execute("create or replace table t_geo2(geo geometry);"); + paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); + paramProperties.put("ENABLE_GEOMETRY_TYPE", true); - regularStatement.execute( - "insert into t_geo2 values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo2(geo geometry);"); - testGeometryOutputTypeSingle( - regularStatement, true, "geoJson", "GEOMETRY", "java.lang.String", Types.VARCHAR); + regularStatement.execute( + "insert into t_geo2 values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); - testGeometryOutputTypeSingle( - regularStatement, true, "wkt", "GEOMETRY", "java.lang.String", Types.VARCHAR); + testGeometryOutputTypeSingle( + regularStatement, true, "geoJson", "GEOMETRY", "java.lang.String", Types.VARCHAR); - } finally { - if (regularStatement != null) { + testGeometryOutputTypeSingle( + regularStatement, true, "wkt", "GEOMETRY", "java.lang.String", Types.VARCHAR); + } finally { regularStatement.execute("drop table t_geo2"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -1014,15 +947,13 @@ private void testGeometryOutputTypeSingle( String expectedColumnClassName, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - regularStatement.execute( - "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); + regularStatement.execute( + "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); - resultSet = regularStatement.executeQuery("select * from t_geo2"); + try (ResultSet resultSet = regularStatement.executeQuery("select * from t_geo2")) { ResultSetMetaData metadata = resultSet.getMetaData(); @@ -1032,41 +963,25 @@ private void testGeometryOutputTypeSingle( assertEquals(expectedColumnTypeName, metadata.getColumnTypeName(1)); assertEquals(expectedColumnClassName, metadata.getColumnClassName(1)); assertEquals(expectedColumnType, metadata.getColumnType(1)); - - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeometryMetadata() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - - try { - Properties paramProperties = new Properties(); - connection = getConnection(paramProperties); - - regularStatement = connection.createStatement(); - - regularStatement.execute("create or replace table t_geo2(geo geometry);"); + Properties paramProperties = new Properties(); - testGeometryMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo2(geo geometry);"); - testGeometryMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + testGeometryMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); - } finally { - if (regularStatement != null) { + testGeometryMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + } finally { regularStatement.execute("drop table t_geo2"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -1077,13 +992,11 @@ private void testGeometryMetadataSingle( String outputFormat, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - DatabaseMetaData md = connection.getMetaData(); - resultSet = md.getColumns(null, null, "T_GEO2", null); + DatabaseMetaData md = connection.getMetaData(); + try (ResultSet resultSet = md.getColumns(null, null, "T_GEO2", null)) { ResultSetMetaData metadata = resultSet.getMetaData(); assertEquals(24, metadata.getColumnCount()); @@ -1092,10 +1005,6 @@ private void testGeometryMetadataSingle( assertEquals(expectedColumnType, resultSet.getInt(5)); assertEquals("GEOMETRY", resultSet.getString(6)); - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @@ -1153,7 +1062,7 @@ private void putAndGetFile(Statement statement) throws Throwable { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE_2 + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -1166,7 +1075,7 @@ private void putAndGetFile(Statement statement) throws Throwable { "Original file: " + original.getAbsolutePath() + ", size: " + original.length()); System.out.println( "Unzipped file: " + unzipped.getAbsolutePath() + ", size: " + unzipped.length()); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testGetPut_stage"); } @@ -1183,77 +1092,75 @@ private void putAndGetFile(Statement statement) throws Throwable { public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - Connection connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - - File largeTempFile = tmpFolder.newFile("largeFile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile)); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.close(); - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + + File largeTempFile = tmpFolder.newFile("largeFile.csv"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + } + File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - String sourceFilePath = largeTempFile.getCanonicalPath(); + String sourceFilePath = largeTempFile.getCanonicalPath(); - try { - // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, - // which is a large file - for (int i = 0; i < 12; i++) { - copyContentFrom(largeTempFile, largeTempFile2); - copyContentFrom(largeTempFile2, largeTempFile); - } + // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, + // which is a large file + for (int i = 0; i < 12; i++) { + copyContentFrom(largeTempFile, largeTempFile2); + copyContentFrom(largeTempFile2, largeTempFile); + } - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE largefile_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE largefile_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @largefile_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @largefile_stage/"); - // create a new table with columns matching CSV file - statement.execute("create or replace table large_table (colA string)"); - // copy rows from file into table - statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); - // copy back from table into different stage - statement.execute("create or replace stage extra_stage"); - statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); + // create a new table with columns matching CSV file + statement.execute("create or replace table large_table (colA string)"); + // copy rows from file into table + statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); + // copy back from table into different stage + statement.execute("create or replace stage extra_stage"); + statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + assertTrue(downloaded.exists()); - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - p.waitFor(); + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + p.waitFor(); - // compare the original file with the file that's been uploaded, copied into a table, copied - // back into a stage, - // downloaded, and unzipped - File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); - } finally { - statement.execute("DROP STAGE IF EXISTS largefile_stage"); - statement.execute("DROP STAGE IF EXISTS extra_stage"); - statement.execute("DROP TABLE IF EXISTS large_table"); - statement.close(); - connection.close(); + // compare the original file with the file that's been uploaded, copied into a table, copied + // back into a stage, + // downloaded, and unzipped + File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); + assertEquals(largeTempFile.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(largeTempFile, unzipped)); + } finally { + statement.execute("DROP STAGE IF EXISTS largefile_stage"); + statement.execute("DROP STAGE IF EXISTS extra_stage"); + statement.execute("DROP TABLE IF EXISTS large_table"); + } } } @@ -1261,77 +1168,75 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGetLargeFileAzure() throws Throwable { Properties paramProperties = new Properties(); - Connection connection = getConnection("azureaccount", paramProperties); - Statement statement = connection.createStatement(); - - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - - File largeTempFile = tmpFolder.newFile("largeFile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile)); - bw.write("Creating large test file for Azure PUT/GET test"); - bw.write(System.lineSeparator()); - bw.write("Creating large test file for Azure PUT/GET test"); - bw.write(System.lineSeparator()); - bw.close(); - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + try (Connection connection = getConnection("azureaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + + File largeTempFile = tmpFolder.newFile("largeFile.csv"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { + bw.write("Creating large test file for Azure PUT/GET test"); + bw.write(System.lineSeparator()); + bw.write("Creating large test file for Azure PUT/GET test"); + bw.write(System.lineSeparator()); + } + File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - String sourceFilePath = largeTempFile.getCanonicalPath(); + String sourceFilePath = largeTempFile.getCanonicalPath(); - try { - // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, - // which is a large file - for (int i = 0; i < 12; i++) { - copyContentFrom(largeTempFile, largeTempFile2); - copyContentFrom(largeTempFile2, largeTempFile); - } + // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, + // which is a large file + for (int i = 0; i < 12; i++) { + copyContentFrom(largeTempFile, largeTempFile2); + copyContentFrom(largeTempFile2, largeTempFile); + } - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE largefile_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE largefile_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @largefile_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @largefile_stage/"); - // create a new table with columns matching CSV file - statement.execute("create or replace table large_table (colA string)"); - // copy rows from file into table - statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); - // copy back from table into different stage - statement.execute("create or replace stage extra_stage"); - statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); + // create a new table with columns matching CSV file + statement.execute("create or replace table large_table (colA string)"); + // copy rows from file into table + statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); + // copy back from table into different stage + statement.execute("create or replace stage extra_stage"); + statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + assertTrue(downloaded.exists()); - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - p.waitFor(); + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + p.waitFor(); - // compare the original file with the file that's been uploaded, copied into a table, copied - // back into a stage, - // downloaded, and unzipped - File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); - } finally { - statement.execute("DROP STAGE IF EXISTS largefile_stage"); - statement.execute("DROP STAGE IF EXISTS extra_stage"); - statement.execute("DROP TABLE IF EXISTS large_table"); - statement.close(); - connection.close(); + // compare the original file with the file that's been uploaded, copied into a table, copied + // back into a stage, + // downloaded, and unzipped + File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); + assertEquals(largeTempFile.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(largeTempFile, unzipped)); + } finally { + statement.execute("DROP STAGE IF EXISTS largefile_stage"); + statement.execute("DROP STAGE IF EXISTS extra_stage"); + statement.execute("DROP TABLE IF EXISTS large_table"); + } } } @@ -1345,115 +1250,116 @@ public void testPutGetLargeFileAzure() throws Throwable { private void copyContentFrom(File file1, File file2) throws Exception { FileInputStream inputStream = new FileInputStream(file1); FileOutputStream outputStream = new FileOutputStream(file2); - FileChannel fIn = inputStream.getChannel(); - FileChannel fOut = outputStream.getChannel(); - fOut.transferFrom(fIn, 0, fIn.size()); - fIn.position(0); - fOut.transferFrom(fIn, fIn.size(), fIn.size()); - fOut.close(); - fIn.close(); + try (FileChannel fIn = inputStream.getChannel(); + FileChannel fOut = outputStream.getChannel()) { + fOut.transferFrom(fIn, 0, fIn.size()); + fIn.position(0); + fOut.transferFrom(fIn, fIn.size(), fIn.size()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutS3RegionalUrl() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); - Statement statement = connection.createStatement(); - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // Test put file with internal compression - String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); - List metadatas1 = sfAgent1.getFileTransferMetadatas(); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); + // Test put file with internal compression + String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); + List metadatas1 = sfAgent1.getFileTransferMetadatas(); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { - InputStream inputStream = new FileInputStream(srcPath1); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand1) - .setUseS3RegionalUrl(false) - .build()); - } + String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { - InputStream inputStream = new FileInputStream(srcPath1); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand1) - .setUseS3RegionalUrl(true) - .build()); - } + for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { + InputStream inputStream = new FileInputStream(srcPath1); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand1) + .setUseS3RegionalUrl(false) + .build()); + } - // Test Put file with external compression - String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); - List metadatas2 = sfAgent2.getFileTransferMetadatas(); + for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { + InputStream inputStream = new FileInputStream(srcPath1); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand1) + .setUseS3RegionalUrl(true) + .build()); + } - String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { - String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; - Process p = - Runtime.getRuntime() - .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); - p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); + // Test Put file with external compression + String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); + List metadatas2 = sfAgent2.getFileTransferMetadatas(); - InputStream gzInputStream = new FileInputStream(gzfilePath); + String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); + for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { + String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; + Process p = + Runtime.getRuntime() + .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); + p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(gzInputStream) - .setRequireCompress(false) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand2) - .build()); - } + InputStream gzInputStream = new FileInputStream(gzfilePath); - // Download two files and verify their content. - assertTrue( - "Failed to get files", - statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(gzInputStream) + .setRequireCompress(false) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand2) + .build()); + } - // Make sure that the downloaded files are EQUAL, - // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + // Download two files and verify their content. + assertTrue( + "Failed to get files", + statement.execute( + "GET @" + + testStageName + + " 'file://" + + destFolderCanonicalPath + + "/' parallel=8")); + + // Make sure that the downloaded files are EQUAL, + // they should be gzip compressed + assertTrue( + isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assertTrue( + isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -1466,66 +1372,62 @@ public void testPutS3RegionalUrl() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { - Connection connection = null; String clientName = "clientName"; String clientKey = "clientKey"; List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Test put file with internal compression + String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + List metadata = sfAgent.getFileTransferMetadatas(); - // Test put file with internal compression - String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - List metadata = sfAgent.getFileTransferMetadatas(); + String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadata) { + InputStream inputStream = new FileInputStream(srcPath1); - String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadata) { - InputStream inputStream = new FileInputStream(srcPath1); - - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand) - .setStreamingIngestClientName(clientName) - .setStreamingIngestClientKey(clientKey) - .build()); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand) + .setStreamingIngestClientName(clientName) + .setStreamingIngestClientKey(clientKey) + .build()); - SnowflakeStorageClient client = - StorageClientFactory.getFactory() - .createClient( - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), - 1, - null, - /* session= */ null); - - String location = - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + "file1.gz"; - StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); - - // Verify that we are able to fetch the metadata - assertEquals(clientName, client.getStreamingIngestClientName(meta)); - assertEquals(clientKey, client.getStreamingIngestClientKey(meta)); - } - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + SnowflakeStorageClient client = + StorageClientFactory.getFactory() + .createClient( + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), + 1, + null, + /* session= */ null); + + String location = + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + "file1.gz"; + StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); + + // Verify that we are able to fetch the metadata + assertEquals(clientName, client.getStreamingIngestClientName(meta)); + assertEquals(clientKey, client.getStreamingIngestClientKey(meta)); + } + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -1533,38 +1435,36 @@ public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { @Test(expected = SnowflakeSQLException.class) public void testNoSpaceLeftOnDeviceException() throws SQLException { - Connection connection = null; List supportedAccounts = Arrays.asList("gcpaccount", "s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); + try (Connection connection = getConnection(accountName)) { SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - Statement statement = connection.createStatement(); - SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); - statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testPutGet_stage"); - String command = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file:///tmp'"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, sfStatement); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeStorageClient client = - StorageClientFactory.getFactory().createClient(info, 1, null, /* session= */ null); - - client.handleStorageException( - new StorageException( + try (Statement statement = connection.createStatement()) { + try { + SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); + statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testPutGet_stage"); + String command = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file:///tmp'"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, sfStatement); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeStorageClient client = + StorageClientFactory.getFactory().createClient(info, 1, null, /* session= */ null); + + client.handleStorageException( + new StorageException( + client.getMaxRetries(), + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), client.getMaxRetries(), - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - client.getMaxRetries(), - "download", - null, - command, - null); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testPutGet_stage"); - connection.close(); + "download", + null, + command, + null); + } finally { + statement.execute("DROP STAGE if exists testPutGet_stage"); + } } } } @@ -1573,51 +1473,47 @@ public void testNoSpaceLeftOnDeviceException() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); - try { - // set parameter for presignedUrl upload instead of downscoped token - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", false); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); + // set parameter for presignedUrl upload instead of downscoped token + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", false); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Test put file with internal compression + String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + List metadata = sfAgent.getFileTransferMetadatas(); - // Test put file with internal compression - String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - List metadata = sfAgent.getFileTransferMetadatas(); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadata) { - InputStream inputStream = new FileInputStream(srcPath); - - assert (oneMetadata.isForOneFile()); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .build()); - } + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadata) { + InputStream inputStream = new FileInputStream(srcPath); - assertTrue( - "Failed to get files", - statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); - assert (isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + assertTrue(oneMetadata.isForOneFile()); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .build()); + } + + assertTrue( + "Failed to get files", + statement.execute( + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); + assertTrue(isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -1785,39 +1681,47 @@ public void testHTAPStatementParameterCaching() throws SQLException { + TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT") + " set ENABLE_SNOW_654741_FOR_TESTING=true"); } - Connection con = getConnection(); - Statement statement = con.createStatement(); - // Set up a test table with time, date, and timestamp values - statement.execute("create or replace table timetable (t1 time, t2 timestamp, t3 date)"); - statement.execute( - "insert into timetable values ('13:53:11', '2023-08-17 13:53:33', '2023-08-17')"); - // Set statement- level parameters that will affect the output (set output format params) - statement - .unwrap(SnowflakeStatement.class) - .setParameter("TIME_OUTPUT_FORMAT", "HH12:MI:SS.FF AM"); - statement.unwrap(SnowflakeStatement.class).setParameter("DATE_OUTPUT_FORMAT", "DD-MON-YYYY"); - statement - .unwrap(SnowflakeStatement.class) - .setParameter("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS"); - ResultSet resultSet = statement.executeQuery("select * from timetable"); - resultSet.next(); - // Assert that the values match the format of the specified statement parameter output format - // values - assertEquals("01:53:11.000000000 PM", resultSet.getString(1)); - assertEquals("2023-08-17T13:53:33", resultSet.getString(2)); - assertEquals("17-Aug-2023", resultSet.getString(3)); - // Set a different statement parameter value for DATE_OUTPUT_FORMAT - statement.unwrap(SnowflakeStatement.class).setParameter("DATE_OUTPUT_FORMAT", "MM/DD/YYYY"); - resultSet = statement.executeQuery("select * from timetable"); - resultSet.next(); - // Verify it matches the new statement parameter specified output format - assertEquals("08/17/2023", resultSet.getString(3)); - statement.execute("drop table if exists timetable"); - statement.close(); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + // Set up a test table with time, date, and timestamp values + try { + statement.execute("create or replace table timetable (t1 time, t2 timestamp, t3 date)"); + statement.execute( + "insert into timetable values ('13:53:11', '2023-08-17 13:53:33', '2023-08-17')"); + // Set statement- level parameters that will affect the output (set output format params) + statement + .unwrap(SnowflakeStatement.class) + .setParameter("TIME_OUTPUT_FORMAT", "HH12:MI:SS.FF AM"); + statement + .unwrap(SnowflakeStatement.class) + .setParameter("DATE_OUTPUT_FORMAT", "DD-MON-YYYY"); + statement + .unwrap(SnowflakeStatement.class) + .setParameter("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS"); + try (ResultSet resultSet = statement.executeQuery("select * from timetable")) { + assertTrue(resultSet.next()); + // Assert that the values match the format of the specified statement parameter output + // format + // values + assertEquals("01:53:11.000000000 PM", resultSet.getString(1)); + assertEquals("2023-08-17T13:53:33", resultSet.getString(2)); + assertEquals("17-Aug-2023", resultSet.getString(3)); + } + + // Set a different statement parameter value for DATE_OUTPUT_FORMAT + statement.unwrap(SnowflakeStatement.class).setParameter("DATE_OUTPUT_FORMAT", "MM/DD/YYYY"); + try (ResultSet resultSet = statement.executeQuery("select * from timetable")) { + assertTrue(resultSet.next()); + // Verify it matches the new statement parameter specified output format + assertEquals("08/17/2023", resultSet.getString(3)); + } + } finally { + statement.execute("drop table if exists timetable"); + } + } // cleanup - try (Connection con2 = getSnowflakeAdminConnection()) { - statement = con2.createStatement(); + try (Connection con2 = getSnowflakeAdminConnection(); + Statement statement = con2.createStatement()) { statement.execute( "alter account " + TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT") @@ -1828,41 +1732,46 @@ public void testHTAPStatementParameterCaching() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testS3PutInGS() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); - try { - Properties paramProperties = new Properties(); - connection = getConnection("s3testaccount", paramProperties); - Statement statement = connection.createStatement(); - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); - - // put file using GS system commmand, this is internal GS behavior - final String fileName = "testFile.json"; - final String content = "testName: testS3PutInGs"; - String putSystemCall = - String.format( - "call system$it('PUT_FILE_TO_STAGE', '%s', '%s', '%s', '%s')", - testStageName, fileName, content, "false"); - statement.execute(putSystemCall); - - // get file using jdbc - String getCall = - String.format("GET @%s 'file://%s/'", testStageName, destFolderCanonicalPath); - statement.execute(getCall); - - InputStream downloadedFileStream = - new FileInputStream(destFolderCanonicalPath + "/" + fileName); - String downloadedFile = IOUtils.toString(downloadedFileStream, StandardCharsets.UTF_8); - assertTrue( - "downloaded content does not equal uploaded content", content.equals(downloadedFile)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + Properties paramProperties = new Properties(); + try (Connection connection = getConnection("s3testaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); + + // put file using GS system commmand, this is internal GS behavior + final String fileName = "testFile.json"; + final String content = "testName: testS3PutInGs"; + String putSystemCall = + String.format( + "call system$it('PUT_FILE_TO_STAGE', '%s', '%s', '%s', '%s')", + testStageName, fileName, content, "false"); + statement.execute(putSystemCall); + + // get file using jdbc + String getCall = + String.format("GET @%s 'file://%s/'", testStageName, destFolderCanonicalPath); + statement.execute(getCall); + + InputStream downloadedFileStream = + new FileInputStream(destFolderCanonicalPath + "/" + fileName); + String downloadedFile = IOUtils.toString(downloadedFileStream, StandardCharsets.UTF_8); + assertTrue( + "downloaded content does not equal uploaded content", content.equals(downloadedFile)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } + + /** Added in > 3.17.0 */ + @Test + public void shouldLoadDriverWithDisabledTelemetryOob() throws ClassNotFoundException { + Class.forName("net.snowflake.client.jdbc.SnowflakeDriver"); + + assertFalse(TelemetryService.getInstance().isEnabled()); + assertFalse(TelemetryService.getInstance().isHTAPEnabled()); + } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java index 20f986542..f9c2bb66d 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java @@ -56,17 +56,16 @@ public Connection init() throws SQLException { public Connection init(@Nullable Properties properties) throws SQLException { Connection conn = BaseJDBCTest.getConnection(properties); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - - // Set up theses parameters as smaller values in order to generate - // multiple file chunks with small data volumes. - stmt.execute("alter session set result_first_chunk_max_size = 512"); - stmt.execute("alter session set result_min_chunk_size = 512"); - stmt.execute("alter session set arrow_result_rb_flush_size = 512"); - stmt.execute("alter session set result_chunk_size_multiplier = 1.2"); - stmt.close(); - + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + + // Set up theses parameters as smaller values in order to generate + // multiple file chunks with small data volumes. + stmt.execute("alter session set result_first_chunk_max_size = 512"); + stmt.execute("alter session set result_min_chunk_size = 512"); + stmt.execute("alter session set arrow_result_rb_flush_size = 512"); + stmt.execute("alter session set result_chunk_size_multiplier = 1.2"); + } return conn; } @@ -123,12 +122,11 @@ private List serializeResultSet( // Write object to file String tmpFileName = tmpFolder.getRoot().getPath() + "_result_" + i + "." + fileNameAppendix; - FileOutputStream fo = new FileOutputStream(tmpFileName); - ObjectOutputStream so = new ObjectOutputStream(fo); - so.writeObject(entry); - so.flush(); - so.close(); - + try (FileOutputStream fo = new FileOutputStream(tmpFileName); + ObjectOutputStream so = new ObjectOutputStream(fo)) { + so.writeObject(entry); + so.flush(); + } result.add(tmpFileName); } @@ -161,67 +159,68 @@ private String deserializeResultSetWithProperties(List files, Properties for (String filename : files) { // Read Object from file - FileInputStream fi = new FileInputStream(filename); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 resultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); + try (FileInputStream fi = new FileInputStream(filename); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 resultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); - if (developPrint) { - System.out.println( - "\nFormat: " - + resultSetChunk.getQueryResultFormat() - + " UncompChunksize: " - + resultSetChunk.getUncompressedDataSizeInBytes() - + " firstChunkContent: " - + (resultSetChunk.getFirstChunkStringData() == null ? " null " : " not null ")); - for (SnowflakeResultSetSerializableV1.ChunkFileMetadata chunkFileMetadata : - resultSetChunk.chunkFileMetadatas) { + if (developPrint) { System.out.println( - "RowCount=" - + chunkFileMetadata.getRowCount() - + ", cpsize=" - + chunkFileMetadata.getCompressedByteSize() - + ", uncpsize=" - + chunkFileMetadata.getUncompressedByteSize() - + ", URL= " - + chunkFileMetadata.getFileURL()); + "\nFormat: " + + resultSetChunk.getQueryResultFormat() + + " UncompChunksize: " + + resultSetChunk.getUncompressedDataSizeInBytes() + + " firstChunkContent: " + + (resultSetChunk.getFirstChunkStringData() == null ? " null " : " not null ")); + for (SnowflakeResultSetSerializableV1.ChunkFileMetadata chunkFileMetadata : + resultSetChunk.chunkFileMetadatas) { + System.out.println( + "RowCount=" + + chunkFileMetadata.getRowCount() + + ", cpsize=" + + chunkFileMetadata.getCompressedByteSize() + + ", uncpsize=" + + chunkFileMetadata.getUncompressedByteSize() + + ", URL= " + + chunkFileMetadata.getFileURL()); + } } - } - // Read data from object - ResultSet rs = - resultSetChunk.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(props) - .setSfFullURL(sfFullURL) - .build()); - - // print result set meta data - ResultSetMetaData metadata = rs.getMetaData(); - int colCount = metadata.getColumnCount(); - if (developPrint) { - for (int j = 1; j <= colCount; j++) { - System.out.print(" table: " + metadata.getTableName(j)); - System.out.print(" schema: " + metadata.getSchemaName(j)); - System.out.print(" type: " + metadata.getColumnTypeName(j)); - System.out.print(" name: " + metadata.getColumnName(j)); - System.out.print(" precision: " + metadata.getPrecision(j)); - System.out.println(" scale:" + metadata.getScale(j)); - } - } + // Read data from object + try (ResultSet rs = + resultSetChunk.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(props) + .setSfFullURL(sfFullURL) + .build())) { + + // print result set meta data + ResultSetMetaData metadata = rs.getMetaData(); + int colCount = metadata.getColumnCount(); + if (developPrint) { + for (int j = 1; j <= colCount; j++) { + System.out.print(" table: " + metadata.getTableName(j)); + System.out.print(" schema: " + metadata.getSchemaName(j)); + System.out.print(" type: " + metadata.getColumnTypeName(j)); + System.out.print(" name: " + metadata.getColumnName(j)); + System.out.print(" precision: " + metadata.getPrecision(j)); + System.out.println(" scale:" + metadata.getScale(j)); + } + } - // Print and count data - while (rs.next()) { - for (int i = 1; i <= colCount; i++) { - rs.getObject(i); - if (rs.wasNull()) { - builder.append("\"").append("null").append("\","); - } else { - builder.append("\"").append(rs.getString(i)).append("\","); + // Print and count data + while (rs.next()) { + for (int i = 1; i <= colCount; i++) { + rs.getObject(i); + if (rs.wasNull()) { + builder.append("\"").append("null").append("\","); + } else { + builder.append("\"").append(rs.getString(i)).append("\","); + } + } + builder.append("\n"); } } - builder.append("\n"); } } @@ -275,15 +274,15 @@ private void testBasicTableHarness( } String sqlSelect = "select * from table_basic " + whereClause; - ResultSet rs = + try (ResultSet rs = async ? statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(sqlSelect) - : statement.executeQuery(sqlSelect); + : statement.executeQuery(sqlSelect)) { - fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); + fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); - originalResultCSVString = generateCSVResult(rs); - rs.close(); + originalResultCSVString = generateCSVResult(rs); + } } String chunkResultString = deserializeResultSet(fileNameList); @@ -370,25 +369,14 @@ private void testTimestampHarness( throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init()) { - connection - .createStatement() - .execute("alter session set DATE_OUTPUT_FORMAT = '" + format_date + "'"); - connection - .createStatement() - .execute("alter session set TIME_OUTPUT_FORMAT = '" + format_time + "'"); - connection - .createStatement() - .execute("alter session set TIMESTAMP_NTZ_OUTPUT_FORMAT = '" + format_ntz + "'"); - connection - .createStatement() - .execute("alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT = '" + format_ltz + "'"); - connection - .createStatement() - .execute("alter session set TIMESTAMP_TZ_OUTPUT_FORMAT = '" + format_tz + "'"); - connection.createStatement().execute("alter session set TIMEZONE = '" + timezone + "'"); - - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set DATE_OUTPUT_FORMAT = '" + format_date + "'"); + statement.execute("alter session set TIME_OUTPUT_FORMAT = '" + format_time + "'"); + statement.execute("alter session set TIMESTAMP_NTZ_OUTPUT_FORMAT = '" + format_ntz + "'"); + statement.execute("alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT = '" + format_ltz + "'"); + statement.execute("alter session set TIMESTAMP_TZ_OUTPUT_FORMAT = '" + format_tz + "'"); + statement.execute("alter session set TIMEZONE = '" + timezone + "'"); statement.execute( "Create or replace table all_timestamps (" @@ -403,30 +391,28 @@ private void testTimestampHarness( + ")"); if (rowCount > 0) { - connection - .createStatement() - .execute( - "insert into all_timestamps " - + "select seq4(), '2015-10-25' , " - + "'23:59:59.123456789', '23:59:59', '23:59:59.123', '23:59:59.123456', " - + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," - + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," - + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," - + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," - + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," - + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'" - + " from table(generator(rowcount=>" - + rowCount - + "))"); + statement.execute( + "insert into all_timestamps " + + "select seq4(), '2015-10-25' , " + + "'23:59:59.123456789', '23:59:59', '23:59:59.123', '23:59:59.123456', " + + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," + + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," + + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," + + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," + + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," + + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'" + + " from table(generator(rowcount=>" + + rowCount + + "))"); } String sqlSelect = "select * from all_timestamps " + whereClause; - ResultSet rs = statement.executeQuery(sqlSelect); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { - fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); + fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); - originalResultCSVString = generateCSVResult(rs); - rs.close(); + originalResultCSVString = generateCSVResult(rs); + } } String chunkResultString = deserializeResultSet(fileNameList); @@ -464,9 +450,8 @@ public void testTimestamp() throws Throwable { public void testBasicTableWithSerializeObjectsAfterReadResultSet() throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute("create or replace schema testschema"); statement.execute( @@ -481,16 +466,15 @@ public void testBasicTableWithSerializeObjectsAfterReadResultSet() throws Throwa + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - - originalResultCSVString = generateCSVResult(rs); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { - // In previous test, the serializable objects are serialized before - // reading the ResultSet. This test covers the case that serializes the - // object after reading the result set. - fileNameList = serializeResultSet((SnowflakeResultSet) rs, 1 * 1024 * 1024, "txt"); + originalResultCSVString = generateCSVResult(rs); - rs.close(); + // In previous test, the serializable objects are serialized before + // reading the ResultSet. This test covers the case that serializes the + // object after reading the result set. + fileNameList = serializeResultSet((SnowflakeResultSet) rs, 1 * 1024 * 1024, "txt"); + } } String chunkResultString = deserializeResultSet(fileNameList); @@ -511,29 +495,29 @@ private synchronized List splitResultSetSerializables( for (String filename : files) { // Read Object from file - FileInputStream fi = new FileInputStream(filename); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 resultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); - - // Get ResultSet from object - ResultSet rs = - resultSetChunk.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(new Properties()) - .setSfFullURL(sfFullURL) - .build()); - - String[] filePathParts = filename.split(File.separator); - String appendix = filePathParts[filePathParts.length - 1]; - - List thisFileList = - serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, appendix); - for (int i = 0; i < thisFileList.size(); i++) { - resultFileList.add(thisFileList.get(i)); + try (FileInputStream fi = new FileInputStream(filename); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 resultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); + + // Get ResultSet from object + try (ResultSet rs = + resultSetChunk.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(new Properties()) + .setSfFullURL(sfFullURL) + .build())) { + + String[] filePathParts = filename.split(File.separator); + String appendix = filePathParts[filePathParts.length - 1]; + + List thisFileList = + serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, appendix); + for (int i = 0; i < thisFileList.size(); i++) { + resultFileList.add(thisFileList.get(i)); + } + } } - rs.close(); } if (developPrint) { @@ -550,8 +534,8 @@ public void testSplitResultSetSerializable() throws Throwable { List fileNameList = null; String originalResultCSVString = null; int rowCount = 90000; - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -565,12 +549,12 @@ public void testSplitResultSetSerializable() throws Throwable { + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { - fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); + fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); - originalResultCSVString = generateCSVResult(rs); - rs.close(); + originalResultCSVString = generateCSVResult(rs); + } } // Split deserializedResultSet by 3M, the result should be the same @@ -613,28 +597,30 @@ private void hackToSetupWrongURL(List resultSetS @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCloseUnconsumedResultSet() throws Throwable { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table table_basic " + " (int_c int, string_c string(128))"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table table_basic " + " (int_c int, string_c string(128))"); - int rowCount = 100000; - statement.execute( - "insert into table_basic select " - + "seq4(), " - + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" - + " from table(generator(rowcount=>" - + rowCount - + "))"); + int rowCount = 100000; + statement.execute( + "insert into table_basic select " + + "seq4(), " + + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" + + " from table(generator(rowcount=>" + + rowCount + + "))"); - int testCount = 5; - while (testCount-- > 0) { - String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - rs.close(); + int testCount = 5; + while (testCount-- > 0) { + String sqlSelect = "select * from table_basic "; + try (ResultSet rs = statement.executeQuery(sqlSelect)) {} + ; + } + } finally { + statement.execute("drop table if exists table_basic"); } - statement.execute("drop table if exists table_basic"); } } @@ -645,50 +631,50 @@ public void testNegativeWithChunkFileNotExist() throws Throwable { Properties properties = new Properties(); properties.put("networkTimeout", 10000); // 10000 millisec try (Connection connection = init(properties)) { - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table table_basic " + " (int_c int, string_c string(128))"); - - int rowCount = 300; - statement.execute( - "insert into table_basic select " - + "seq4(), " - + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" - + " from table(generator(rowcount=>" - + rowCount - + "))"); - - String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - - // Test case 1: Generate one Serializable object - List resultSetSerializables = - ((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024); - - hackToSetupWrongURL(resultSetSerializables); - - // Expected to hit credential issue when access the result. - assertEquals(resultSetSerializables.size(), 1); - try { - SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); + try (Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table table_basic " + " (int_c int, string_c string(128))"); - ResultSet resultSet = - resultSetSerializable.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(new Properties()) - .setSfFullURL(sfFullURL) - .build()); + int rowCount = 300; + statement.execute( + "insert into table_basic select " + + "seq4(), " + + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" + + " from table(generator(rowcount=>" + + rowCount + + "))"); - while (resultSet.next()) { - resultSet.getString(1); + String sqlSelect = "select * from table_basic "; + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // Test case 1: Generate one Serializable object + List resultSetSerializables = + ((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024); + + hackToSetupWrongURL(resultSetSerializables); + + // Expected to hit credential issue when access the result. + assertEquals(resultSetSerializables.size(), 1); + try { + SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); + + ResultSet resultSet = + resultSetSerializable.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(new Properties()) + .setSfFullURL(sfFullURL) + .build()); + + while (resultSet.next()) { + resultSet.getString(1); + } + fail( + "error should happen when accessing the data because the " + + "file URL is corrupted."); + } catch (SQLException ex) { + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), ex.getErrorCode()); + } } - fail("error should happen when accessing the data because the " + "file URL is corrupted."); - } catch (SQLException ex) { - assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), ex.getErrorCode()); } - - rs.close(); } } @@ -790,8 +776,8 @@ public void testCustomProxyWithFiles() throws Throwable { } private void generateTestFiles() throws Throwable { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -806,10 +792,11 @@ private void generateTestFiles() throws Throwable { + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - developPrint = true; - serializeResultSet((SnowflakeResultSet) rs, 2 * 1024 * 1024, "txt"); - System.exit(-1); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + developPrint = true; + serializeResultSet((SnowflakeResultSet) rs, 2 * 1024 * 1024, "txt"); + System.exit(-1); + } } } @@ -821,8 +808,8 @@ public void testRetrieveMetadata() throws Throwable { long expectedTotalRowCount = 0; long expectedTotalCompressedSize = 0; long expectedTotalUncompressedSize = 0; - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -836,34 +823,31 @@ public void testRetrieveMetadata() throws Throwable { + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - - // Split deserializedResultSet by 3M - fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); - - // Only one serializable object is generated with 100M data. - assertEquals(fileNameList.size(), 1); - - FileInputStream fi = new FileInputStream(fileNameList.get(0)); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 wholeResultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); - expectedTotalRowCount = wholeResultSetChunk.getRowCount(); - expectedTotalCompressedSize = wholeResultSetChunk.getCompressedDataSizeInBytes(); - expectedTotalUncompressedSize = wholeResultSetChunk.getUncompressedDataSizeInBytes(); - - if (developPrint) { - System.out.println( - "Total statistic: RowCount=" - + expectedTotalRowCount - + " CompSize=" - + expectedTotalCompressedSize - + " UncompSize=" - + expectedTotalUncompressedSize); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // Split deserializedResultSet by 3M + fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); + + // Only one serializable object is generated with 100M data. + assertEquals(fileNameList.size(), 1); + + try (FileInputStream fi = new FileInputStream(fileNameList.get(0)); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 wholeResultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); + expectedTotalRowCount = wholeResultSetChunk.getRowCount(); + expectedTotalCompressedSize = wholeResultSetChunk.getCompressedDataSizeInBytes(); + expectedTotalUncompressedSize = wholeResultSetChunk.getUncompressedDataSizeInBytes(); + } + if (developPrint) { + System.out.println( + "Total statistic: RowCount=" + + expectedTotalRowCount + + " CompSize=" + + expectedTotalCompressedSize + + " UncompSize=" + + expectedTotalUncompressedSize); + } } - - rs.close(); } assertEquals(expectedTotalRowCount, rowCount); assertThat(expectedTotalCompressedSize, greaterThan((long) 0)); @@ -938,56 +922,55 @@ private boolean isMetadataConsistent( for (String filename : files) { // Read Object from file - FileInputStream fi = new FileInputStream(filename); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 resultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); - - // Accumulate statistic from metadata - actualRowCountFromMetadata += resultSetChunk.getRowCount(); - actualTotalCompressedSize += resultSetChunk.getCompressedDataSizeInBytes(); - actualTotalUncompressedSize += resultSetChunk.getUncompressedDataSizeInBytes(); - chunkFileCount += resultSetChunk.chunkFileCount; - - // Get actual row count from result set. - // sfFullURL is used to support private link URL. - // This test case is not for private link env, so just use a valid URL for testing purpose. - ResultSet rs = - resultSetChunk.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(props) - .setSfFullURL(sfFullURL) - .build()); - - // Accumulate the actual row count from result set. - while (rs.next()) { - actualRowCount++; - } - } + try (FileInputStream fi = new FileInputStream(filename); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 resultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); + + // Accumulate statistic from metadata + actualRowCountFromMetadata += resultSetChunk.getRowCount(); + actualTotalCompressedSize += resultSetChunk.getCompressedDataSizeInBytes(); + actualTotalUncompressedSize += resultSetChunk.getUncompressedDataSizeInBytes(); + chunkFileCount += resultSetChunk.chunkFileCount; + + // Get actual row count from result set. + // sfFullURL is used to support private link URL. + // This test case is not for private link env, so just use a valid URL for testing purpose. + try (ResultSet rs = + resultSetChunk.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(props) + .setSfFullURL(sfFullURL) + .build())) { - if (developPrint) { - System.out.println( - "isMetadataConsistent: FileCount=" - + files.size() - + " RowCounts=" - + expectedTotalRowCount - + " " - + actualRowCountFromMetadata - + " (" - + actualRowCount - + ") CompSize=" - + expectedTotalCompressedSize - + " " - + actualTotalCompressedSize - + " UncompSize=" - + expectedTotalUncompressedSize - + " " - + actualTotalUncompressedSize - + " chunkFileCount=" - + chunkFileCount); + // Accumulate the actual row count from result set. + while (rs.next()) { + actualRowCount++; + } + } + } + if (developPrint) { + System.out.println( + "isMetadataConsistent: FileCount=" + + files.size() + + " RowCounts=" + + expectedTotalRowCount + + " " + + actualRowCountFromMetadata + + " (" + + actualRowCount + + ") CompSize=" + + expectedTotalCompressedSize + + " " + + actualTotalCompressedSize + + " UncompSize=" + + expectedTotalUncompressedSize + + " " + + actualTotalUncompressedSize + + " chunkFileCount=" + + chunkFileCount); + } } - return actualRowCount == expectedTotalRowCount && actualRowCountFromMetadata == expectedTotalRowCount && actualTotalCompressedSize == expectedTotalCompressedSize diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java new file mode 100644 index 000000000..29c58b787 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java @@ -0,0 +1,108 @@ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.jdbc.SnowflakeType.convertStringToType; +import static net.snowflake.client.jdbc.SnowflakeType.getJavaType; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import java.math.BigDecimal; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Types; +import org.junit.Test; + +public class SnowflakeTypeTest { + + @Test + public void testSnowflakeType() { + assertEquals(getJavaType(SnowflakeType.CHAR, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals(getJavaType(SnowflakeType.INTEGER, false), SnowflakeType.JavaDataType.JAVA_LONG); + assertEquals( + getJavaType(SnowflakeType.FIXED, false), SnowflakeType.JavaDataType.JAVA_BIGDECIMAL); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals(getJavaType(SnowflakeType.TIME, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_LTZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_NTZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_TZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals(getJavaType(SnowflakeType.DATE, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.BOOLEAN, false), SnowflakeType.JavaDataType.JAVA_BOOLEAN); + assertEquals(getJavaType(SnowflakeType.VECTOR, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals(getJavaType(SnowflakeType.BINARY, false), SnowflakeType.JavaDataType.JAVA_BYTES); + assertEquals(getJavaType(SnowflakeType.ANY, false), SnowflakeType.JavaDataType.JAVA_OBJECT); + assertEquals(getJavaType(SnowflakeType.OBJECT, true), SnowflakeType.JavaDataType.JAVA_OBJECT); + assertEquals(getJavaType(SnowflakeType.OBJECT, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals( + getJavaType(SnowflakeType.GEOMETRY, false), SnowflakeType.JavaDataType.JAVA_STRING); + } + + @Test + public void testConvertStringToType() { + assertEquals(convertStringToType(null), Types.NULL); + assertEquals(convertStringToType("decimal"), Types.DECIMAL); + assertEquals(convertStringToType("int"), Types.INTEGER); + assertEquals(convertStringToType("integer"), Types.INTEGER); + assertEquals(convertStringToType("byteint"), Types.INTEGER); + assertEquals(convertStringToType("smallint"), Types.SMALLINT); + assertEquals(convertStringToType("bigint"), Types.BIGINT); + assertEquals(convertStringToType("double"), Types.DOUBLE); + assertEquals(convertStringToType("double precision"), Types.DOUBLE); + assertEquals(convertStringToType("real"), Types.REAL); + assertEquals(convertStringToType("char"), Types.CHAR); + assertEquals(convertStringToType("character"), Types.CHAR); + assertEquals(convertStringToType("varbinary"), Types.VARBINARY); + assertEquals(convertStringToType("boolean"), Types.BOOLEAN); + assertEquals(convertStringToType("date"), Types.DATE); + assertEquals(convertStringToType("time"), Types.TIME); + assertEquals(convertStringToType("timestamp"), Types.TIMESTAMP); + assertEquals(convertStringToType("datetime"), Types.TIMESTAMP); + assertEquals(convertStringToType("timestamp_ntz"), Types.TIMESTAMP); + assertEquals(convertStringToType("timestamp_ltz"), Types.TIMESTAMP_WITH_TIMEZONE); + assertEquals(convertStringToType("timestamp_tz"), Types.TIMESTAMP_WITH_TIMEZONE); + assertEquals(convertStringToType("variant"), Types.OTHER); + assertEquals(convertStringToType("object"), Types.JAVA_OBJECT); + assertEquals(convertStringToType("vector"), SnowflakeUtil.EXTRA_TYPES_VECTOR); + assertEquals(convertStringToType("array"), Types.ARRAY); + assertEquals(convertStringToType("default"), Types.OTHER); + } + + @Test + public void testJavaSQLTypeFind() { + assertNull(SnowflakeType.JavaSQLType.find(200000)); + } + + @Test + public void testJavaSQLTypeLexicalValue() { + assertEquals(SnowflakeType.lexicalValue(1.0f, null, null, null, null), "0x1.0p0"); + assertEquals(SnowflakeType.lexicalValue(new BigDecimal(100.0), null, null, null, null), "100"); + assertEquals( + SnowflakeType.lexicalValue("random".getBytes(), null, null, null, null), "72616E646F6D"); + } + + @Test + public void testJavaTypeToSFType() throws SnowflakeSQLException { + assertEquals(SnowflakeType.javaTypeToSFType(0, null), SnowflakeType.ANY); + assertThrows( + SnowflakeSQLLoggedException.class, + () -> { + SnowflakeType.javaTypeToSFType(2000000, null); + }); + } + + @Test + public void testJavaTypeToClassName() throws SQLException { + assertEquals(SnowflakeType.javaTypeToClassName(Types.DECIMAL), BigDecimal.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.TIME), java.sql.Time.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.BOOLEAN), Boolean.class.getName()); + assertThrows( + SQLFeatureNotSupportedException.class, + () -> { + SnowflakeType.javaTypeToClassName(-2000000); + }); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java index 23b96dc6c..6e61d82dc 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -5,7 +5,9 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.getSnowflakeType; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -33,7 +35,7 @@ public void testCreateMetadata() throws Throwable { fields.add(fieldOne); JsonNode fieldTwo = createFieldNode("name2", 5, 128, 2, "real", true, "collation", 256); fields.add(fieldTwo); - rootNode.put("fields", fields); + rootNode.putIfAbsent("fields", fields); SnowflakeColumnMetadata expectedColumnMetadata = createExpectedMetadata(rootNode, fieldOne, fieldTwo); // when @@ -46,6 +48,40 @@ public void testCreateMetadata() throws Throwable { OBJECT_MAPPER.writeValueAsString(columnMetadata)); } + @Test + public void testCreateFieldsMetadataForObject() throws Throwable { + // given + ObjectNode rootNode = createRootNode(); + ArrayNode fields = OBJECT_MAPPER.createArrayNode(); + fields.add( + OBJECT_MAPPER.readTree( + "{\"fieldName\":\"name1\", \"fieldType\": {\"type\":\"text\",\"precision\":null,\"length\":256,\"scale\":null,\"nullable\":false}}")); + fields.add( + OBJECT_MAPPER.readTree( + "{\"fieldName\":\"name2\", \"fieldType\": {\"type\":\"real\",\"precision\":5,\"length\":128,\"scale\":null,\"nullable\":true}}")); + rootNode.putIfAbsent("fields", fields); + + // when + SnowflakeColumnMetadata columnMetadata = + SnowflakeUtil.extractColumnMetadata(rootNode, false, null); + // then + assertNotNull(columnMetadata); + assertEquals("OBJECT", columnMetadata.getTypeName()); + + FieldMetadata firstField = columnMetadata.getFields().get(0); + assertEquals("name1", firstField.getName()); + assertEquals(SnowflakeType.TEXT, firstField.getBase()); + assertEquals(256, firstField.getByteLength()); + assertFalse(firstField.isNullable()); + + FieldMetadata secondField = columnMetadata.getFields().get(1); + assertEquals("name2", secondField.getName()); + assertEquals(SnowflakeType.REAL, secondField.getBase()); + assertEquals(128, secondField.getByteLength()); + assertEquals(5, secondField.getPrecision()); + assertTrue(secondField.isNullable()); + } + private static SnowflakeColumnMetadata createExpectedMetadata( JsonNode rootNode, JsonNode fieldOne, JsonNode fieldTwo) throws SnowflakeSQLLoggedException { ColumnTypeInfo columnTypeInfo = @@ -93,7 +129,8 @@ private static SnowflakeColumnMetadata createExpectedMetadata( rootNode.path("database").asText(), rootNode.path("schema").asText(), rootNode.path("table").asText(), - false); + false, + rootNode.path("dimension").asInt()); return expectedColumnMetadata; } diff --git a/src/test/java/net/snowflake/client/jdbc/StatementIT.java b/src/test/java/net/snowflake/client/jdbc/StatementIT.java index 82c9725b4..be5f65a56 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementIT.java @@ -41,9 +41,9 @@ public class StatementIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @@ -51,209 +51,205 @@ public static Connection getConnection() throws SQLException { @Test public void testFetchDirection() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(ResultSet.FETCH_FORWARD, statement.getFetchDirection()); - try { - statement.setFetchDirection(ResultSet.FETCH_REVERSE); - } catch (SQLFeatureNotSupportedException e) { - assertTrue(true); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + assertEquals(ResultSet.FETCH_FORWARD, statement.getFetchDirection()); + try { + statement.setFetchDirection(ResultSet.FETCH_REVERSE); + } catch (SQLFeatureNotSupportedException e) { + assertTrue(true); + } } - statement.close(); - connection.close(); } @Ignore("Not working for setFetchSize") @Test public void testFetchSize() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(50, statement.getFetchSize()); - statement.setFetchSize(1); - ResultSet rs = statement.executeQuery("select * from JDBC_STATEMENT"); - assertEquals(1, getSizeOfResultSet(rs)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + assertEquals(50, statement.getFetchSize()); + statement.setFetchSize(1); + ResultSet rs = statement.executeQuery("select * from JDBC_STATEMENT"); + assertEquals(1, getSizeOfResultSet(rs)); + } } @Test public void testMaxRows() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; - assertEquals(0, statement.getMaxRows()); - - // statement.setMaxRows(1); - // assertEquals(1, statement.getMaxRows()); - ResultSet rs = statement.executeQuery(sqlSelect); - int resultSizeCount = getSizeOfResultSet(rs); - // assertEquals(1, resultSizeCount); - - statement.setMaxRows(0); - rs = statement.executeQuery(sqlSelect); - // assertEquals(3, getSizeOfResultSet(rs)); - - statement.setMaxRows(-1); - rs = statement.executeQuery(sqlSelect); - // assertEquals(3, getSizeOfResultSet(rs)); - statement.close(); - - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; + assertEquals(0, statement.getMaxRows()); + + // statement.setMaxRows(1); + // assertEquals(1, statement.getMaxRows()); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + int resultSizeCount = getSizeOfResultSet(rs); + // assertEquals(1, resultSizeCount); + } + statement.setMaxRows(0); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // assertEquals(3, getSizeOfResultSet(rs)); + } + statement.setMaxRows(-1); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // assertEquals(3, getSizeOfResultSet(rs)); + } + } } @Test public void testQueryTimeOut() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(0, statement.getQueryTimeout()); - statement.setQueryTimeout(5); - assertEquals(5, statement.getQueryTimeout()); - try { - statement.executeQuery("select count(*) from table(generator(timeLimit => 100))"); - } catch (SQLException e) { - assertTrue(true); - assertEquals(SqlState.QUERY_CANCELED, e.getSQLState()); - assertEquals("SQL execution canceled", e.getMessage()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + assertEquals(0, statement.getQueryTimeout()); + statement.setQueryTimeout(5); + assertEquals(5, statement.getQueryTimeout()); + try { + statement.executeQuery("select count(*) from table(generator(timeLimit => 100))"); + } catch (SQLException e) { + assertTrue(true); + assertEquals(SqlState.QUERY_CANCELED, e.getSQLState()); + assertEquals("SQL execution canceled", e.getMessage()); + } } - statement.close(); - connection.close(); } @Test public void testStatementClose() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(connection, statement.getConnection()); - assertTrue(!statement.isClosed()); - statement.close(); - assertTrue(statement.isClosed()); - connection.close(); + try (Connection connection = getConnection()) { + Statement statement = connection.createStatement(); + assertEquals(connection, statement.getConnection()); + assertTrue(!statement.isClosed()); + statement.close(); + assertTrue(statement.isClosed()); + } } @Test public void testExecuteSelect() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; - boolean success = statement.execute(sqlSelect); - assertTrue(success); - String queryID1 = statement.unwrap(SnowflakeStatement.class).getQueryID(); - assertNotNull(queryID1); - - ResultSet rs = statement.getResultSet(); - assertEquals(3, getSizeOfResultSet(rs)); - assertEquals(-1, statement.getUpdateCount()); - assertEquals(-1L, statement.getLargeUpdateCount()); - String queryID2 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - assertEquals(queryID2, queryID1); - - rs = statement.executeQuery(sqlSelect); - assertEquals(3, getSizeOfResultSet(rs)); - String queryID4 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - assertNotEquals(queryID4, queryID1); - rs.close(); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; + boolean success = statement.execute(sqlSelect); + assertTrue(success); + String queryID1 = statement.unwrap(SnowflakeStatement.class).getQueryID(); + assertNotNull(queryID1); + + try (ResultSet rs = statement.getResultSet()) { + assertEquals(3, getSizeOfResultSet(rs)); + assertEquals(-1, statement.getUpdateCount()); + assertEquals(-1L, statement.getLargeUpdateCount()); + String queryID2 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + assertEquals(queryID2, queryID1); + } + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + assertEquals(3, getSizeOfResultSet(rs)); + String queryID4 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + assertNotEquals(queryID4, queryID1); + } + } } @Test public void testExecuteInsert() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_insert(cola number)"); - - String insertSQL = "insert into test_insert values(2),(3)"; - int updateCount; - boolean success; - updateCount = statement.executeUpdate(insertSQL); - assertEquals(2, updateCount); - - success = statement.execute(insertSQL); - assertFalse(success); - assertEquals(2, statement.getUpdateCount()); - assertEquals(2L, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); - - ResultSet rs = statement.executeQuery("select count(*) from test_insert"); - rs.next(); - assertEquals(4, rs.getInt(1)); - rs.close(); - - assertTrue(statement.execute("select 1")); - ResultSet rs0 = statement.getResultSet(); - rs0.next(); - assertEquals(rs0.getInt(1), 1); - rs0.close(); - - statement.execute("drop table if exists test_insert"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_insert(cola number)"); + + String insertSQL = "insert into test_insert values(2),(3)"; + int updateCount; + boolean success; + updateCount = statement.executeUpdate(insertSQL); + assertEquals(2, updateCount); + + success = statement.execute(insertSQL); + assertFalse(success); + assertEquals(2, statement.getUpdateCount()); + assertEquals(2L, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + + try (ResultSet rs = statement.executeQuery("select count(*) from test_insert")) { + assertTrue(rs.next()); + assertEquals(4, rs.getInt(1)); + } + + assertTrue(statement.execute("select 1")); + try (ResultSet rs0 = statement.getResultSet()) { + assertTrue(rs0.next()); + assertEquals(rs0.getInt(1), 1); + } + } finally { + statement.execute("drop table if exists test_insert"); + } + } } @Test public void testExecuteUpdateAndDelete() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table test_update(cola number, colb string) " + "as select 1, 'str1'"); - - statement.execute("insert into test_update values(2, 'str2')"); - - int updateCount; - boolean success; - updateCount = statement.executeUpdate("update test_update set COLB = 'newStr' where COLA = 1"); - assertEquals(1, updateCount); - - success = statement.execute("update test_update set COLB = 'newStr' where COLA = 2"); - assertFalse(success); - assertEquals(1, statement.getUpdateCount()); - assertEquals(1L, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); - - updateCount = statement.executeUpdate("delete from test_update where colA = 1"); - assertEquals(1, updateCount); - - success = statement.execute("delete from test_update where colA = 2"); - assertFalse(success); - assertEquals(1, statement.getUpdateCount()); - assertEquals(1L, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); - - statement.execute("drop table if exists test_update"); - statement.close(); - - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_update(cola number, colb string) " + + "as select 1, 'str1'"); + + statement.execute("insert into test_update values(2, 'str2')"); + + int updateCount; + boolean success; + updateCount = + statement.executeUpdate("update test_update set COLB = 'newStr' where COLA = 1"); + assertEquals(1, updateCount); + + success = statement.execute("update test_update set COLB = 'newStr' where COLA = 2"); + assertFalse(success); + assertEquals(1, statement.getUpdateCount()); + assertEquals(1L, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + + updateCount = statement.executeUpdate("delete from test_update where colA = 1"); + assertEquals(1, updateCount); + + success = statement.execute("delete from test_update where colA = 2"); + assertFalse(success); + assertEquals(1, statement.getUpdateCount()); + assertEquals(1L, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + } finally { + statement.execute("drop table if exists test_update"); + } + } } @Test public void testExecuteMerge() throws SQLException { - Connection connection = getConnection(); - String mergeSQL = - "merge into target using source on target.id = source.id " - + "when matched and source.sb =22 then update set ta = 'newStr' " - + "when not matched then insert (ta, tb) values (source.sa, source.sb)"; - Statement statement = connection.createStatement(); - statement.execute("create or replace table target(id integer, ta string, tb integer)"); - statement.execute("create or replace table source(id integer, sa string, sb integer)"); - statement.execute("insert into target values(1, 'str', 1)"); - statement.execute("insert into target values(2, 'str', 2)"); - statement.execute("insert into target values(3, 'str', 3)"); - statement.execute("insert into source values(1, 'str1', 11)"); - statement.execute("insert into source values(2, 'str2', 22)"); - statement.execute("insert into source values(3, 'str3', 33)"); - - int updateCount = statement.executeUpdate(mergeSQL); - - assertEquals(1, updateCount); - - statement.execute("drop table if exists target"); - statement.execute("drop table if exists source"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String mergeSQL = + "merge into target using source on target.id = source.id " + + "when matched and source.sb =22 then update set ta = 'newStr' " + + "when not matched then insert (ta, tb) values (source.sa, source.sb)"; + try { + statement.execute("create or replace table target(id integer, ta string, tb integer)"); + statement.execute("create or replace table source(id integer, sa string, sb integer)"); + statement.execute("insert into target values(1, 'str', 1)"); + statement.execute("insert into target values(2, 'str', 2)"); + statement.execute("insert into target values(3, 'str', 3)"); + statement.execute("insert into source values(1, 'str1', 11)"); + statement.execute("insert into source values(2, 'str2', 22)"); + statement.execute("insert into source values(3, 'str3', 33)"); + + int updateCount = statement.executeUpdate(mergeSQL); + + assertEquals(1, updateCount); + } finally { + statement.execute("drop table if exists target"); + statement.execute("drop table if exists source"); + } + } } /** @@ -263,8 +259,8 @@ public void testExecuteMerge() throws SQLException { */ @Test public void testAutogenerateKey() throws Throwable { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.execute("create or replace table t(c1 int)"); statement.execute("insert into t values(1)", Statement.NO_GENERATED_KEYS); try { @@ -274,154 +270,161 @@ public void testAutogenerateKey() throws Throwable { // nop } // empty result - ResultSet rset = statement.getGeneratedKeys(); - assertFalse(rset.next()); - rset.close(); + try (ResultSet rset = statement.getGeneratedKeys()) { + assertFalse(rset.next()); + } } } @Test public void testExecuteMultiInsert() throws SQLException { - Connection connection = getConnection(); - String multiInsertionSQL = - " insert all " - + "into foo " - + "into foo1 " - + "into bar (b1, b2, b3) values (s3, s2, s1) " - + "select s1, s2, s3 from source"; - - Statement statement = connection.createStatement(); - assertFalse( - statement.execute("create or replace table foo (f1 integer, f2 integer, f3 integer)")); - assertFalse( - statement.execute("create or replace table foo1 (f1 integer, f2 integer, f3 integer)")); - assertFalse( - statement.execute("create or replace table bar (b1 integer, b2 integer, b3 integer)")); - assertFalse( - statement.execute("create or replace table source(s1 integer, s2 integer, s3 integer)")); - assertFalse(statement.execute("insert into source values(1, 2, 3)")); - assertFalse(statement.execute("insert into source values(11, 22, 33)")); - assertFalse(statement.execute("insert into source values(111, 222, 333)")); - - int updateCount = statement.executeUpdate(multiInsertionSQL); - assertEquals(9, updateCount); - - statement.execute("drop table if exists foo"); - statement.execute("drop table if exists foo1"); - statement.execute("drop table if exists bar"); - statement.execute("drop table if exists source"); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiInsertionSQL = + " insert all " + + "into foo " + + "into foo1 " + + "into bar (b1, b2, b3) values (s3, s2, s1) " + + "select s1, s2, s3 from source"; + + try { + assertFalse( + statement.execute("create or replace table foo (f1 integer, f2 integer, f3 integer)")); + assertFalse( + statement.execute("create or replace table foo1 (f1 integer, f2 integer, f3 integer)")); + assertFalse( + statement.execute("create or replace table bar (b1 integer, b2 integer, b3 integer)")); + assertFalse( + statement.execute( + "create or replace table source(s1 integer, s2 integer, s3 integer)")); + assertFalse(statement.execute("insert into source values(1, 2, 3)")); + assertFalse(statement.execute("insert into source values(11, 22, 33)")); + assertFalse(statement.execute("insert into source values(111, 222, 333)")); + + int updateCount = statement.executeUpdate(multiInsertionSQL); + assertEquals(9, updateCount); + } finally { + statement.execute("drop table if exists foo"); + statement.execute("drop table if exists foo1"); + statement.execute("drop table if exists bar"); + statement.execute("drop table if exists source"); + } + } } @Test public void testExecuteBatch() throws Exception { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - connection.setAutoCommit(false); - // mixed of ddl/dml in batch - statement.addBatch("create or replace table test_batch(a string, b integer)"); - statement.addBatch("insert into test_batch values('str1', 1), ('str2', 2)"); - statement.addBatch( - "update test_batch set test_batch.b = src.b + 5 from " - + "(select 'str1' as a, 2 as b) src where test_batch.a = src.a"); - - int[] rowCounts = statement.executeBatch(); - connection.commit(); - - assertThat(rowCounts.length, is(3)); - assertThat(rowCounts[0], is(0)); - assertThat(rowCounts[1], is(2)); - assertThat(rowCounts[2], is(1)); - - List batchQueryIDs = statement.unwrap(SnowflakeStatement.class).getBatchQueryIDs(); - assertEquals(3, batchQueryIDs.size()); - assertEquals(statement.unwrap(SnowflakeStatement.class).getQueryID(), batchQueryIDs.get(2)); - - ResultSet resultSet = statement.executeQuery("select * from test_batch order by b asc"); - resultSet.next(); - assertThat(resultSet.getInt("B"), is(2)); - resultSet.next(); - assertThat(resultSet.getInt("B"), is(7)); - statement.clearBatch(); - - // one of the batch is query instead of ddl/dml - // it should continuing processing - try { - statement.addBatch("insert into test_batch values('str3', 3)"); - statement.addBatch("select * from test_batch"); - statement.addBatch("select * from test_batch_not_exist"); - statement.addBatch("insert into test_batch values('str4', 4)"); - statement.executeBatch(); - fail(); - } catch (BatchUpdateException e) { - rowCounts = e.getUpdateCounts(); - assertThat(e.getErrorCode(), is(ERROR_CODE_DOMAIN_OBJECT_DOES_NOT_EXIST)); - assertThat(rowCounts[0], is(1)); - assertThat(rowCounts[1], is(Statement.SUCCESS_NO_INFO)); - assertThat(rowCounts[2], is(Statement.EXECUTE_FAILED)); - assertThat(rowCounts[3], is(1)); - - connection.rollback(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + connection.setAutoCommit(false); + // mixed of ddl/dml in batch + statement.addBatch("create or replace table test_batch(a string, b integer)"); + statement.addBatch("insert into test_batch values('str1', 1), ('str2', 2)"); + statement.addBatch( + "update test_batch set test_batch.b = src.b + 5 from " + + "(select 'str1' as a, 2 as b) src where test_batch.a = src.a"); + + int[] rowCounts = statement.executeBatch(); + connection.commit(); + + assertThat(rowCounts.length, is(3)); + assertThat(rowCounts[0], is(0)); + assertThat(rowCounts[1], is(2)); + assertThat(rowCounts[2], is(1)); + + List batchQueryIDs = statement.unwrap(SnowflakeStatement.class).getBatchQueryIDs(); + assertEquals(3, batchQueryIDs.size()); + assertEquals(statement.unwrap(SnowflakeStatement.class).getQueryID(), batchQueryIDs.get(2)); + + try (ResultSet resultSet = + statement.executeQuery("select * from test_batch order by b asc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt("B"), is(2)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt("B"), is(7)); + statement.clearBatch(); + + // one of the batch is query instead of ddl/dml + // it should continuing processing + try { + statement.addBatch("insert into test_batch values('str3', 3)"); + statement.addBatch("select * from test_batch"); + statement.addBatch("select * from test_batch_not_exist"); + statement.addBatch("insert into test_batch values('str4', 4)"); + statement.executeBatch(); + fail(); + } catch (BatchUpdateException e) { + rowCounts = e.getUpdateCounts(); + assertThat(e.getErrorCode(), is(ERROR_CODE_DOMAIN_OBJECT_DOES_NOT_EXIST)); + assertThat(rowCounts[0], is(1)); + assertThat(rowCounts[1], is(Statement.SUCCESS_NO_INFO)); + assertThat(rowCounts[2], is(Statement.EXECUTE_FAILED)); + assertThat(rowCounts[3], is(1)); + + connection.rollback(); + } + + statement.clearBatch(); + + statement.addBatch( + "put file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @%test_batch auto_compress=false"); + File tempFolder = tmpFolder.newFolder("test_downloads_folder"); + statement.addBatch("get @%test_batch file://" + tempFolder.getCanonicalPath()); + + rowCounts = statement.executeBatch(); + assertThat(rowCounts.length, is(2)); + assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); + assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); + statement.clearBatch(); + } + } finally { + statement.execute("drop table if exists test_batch"); + } } - - statement.clearBatch(); - - statement.addBatch( - "put file://" - + getFullPathFileInResource(TEST_DATA_FILE) - + " @%test_batch auto_compress=false"); - File tempFolder = tmpFolder.newFolder("test_downloads_folder"); - statement.addBatch("get @%test_batch file://" + tempFolder); - - rowCounts = statement.executeBatch(); - assertThat(rowCounts.length, is(2)); - assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); - assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); - statement.clearBatch(); - - statement.execute("drop table if exists test_batch"); - statement.close(); - connection.close(); } @Test public void testExecuteLargeBatch() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - /** - * Generate a table with several rows and 1 column named test_large_batch Note: to truly test - * that executeLargeBatch works with a number of rows greater than MAX_INT, replace rowcount => - * 15 in next code line with rowcount => 2147483648, or some other number larger than MAX_INT. - * Test will take about 15 minutes to run. - */ - statement.execute( - "create or replace table test_large_batch (a number) as (select * from (select 5 from table" - + "(generator(rowcount => 15)) v));"); - // update values in table so that all rows are updated - statement.addBatch("update test_large_batch set a = 7 where a = 5;"); - long[] rowsUpdated = statement.executeLargeBatch(); - assertThat(rowsUpdated.length, is(1)); - long testVal = 15L; - assertThat(rowsUpdated[0], is(testVal)); - statement.clearBatch(); - /** - * To test SQLException for integer overflow when using executeBatch() for row updates of larger - * than MAX_INT, uncomment the following lines of code. Test will take about 15 minutes to run. - * - *

statement.execute("create or replace table test_large_batch (a number) as (select * from - * (select 5 from table" + "(generator(rowcount => 2147483648)) v));"); - * statement.addBatch("update test_large_batch set a = 7 where a = 5;"); try { int[] rowsUpdated - * = statement.executeBatch(); fail(); } catch (SnowflakeSQLException e) { assertEquals((int) - * ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getMessageCode(), e.getErrorCode()); - * assertEquals(ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getSqlState(), e.getSQLState()); } - * statement.clearBatch(); - */ - statement.execute("drop table if exists test_large_batch"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + /** + * Generate a table with several rows and 1 column named test_large_batch Note: to truly test + * that executeLargeBatch works with a number of rows greater than MAX_INT, replace rowcount + * => 15 in next code line with rowcount => 2147483648, or some other number larger than + * MAX_INT. Test will take about 15 minutes to run. + */ + try { + statement.execute( + "create or replace table test_large_batch (a number) as (select * from (select 5 from table" + + "(generator(rowcount => 15)) v));"); + // update values in table so that all rows are updated + statement.addBatch("update test_large_batch set a = 7 where a = 5;"); + long[] rowsUpdated = statement.executeLargeBatch(); + assertThat(rowsUpdated.length, is(1)); + long testVal = 15L; + assertThat(rowsUpdated[0], is(testVal)); + statement.clearBatch(); + + /** + * To test SQLException for integer overflow when using executeBatch() for row updates of + * larger than MAX_INT, uncomment the following lines of code. Test will take about 15 + * minutes to run. + * + *

statement.execute("create or replace table test_large_batch (a number) as (select * + * from (select 5 from table" + "(generator(rowcount => 2147483648)) v));"); + * statement.addBatch("update test_large_batch set a = 7 where a = 5;"); try { int[] + * rowsUpdated = statement.executeBatch(); fail(); } catch (SnowflakeSQLException e) { + * assertEquals((int) ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getMessageCode(), + * e.getErrorCode()); assertEquals(ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getSqlState(), + * e.getSQLState()); } statement.clearBatch(); + */ + } finally { + statement.execute("drop table if exists test_large_batch"); + } + } } /** @@ -433,7 +436,6 @@ public void testExecuteLargeBatch() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testExecuteUpdateZeroCount() throws SQLException { try (Connection connection = getConnection()) { - String[] testCommands = { "use role accountadmin", "use database testdb", @@ -457,25 +459,26 @@ public void testExecuteUpdateZeroCount() throws SQLException { }; try { for (String testCommand : testCommands) { - Statement statement = connection.createStatement(); - int updateCount = statement.executeUpdate(testCommand); - assertThat(updateCount, is(0)); - statement.close(); + try (Statement statement = connection.createStatement()) { + int updateCount = statement.executeUpdate(testCommand); + assertThat(updateCount, is(0)); + } } } finally { - Statement statement = connection.createStatement(); - statement.execute("use role accountadmin"); - statement.execute("drop table if exists testExecuteUpdate"); - statement.execute("drop role if exists testrole"); - statement.execute("drop user if exists testuser"); + try (Statement statement = connection.createStatement()) { + statement.execute("use role accountadmin"); + statement.execute("drop table if exists testExecuteUpdate"); + statement.execute("drop role if exists testrole"); + statement.execute("drop user if exists testuser"); + } } } } @Test public void testExecuteUpdateFail() throws Exception { - try (Connection connection = getConnection()) { - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { String[] testCommands = { "list @~", "ls @~", @@ -488,7 +491,6 @@ public void testExecuteUpdateFail() throws Exception { for (String testCommand : testCommands) { try { - Statement statement = connection.createStatement(); statement.executeUpdate(testCommand); fail("TestCommand: " + testCommand + " is expected to be failed to execute"); } catch (SQLException e) { @@ -503,30 +505,29 @@ public void testExecuteUpdateFail() throws Exception { @Test public void testTelemetryBatch() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - ResultSet rs; - String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; - statement.execute(sqlSelect); - - rs = statement.getResultSet(); - assertEquals(3, getSizeOfResultSet(rs)); - assertEquals(-1, statement.getUpdateCount()); - assertEquals(-1L, statement.getLargeUpdateCount()); + Telemetry telemetryClient = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - rs = statement.executeQuery(sqlSelect); - assertEquals(3, getSizeOfResultSet(rs)); - rs.close(); + String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; + statement.execute(sqlSelect); - Telemetry telemetryClient = - ((SnowflakeStatementV1) statement).connection.getSfSession().getTelemetryClient(); + try (ResultSet rs = statement.getResultSet()) { + assertEquals(3, getSizeOfResultSet(rs)); + assertEquals(-1, statement.getUpdateCount()); + assertEquals(-1L, statement.getLargeUpdateCount()); + } - // there should be logs ready to be sent - assertTrue(((TelemetryClient) telemetryClient).bufferSize() > 0); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + assertEquals(3, getSizeOfResultSet(rs)); + } - statement.close(); + telemetryClient = + ((SnowflakeStatementV1) statement).connection.getSfSession().getTelemetryClient(); + // there should be logs ready to be sent + assertTrue(((TelemetryClient) telemetryClient).bufferSize() > 0); + } // closing the statement should flush the buffer, however, flush is async, // sleep some time before check buffer size try { @@ -534,54 +535,50 @@ public void testTelemetryBatch() throws SQLException { } catch (Throwable e) { } assertEquals(((TelemetryClient) telemetryClient).bufferSize(), 0); - connection.close(); } @Test public void testMultiStmtNotEnabled() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; - try { - statement.execute(multiStmtQuery); - fail("Using a multi-statement query without the parameter set should fail"); - } catch (SnowflakeSQLException ex) { - assertEquals(SqlState.FEATURE_NOT_SUPPORTED, ex.getSQLState()); + try { + statement.execute(multiStmtQuery); + fail("Using a multi-statement query without the parameter set should fail"); + } catch (SnowflakeSQLException ex) { + assertEquals(SqlState.FEATURE_NOT_SUPPORTED, ex.getSQLState()); + } } - - statement.close(); - connection.close(); } @Test public void testCallStoredProcedure() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace procedure SP()\n" - + "returns string not null\n" - + "language javascript\n" - + "as $$\n" - + " snowflake.execute({sqlText:'select seq4() from table(generator(rowcount=>5))'});\n" - + " return 'done';\n" - + "$$"); - - assertTrue(statement.execute("call SP()")); - ResultSet rs = statement.getResultSet(); - assertNotNull(rs); - assertTrue(rs.next()); - assertEquals("done", rs.getString(1)); - assertFalse(rs.next()); - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - assertEquals(-1L, statement.getLargeUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "create or replace procedure SP()\n" + + "returns string not null\n" + + "language javascript\n" + + "as $$\n" + + " snowflake.execute({sqlText:'select seq4() from table(generator(rowcount=>5))'});\n" + + " return 'done';\n" + + "$$"); + + assertTrue(statement.execute("call SP()")); + try (ResultSet rs = statement.getResultSet()) { + assertNotNull(rs); + assertTrue(rs.next()); + assertEquals("done", rs.getString(1)); + assertFalse(rs.next()); + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + assertEquals(-1L, statement.getLargeUpdateCount()); + } + } } @Test @@ -612,8 +609,8 @@ public void testCreateStatementWithParameters() throws Throwable { @Test public void testUnwrapper() throws Throwable { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { if (statement.isWrapperFor(SnowflakeStatementV1.class)) { statement.execute("select 1"); SnowflakeStatement sfstatement = statement.unwrap(SnowflakeStatement.class); @@ -632,10 +629,9 @@ public void testUnwrapper() throws Throwable { @Test public void testQueryIdIsNullOnFreshStatement() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java index cdd3527f7..d041b1694 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java @@ -13,18 +13,21 @@ public class StatementLargeUpdateIT extends BaseJDBCTest { @Test public void testLargeUpdate() throws Throwable { - try (Connection con = getConnection()) { + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { long expectedUpdateRows = (long) Integer.MAX_VALUE + 10L; - con.createStatement().execute("create or replace table test_large_update(c1 boolean)"); - Statement st = con.createStatement(); - long updatedRows = - st.executeLargeUpdate( - "insert into test_large_update select true from table(generator(rowcount=>" - + expectedUpdateRows - + "))"); - assertEquals(expectedUpdateRows, updatedRows); - assertEquals(expectedUpdateRows, st.getLargeUpdateCount()); - con.createStatement().execute("drop table if exists test_large_update"); + try { + statement.execute("create or replace table test_large_update(c1 boolean)"); + long updatedRows = + statement.executeLargeUpdate( + "insert into test_large_update select true from table(generator(rowcount=>" + + expectedUpdateRows + + "))"); + assertEquals(expectedUpdateRows, updatedRows); + assertEquals(expectedUpdateRows, statement.getLargeUpdateCount()); + } finally { + statement.execute("drop table if exists test_large_update"); + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java index 0a003957c..d37d88118 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java @@ -46,9 +46,9 @@ public class StatementLatestIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @@ -56,111 +56,119 @@ public static Connection getConnection() throws SQLException { @Test public void testExecuteCreateAndDrop() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - boolean success = statement.execute("create or replace table test_create(colA integer)"); - assertFalse(success); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - int rowCount = statement.executeUpdate("create or replace table test_create_2(colA integer)"); - assertEquals(0, rowCount); - assertEquals(0, statement.getUpdateCount()); + boolean success = statement.execute("create or replace table test_create(colA integer)"); + assertFalse(success); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); - success = statement.execute("drop table if exists TEST_CREATE"); - assertFalse(success); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); + int rowCount = statement.executeUpdate("create or replace table test_create_2(colA integer)"); + assertEquals(0, rowCount); + assertEquals(0, statement.getUpdateCount()); - rowCount = statement.executeUpdate("drop table if exists TEST_CREATE_2"); - assertEquals(0, rowCount); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); + success = statement.execute("drop table if exists TEST_CREATE"); + assertFalse(success); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); - statement.close(); - connection.close(); + rowCount = statement.executeUpdate("drop table if exists TEST_CREATE_2"); + assertEquals(0, rowCount); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCopyAndUpload() throws Exception { - - Connection connection = null; - Statement statement = null; File tempFolder = tmpFolder.newFolder("test_downloads_folder"); List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { String fileName = "test_copy.csv"; URL resource = StatementIT.class.getResource(fileName); - connection = getConnection(accounts.get(i)); - statement = connection.createStatement(); - - statement.execute("create or replace table test_copy(c1 number, c2 number, c3 string)"); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - - String path = resource.getFile(); - - // put files - ResultSet rset = statement.executeQuery("PUT file://" + path + " @%test_copy"); - try { - rset.getString(1); - fail("Should raise No row found exception, because no next() is called."); - } catch (SQLException ex) { - assertThat( - "No row found error", ex.getErrorCode(), equalTo(ROW_DOES_NOT_EXIST.getMessageCode())); - } - int cnt = 0; - while (rset.next()) { - assertThat("uploaded file name", rset.getString(1), equalTo(fileName)); - ++cnt; + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_copy(c1 number, c2 number, c3 string)"); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + + String path = resource.getFile(); + + // put files + try (ResultSet rset = statement.executeQuery("PUT file://" + path + " @%test_copy")) { + try { + rset.getString(1); + fail("Should raise No row found exception, because no next() is called."); + } catch (SQLException ex) { + assertThat( + "No row found error", + ex.getErrorCode(), + equalTo(ROW_DOES_NOT_EXIST.getMessageCode())); + } + int cnt = 0; + while (rset.next()) { + assertThat("uploaded file name", rset.getString(1), equalTo(fileName)); + ++cnt; + } + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertThat("number of files", cnt, equalTo(1)); + int numRows = statement.executeUpdate("copy into test_copy"); + assertEquals(2, numRows); + assertEquals(2, statement.getUpdateCount()); + assertEquals(2L, statement.getLargeUpdateCount()); + + // get files + statement.executeQuery( + "get @%test_copy 'file://" + tempFolder.getCanonicalPath() + "' parallel=8"); + + // Make sure that the downloaded file exists, it should be gzip compressed + File downloaded = + new File(tempFolder.getCanonicalPath() + File.separator + fileName + ".gz"); + assertTrue(downloaded.exists()); + } + // unzip the new file + Process p = + Runtime.getRuntime() + .exec( + "gzip -d " + + tempFolder.getCanonicalPath() + + File.separator + + fileName + + ".gz"); + p.waitFor(); + File newCopy = new File(tempFolder.getCanonicalPath() + File.separator + fileName); + // check that the get worked by uploading new file again to a different table and + // comparing it + // to original table + statement.execute("create or replace table test_copy_2(c1 number, c2 number, c3 string)"); + + // put copy of file + statement.executeQuery("PUT file://" + newCopy.getPath() + " @%test_copy_2"); + // assert that the result set is empty when you subtract each table from the other + try (ResultSet rset = + statement.executeQuery( + "select * from @%test_copy minus select * from @%test_copy_2")) { + assertFalse(rset.next()); + } + try (ResultSet rset = + statement.executeQuery( + "select * from @%test_copy_2 minus select * from @%test_copy")) { + assertFalse(rset.next()); + } + } finally { + statement.execute("drop table if exists test_copy"); + statement.execute("drop table if exists test_copy_2"); + } } - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertThat("number of files", cnt, equalTo(1)); - int numRows = statement.executeUpdate("copy into test_copy"); - assertEquals(2, numRows); - assertEquals(2, statement.getUpdateCount()); - assertEquals(2L, statement.getLargeUpdateCount()); - - // get files - statement.executeQuery( - "get @%test_copy 'file://" + tempFolder.getCanonicalPath() + "' parallel=8"); - - // Make sure that the downloaded file exists, it should be gzip compressed - File downloaded = new File(tempFolder.getCanonicalPath() + File.separator + fileName + ".gz"); - assert (downloaded.exists()); - - // unzip the new file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + tempFolder.getCanonicalPath() + File.separator + fileName + ".gz"); - p.waitFor(); - File newCopy = new File(tempFolder.getCanonicalPath() + File.separator + fileName); - - // check that the get worked by uploading new file again to a different table and comparing it - // to original table - statement.execute("create or replace table test_copy_2(c1 number, c2 number, c3 string)"); - - // put copy of file - rset = statement.executeQuery("PUT file://" + newCopy.getPath() + " @%test_copy_2"); - // assert that the result set is empty when you subtract each table from the other - rset = statement.executeQuery("select * from @%test_copy minus select * from @%test_copy_2"); - assertFalse(rset.next()); - rset = statement.executeQuery("select * from @%test_copy_2 minus select * from @%test_copy"); - assertFalse(rset.next()); - - statement.execute("drop table if exists test_copy"); - statement.execute("drop table if exists test_copy_2"); } - - statement.close(); - connection.close(); } /** @@ -170,36 +178,34 @@ public void testCopyAndUpload() throws Exception { */ @Test public void testExecuteOpenResultSets() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - ResultSet resultSet; + try (Connection con = getConnection()) { + try (Statement statement = con.createStatement()) { + for (int i = 0; i < 10; i++) { + statement.execute("select 1"); + statement.getResultSet(); + } - for (int i = 0; i < 10; i++) { - statement.execute("select 1"); - statement.getResultSet(); - } + assertEquals(9, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); + } - assertEquals(9, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); - statement.close(); + try (Statement statement = con.createStatement()) { + for (int i = 0; i < 10; i++) { + statement.execute("select 1"); + ResultSet resultSet = statement.getResultSet(); + resultSet.close(); + } - statement = con.createStatement(); - for (int i = 0; i < 10; i++) { - statement.execute("select 1"); - resultSet = statement.getResultSet(); - resultSet.close(); + assertEquals(0, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); + } } - - assertEquals(0, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); - - statement.close(); - con.close(); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPreparedStatementLogging() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + try { SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); sfSession.setPreparedStatementLogging(true); @@ -229,7 +235,7 @@ public void testPreparedStatementLogging() throws SQLException { int bindValues = BindUploader.arrayBindValueCount(bindings); assertEquals(8008, bindValues); pstatement.executeBatch(); - + } finally { stmt.execute("drop table if exists mytab"); } } @@ -240,29 +246,27 @@ public void testSchemaWith255CharactersDoesNotCauseException() throws SQLExcepti String schemaName = TestUtil.GENERATED_SCHEMA_PREFIX + SnowflakeUtil.randomAlphaNumeric(255 - TestUtil.GENERATED_SCHEMA_PREFIX.length()); - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - stmt.execute("create schema " + schemaName); - stmt.execute("use schema " + schemaName); - stmt.execute("drop schema " + schemaName); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + stmt.execute("create schema " + schemaName); + stmt.execute("use schema " + schemaName); + stmt.execute("drop schema " + schemaName); } } /** Added in > 3.14.4 */ @Test public void testQueryIdIsSetOnFailedQueryExecute() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - try { - stmt.execute("use database not_existing_database"); - fail("Statement should fail with exception"); - } catch (SnowflakeSQLException e) { - String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); - TestUtil.assertValidQueryId(queryID); - assertEquals(queryID, e.getQueryId()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); + try { + stmt.execute("use database not_existing_database"); + fail("Statement should fail with exception"); + } catch (SnowflakeSQLException e) { + String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); + TestUtil.assertValidQueryId(queryID); + assertEquals(queryID, e.getQueryId()); } } } @@ -270,17 +274,16 @@ public void testQueryIdIsSetOnFailedQueryExecute() throws SQLException { /** Added in > 3.14.4 */ @Test public void testQueryIdIsSetOnFailedExecuteUpdate() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - try { - stmt.executeUpdate("update not_existing_table set a = 1 where id = 42"); - fail("Statement should fail with exception"); - } catch (SnowflakeSQLException e) { - String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); - TestUtil.assertValidQueryId(queryID); - assertEquals(queryID, e.getQueryId()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); + try { + stmt.executeUpdate("update not_existing_table set a = 1 where id = 42"); + fail("Statement should fail with exception"); + } catch (SnowflakeSQLException e) { + String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); + TestUtil.assertValidQueryId(queryID); + assertEquals(queryID, e.getQueryId()); } } } @@ -288,17 +291,16 @@ public void testQueryIdIsSetOnFailedExecuteUpdate() throws SQLException { /** Added in > 3.14.4 */ @Test public void testQueryIdIsSetOnFailedExecuteQuery() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - try { - stmt.executeQuery("select * from not_existing_table"); - fail("Statement should fail with exception"); - } catch (SnowflakeSQLException e) { - String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); - TestUtil.assertValidQueryId(queryID); - assertEquals(queryID, e.getQueryId()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); + try { + stmt.executeQuery("select * from not_existing_table"); + fail("Statement should fail with exception"); + } catch (SnowflakeSQLException e) { + String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); + TestUtil.assertValidQueryId(queryID); + assertEquals(queryID, e.getQueryId()); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StreamIT.java b/src/test/java/net/snowflake/client/jdbc/StreamIT.java index f36fd5d34..d1762904d 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamIT.java @@ -32,40 +32,32 @@ public class StreamIT extends BaseJDBCTest { @Test public void testUploadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); - - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); - - // select from the file to make sure the data is uploaded - ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX); - - String ret = null; - - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); + + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX)) { + String ret = null; + + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } @@ -80,38 +72,37 @@ public void testUploadStream() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDownloadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); - statement = connection.createStatement(); - ResultSet rset = - statement.executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~/" + DEST_PREFIX); - assertTrue(rset.next()); - assertEquals("UPLOADED", rset.getString(7)); - - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - // the first 2 characters - assertEquals("1|", output.substring(0, 2)); - - // the number of lines - String[] lines = output.split("\n"); - assertEquals(28, lines.length); - } finally { - if (statement != null) { + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + try (ResultSet rset = + statement.executeQuery( + "PUT file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @~/" + + DEST_PREFIX)) { + assertTrue(rset.next()); + assertEquals("UPLOADED", rset.getString(7)); + + InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + // the first 2 characters + assertEquals("1|", output.substring(0, 2)); + + // the number of lines + String[] lines = output.split("\n"); + assertEquals(28, lines.length); + } + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } } @@ -119,42 +110,34 @@ public void testDownloadStream() throws Throwable { @Test public void testCompressAndUploadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/" + "testCompressAndUploadStream"; - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); - - // upload the data to user stage under testCompressAndUploadStream - // with name hello.txt - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnectionV1.class) - .uploadStream( - "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", true); - - // select from the file to make sure the data is uploaded - ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); + + // upload the data to user stage under testCompressAndUploadStream + // with name hello.txt + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnectionV1.class) + .uploadStream( + "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", true); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX)) { + + String ret = null; + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } - String ret = null; - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - } finally { - if (statement != null) { + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(resultSet, statement, connection); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java index f7e3d0d74..3ab179b70 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java @@ -47,58 +47,56 @@ public class StreamLatestIT extends BaseJDBCTest { */ @Test public void testUnusualStageName() throws Throwable { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); + String ret = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - try { - statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); + try { + statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "'@%\"ice cream (nice)\"'", - null, outputStream.asByteSource().openStream(), "hello.txt", false); - - // select from the file to make sure the data is uploaded - ResultSet rset = statement.executeQuery("SELECT $1 FROM '@%\"ice cream (nice)\"/'"); - - String ret = null; - - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - - statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); - - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "$$@%\"ice cream (nice)\"$$", - null, outputStream.asByteSource().openStream(), "hello.txt", false); - - // select from the file to make sure the data is uploaded - rset = statement.executeQuery("SELECT $1 FROM $$@%\"ice cream (nice)\"/$$"); - - ret = null; - - while (rset.next()) { - ret = rset.getString(1); + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "'@%\"ice cream (nice)\"'", + null, outputStream.asByteSource().openStream(), "hello.txt", false); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = statement.executeQuery("SELECT $1 FROM '@%\"ice cream (nice)\"/'")) { + ret = null; + + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } + statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); + + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "$$@%\"ice cream (nice)\"$$", + null, outputStream.asByteSource().openStream(), "hello.txt", false); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = + statement.executeQuery("SELECT $1 FROM $$@%\"ice cream (nice)\"/$$")) { + + ret = null; + + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } + } finally { + statement.execute("DROP TABLE IF EXISTS \"ice cream (nice)\""); } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - - } finally { - statement.execute("DROP TABLE IF EXISTS \"ice cream (nice)\""); - statement.close(); - connection.close(); } } @@ -106,28 +104,25 @@ public void testUnusualStageName() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - statement = connection.createStatement(); - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", DEST_PREFIX + "/abc.gz", true); - fail("should throw a storage provider exception for blob not found"); - } catch (Exception ex) { - assertTrue(ex instanceof SQLException); - assertTrue( - "Wrong exception message: " + ex.getMessage(), - ex.getMessage().matches(".*Blob.*not found in bucket.*")); - } finally { - if (statement != null) { + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + + try { + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", DEST_PREFIX + "/abc.gz", true); + fail("should throw a storage provider exception for blob not found"); + } catch (Exception ex) { + assertTrue(ex instanceof SQLException); + assertTrue( + "Wrong exception message: " + ex.getMessage(), + ex.getMessage().matches(".*Blob.*not found in bucket.*")); + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } @@ -135,121 +130,111 @@ public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { @Ignore public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOException { final String DEST_PREFIX = "testUploadStream"; - Connection connection = null; - Statement statement = null; - connection = getConnection("gcpaccount"); - statement = connection.createStatement(); - statement.execute("create or replace stage testgcpstage"); - ResultSet rset = - statement.executeQuery( - "PUT file://" - + getFullPathFileInResource(TEST_DATA_FILE) - + " @testgcpstage/" - + DEST_PREFIX); - assertTrue(rset.next()); - assertEquals("Error message:" + rset.getString(8), "UPLOADED", rset.getString(7)); - - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("@testgcpstage", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - // the first 2 characters - assertEquals("1|", output.substring(0, 2)); - // the number of lines - String[] lines = output.split("\n"); - assertEquals(28, lines.length); - - statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); - closeSQLObjects(statement, connection); + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { + statement.execute("create or replace stage testgcpstage"); + try (ResultSet rset = + statement.executeQuery( + "PUT file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @testgcpstage/" + + DEST_PREFIX)) { + assertTrue(rset.next()); + assertEquals("Error message:" + rset.getString(8), "UPLOADED", rset.getString(7)); + + InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("@testgcpstage", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + // the first 2 characters + assertEquals("1|", output.substring(0, 2)); + + // the number of lines + String[] lines = output.split("\n"); + assertEquals(28, lines.length); + } + statement.execute("rm @~/" + DEST_PREFIX); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDownloadToStreamGCS() throws SQLException, IOException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - try { - connection = getConnection("gcpaccount", paramProperties); - statement = connection.createStatement(); - ResultSet rset = - statement.executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~/" + DEST_PREFIX); - assertTrue(rset.next()); - assertEquals("UPLOADED", rset.getString(7)); - - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - // the first 2 characters - assertEquals("1|", output.substring(0, 2)); - // the number of lines - String[] lines = output.split("\n"); - assertEquals(28, lines.length); - } finally { - if (statement != null) { + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement(); + ResultSet rset = + statement.executeQuery( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~/" + DEST_PREFIX)) { + try { + assertTrue(rset.next()); + assertEquals("UPLOADED", rset.getString(7)); + + InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + // the first 2 characters + assertEquals("1|", output.substring(0, 2)); + + // the number of lines + String[] lines = output.split("\n"); + assertEquals(28, lines.length); + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } @Test public void testSpecialCharactersInFileName() throws SQLException, IOException { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - statement = connection.createStatement(); - - // Create a temporary file with special characters in the name and write to it - File specialCharFile = tmpFolder.newFile("(special char@).txt"); - BufferedWriter bw = new BufferedWriter(new FileWriter(specialCharFile)); - bw.write("Creating test file for downloadStream test"); - bw.close(); - - String sourceFilePath = specialCharFile.getCanonicalPath(); - String sourcePathEscaped; - if (System.getProperty("file.separator").equals("\\")) { - // windows separator needs to be escaped because of quotes - sourcePathEscaped = sourceFilePath.replace("\\", "\\\\"); - } else { - sourcePathEscaped = sourceFilePath; - } - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE downloadStream_stage"); - statement.execute( - "PUT 'file://" + sourcePathEscaped + "' @~/downloadStream_stage auto_compress=false"); - - // download file stream - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", "/downloadStream_stage/" + specialCharFile.getName(), false); - - // Read file stream and check the result - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - assertEquals("Creating test file for downloadStream test", output); - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // Create a temporary file with special characters in the name and write to it + File specialCharFile = tmpFolder.newFile("(special char@).txt"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(specialCharFile))) { + bw.write("Creating test file for downloadStream test"); + } + + String sourceFilePath = specialCharFile.getCanonicalPath(); + String sourcePathEscaped; + if (System.getProperty("file.separator").equals("\\")) { + // windows separator needs to be escaped because of quotes + sourcePathEscaped = sourceFilePath.replace("\\", "\\\\"); + } else { + sourcePathEscaped = sourceFilePath; + } + + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE downloadStream_stage"); + statement.execute( + "PUT 'file://" + sourcePathEscaped + "' @~/downloadStream_stage auto_compress=false"); + + // download file stream + try (InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", "/downloadStream_stage/" + specialCharFile.getName(), false)) { + + // Read file stream and check the result + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + assertEquals("Creating test file for downloadStream test", output); + } + } finally { statement.execute("DROP STAGE IF EXISTS downloadStream_stage"); - statement.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java index c667b7a3f..93539005a 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java @@ -1,8 +1,11 @@ package net.snowflake.client.jdbc.cloud.storage; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; +import static org.mockito.Mockito.spy; +import com.microsoft.azure.storage.blob.ListBlobItem; import java.sql.Connection; import java.sql.SQLException; import net.snowflake.client.ConditionalIgnoreRule; @@ -17,7 +20,6 @@ import org.junit.Test; public class SnowflakeAzureClientLatestIT extends BaseJDBCTest { - @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { @@ -37,4 +39,14 @@ public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { } } } + + @Test + public void testCloudExceptionTest() { + Iterable mockList = null; + AzureObjectSummariesIterator iterator = new AzureObjectSummariesIterator(mockList); + AzureObjectSummariesIterator spyIterator = spy(iterator); + UnsupportedOperationException ex = + assertThrows(UnsupportedOperationException.class, () -> spyIterator.remove()); + assertEquals(ex.getMessage(), "remove() method not supported"); + } } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java index ab6276ec3..de241162f 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java @@ -72,26 +72,29 @@ public void testS3Client256Encryption() throws SQLException { @Test @Ignore public void testS3ConnectionWithProxyEnvVariablesSet() throws SQLException { - Connection connection = null; String testStageName = "s3TestStage"; - try { - connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery("select 1"); - assertTrue(resultSet.next()); - statement.execute("create or replace stage " + testStageName); - resultSet = - connection - .createStatement() - .executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @" + testStageName); - while (resultSet.next()) { - assertEquals("UPLOADED", resultSet.getString("status")); + + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try (ResultSet resultSet = statement.executeQuery("select 1")) { + assertTrue(resultSet.next()); } - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + try { + statement.execute("create or replace stage " + testStageName); + try (ResultSet resultSet = + connection + .createStatement() + .executeQuery( + "PUT file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @" + + testStageName)) { + while (resultSet.next()) { + assertEquals("UPLOADED", resultSet.getString("status")); + } + } + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java new file mode 100644 index 000000000..3daddf3df --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.jdbc.cloud.storage; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class SnowflakeS3ClientTest { + + @Test + public void shouldDetermineDomainForRegion() { + assertEquals("amazonaws.com", SnowflakeS3Client.getDomainSuffixForRegionalUrl("us-east-1")); + assertEquals( + "amazonaws.com.cn", SnowflakeS3Client.getDomainSuffixForRegionalUrl("cn-northwest-1")); + assertEquals( + "amazonaws.com.cn", SnowflakeS3Client.getDomainSuffixForRegionalUrl("CN-NORTHWEST-1")); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java new file mode 100644 index 000000000..042c6b0f4 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java @@ -0,0 +1,356 @@ +package net.snowflake.client.jdbc.diagnostic; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import net.snowflake.client.category.TestCategoryDiagnostic; +import net.snowflake.client.core.SFSessionProperty; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(TestCategoryDiagnostic.class) +public class DiagnosticContextLatestIT { + + private static final String HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts"; + private static final String HTTP_PROXY_HOST = "http.proxyHost"; + private static final String HTTP_PROXY_PORT = "http.proxyPort"; + private static final String HTTPS_PROXY_HOST = "https.proxyHost"; + private static final String HTTPS_PROXY_PORT = "https.proxyPort"; + + private static String oldJvmNonProxyHosts; + private static String oldJvmHttpProxyHost; + private static String oldJvmHttpProxyPort; + private static String oldJvmHttpsProxyHost; + private static String oldJvmHttpsProxyPort; + + @BeforeClass + public static void init() { + oldJvmNonProxyHosts = System.getProperty(HTTP_NON_PROXY_HOSTS); + oldJvmHttpProxyHost = System.getProperty(HTTP_PROXY_HOST); + oldJvmHttpProxyPort = System.getProperty(HTTP_PROXY_PORT); + oldJvmHttpsProxyHost = System.getProperty(HTTPS_PROXY_HOST); + oldJvmHttpsProxyPort = System.getProperty(HTTPS_PROXY_PORT); + } + + @Before + public void clearJvmProperties() { + System.clearProperty(HTTP_NON_PROXY_HOSTS); + System.clearProperty(HTTP_PROXY_HOST); + System.clearProperty(HTTP_PROXY_PORT); + System.clearProperty(HTTPS_PROXY_HOST); + System.clearProperty(HTTPS_PROXY_PORT); + } + /** + * Check that all the mock Snowflake Endpoints we manually created exist in the array returned to + * us by the DiagnosticContext class which it generated after it parsed the allowlist.json file + * during initialization. + * + *

Test added in version > 3.16.1 + */ + @Test + public void parseAllowListFileTest() { + Map connectionPropertiesMap = new HashMap<>(); + File allowlistFile = new File("src/test/resources/allowlist.json"); + + DiagnosticContext diagnosticContext = + new DiagnosticContext(allowlistFile.getAbsolutePath(), connectionPropertiesMap); + List endpointsFromTestFile = diagnosticContext.getEndpoints(); + List mockEndpoints = new ArrayList<>(); + + mockEndpoints.add( + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint( + "SNOWFLAKE_DEPLOYMENT_REGIONLESS", "org-account_name.snowflakecomputing.com", 443)); + mockEndpoints.add(new SnowflakeEndpoint("STAGE", "stage-bucket.s3.amazonaws.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("STAGE", "stage-bucket.s3.us-west-2.amazonaws.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("STAGE", "stage-bucket.s3-us-west-2.amazonaws.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("SNOWSQL_REPO", "snowsql_repo.snowflakecomputing.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint( + "OUT_OF_BAND_TELEMETRY", "out_of_band_telemetry.snowflakecomputing.com", 443)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("DUO_SECURITY", "duo_security.duosecurity.com", 443)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.rootg2.amazontrust.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "o.ss2.us", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.sca1b.amazontrust.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.r2m01.amazontrust.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.rootca1.amazontrust.com", 80)); + mockEndpoints.add( + new SnowflakeEndpoint("SNOWSIGHT_DEPLOYMENT", "snowsight_deployment.snowflake.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("SNOWSIGHT_DEPLOYMENT", "snowsight_deployment_2.snowflake.com", 443)); + + String testFailedMessage = + "The lists of SnowflakeEndpoints in mockEndpoints and endpointsFromTestFile should be identical"; + assertTrue(testFailedMessage, endpointsFromTestFile.containsAll(mockEndpoints)); + } + + /** + * Test that we correctly determine that proxy settings are absent from both the JVM and the + * connections parameters (i.e. empty strings for hostnames, or -1 for ports). + * + *

Test added in version > 3.16.1 + */ + @Test + public void testEmptyProxyConfig() { + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + assertFalse("Proxy configurations should be empty", diagnosticContext.isProxyEnabled()); + assertTrue( + "getHttpProxyHost() must return an empty string in the absence of proxy configuration", + diagnosticContext.getHttpProxyHost().isEmpty()); + assertEquals( + "getHttpProxyPort() must return -1 in the absence of proxy configuration", + -1, + diagnosticContext.getHttpProxyPort()); + assertTrue( + "getHttpsProxyHost() must return an empty string in the absence of proxy configuration", + diagnosticContext.getHttpsProxyHost().isEmpty()); + assertEquals( + "getHttpsProxyPort() must return -1 in the absence of proxy configuration", + -1, + diagnosticContext.getHttpsProxyPort()); + assertTrue( + "getHttpNonProxyHosts() must return an empty string in the absence of proxy configuration", + diagnosticContext.getHttpNonProxyHosts().isEmpty()); + } + + /** Test added in version > 3.16.1 */ + @Test + public void testProxyConfigSetOnJvm() { + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_NON_PROXY_HOSTS, "*.domain.com|localhost"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + assertTrue(diagnosticContext.isProxyEnabled()); + assertTrue(diagnosticContext.isProxyEnabledOnJvm()); + assertEquals(diagnosticContext.getHttpProxyHost(), "http.proxyHost.com"); + assertEquals(diagnosticContext.getHttpProxyPort(), 8080); + assertEquals(diagnosticContext.getHttpsProxyHost(), "https.proxyHost.com"); + assertEquals(diagnosticContext.getHttpsProxyPort(), 8083); + assertEquals(diagnosticContext.getHttpNonProxyHosts(), "*.domain.com|localhost"); + } + + /** + * If Proxy settings are passed using JVM arguments and connection parameters then the connection + * parameters take precedence. + * + *

Test added in version > 3.16.1 + */ + @Test + public void testProxyOverrideWithConnectionParameter() { + + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_NON_PROXY_HOSTS, "*.domain.com|localhost"); + + Map connectionPropertiesMap = new HashMap<>(); + + connectionPropertiesMap.put(SFSessionProperty.PROXY_HOST, "override.proxyHost.com"); + connectionPropertiesMap.put(SFSessionProperty.PROXY_PORT, "80"); + connectionPropertiesMap.put(SFSessionProperty.NON_PROXY_HOSTS, "*.new_domain.com|localhost"); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + assertTrue(diagnosticContext.isProxyEnabled()); + assertFalse(diagnosticContext.isProxyEnabledOnJvm()); + assertEquals(diagnosticContext.getHttpProxyHost(), "override.proxyHost.com"); + assertEquals(diagnosticContext.getHttpProxyPort(), 80); + assertEquals(diagnosticContext.getHttpsProxyHost(), "override.proxyHost.com"); + assertEquals(diagnosticContext.getHttpsProxyPort(), 80); + assertEquals(diagnosticContext.getHttpNonProxyHosts(), "*.new_domain.com|localhost"); + } + + /** Test added in version > 3.16.1 */ + @Test + public void testGetProxy() { + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_NON_PROXY_HOSTS, "*.domain.com|localhost|*.snowflakecomputing.com"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + String httpProxyHost = diagnosticContext.getHttpProxyHost(); + int httpProxyPort = diagnosticContext.getHttpProxyPort(); + String httpsProxyHost = diagnosticContext.getHttpsProxyHost(); + int httpsProxyPort = diagnosticContext.getHttpsProxyPort(); + + SnowflakeEndpoint httpsHostBypassingProxy = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint httpHostBypassingProxy = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + SnowflakeEndpoint hostWithHttpProxy = + new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.rootg2.amazontrust.com", 80); + SnowflakeEndpoint hostWithHttpsProxy = + new SnowflakeEndpoint("STAGE", "stage-bucket.s3-us-west-2.amazonaws.com", 443); + + Proxy byPassProxy = Proxy.NO_PROXY; + Proxy httpProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpProxyHost, httpProxyPort)); + Proxy httpsProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpsProxyHost, httpsProxyPort)); + + assertEquals(byPassProxy, diagnosticContext.getProxy(httpsHostBypassingProxy)); + assertEquals(byPassProxy, diagnosticContext.getProxy(httpHostBypassingProxy)); + assertEquals(httpProxy, diagnosticContext.getProxy(hostWithHttpProxy)); + assertEquals(httpsProxy, diagnosticContext.getProxy(hostWithHttpsProxy)); + } + + /** + * Test that we correctly create direct HTTPS connections and only route HTTP requests through a + * proxy server when we set only the -Dhttp.proxyHost and -Dhttp.proxyPort arguments + * + *

Test added in version > 3.16.1 + */ + @Test + public void testGetHttpProxyOnly() { + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + System.clearProperty(HTTP_PROXY_HOST); + System.clearProperty(HTTP_PROXY_PORT); + + String httpProxyHost = diagnosticContext.getHttpProxyHost(); + int httpProxyPort = diagnosticContext.getHttpProxyPort(); + + Proxy noProxy = Proxy.NO_PROXY; + Proxy httpProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpProxyHost, httpProxyPort)); + + SnowflakeEndpoint httpsHostDirectConnection = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint httpHostProxy = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + + assertEquals(noProxy, diagnosticContext.getProxy(httpsHostDirectConnection)); + assertEquals(httpProxy, diagnosticContext.getProxy(httpHostProxy)); + } + + /** + * Test that we correctly create direct HTTP connections and only route HTTPS through a proxy + * server when we set only the -Dhttps.proxyHost and -Dhttps.proxyPort parameters + * + *

Test added in version > 3.16.1 + */ + @Test + public void testGetHttpsProxyOnly() { + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + String httpsProxyHost = diagnosticContext.getHttpsProxyHost(); + int httpsProxyPort = diagnosticContext.getHttpsProxyPort(); + + Proxy noProxy = Proxy.NO_PROXY; + Proxy httpsProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpsProxyHost, httpsProxyPort)); + + SnowflakeEndpoint httpsHostProxy = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint httpHostDirectConnection = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + + assertEquals(noProxy, diagnosticContext.getProxy(httpHostDirectConnection)); + assertEquals(httpsProxy, diagnosticContext.getProxy(httpsHostProxy)); + } + + /** + * Test that we create a direct connection to every host even though the JVM arguments are set. We + * override the JVM arguments with the nonProxyHosts connection parameter. + * + *

Test added in version > 3.16.1 + */ + @Test + public void testgetNoProxyAfterOverridingJvm() { + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + + Map connectionPropertiesMap = new HashMap<>(); + + connectionPropertiesMap.put(SFSessionProperty.PROXY_HOST, "override.proxyHost.com"); + connectionPropertiesMap.put(SFSessionProperty.PROXY_PORT, "80"); + connectionPropertiesMap.put(SFSessionProperty.NON_PROXY_HOSTS, "*"); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + Proxy noProxy = Proxy.NO_PROXY; + + SnowflakeEndpoint host1 = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint host2 = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + SnowflakeEndpoint host3 = + new SnowflakeEndpoint( + "SNOWFLAKE_DEPLOYMENT", "account_name.privatelink.snowflakecomputing.com", 443); + SnowflakeEndpoint host4 = + new SnowflakeEndpoint("STAGE", "stage-bucket.s3-us-west-2.amazonaws.com", 443); + + assertEquals(noProxy, diagnosticContext.getProxy(host1)); + assertEquals(noProxy, diagnosticContext.getProxy(host2)); + assertEquals(noProxy, diagnosticContext.getProxy(host3)); + assertEquals(noProxy, diagnosticContext.getProxy(host4)); + } + + @After + public void restoreJvmArguments() { + System.clearProperty(HTTP_NON_PROXY_HOSTS); + System.clearProperty(HTTP_PROXY_HOST); + System.clearProperty(HTTP_PROXY_PORT); + System.clearProperty(HTTPS_PROXY_HOST); + System.clearProperty(HTTPS_PROXY_PORT); + + if (oldJvmNonProxyHosts != null) { + System.setProperty(HTTP_NON_PROXY_HOSTS, oldJvmNonProxyHosts); + } + if (oldJvmHttpProxyHost != null) { + System.setProperty(HTTP_PROXY_HOST, oldJvmHttpProxyHost); + } + if (oldJvmHttpProxyPort != null) { + System.setProperty(HTTP_PROXY_PORT, oldJvmHttpProxyPort); + } + if (oldJvmHttpsProxyHost != null) { + System.setProperty(HTTPS_PROXY_HOST, oldJvmHttpsProxyHost); + } + if (oldJvmHttpsProxyPort != null) { + System.getProperty(HTTPS_PROXY_PORT, oldJvmHttpsProxyPort); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java new file mode 100644 index 000000000..a926a649e --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java @@ -0,0 +1,28 @@ +package net.snowflake.client.jdbc.diagnostic; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Test; + +public class SnowflakeEndpointTest { + + @Test + public void shouldDetectPrivateLinkEndpoint() { + Map hostsToPrivateLinks = new HashMap<>(); + hostsToPrivateLinks.put("snowhouse.snowflakecomputing.com", false); + hostsToPrivateLinks.put("snowhouse.privatelink.snowflakecomputing.com", true); + hostsToPrivateLinks.put("snowhouse.snowflakecomputing.cn", false); + hostsToPrivateLinks.put("snowhouse.PRIVATELINK.snowflakecomputing.cn", true); + + hostsToPrivateLinks.forEach( + (host, expectedToBePrivateLink) -> { + SnowflakeEndpoint endpoint = new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", host, 443); + assertEquals( + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), + expectedToBePrivateLink, + endpoint.isPrivateLink()); + }); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java index 2eaaa5e39..b1da95b99 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java @@ -6,6 +6,7 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import java.math.BigDecimal; @@ -29,12 +30,15 @@ import net.snowflake.client.category.TestCategoryResultSet; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.jdbc.BaseJDBCTest; +import net.snowflake.client.jdbc.ResultSetFormatType; import net.snowflake.client.jdbc.SnowflakeBaseResultSet; import net.snowflake.client.jdbc.SnowflakeResultSetMetaData; import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.NestedStructSqlData; import net.snowflake.client.jdbc.structuredtypes.sqldata.NullableFieldsSqlData; import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; +import net.snowflake.client.jdbc.structuredtypes.sqldata.StringClass; +import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -61,6 +65,22 @@ public ResultSetStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { this.queryResultFormat = queryResultFormat; } + @Before + public void setup() { + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); + SnowflakeObjectTypeFactories.register(NullableFieldsSqlData.class, NullableFieldsSqlData::new); + } + + @After + public void clean() { + SnowflakeObjectTypeFactories.unregister(StringClass.class); + SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); + SnowflakeObjectTypeFactories.unregister(NullableFieldsSqlData.class); + } + public Connection init() throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { @@ -79,12 +99,6 @@ public Connection init() throws SQLException { return conn; } - @Before - public void clean() throws Exception { - SnowflakeObjectTypeFactories.unregister(SimpleClass.class); - SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); - } - @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapStructToObjectWithFactory() throws SQLException { @@ -100,21 +114,17 @@ public void testMapStructToObjectWithReflection() throws SQLException { private void testMapJson(boolean registerFactory) throws SQLException { if (registerFactory) { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); + } else { + SnowflakeObjectTypeFactories.unregister(StringClass.class); } withFirstRow( "select {'string':'a'}::OBJECT(string VARCHAR)", (resultSet) -> { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); + StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); }); - } - - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructAllTypes() throws SQLException { - testMapAllTypes(false); - testMapAllTypes(true); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); } @Test @@ -123,17 +133,14 @@ public void testMapNullStruct() throws SQLException { withFirstRow( "select null::OBJECT(string VARCHAR)", (resultSet) -> { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); + StringClass object = resultSet.getObject(1, StringClass.class); assertNull(object); }); } - private void testMapAllTypes(boolean registerFactory) throws SQLException { - if (registerFactory) { - SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); - } else { - SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); - } + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapStructAllTypes() throws SQLException { try (Connection connection = init(); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); @@ -155,7 +162,7 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { + "'date': '2023-12-24'::DATE, " + "'time': '12:34:56'::TIME, " + "'binary': TO_BINARY('616263', 'HEX'), " - + "'simpleClass': {'string': 'b'}" + + "'simpleClass': {'string': 'b', 'intValue': 2}" + "}::OBJECT(" + "string VARCHAR, " + "b TINYINT, " @@ -172,7 +179,7 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { + "date DATE, " + "time TIME, " + "binary BINARY, " - + "simpleClass OBJECT(string VARCHAR)" + + "simpleClass OBJECT(string VARCHAR, intValue INTEGER)" + ")"); ) { resultSet.next(); AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); @@ -205,19 +212,119 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { assertArrayEquals(new byte[] {'a', 'b', 'c'}, object.getBinary()); assertTrue(object.getBool()); assertEquals("b", object.getSimpleClass().getString()); + assertEquals(Integer.valueOf(2), object.getSimpleClass().getIntValue()); } } } + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnStructAsStringIfTypeWasNotIndicated() throws SQLException { + Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='Europe/Warsaw'," + + "TIME_OUTPUT_FORMAT = 'HH24:MI:SS'," + + "DATE_OUTPUT_FORMAT = 'YYYY-MM-DD'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3'"); + + try (ResultSet resultSet = + statement.executeQuery( + "select {" + + "'string': 'a', " + + "'b': 1, " + + "'s': 2, " + + "'i': 3, " + + "'l': 4, " + + "'f': 1.1, " + + "'d': 2.2, " + + "'bd': 3.3, " + + "'bool': true, " + + "'timestamp_ltz': '2021-12-22 09:43:44'::TIMESTAMP_LTZ, " + + "'timestamp_ntz': '2021-12-23 09:44:44'::TIMESTAMP_NTZ, " + + "'timestamp_tz': '2021-12-24 09:45:45 +0800'::TIMESTAMP_TZ, " + + "'date': '2023-12-24'::DATE, " + + "'time': '12:34:56'::TIME, " + + "'binary': TO_BINARY('616263', 'HEX'), " + + "'simpleClass': {'string': 'b', 'intValue': 2}" + + "}::OBJECT(" + + "string VARCHAR, " + + "b TINYINT, " + + "s SMALLINT, " + + "i INTEGER, " + + "l BIGINT, " + + "f FLOAT, " + + "d DOUBLE, " + + "bd DOUBLE, " + + "bool BOOLEAN, " + + "timestamp_ltz TIMESTAMP_LTZ, " + + "timestamp_ntz TIMESTAMP_NTZ, " + + "timestamp_tz TIMESTAMP_TZ, " + + "date DATE, " + + "time TIME, " + + "binary BINARY, " + + "simpleClass OBJECT(string VARCHAR, intValue INTEGER)" + + ")"); ) { + resultSet.next(); + String object = (String) resultSet.getObject(1); + String expected = + "{\n" + + " \"string\": \"a\",\n" + + " \"b\": 1,\n" + + " \"s\": 2,\n" + + " \"i\": 3,\n" + + " \"l\": 4,\n" + + " \"f\": 1.100000000000000e+00,\n" + + " \"d\": 2.200000000000000e+00,\n" + + " \"bd\": 3.300000000000000e+00,\n" + + " \"bool\": true,\n" + + " \"timestamp_ltz\": \"2021-12-22 09:43:44.000 +0100\",\n" + + " \"timestamp_ntz\": \"2021-12-23 09:44:44.000\",\n" + + " \"timestamp_tz\": \"2021-12-24 09:45:45.000 +0800\",\n" + + " \"date\": \"2023-12-24\",\n" + + " \"time\": \"12:34:56\",\n" + + " \"binary\": \"616263\",\n" + + " \"simpleClass\": {\n" + + " \"string\": \"b\",\n" + + " \"intValue\": 2\n" + + " }\n" + + "}"; + assertEquals(expected, object); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testThrowingGettingObjectIfTypeWasNotIndicatedAndFormatNativeArrow() + throws SQLException { + Assume.assumeTrue(queryResultFormat == ResultSetFormatType.NATIVE_ARROW); + withFirstRow( + "select {'string':'a'}::OBJECT(string VARCHAR)", + (resultSet) -> { + assertThrows(SQLException.class, () -> resultSet.getObject(1)); + }); + withFirstRow( + "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", + (resultSet) -> { + assertThrows(SQLException.class, () -> resultSet.getObject(1, Map.class)); + }); + } + @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsArrayOfSqlData() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); withFirstRow( "SELECT ARRAY_CONSTRUCT({'string':'one'}, {'string':'two'}, {'string':'three'})::ARRAY(OBJECT(string VARCHAR))", (resultSet) -> { - SimpleClass[] resultArray = - resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, SimpleClass.class); + StringClass[] resultArray = + resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, StringClass.class); assertEquals("one", resultArray[0].getString()); assertEquals("two", resultArray[1].getString()); assertEquals("three", resultArray[2].getString()); @@ -227,7 +334,6 @@ public void testReturnAsArrayOfSqlData() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { - SnowflakeObjectTypeFactories.register(NullableFieldsSqlData.class, NullableFieldsSqlData::new); withFirstRow( "SELECT OBJECT_CONSTRUCT_KEEP_NULL('string', null, 'nullableIntValue', null, 'nullableLongValue', null, " + "'date', null, 'bd', null, 'bytes', null, 'longValue', null)" @@ -250,16 +356,15 @@ public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullsForAllTpesInSqlData() throws SQLException { - SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); try (Connection connection = init(); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); try (ResultSet resultSet = statement.executeQuery( "SELECT OBJECT_CONSTRUCT_KEEP_NULL('string', null, 'b', null, 's', null, 'i', null, 'l', null, 'f', null,'d', null, 'bd', null, 'bool', null," - + " 'timestamp_ltz', null, 'timestamp_ntz', null, 'timestamp_tz', null, 'date', null, 'time', null, 'binary', null, 'simpleClass', null)" + + " 'timestamp_ltz', null, 'timestamp_ntz', null, 'timestamp_tz', null, 'date', null, 'time', null, 'binary', null, 'StringClass', null)" + "::OBJECT(string VARCHAR, b TINYINT, s SMALLINT, i INTEGER, l BIGINT, f FLOAT, d DOUBLE, bd DOUBLE, bool BOOLEAN, timestamp_ltz TIMESTAMP_LTZ, " - + "timestamp_ntz TIMESTAMP_NTZ, timestamp_tz TIMESTAMP_TZ, date DATE, time TIME, binary BINARY, simpleClass OBJECT(string VARCHAR))"); ) { + + "timestamp_ntz TIMESTAMP_NTZ, timestamp_tz TIMESTAMP_TZ, date DATE, time TIME, binary BINARY, StringClass OBJECT(string VARCHAR))"); ) { resultSet.next(); AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); assertNull(object.getString()); @@ -368,27 +473,39 @@ public void testReturnAsListOfDouble() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsMap() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { - Map map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertEquals("one", map.get("x").getString()); assertEquals("two", map.get("y").getString()); assertEquals("three", map.get("z").getString()); }); } + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnAsMapByGetObject() throws SQLException { + Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + withFirstRow( + "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", + (resultSet) -> { + Map> map = resultSet.getObject(1, Map.class); + assertEquals("one", map.get("x").get("string")); + assertEquals("two", map.get("y").get("string")); + assertEquals("three", map.get("z").get("string")); + }); + } + @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsMapWithNullableValues() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); withFirstRow( "select {'x':{'string':'one'},'y':null,'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { - Map map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertEquals("one", map.get("x").getString()); assertNull(map.get("y")); assertEquals("three", map.get("z").getString()); @@ -398,7 +515,6 @@ public void testReturnAsMapWithNullableValues() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullAsObjectOfTypeMap() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -411,12 +527,11 @@ public void testReturnNullAsObjectOfTypeMap() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullAsMap() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { - Map map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertNull(map); }); } @@ -521,12 +636,11 @@ public void testReturnAsMapOfBoolean() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsList() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); withFirstRow( "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))", (resultSet) -> { - List map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, SimpleClass.class); + List map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, StringClass.class); assertEquals("one", map.get(0).getString()); assertEquals("two", map.get(1).getString()); }); @@ -539,7 +653,7 @@ public void testMapStructsFromChunks() throws SQLException { "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))", (resultSet) -> { while (resultSet.next()) { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); + StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); } }); @@ -736,17 +850,17 @@ public void testMapArrayOfArrays() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapNestedStructures() throws SQLException { withFirstRow( - "SELECT {'simpleClass': {'string': 'a'}, " - + "'simpleClasses': ARRAY_CONSTRUCT({'string': 'a'}, {'string': 'b'}), " - + "'arrayOfSimpleClasses': ARRAY_CONSTRUCT({'string': 'a'}, {'string': 'b'}), " - + "'mapOfSimpleClasses':{'x':{'string': 'c'}, 'y':{'string': 'd'}}," + "SELECT {'simpleClass': {'string': 'a', 'intValue': 2}, " + + "'simpleClasses': ARRAY_CONSTRUCT({'string': 'a', 'intValue': 2}, {'string': 'b', 'intValue': 2}), " + + "'arrayOfSimpleClasses': ARRAY_CONSTRUCT({'string': 'a', 'intValue': 2}, {'string': 'b', 'intValue': 2}), " + + "'mapOfSimpleClasses':{'x':{'string': 'c', 'intValue': 2}, 'y':{'string': 'd', 'intValue': 2}}," + "'texts': ARRAY_CONSTRUCT('string', 'a'), " + "'arrayOfDates': ARRAY_CONSTRUCT(to_date('2023-12-24', 'YYYY-MM-DD'), to_date('2023-12-25', 'YYYY-MM-DD')), " + "'mapOfIntegers':{'x':3, 'y':4}}" - + "::OBJECT(simpleClass OBJECT(string VARCHAR), " - + "simpleClasses ARRAY(OBJECT(string VARCHAR))," - + "arrayOfSimpleClasses ARRAY(OBJECT(string VARCHAR))," - + "mapOfSimpleClasses MAP(VARCHAR, OBJECT(string VARCHAR))," + + "::OBJECT(simpleClass OBJECT(string VARCHAR, intValue INTEGER), " + + "simpleClasses ARRAY(OBJECT(string VARCHAR, intValue INTEGER))," + + "arrayOfSimpleClasses ARRAY(OBJECT(string VARCHAR, intValue INTEGER))," + + "mapOfSimpleClasses MAP(VARCHAR, OBJECT(string VARCHAR, intValue INTEGER))," + "texts ARRAY(VARCHAR)," + "arrayOfDates ARRAY(DATE)," + "mapOfIntegers MAP(VARCHAR, INTEGER))", @@ -755,15 +869,30 @@ public void testMapNestedStructures() throws SQLException { resultSet.getObject(1, NestedStructSqlData.class); ; assertEquals("a", nestedStructSqlData.getSimpleClass().getString()); + assertEquals(Integer.valueOf(2), nestedStructSqlData.getSimpleClass().getIntValue()); assertEquals("a", nestedStructSqlData.getSimpleClassses().get(0).getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getSimpleClassses().get(0).getIntValue()); assertEquals("b", nestedStructSqlData.getSimpleClassses().get(1).getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getSimpleClassses().get(1).getIntValue()); assertEquals("a", nestedStructSqlData.getArrayOfSimpleClasses()[0].getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getArrayOfSimpleClasses()[0].getIntValue()); assertEquals("b", nestedStructSqlData.getArrayOfSimpleClasses()[1].getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getArrayOfSimpleClasses()[1].getIntValue()); assertEquals("c", nestedStructSqlData.getMapOfSimpleClasses().get("x").getString()); + assertEquals( + Integer.valueOf(2), + nestedStructSqlData.getMapOfSimpleClasses().get("x").getIntValue()); assertEquals("d", nestedStructSqlData.getMapOfSimpleClasses().get("y").getString()); + assertEquals( + Integer.valueOf(2), + nestedStructSqlData.getMapOfSimpleClasses().get("y").getIntValue()); assertEquals("string", nestedStructSqlData.getTexts().get(0)); assertEquals("a", nestedStructSqlData.getTexts().get(1)); @@ -834,16 +963,4 @@ private void withFirstRow(String sqlText, ThrowingConsumer parameters = getConnectionParameters(); String testUser = parameters.get("user"); - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("use role accountadmin"); - String pathfile = getFullPathFileInResource("rsa_key.pub"); - String pubKey = new String(Files.readAllBytes(Paths.get(pathfile))); - pubKey = pubKey.replace("-----BEGIN PUBLIC KEY-----", ""); - pubKey = pubKey.replace("-----END PUBLIC KEY-----", ""); - statement.execute(String.format("alter user %s set rsa_public_key='%s'", testUser, pubKey)); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("use role accountadmin"); + String pathfile = getFullPathFileInResource("rsa_key.pub"); + String pubKey = new String(Files.readAllBytes(Paths.get(pathfile))); + pubKey = pubKey.replace("-----BEGIN PUBLIC KEY-----", ""); + pubKey = pubKey.replace("-----END PUBLIC KEY-----", ""); + statement.execute(String.format("alter user %s set rsa_public_key='%s'", testUser, pubKey)); + } } // Helper function to create a sessionless telemetry using OAuth @@ -279,26 +279,28 @@ private TelemetryClient createOAuthSessionlessTelemetry() // Helper function to set up and get OAuth token private String getOAuthToken() throws SQLException { Map parameters = getConnectionParameters(); - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("use role accountadmin"); - statement.execute( - "create or replace security integration telemetry_oauth_integration\n" - + " type=oauth\n" - + " oauth_client=CUSTOM\n" - + " oauth_client_type=CONFIDENTIAL\n" - + " oauth_redirect_uri='https://localhost.com/oauth'\n" - + " oauth_issue_refresh_tokens=true\n" - + " enabled=true oauth_refresh_token_validity=86400;"); - String role = parameters.get("role"); - ResultSet resultSet = - statement.executeQuery( - "select system$it('create_oauth_access_token', 'TELEMETRY_OAUTH_INTEGRATION', '" - + role - + "')"); - resultSet.next(); - String token = resultSet.getString(1); - connection.close(); + String token = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("use role accountadmin"); + statement.execute( + "create or replace security integration telemetry_oauth_integration\n" + + " type=oauth\n" + + " oauth_client=CUSTOM\n" + + " oauth_client_type=CONFIDENTIAL\n" + + " oauth_redirect_uri='https://localhost.com/oauth'\n" + + " oauth_issue_refresh_tokens=true\n" + + " enabled=true oauth_refresh_token_validity=86400;"); + String role = parameters.get("role"); + try (ResultSet resultSet = + statement.executeQuery( + "select system$it('create_oauth_access_token', 'TELEMETRY_OAUTH_INTEGRATION', '" + + role + + "')")) { + assertTrue(resultSet.next()); + token = resultSet.getString(1); + } + } return token; } } diff --git a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java index 573579e38..86f8caf5a 100644 --- a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java +++ b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java @@ -6,6 +6,7 @@ import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; @@ -31,23 +32,23 @@ public class FlatfileReadMultithreadIT { @BeforeClass public static void setUpClass() throws Throwable { - Connection testConnection = AbstractDriverIT.getConnection(); - // NOTE: the stage object must be created right after the connection - // because the Loader API assumes the stage object exists in the default - // namespace of the connection. - testConnection - .createStatement() - .execute(String.format("CREATE OR REPLACE STAGE %s", TARGET_STAGE)); - TARGET_SCHEMA = testConnection.getSchema(); - TARGET_DB = testConnection.getCatalog(); + try (Connection testConnection = AbstractDriverIT.getConnection(); + // NOTE: the stage object must be created right after the connection + // because the Loader API assumes the stage object exists in the default + // namespace of the connection. + Statement statement = testConnection.createStatement()) { + statement.execute(String.format("CREATE OR REPLACE STAGE %s", TARGET_STAGE)); + TARGET_SCHEMA = testConnection.getSchema(); + TARGET_DB = testConnection.getCatalog(); + } } @AfterClass public static void tearDownClass() throws Throwable { - Connection testConnection = AbstractDriverIT.getConnection(); - testConnection - .createStatement() - .execute(String.format("DROP STAGE IF EXISTS %s", TARGET_STAGE)); + try (Connection testConnection = AbstractDriverIT.getConnection(); + Statement statement = testConnection.createStatement()) { + statement.execute(String.format("DROP STAGE IF EXISTS %s", TARGET_STAGE)); + } } /** @@ -58,220 +59,215 @@ public static void tearDownClass() throws Throwable { @Test public void testIssueSimpleDateFormat() throws Throwable { final String targetTable = "TABLE_ISSUE_SIMPLEDATEFORMAT"; - Connection testConnection = AbstractDriverIT.getConnection(); - testConnection - .createStatement() - .execute( + try (Connection testConnection = AbstractDriverIT.getConnection(); + Statement statement = testConnection.createStatement()) { + try { + statement.execute( String.format( "CREATE OR REPLACE TABLE %s.%s.%s (" + "ID int, " + "C1 timestamp)", TARGET_DB, TARGET_SCHEMA, targetTable)); - try { - Thread t1 = - new Thread( - new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); - Thread t2 = - new Thread( - new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); - - t1.start(); - t2.start(); - t1.join(); - t2.join(); - ResultSet rs = - testConnection - .createStatement() - .executeQuery( - String.format( - "select count(*) from %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable)); - rs.next(); - assertThat("total number of records", rs.getInt(1), equalTo(NUM_RECORDS * 2)); - - } finally { - testConnection - .createStatement() - .execute( - String.format( - "DROP TABLE IF EXISTS %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable)); + Thread t1 = + new Thread( + new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); + Thread t2 = + new Thread( + new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); + + t1.start(); + t2.start(); + t1.join(); + t2.join(); + try (ResultSet rs = + statement.executeQuery( + String.format( + "select count(*) from %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable))) { + rs.next(); + assertThat("total number of records", rs.getInt(1), equalTo(NUM_RECORDS * 2)); + } + + } finally { + statement.execute( + String.format("DROP TABLE IF EXISTS %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable)); + } } } -} -class FlatfileRead implements Runnable { - private final int totalRows; - private final String dbName; - private final String schemaName; - private final String tableName; - private final String stageName; - - FlatfileRead( - int totalRows, String dbName, String schemaName, String stageName, String tableName) { - this.totalRows = totalRows; - this.dbName = dbName; - this.schemaName = schemaName; - this.stageName = stageName; - this.tableName = tableName; - } - - @Override - public void run() { - Connection testConnection = null; - Connection putConnection = null; - try { - testConnection = AbstractDriverIT.getConnection(); - putConnection = AbstractDriverIT.getConnection(); - } catch (SQLException e) { - e.printStackTrace(); + class FlatfileRead implements Runnable { + private final int totalRows; + private final String dbName; + private final String schemaName; + private final String tableName; + private final String stageName; + + FlatfileRead( + int totalRows, String dbName, String schemaName, String stageName, String tableName) { + this.totalRows = totalRows; + this.dbName = dbName; + this.schemaName = schemaName; + this.stageName = stageName; + this.tableName = tableName; } - ResultListener _resultListener = new ResultListener(); - - // init properties - Map prop = new HashMap<>(); - prop.put(LoaderProperty.tableName, this.tableName); - prop.put(LoaderProperty.schemaName, this.schemaName); - prop.put(LoaderProperty.databaseName, this.dbName); - prop.put(LoaderProperty.remoteStage, this.stageName); - prop.put(LoaderProperty.operation, Operation.INSERT); - - StreamLoader underTest = - (StreamLoader) LoaderFactory.createLoader(prop, putConnection, testConnection); - underTest.setProperty(LoaderProperty.startTransaction, true); - underTest.setProperty(LoaderProperty.truncateTable, false); - - underTest.setProperty(LoaderProperty.columns, Arrays.asList("ID", "C1")); - - underTest.setListener(_resultListener); - underTest.start(); - - Random rnd = new Random(); - for (int i = 0; i < this.totalRows; ++i) { - Object[] row = new Object[2]; - row[0] = i; - // random timestamp data - long ms = -946771200000L + (Math.abs(rnd.nextLong()) % (70L * 365 * 24 * 60 * 60 * 1000)); - row[1] = new Date(ms); - underTest.submitRow(row); + @Override + public void run() { + try (Connection testConnection = AbstractDriverIT.getConnection(); + Connection putConnection = AbstractDriverIT.getConnection()) { + + ResultListener _resultListener = new ResultListener(); + + // init properties + Map prop = new HashMap<>(); + prop.put(LoaderProperty.tableName, this.tableName); + prop.put(LoaderProperty.schemaName, this.schemaName); + prop.put(LoaderProperty.databaseName, this.dbName); + prop.put(LoaderProperty.remoteStage, this.stageName); + prop.put(LoaderProperty.operation, Operation.INSERT); + + StreamLoader underTest = + (StreamLoader) LoaderFactory.createLoader(prop, putConnection, testConnection); + underTest.setProperty(LoaderProperty.startTransaction, true); + underTest.setProperty(LoaderProperty.truncateTable, false); + + underTest.setProperty(LoaderProperty.columns, Arrays.asList("ID", "C1")); + + underTest.setListener(_resultListener); + underTest.start(); + + Random rnd = new Random(); + for (int i = 0; i < this.totalRows; ++i) { + Object[] row = new Object[2]; + row[0] = i; + // random timestamp data + long ms = -946771200000L + (Math.abs(rnd.nextLong()) % (70L * 365 * 24 * 60 * 60 * 1000)); + row[1] = new Date(ms); + underTest.submitRow(row); + } + + try { + underTest.finish(); + } catch (Exception e) { + e.printStackTrace(); + } + underTest.close(); + assertThat("must be no error", _resultListener.getErrorCount(), equalTo(0)); + assertThat( + "total number of rows", + _resultListener.getSubmittedRowCount(), + equalTo(this.totalRows)); + } catch (SQLException e) { + e.printStackTrace(); + } } - try { - underTest.finish(); - } catch (Exception e) { - e.printStackTrace(); + class ResultListener implements LoadResultListener { + + private final List errors = new ArrayList<>(); + + private final AtomicInteger errorCount = new AtomicInteger(0); + private final AtomicInteger errorRecordCount = new AtomicInteger(0); + + private final AtomicInteger counter = new AtomicInteger(0); + private final AtomicInteger processed = new AtomicInteger(0); + private final AtomicInteger deleted = new AtomicInteger(0); + private final AtomicInteger updated = new AtomicInteger(0); + private final AtomicInteger submittedRowCount = new AtomicInteger(0); + + private Object[] lastRecord = null; + + public boolean throwOnError = false; // should not trigger rollback + + @Override + public boolean needErrors() { + return true; + } + + @Override + public boolean needSuccessRecords() { + return true; + } + + @Override + public void addError(LoadingError error) { + errors.add(error); + } + + @Override + public boolean throwOnError() { + return throwOnError; + } + + public List getErrors() { + return errors; + } + + @Override + public void recordProvided(Operation op, Object[] record) { + lastRecord = record; + } + + @Override + public void addProcessedRecordCount(Operation op, int i) { + processed.addAndGet(i); + } + + @Override + public void addOperationRecordCount(Operation op, int i) { + counter.addAndGet(i); + if (op == Operation.DELETE) { + deleted.addAndGet(i); + } else if (op == Operation.MODIFY || op == Operation.UPSERT) { + updated.addAndGet(i); + } + } + + public Object[] getLastRecord() { + return lastRecord; + } + + @Override + public int getErrorCount() { + return errorCount.get(); + } + + @Override + public int getErrorRecordCount() { + return errorRecordCount.get(); + } + + @Override + public void resetErrorCount() { + errorCount.set(0); + } + + @Override + public void resetErrorRecordCount() { + errorRecordCount.set(0); + } + + @Override + public void addErrorCount(int count) { + errorCount.addAndGet(count); + } + + @Override + public void addErrorRecordCount(int count) { + errorRecordCount.addAndGet(count); + } + + @Override + public void resetSubmittedRowCount() { + submittedRowCount.set(0); + } + + @Override + public void addSubmittedRowCount(int count) { + submittedRowCount.addAndGet(count); + } + + @Override + public int getSubmittedRowCount() { + return submittedRowCount.get(); + } } - underTest.close(); - assertThat("must be no error", _resultListener.getErrorCount(), equalTo(0)); - assertThat( - "total number of rows", _resultListener.getSubmittedRowCount(), equalTo(this.totalRows)); - } -} - -class ResultListener implements LoadResultListener { - - private final List errors = new ArrayList<>(); - - private final AtomicInteger errorCount = new AtomicInteger(0); - private final AtomicInteger errorRecordCount = new AtomicInteger(0); - - private final AtomicInteger counter = new AtomicInteger(0); - private final AtomicInteger processed = new AtomicInteger(0); - private final AtomicInteger deleted = new AtomicInteger(0); - private final AtomicInteger updated = new AtomicInteger(0); - private final AtomicInteger submittedRowCount = new AtomicInteger(0); - - private Object[] lastRecord = null; - - public boolean throwOnError = false; // should not trigger rollback - - @Override - public boolean needErrors() { - return true; - } - - @Override - public boolean needSuccessRecords() { - return true; - } - - @Override - public void addError(LoadingError error) { - errors.add(error); - } - - @Override - public boolean throwOnError() { - return throwOnError; - } - - public List getErrors() { - return errors; - } - - @Override - public void recordProvided(Operation op, Object[] record) { - lastRecord = record; - } - - @Override - public void addProcessedRecordCount(Operation op, int i) { - processed.addAndGet(i); - } - - @Override - public void addOperationRecordCount(Operation op, int i) { - counter.addAndGet(i); - if (op == Operation.DELETE) { - deleted.addAndGet(i); - } else if (op == Operation.MODIFY || op == Operation.UPSERT) { - updated.addAndGet(i); - } - } - - public Object[] getLastRecord() { - return lastRecord; - } - - @Override - public int getErrorCount() { - return errorCount.get(); - } - - @Override - public int getErrorRecordCount() { - return errorRecordCount.get(); - } - - @Override - public void resetErrorCount() { - errorCount.set(0); - } - - @Override - public void resetErrorRecordCount() { - errorRecordCount.set(0); - } - - @Override - public void addErrorCount(int count) { - errorCount.addAndGet(count); - } - - @Override - public void addErrorRecordCount(int count) { - errorRecordCount.addAndGet(count); - } - - @Override - public void resetSubmittedRowCount() { - submittedRowCount.set(0); - } - - @Override - public void addSubmittedRowCount(int count) { - submittedRowCount.addAndGet(count); - } - - @Override - public int getSubmittedRowCount() { - return submittedRowCount.get(); } } diff --git a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java index 6ac489300..e10a606d4 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java @@ -4,10 +4,12 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.PreparedStatement; import java.sql.ResultSet; +import java.sql.Statement; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -52,29 +54,30 @@ public void testLoaderUpsert() throws Exception { assertThat("error count", listener.getErrorCount(), equalTo(0)); assertThat("error record count", listener.getErrorRecordCount(), equalTo(0)); - ResultSet rs = + try (ResultSet rs = testConnection .createStatement() .executeQuery( String.format( - "SELECT C1, C4, C3" + " FROM \"%s\" WHERE ID=10001", TARGET_TABLE_NAME)); + "SELECT C1, C4, C3" + " FROM \"%s\" WHERE ID=10001", TARGET_TABLE_NAME))) { - rs.next(); - assertThat("C1 is not correct", rs.getString("C1"), equalTo("inserted\\,")); + assertTrue(rs.next()); + assertThat("C1 is not correct", rs.getString("C1"), equalTo("inserted\\,")); - long l = rs.getTimestamp("C4").getTime(); - assertThat("C4 is not correct", l, equalTo(d.getTime())); - assertThat( - "C3 is not correct", Double.toHexString((rs.getDouble("C3"))), equalTo("0x1.044ccp4")); - - rs = + long l = rs.getTimestamp("C4").getTime(); + assertThat("C4 is not correct", l, equalTo(d.getTime())); + assertThat( + "C3 is not correct", Double.toHexString((rs.getDouble("C3"))), equalTo("0x1.044ccp4")); + } + try (ResultSet rs = testConnection .createStatement() .executeQuery( - String.format("SELECT C1 AS N" + " FROM \"%s\" WHERE ID=39", TARGET_TABLE_NAME)); + String.format("SELECT C1 AS N" + " FROM \"%s\" WHERE ID=39", TARGET_TABLE_NAME))) { - rs.next(); - assertThat("N is not correct", rs.getString("N"), equalTo("modified")); + assertTrue(rs.next()); + assertThat("N is not correct", rs.getString("N"), equalTo("modified")); + } } @Test @@ -82,76 +85,79 @@ public void testLoaderUpsertWithErrorAndRollback() throws Exception { TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); tdcb.populate(); - PreparedStatement pstmt = + try (PreparedStatement pstmt = testConnection.prepareStatement( String.format( "INSERT INTO \"%s\"(ID,C1,C2,C3,C4,C5)" + " SELECT column1, column2, column3, column4," + " column5, parse_json(column6)" + " FROM VALUES(?,?,?,?,?,?)", - TARGET_TABLE_NAME)); - pstmt.setInt(1, 10001); - pstmt.setString(2, "inserted\\,"); - pstmt.setString(3, "something"); - pstmt.setDouble(4, 0x4.11_33p2); - pstmt.setDate(5, new java.sql.Date(new Date().getTime())); - pstmt.setObject(6, "{}"); - pstmt.execute(); - testConnection.commit(); - - TestDataConfigBuilder tdcbUpsert = new TestDataConfigBuilder(testConnection, putConnection); - tdcbUpsert - .setOperation(Operation.UPSERT) - .setTruncateTable(false) - .setStartTransaction(true) - .setPreserveStageFile(true) - .setColumns(Arrays.asList("ID", "C1", "C2", "C3", "C4", "C5")) - .setKeys(Collections.singletonList("ID")); - StreamLoader loader = tdcbUpsert.getStreamLoader(); - TestDataConfigBuilder.ResultListener listener = tdcbUpsert.getListener(); - listener.throwOnError = true; // should trigger rollback - loader.start(); - try { - - Object[] noerr = new Object[] {"10001", "inserted", "something", "42", new Date(), "{}"}; - loader.submitRow(noerr); - - Object[] err = new Object[] {"10002-", "inserted", "something", "42-", new Date(), "{}"}; - loader.submitRow(err); - - loader.finish(); - - fail("Test must raise Loader.DataError exception"); - } catch (Loader.DataError e) { - // we are good - assertThat( - "error message", - e.getMessage(), - allOf(containsString("10002-"), containsString("not recognized"))); + TARGET_TABLE_NAME))) { + pstmt.setInt(1, 10001); + pstmt.setString(2, "inserted\\,"); + pstmt.setString(3, "something"); + pstmt.setDouble(4, 0x4.11_33p2); + pstmt.setDate(5, new java.sql.Date(new Date().getTime())); + pstmt.setObject(6, "{}"); + pstmt.execute(); + testConnection.commit(); + + TestDataConfigBuilder tdcbUpsert = new TestDataConfigBuilder(testConnection, putConnection); + tdcbUpsert + .setOperation(Operation.UPSERT) + .setTruncateTable(false) + .setStartTransaction(true) + .setPreserveStageFile(true) + .setColumns(Arrays.asList("ID", "C1", "C2", "C3", "C4", "C5")) + .setKeys(Collections.singletonList("ID")); + StreamLoader loader = tdcbUpsert.getStreamLoader(); + TestDataConfigBuilder.ResultListener listener = tdcbUpsert.getListener(); + listener.throwOnError = true; // should trigger rollback + loader.start(); + try { + + Object[] noerr = new Object[] {"10001", "inserted", "something", "42", new Date(), "{}"}; + loader.submitRow(noerr); + + Object[] err = new Object[] {"10002-", "inserted", "something", "42-", new Date(), "{}"}; + loader.submitRow(err); + + loader.finish(); + + fail("Test must raise Loader.DataError exception"); + } catch (Loader.DataError e) { + // we are good + assertThat( + "error message", + e.getMessage(), + allOf(containsString("10002-"), containsString("not recognized"))); + } + + assertThat("processed", listener.processed.get(), equalTo(0)); + assertThat("submitted row", listener.getSubmittedRowCount(), equalTo(2)); + assertThat("updated/inserted", listener.updated.get(), equalTo(0)); + assertThat("error count", listener.getErrorCount(), equalTo(2)); + assertThat("error record count", listener.getErrorRecordCount(), equalTo(1)); + + try (ResultSet rs = + testConnection + .createStatement() + .executeQuery(String.format("SELECT COUNT(*) AS N FROM \"%s\"", TARGET_TABLE_NAME))) { + assertTrue(rs.next()); + assertThat("N", rs.getInt("N"), equalTo(10001)); + } + try (ResultSet rs = + testConnection + .createStatement() + .executeQuery( + String.format("SELECT C3 FROM \"%s\" WHERE id=10001", TARGET_TABLE_NAME))) { + assertTrue(rs.next()); + assertThat( + "C3. No commit should happen", + Double.toHexString((rs.getDouble("C3"))), + equalTo("0x1.044ccp4")); + } } - - assertThat("processed", listener.processed.get(), equalTo(0)); - assertThat("submitted row", listener.getSubmittedRowCount(), equalTo(2)); - assertThat("updated/inserted", listener.updated.get(), equalTo(0)); - assertThat("error count", listener.getErrorCount(), equalTo(2)); - assertThat("error record count", listener.getErrorRecordCount(), equalTo(1)); - - ResultSet rs = - testConnection - .createStatement() - .executeQuery(String.format("SELECT COUNT(*) AS N FROM \"%s\"", TARGET_TABLE_NAME)); - rs.next(); - assertThat("N", rs.getInt("N"), equalTo(10001)); - - rs = - testConnection - .createStatement() - .executeQuery(String.format("SELECT C3 FROM \"%s\" WHERE id=10001", TARGET_TABLE_NAME)); - rs.next(); - assertThat( - "C3. No commit should happen", - Double.toHexString((rs.getDouble("C3"))), - equalTo("0x1.044ccp4")); } /** @@ -163,44 +169,43 @@ public void testLoaderUpsertWithErrorAndRollback() throws Exception { @Test public void testKeyClusteringTable() throws Exception { String targetTableName = "CLUSTERED_TABLE"; + try (Statement statement = testConnection.createStatement()) { + // create table with spaces in column names + statement.execute( + String.format( + "CREATE OR REPLACE TABLE \"%s\" (" + + "ID int, " + + "\"Column1\" varchar(255), " + + "\"Column2\" varchar(255))", + targetTableName)); + // Add the clustering key; all columns clustered together + statement.execute( + String.format( + "alter table %s cluster by (ID, \"Column1\", \"Column2\")", targetTableName)); + TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); + // Only submit data for 2 columns out of 3 in the table so that 1 column will be dropped in + // temp + // table + tdcb.setTableName(targetTableName).setColumns(Arrays.asList("ID", "Column1")); + StreamLoader loader = tdcb.getStreamLoader(); + loader.start(); + + for (int i = 0; i < 5; ++i) { + Object[] row = new Object[] {i, "foo_" + i}; + loader.submitRow(row); + } + loader.finish(); - // create table with spaces in column names - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE \"%s\" (" - + "ID int, " - + "\"Column1\" varchar(255), " - + "\"Column2\" varchar(255))", - targetTableName)); - // Add the clustering key; all columns clustered together - testConnection - .createStatement() - .execute( - String.format( - "alter table %s cluster by (ID, \"Column1\", \"Column2\")", targetTableName)); - TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); - // Only submit data for 2 columns out of 3 in the table so that 1 column will be dropped in temp - // table - tdcb.setTableName(targetTableName).setColumns(Arrays.asList("ID", "Column1")); - StreamLoader loader = tdcb.getStreamLoader(); - loader.start(); + try (ResultSet rs = + testConnection + .createStatement() + .executeQuery( + String.format("SELECT * FROM \"%s\" ORDER BY \"Column1\"", targetTableName))) { - for (int i = 0; i < 5; ++i) { - Object[] row = new Object[] {i, "foo_" + i}; - loader.submitRow(row); + assertTrue(rs.next()); + assertThat("The first id", rs.getInt(1), equalTo(0)); + assertThat("The first str", rs.getString(2), equalTo("foo_0")); + } } - loader.finish(); - - ResultSet rs = - testConnection - .createStatement() - .executeQuery( - String.format("SELECT * FROM \"%s\" ORDER BY \"Column1\"", targetTableName)); - - rs.next(); - assertThat("The first id", rs.getInt(1), equalTo(0)); - assertThat("The first str", rs.getString(2), equalTo("foo_0")); } } diff --git a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java index d616598d0..859533686 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java @@ -5,8 +5,10 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import java.sql.ResultSet; +import java.sql.Statement; import java.util.List; import net.snowflake.client.category.TestCategoryLoader; import org.junit.Test; @@ -17,48 +19,46 @@ public class LoaderMultipleBatchIT extends LoaderBase { @Test public void testLoaderMultipleBatch() throws Exception { String refTableName = "LOADER_TEST_TABLE_REF"; - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE \"%s\" (" - + "ID int, " - + "C1 varchar(255), " - + "C2 varchar(255) DEFAULT 'X', " - + "C3 double, " - + "C4 timestamp, " - + "C5 variant)", - refTableName)); + try (Statement statement = testConnection.createStatement()) { + statement.execute( + String.format( + "CREATE OR REPLACE TABLE \"%s\" (" + + "ID int, " + + "C1 varchar(255), " + + "C2 varchar(255) DEFAULT 'X', " + + "C3 double, " + + "C4 timestamp, " + + "C5 variant)", + refTableName)); - try { - TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); - List dataSet = tdcb.populateReturnData(); + try { + TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); + List dataSet = tdcb.populateReturnData(); - TestDataConfigBuilder tdcbRef = new TestDataConfigBuilder(testConnection, putConnection); - tdcbRef - .setDataSet(dataSet) - .setTableName(refTableName) - .setCsvFileBucketSize(2) - .setCsvFileSize(30000) - .populate(); + TestDataConfigBuilder tdcbRef = new TestDataConfigBuilder(testConnection, putConnection); + tdcbRef + .setDataSet(dataSet) + .setTableName(refTableName) + .setCsvFileBucketSize(2) + .setCsvFileSize(30000) + .populate(); - ResultSet rsReference = - testConnection - .createStatement() - .executeQuery(String.format("SELECT hash_agg(*) FROM \"%s\"", TARGET_TABLE_NAME)); - rsReference.next(); - long hashValueReference = rsReference.getLong(1); - ResultSet rsTarget = - testConnection - .createStatement() - .executeQuery(String.format("SELECT hash_agg(*) FROM \"%s\"", refTableName)); - rsTarget.next(); - long hashValueTarget = rsTarget.getLong(1); - assertThat("hash values", hashValueTarget, equalTo(hashValueReference)); - } finally { - testConnection - .createStatement() - .execute(String.format("DROP TABLE IF EXISTS %s", refTableName)); + try (ResultSet rsReference = + statement.executeQuery( + String.format("SELECT hash_agg(*) FROM \"%s\"", TARGET_TABLE_NAME))) { + assertTrue(rsReference.next()); + long hashValueReference = rsReference.getLong(1); + try (ResultSet rsTarget = + statement.executeQuery( + String.format("SELECT hash_agg(*) FROM \"%s\"", refTableName))) { + assertTrue(rsTarget.next()); + long hashValueTarget = rsTarget.getLong(1); + assertThat("hash values", hashValueTarget, equalTo(hashValueReference)); + } + } + } finally { + statement.execute(String.format("DROP TABLE IF EXISTS %s", refTableName)); + } } } } diff --git a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java index 352a3a3d1..790249e96 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java @@ -5,8 +5,10 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import java.sql.ResultSet; +import java.sql.Statement; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Arrays; @@ -23,44 +25,44 @@ public void testLoadTimestamp() throws Exception { final String targetTableName = "LOADER_TEST_TIMESTAMP"; // create table including TIMESTAMP_NTZ - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE %s (" - + "ID int, " - + "C1 varchar(255), " - + "C2 timestamp_ntz)", - targetTableName)); - - // Binding java.util.Date, Timestamp and java.sql.Date with TIMESTAMP - // datatype. No java.sql.Time binding is supported for TIMESTAMP. - // For java.sql.Time, the target data type must be TIME. - Object[] testData = - new Object[] { - new Date(), - java.sql.Timestamp.valueOf("0001-01-01 08:00:00"), - java.sql.Date.valueOf("2001-01-02") - }; - - for (int i = 0; i < 2; ++i) { - boolean useLocalTimezone = false; - TimeZone originalTimeZone; - TimeZone targetTimeZone; - - if (i == 0) { - useLocalTimezone = true; - originalTimeZone = TimeZone.getDefault(); - targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); - } else { - originalTimeZone = TimeZone.getTimeZone("UTC"); - targetTimeZone = TimeZone.getTimeZone("UTC"); - } - - // input timestamp associated with the target timezone, America/Los_Angeles - for (Object testTs : testData) { - _testLoadTimestamp( - targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, false); + try (Statement statement = testConnection.createStatement()) { + statement.execute( + String.format( + "CREATE OR REPLACE TABLE %s (" + + "ID int, " + + "C1 varchar(255), " + + "C2 timestamp_ntz)", + targetTableName)); + + // Binding java.util.Date, Timestamp and java.sql.Date with TIMESTAMP + // datatype. No java.sql.Time binding is supported for TIMESTAMP. + // For java.sql.Time, the target data type must be TIME. + Object[] testData = + new Object[] { + new Date(), + java.sql.Timestamp.valueOf("0001-01-01 08:00:00"), + java.sql.Date.valueOf("2001-01-02") + }; + + for (int i = 0; i < 2; ++i) { + boolean useLocalTimezone = false; + TimeZone originalTimeZone; + TimeZone targetTimeZone; + + if (i == 0) { + useLocalTimezone = true; + originalTimeZone = TimeZone.getDefault(); + targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); + } else { + originalTimeZone = TimeZone.getTimeZone("UTC"); + targetTimeZone = TimeZone.getTimeZone("UTC"); + } + + // input timestamp associated with the target timezone, America/Los_Angeles + for (Object testTs : testData) { + _testLoadTimestamp( + targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, false); + } } } } @@ -97,26 +99,27 @@ private void _testLoadTimestamp( assertThat("Loader detected errors", listener.getErrorCount(), equalTo(0)); - ResultSet rs = + try (ResultSet rs = testConnection .createStatement() - .executeQuery(String.format("SELECT * FROM \"%s\"", targetTableName)); + .executeQuery(String.format("SELECT * FROM \"%s\"", targetTableName))) { - rs.next(); - Timestamp ts = rs.getTimestamp("C2"); + assertTrue(rs.next()); + Timestamp ts = rs.getTimestamp("C2"); - // format the input TS with the target timezone - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); - sdf.setTimeZone(targetTimeZone); - String currentTsStr = sdf.format(testTs); + // format the input TS with the target timezone + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); + sdf.setTimeZone(targetTimeZone); + String currentTsStr = sdf.format(testTs); - // format the retrieved TS with the original timezone - sdf.setTimeZone(originalTimeZone); - String retrievedTsStr = sdf.format(new Date(ts.getTime())); + // format the retrieved TS with the original timezone + sdf.setTimeZone(originalTimeZone); + String retrievedTsStr = sdf.format(new Date(ts.getTime())); - // They must be identical. - assertThat( - "Input and retrieved timestamp are different", retrievedTsStr, equalTo(currentTsStr)); + // They must be identical. + assertThat( + "Input and retrieved timestamp are different", retrievedTsStr, equalTo(currentTsStr)); + } } @Test @@ -124,45 +127,46 @@ public void testLoadTimestampV1() throws Exception { final String targetTableName = "LOADER_TEST_TIMESTAMP_V1"; // create table including TIMESTAMP_NTZ - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE %s (" - + "ID int, " - + "C1 varchar(255), " - + "C2 timestamp_ntz)", - targetTableName)); - - // Binding java.sql.Time with TIMESTAMP is supported only if - // mapTimeToTimestamp flag is enabled. This is required to keep the - // old behavior of Informatica V1 connector. - Object[] testData = - new Object[] { - // full timestamp in Time object. Interestingly all values are - // preserved. - new java.sql.Time(1502931205000L), java.sql.Time.valueOf("12:34:56") // a basic test case - }; - - for (int i = 0; i < 2; ++i) { - boolean useLocalTimezone; - TimeZone originalTimeZone; - TimeZone targetTimeZone; - - if (i == 0) { - useLocalTimezone = true; - originalTimeZone = TimeZone.getDefault(); - targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); - } else { - useLocalTimezone = false; - originalTimeZone = TimeZone.getTimeZone("UTC"); - targetTimeZone = TimeZone.getTimeZone("UTC"); - } - - // input timestamp associated with the target timezone, America/Los_Angeles - for (Object testTs : testData) { - _testLoadTimestamp( - targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, true); + try (Statement statement = testConnection.createStatement()) { + statement.execute( + String.format( + "CREATE OR REPLACE TABLE %s (" + + "ID int, " + + "C1 varchar(255), " + + "C2 timestamp_ntz)", + targetTableName)); + + // Binding java.sql.Time with TIMESTAMP is supported only if + // mapTimeToTimestamp flag is enabled. This is required to keep the + // old behavior of Informatica V1 connector. + Object[] testData = + new Object[] { + // full timestamp in Time object. Interestingly all values are + // preserved. + new java.sql.Time(1502931205000L), + java.sql.Time.valueOf("12:34:56") // a basic test case + }; + + for (int i = 0; i < 2; ++i) { + boolean useLocalTimezone; + TimeZone originalTimeZone; + TimeZone targetTimeZone; + + if (i == 0) { + useLocalTimezone = true; + originalTimeZone = TimeZone.getDefault(); + targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); + } else { + useLocalTimezone = false; + originalTimeZone = TimeZone.getTimeZone("UTC"); + targetTimeZone = TimeZone.getTimeZone("UTC"); + } + + // input timestamp associated with the target timezone, America/Los_Angeles + for (Object testTs : testData) { + _testLoadTimestamp( + targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, true); + } } } } diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java index faa809af4..232da8451 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java @@ -1,5 +1,6 @@ package net.snowflake.client.log; +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -12,15 +13,19 @@ import java.nio.file.attribute.PosixFilePermission; import java.sql.Connection; import java.sql.SQLException; +import java.sql.Statement; import java.util.HashSet; import java.util.Properties; import java.util.logging.Level; import net.snowflake.client.AbstractDriverIT; +import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import org.apache.commons.io.FileUtils; import org.junit.Test; public class JDK14LoggerWithClientLatestIT extends AbstractDriverIT { + String homePath = systemGetProperty("user.home"); + @Test public void testJDK14LoggingWithClientConfig() { Path configFilePath = Paths.get("config.json"); @@ -29,14 +34,16 @@ public void testJDK14LoggingWithClientConfig() { Files.write(configFilePath, configJson.getBytes()); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); - Connection connection = getConnection(properties); - connection.createStatement().executeQuery("select 1"); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { + statement.executeQuery("select 1"); - File file = new File("logs/jdbc/"); - assertTrue(file.exists()); + File file = new File("logs/jdbc/"); + assertTrue(file.exists()); - Files.deleteIfExists(configFilePath); - FileUtils.deleteDirectory(new File("logs")); + Files.deleteIfExists(configFilePath); + FileUtils.deleteDirectory(new File("logs")); + } } catch (IOException e) { fail("testJDK14LoggingWithClientConfig failed"); } catch (SQLException e) { @@ -49,8 +56,9 @@ public void testJDK14LoggingWithClientConfigInvalidConfigFilePath() throws SQLEx Path configFilePath = Paths.get("invalid.json"); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); - Connection connection = getConnection(properties); - connection.createStatement().executeQuery("select 1"); + try (Connection connection = getConnection(properties)) { + connection.createStatement().executeQuery("select 1"); + } } @Test @@ -89,4 +97,49 @@ public void testJDK14LoggerWithQuotesInMessage() { logger.debug("Returning column: 12: a: Group b) Hi {Hello 'World' War} cant wait"); JDK14Logger.setLevel(Level.OFF); } + + @Test + public void testJDK14LoggingWithMissingLogPathClientConfig() throws Exception { + Path configFilePath = Paths.get("config.json"); + String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; + + Path homeLogPath = Paths.get(homePath, "jdbc"); + Files.write(configFilePath, configJson.getBytes()); + Properties properties = new Properties(); + properties.put("client_config_file", configFilePath.toString()); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { + try { + statement.executeQuery("select 1"); + + File file = new File(homeLogPath.toString()); + assertTrue(file.exists()); + + } finally { + Files.deleteIfExists(configFilePath); + FileUtils.deleteDirectory(new File(homeLogPath.toString())); + } + } + } + + @Test + public void testJDK14LoggingWithMissingLogPathNoHomeDirClientConfig() throws Exception { + System.clearProperty("user.home"); + + Path configFilePath = Paths.get("config.json"); + String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; + Files.write(configFilePath, configJson.getBytes()); + Properties properties = new Properties(); + properties.put("client_config_file", configFilePath.toString()); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { + + fail("testJDK14LoggingWithMissingLogPathNoHomeDirClientConfig failed"); + } catch (SnowflakeSQLLoggedException e) { + // Succeed + } finally { + System.setProperty("user.home", homePath); + Files.deleteIfExists(configFilePath); + } + } } diff --git a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java index 897102e1a..eadd984cc 100644 --- a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java +++ b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java @@ -12,6 +12,7 @@ import java.sql.Connection; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -42,20 +43,21 @@ public void testPooledConnection() throws SQLException { TestingConnectionListener listener = new TestingConnectionListener(); pooledConnection.addConnectionEventListener(listener); - Connection connection = pooledConnection.getConnection(); - connection.createStatement().execute("select 1"); + try (Connection connection = pooledConnection.getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("select 1"); - try { - // should fire connection error events - connection.setCatalog("nonexistent_database"); - fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(2043)); - } + try { + // should fire connection error events + connection.setCatalog("nonexistent_database"); + fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(2043)); + } - // should not close underlying physical connection - // and fire connection closed events - connection.close(); + // should not close underlying physical connection + // and fire connection closed events + } List connectionClosedEvents = listener.getConnectionClosedEvents(); List connectionErrorEvents = listener.getConnectionErrorEvents(); @@ -105,9 +107,9 @@ public void testPooledConnectionUsernamePassword() throws SQLException { TestingConnectionListener listener = new TestingConnectionListener(); pooledConnection.addConnectionEventListener(listener); - Connection connection = pooledConnection.getConnection(); - connection.createStatement().execute("select 1"); - connection.close(); + try (Connection connection = pooledConnection.getConnection()) { + connection.createStatement().execute("select 1"); + } pooledConnection.close(); } diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java index ac50f7608..ce93928ac 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java @@ -49,5 +49,6 @@ public void testLogicalConnectionAlreadyClosed() throws SQLException { expectConnectionAlreadyClosedException(() -> logicalConnection.setSchema("fakedb")); expectConnectionAlreadyClosedException( () -> logicalConnection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED)); + expectConnectionAlreadyClosedException(() -> logicalConnection.createArrayOf("faketype", null)); } } diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java index 251d0006d..97d50970c 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java @@ -70,8 +70,6 @@ public void testLogicalConnectionFeatureNotSupported() throws SQLException { () -> logicalConnection.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT)); expectFeatureNotSupportedException( () -> logicalConnection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT)); - expectFeatureNotSupportedException( - () -> logicalConnection.createArrayOf("fakeType", new Object[] {})); expectFeatureNotSupportedException( () -> logicalConnection.createStruct("fakeType", new Object[] {})); expectFeatureNotSupportedException( diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java index 627f1db31..d25cdb485 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java @@ -6,8 +6,14 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.sql.CallableStatement; import java.sql.Clob; @@ -161,7 +167,7 @@ public void testTransactionStatement() throws SQLException { PooledConnection pooledConnection = poolDataSource.getPooledConnection(); try (Connection logicalConnection = pooledConnection.getConnection()) { logicalConnection.setAutoCommit(false); - assert (!logicalConnection.getAutoCommit()); + assertFalse(logicalConnection.getAutoCommit()); logicalConnection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); assertEquals(2, logicalConnection.getTransactionIsolation()); @@ -370,6 +376,77 @@ public void testDatabaseMetaData() throws SQLException { } } + @Test + public void testLogicalConnectionWhenPhysicalConnectionThrowsErrors() throws SQLException { + Connection connection = mock(Connection.class); + SnowflakePooledConnection snowflakePooledConnection = mock(SnowflakePooledConnection.class); + when(snowflakePooledConnection.getPhysicalConnection()).thenReturn(connection); + SQLException sqlException = new SQLException("mocking error"); + when(connection.createStatement()).thenThrow(sqlException); + when(connection.createStatement(1, 2, 3)).thenThrow(sqlException); + + when(connection.prepareStatement("mocksql")).thenThrow(sqlException); + when(connection.prepareCall("mocksql")).thenThrow(sqlException); + when(connection.prepareCall("mocksql", 1, 2, 3)).thenThrow(sqlException); + when(connection.nativeSQL("mocksql")).thenThrow(sqlException); + when(connection.getAutoCommit()).thenThrow(sqlException); + when(connection.getMetaData()).thenThrow(sqlException); + when(connection.isReadOnly()).thenThrow(sqlException); + when(connection.getCatalog()).thenThrow(sqlException); + when(connection.getTransactionIsolation()).thenThrow(sqlException); + when(connection.getWarnings()).thenThrow(sqlException); + when(connection.prepareCall("mocksql", 1, 2)).thenThrow(sqlException); + when(connection.getTypeMap()).thenThrow(sqlException); + when(connection.getHoldability()).thenThrow(sqlException); + when(connection.createClob()).thenThrow(sqlException); + when(connection.getClientInfo("mocksql")).thenThrow(sqlException); + when(connection.getClientInfo()).thenThrow(sqlException); + when(connection.createArrayOf("mock", null)).thenThrow(sqlException); + when(connection.getSchema()).thenThrow(sqlException); + when(connection.getNetworkTimeout()).thenThrow(sqlException); + when(connection.isWrapperFor(Connection.class)).thenThrow(sqlException); + + doThrow(sqlException).when(connection).setAutoCommit(false); + doThrow(sqlException).when(connection).commit(); + doThrow(sqlException).when(connection).rollback(); + doThrow(sqlException).when(connection).setReadOnly(false); + doThrow(sqlException).when(connection).clearWarnings(); + doThrow(sqlException).when(connection).setSchema(null); + doThrow(sqlException).when(connection).abort(null); + doThrow(sqlException).when(connection).setNetworkTimeout(null, 1); + + LogicalConnection logicalConnection = new LogicalConnection(snowflakePooledConnection); + + assertThrows(SQLException.class, logicalConnection::createStatement); + assertThrows(SQLException.class, () -> logicalConnection.createStatement(1, 2, 3)); + assertThrows(SQLException.class, () -> logicalConnection.nativeSQL("mocksql")); + assertThrows(SQLException.class, logicalConnection::getAutoCommit); + assertThrows(SQLException.class, logicalConnection::getMetaData); + assertThrows(SQLException.class, logicalConnection::isReadOnly); + assertThrows(SQLException.class, logicalConnection::getCatalog); + assertThrows(SQLException.class, logicalConnection::getTransactionIsolation); + assertThrows(SQLException.class, logicalConnection::getWarnings); + assertThrows(SQLException.class, () -> logicalConnection.prepareCall("mocksql")); + assertThrows(SQLException.class, logicalConnection::getTypeMap); + assertThrows(SQLException.class, logicalConnection::getHoldability); + assertThrows(SQLException.class, logicalConnection::createClob); + assertThrows(SQLException.class, () -> logicalConnection.getClientInfo("mocksql")); + assertThrows(SQLException.class, logicalConnection::getClientInfo); + assertThrows(SQLException.class, () -> logicalConnection.createArrayOf("mock", null)); + assertThrows(SQLException.class, logicalConnection::getSchema); + assertThrows(SQLException.class, logicalConnection::getNetworkTimeout); + assertThrows(SQLException.class, () -> logicalConnection.isWrapperFor(Connection.class)); + assertThrows(SQLException.class, () -> logicalConnection.setAutoCommit(false)); + assertThrows(SQLException.class, logicalConnection::rollback); + assertThrows(SQLException.class, () -> logicalConnection.setReadOnly(false)); + assertThrows(SQLException.class, logicalConnection::clearWarnings); + assertThrows(SQLException.class, () -> logicalConnection.setSchema(null)); + assertThrows(SQLException.class, () -> logicalConnection.abort(null)); + assertThrows(SQLException.class, () -> logicalConnection.setNetworkTimeout(null, 1)); + + verify(snowflakePooledConnection, times(26)).fireConnectionErrorEvent(sqlException); + } + private SnowflakeConnectionPoolDataSource setProperties( SnowflakeConnectionPoolDataSource poolDataSource) { poolDataSource.setUrl(properties.get("uri")); diff --git a/src/test/java/net/snowflake/client/util/StopwatchTest.java b/src/test/java/net/snowflake/client/util/StopwatchTest.java new file mode 100644 index 000000000..9e44ce18a --- /dev/null +++ b/src/test/java/net/snowflake/client/util/StopwatchTest.java @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.util; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.Test; + +public class StopwatchTest { + Stopwatch stopwatch = new Stopwatch(); + + @Before + public void before() { + stopwatch = new Stopwatch(); + } + + @Test + public void testGetMillisWhenStopped() throws InterruptedException { + stopwatch.start(); + TimeUnit.MILLISECONDS.sleep(20); + stopwatch.stop(); + + assertThat( + stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(500L))); + } + + @Test + public void testGetMillisWithoutStopping() throws InterruptedException { + stopwatch.start(); + TimeUnit.MILLISECONDS.sleep(20); + assertThat( + stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(500L))); + } + + @Test + public void testShouldBeStarted() { + stopwatch.start(); + assertTrue(stopwatch.isStarted()); + } + + @Test + public void testShouldBeStopped() { + assertFalse(stopwatch.isStarted()); + } + + @Test + public void testThrowsExceptionWhenStartedTwice() { + stopwatch.start(); + + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.start()); + + assertTrue(e.getMessage().contains("Stopwatch is already running")); + } + + @Test + public void testThrowsExceptionWhenStoppedTwice() { + stopwatch.start(); + stopwatch.stop(); + + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.stop()); + + assertTrue(e.getMessage().contains("Stopwatch is already stopped")); + } + + @Test + public void testThrowsExceptionWhenStoppedWithoutStarting() { + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.stop()); + + assertTrue(e.getMessage().contains("Stopwatch has not been started")); + } + + @Test + public void testThrowsExceptionWhenElapsedMillisWithoutStarting() { + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.elapsedMillis()); + + assertTrue(e.getMessage().contains("Stopwatch has not been started")); + } + + @Test + public void testShouldReset() { + stopwatch.start(); + assertTrue(stopwatch.isStarted()); + stopwatch.reset(); + assertFalse(stopwatch.isStarted()); + } + + @Test + public void testShouldRestart() { + stopwatch.start(); + assertTrue(stopwatch.isStarted()); + stopwatch.stop(); + assertFalse(stopwatch.isStarted()); + stopwatch.restart(); + assertTrue(stopwatch.isStarted()); + } +} diff --git a/src/test/resources/allowlist.json b/src/test/resources/allowlist.json new file mode 100644 index 000000000..12ae61965 --- /dev/null +++ b/src/test/resources/allowlist.json @@ -0,0 +1,18 @@ +[ + {"host":"account_name.snowflakecomputing.com","port":443,"type":"SNOWFLAKE_DEPLOYMENT"}, + {"host":"org-account_name.snowflakecomputing.com","port":443,"type":"SNOWFLAKE_DEPLOYMENT_REGIONLESS"}, + {"host":"stage-bucket.s3.amazonaws.com","port":443,"type":"STAGE"}, + {"host":"stage-bucket.s3.us-west-2.amazonaws.com","port":443,"type":"STAGE"}, + {"host":"stage-bucket.s3-us-west-2.amazonaws.com","port":443,"type":"STAGE"}, + {"host":"snowsql_repo.snowflakecomputing.com","port":443,"type":"SNOWSQL_REPO"}, + {"host":"out_of_band_telemetry.snowflakecomputing.com","port":443,"type":"OUT_OF_BAND_TELEMETRY"}, + {"host":"ocsp_cache.snowflakecomputing.com","port":80,"type":"OCSP_CACHE"}, + {"host":"duo_security.duosecurity.com","port":443,"type":"DUO_SECURITY"}, + {"host":"ocsp.rootg2.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"o.ss2.us","port":80,"type":"OCSP_RESPONDER"}, + {"host":"ocsp.sca1b.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"ocsp.r2m01.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"ocsp.rootca1.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"snowsight_deployment.snowflake.com","port":443,"type":"SNOWSIGHT_DEPLOYMENT"}, + {"host":"snowsight_deployment_2.snowflake.com","port":443,"type":"SNOWSIGHT_DEPLOYMENT"} +] diff --git a/thin_public_pom.xml b/thin_public_pom.xml index 239e31e34..715a42878 100644 --- a/thin_public_pom.xml +++ b/thin_public_pom.xml @@ -58,7 +58,7 @@ 2.4.9 1.15.3 2.2.0 - 4.1.100.Final + 4.1.111.Final 9.37.3 UTF-8 UTF-8 @@ -140,6 +140,10 @@ com.fasterxml.jackson.core jackson-databind + + com.fasterxml.jackson.dataformat + jackson-dataformat-toml + com.google.api gax