From 76a0d3f173bef225024e20bda18aca36920f3e1a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Thu, 25 Apr 2024 10:50:15 +0200 Subject: [PATCH 01/54] SNOW-1161547: Set max retries for get query metadata (#1732) --- src/main/java/net/snowflake/client/core/SFSession.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index d7ee69a07..5f653019d 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -213,7 +213,7 @@ private JsonNode getQueryMetadata(String queryID) throws SQLException { loginTimeout, authTimeout, (int) httpClientSocketTimeout.toMillis(), - 0, + maxHttpRetries, getHttpClientKey()); jsonNode = OBJECT_MAPPER.readTree(response); } catch (Exception e) { From 804ef6701438864fecc3a2ebeb46c2600c863d9c Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Fri, 26 Apr 2024 01:08:35 +0900 Subject: [PATCH 02/54] SNOW-1213117: Wrap connection, statement and result set in try with resources(3/4) (#1723) --- .../jdbc/PutFileWithSpaceIncludedIT.java | 81 +- .../client/jdbc/PutUnescapeBackslashIT.java | 55 +- .../snowflake/client/jdbc/ResultSet0IT.java | 105 +- .../client/jdbc/ResultSetAlreadyClosedIT.java | 13 +- .../ResultSetArrowForce0MultiTimeZone.java | 10 +- ...ResultSetArrowForceLTZMultiTimeZoneIT.java | 155 +- .../ResultSetArrowForceTZMultiTimeZoneIT.java | 96 +- .../client/jdbc/ResultSetAsyncIT.java | 504 ++-- .../client/jdbc/ResultSetAsyncLatestIT.java | 48 +- .../jdbc/ResultSetFeatureNotSupportedIT.java | 9 +- .../snowflake/client/jdbc/ResultSetIT.java | 1514 ++++++------ .../client/jdbc/ResultSetJsonVsArrowIT.java | 2123 +++++++++-------- .../jdbc/ResultSetJsonVsArrowMultiTZIT.java | 129 +- .../client/jdbc/ResultSetLatestIT.java | 1242 +++++----- .../client/jdbc/ResultSetMultiTimeZoneIT.java | 685 +++--- .../jdbc/ResultSetMultiTimeZoneLatestIT.java | 475 ++-- 16 files changed, 3758 insertions(+), 3486 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java index e421aebff..5cd03355c 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java @@ -11,6 +11,7 @@ import java.io.FileOutputStream; import java.sql.Connection; import java.sql.ResultSet; +import java.sql.Statement; import net.snowflake.client.TestUtil; import net.snowflake.client.category.TestCategoryOthers; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; @@ -49,52 +50,52 @@ public void putFileWithSpaceIncluded() throws Exception { TarArchiveEntry tarEntry; while ((tarEntry = tis.getNextTarEntry()) != null) { File outputFile = new File(dataFolder, tarEntry.getName()); - FileOutputStream fos = new FileOutputStream(outputFile); - IOUtils.copy(tis, fos); - fos.close(); + try (FileOutputStream fos = new FileOutputStream(outputFile)) { + IOUtils.copy(tis, fos); + } } - try (Connection con = getConnection()) { - con.createStatement() - .execute( - "create or replace stage snow13400 url='s3://" - + SF_AWS_USER_BUCKET - + "/snow13400'" - + "credentials=(AWS_KEY_ID='" - + AWS_KEY_ID - + "' AWS_SECRET_KEY='" - + AWS_SECRET_KEY - + "')"); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute( + "create or replace stage snow13400 url='s3://" + + SF_AWS_USER_BUCKET + + "/snow13400'" + + "credentials=(AWS_KEY_ID='" + + AWS_KEY_ID + + "' AWS_SECRET_KEY='" + + AWS_SECRET_KEY + + "')"); - { - ResultSet resultSet = - con.createStatement() - .executeQuery( - "put file://" - + dataFolder.getCanonicalPath() - + "/* @snow13400 auto_compress=false"); - int cnt = 0; - while (resultSet.next()) { - cnt++; + try (ResultSet resultSet = + statement.executeQuery( + "put file://" + + dataFolder.getCanonicalPath() + + "/* @snow13400 auto_compress=false")) { + int cnt = 0; + while (resultSet.next()) { + cnt++; + } + assertEquals(cnt, 1); } - assertEquals(cnt, 1); - } - con.createStatement().execute("create or replace table snow13400(a string)"); - con.createStatement().execute("copy into snow13400 from @snow13400"); - { - ResultSet resultSet = con.createStatement().executeQuery("select * from snow13400"); - int cnt = 0; - String output = null; - while (resultSet.next()) { - output = resultSet.getString(1); - cnt++; + statement.execute("create or replace table snow13400(a string)"); + statement.execute("copy into snow13400 from @snow13400"); + try (ResultSet resultSet = con.createStatement().executeQuery("select * from snow13400")) { + int cnt = 0; + String output = null; + while (resultSet.next()) { + output = resultSet.getString(1); + cnt++; + } + assertEquals(cnt, 1); + assertEquals(output, "hello"); } - assertEquals(cnt, 1); - assertEquals(output, "hello"); + } finally { + statement.execute("rm @snow13400"); + statement.execute("drop stage if exists snow13400"); + statement.execute("drop table if exists snow13400"); } - con.createStatement().execute("rm @snow13400"); - con.createStatement().execute("drop stage if exists snow13400"); - con.createStatement().execute("drop table if exists snow13400"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java index e27bf02a2..f9579636d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java @@ -38,10 +38,6 @@ public void testPutFileUnescapeBackslashes() throws Exception { String remoteSubDir = "testPut"; String testDataFileName = "testdata.txt"; - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - Writer writer = null; Path topDataDir = null; try { topDataDir = Files.createTempDirectory("testPutFileUnescapeBackslashes"); @@ -53,39 +49,36 @@ public void testPutFileUnescapeBackslashes() throws Exception { // create a test data File dataFile = new File(subDir.toFile(), testDataFileName); - writer = + try (Writer writer = new BufferedWriter( - new OutputStreamWriter(new FileOutputStream(dataFile.getCanonicalPath()), "UTF-8")); - writer.write("1,test1"); - writer.close(); - + new OutputStreamWriter(new FileOutputStream(dataFile.getCanonicalPath()), "UTF-8"))) { + writer.write("1,test1"); + } // run PUT command - connection = getConnection(); - statement = connection.createStatement(); - String sql = - String.format("PUT 'file://%s' @~/%s/", dataFile.getCanonicalPath(), remoteSubDir); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + String sql = + String.format("PUT 'file://%s' @~/%s/", dataFile.getCanonicalPath(), remoteSubDir); - // Escape backslashes. This must be done by the application. - sql = sql.replaceAll("\\\\", "\\\\\\\\"); - statement.execute(sql); + // Escape backslashes. This must be done by the application. + sql = sql.replaceAll("\\\\", "\\\\\\\\"); + statement.execute(sql); - resultSet = - connection.createStatement().executeQuery(String.format("LS @~/%s/", remoteSubDir)); - while (resultSet.next()) { - assertThat( - "File name doesn't match", - resultSet.getString(1), - startsWith(String.format("%s/%s", remoteSubDir, testDataFileName))); + try (ResultSet resultSet = + connection.createStatement().executeQuery(String.format("LS @~/%s/", remoteSubDir))) { + while (resultSet.next()) { + assertThat( + "File name doesn't match", + resultSet.getString(1), + startsWith(String.format("%s/%s", remoteSubDir, testDataFileName))); + } + } + } finally { + statement.execute(String.format("RM @~/%s", remoteSubDir)); + } } - } finally { - if (connection != null) { - connection.createStatement().execute(String.format("RM @~/%s", remoteSubDir)); - } - closeSQLObjects(resultSet, statement, connection); - if (writer != null) { - writer.close(); - } FileUtils.deleteDirectory(topDataDir.toFile()); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java index 2ce20192a..b6832632b 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java @@ -12,7 +12,6 @@ import java.sql.Statement; import java.util.Properties; import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.After; import org.junit.Before; import org.junit.experimental.categories.Category; @@ -23,79 +22,69 @@ public class ResultSet0IT extends BaseJDBCTest { public Connection init(int injectSocketTimeout) throws SQLException { Connection connection = BaseJDBCTest.getConnection(injectSocketTimeout); - - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + } return connection; } public Connection init() throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } public Connection init(Properties paramProperties) throws SQLException { Connection conn = BaseJDBCTest.getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Before public void setUp() throws SQLException { - Connection con = init(); - - // TEST_RS - con.createStatement().execute("create or replace table test_rs (colA string)"); - con.createStatement().execute("insert into test_rs values('rowOne')"); - con.createStatement().execute("insert into test_rs values('rowTwo')"); - con.createStatement().execute("insert into test_rs values('rowThree')"); - - // ORDERS_JDBC - Statement statement = con.createStatement(); - statement.execute( - "create or replace table orders_jdbc" - + "(C1 STRING NOT NULL COMMENT 'JDBC', " - + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " - + "C7 STRING, C8 STRING, C9 STRING) " - + "stage_file_format = (field_delimiter='|' " - + "error_on_column_count_mismatch=false)"); - // put files - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); - - int numRows = statement.executeUpdate("copy into orders_jdbc"); - - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); - - con.close(); - } - - @After - public void tearDown() throws SQLException { - Connection con = init(); - con.createStatement().execute("drop table if exists orders_jdbc"); - con.createStatement().execute("drop table if exists test_rs"); - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement()) { + + // TEST_RS + statement.execute("create or replace table test_rs (colA string)"); + statement.execute("insert into test_rs values('rowOne')"); + statement.execute("insert into test_rs values('rowTwo')"); + statement.execute("insert into test_rs values('rowThree')"); + + // ORDERS_JDBC + statement.execute( + "create or replace table orders_jdbc" + + "(C1 STRING NOT NULL COMMENT 'JDBC', " + + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " + + "C7 STRING, C8 STRING, C9 STRING) " + + "stage_file_format = (field_delimiter='|' " + + "error_on_column_count_mismatch=false)"); + // put files + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + + int numRows = statement.executeUpdate("copy into orders_jdbc"); + + assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + } } ResultSet numberCrossTesting() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java index 40e536ebe..292d71949 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java @@ -21,9 +21,9 @@ public class ResultSetAlreadyClosedIT extends BaseJDBCTest { @Test public void testQueryResultSetAlreadyClosed() throws Throwable { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery("select 1"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("select 1")) { checkAlreadyClosed(resultSet); } } @@ -45,9 +45,10 @@ public void testMetadataResultSetAlreadyClosed() throws Throwable { @Test public void testEmptyResultSetAlreadyClosed() throws Throwable { - ResultSet resultSet = new SnowflakeResultSetV1.EmptyResultSet(); - checkAlreadyClosed(resultSet); - checkAlreadyClosedEmpty(resultSet); + try (ResultSet resultSet = new SnowflakeResultSetV1.EmptyResultSet()) { + checkAlreadyClosed(resultSet); + checkAlreadyClosedEmpty(resultSet); + } } private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java index 67a78d3ce..c6edc67fb 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java @@ -57,12 +57,12 @@ Connection init(String table, String column, String values) throws SQLException + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - } - con.createStatement() - .execute("alter session set jdbc_query_result_format" + " = '" + queryResultFormat + "'"); - con.createStatement().execute("create or replace table " + table + " " + column); - con.createStatement().execute("insert into " + table + " values " + values); + statement.execute( + "alter session set jdbc_query_result_format" + " = '" + queryResultFormat + "'"); + statement.execute("create or replace table " + table + " " + column); + statement.execute("insert into " + table + " values " + values); + } return con; } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java index 56e389fc5..f998fb5d4 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java @@ -5,6 +5,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -75,11 +76,11 @@ private void testTimestampLTZWithScale(int scale) throws SQLException { ResultSet rs = con.createStatement().executeQuery("select * from " + table); int i = 0; while (i < cases.length) { - rs.next(); + assertTrue(rs.next()); assertEquals(times[i++], rs.getTimestamp(1).getTime()); assertEquals(0, rs.getTimestamp(1).getNanos()); } - rs.next(); + assertTrue(rs.next()); assertNull(rs.getString(1)); finish(table, con); } @@ -98,52 +99,56 @@ public void testTimestampLTZOutputFormat() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'),('") + "')"; - Connection con = init(table, column, values); - - Statement statement = con.createStatement(); - - // use initialized ltz output format - ResultSet rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String weekday = rs.getString(1).split(",")[0]; - assertEquals(3, weekday.length()); - } - - // change ltz output format - statement.execute( - "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS TZH:TZM'"); - rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String year = rs.getString(1).split("-")[0]; - assertEquals(4, year.length()); - } - - // unset ltz output format, then it should use timestamp_output_format - statement.execute("alter session unset TIMESTAMP_LTZ_OUTPUT_FORMAT"); - rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String weekday = rs.getString(1).split(",")[0]; - assertEquals(3, weekday.length()); - } - - // set ltz output format back to init value - statement.execute( - "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - rs = statement.executeQuery("select * from " + table); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - String weekday = rs.getString(1).split(",")[0]; - assertEquals(3, weekday.length()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try { + // use initialized ltz output format + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String weekday = rs.getString(1).split(",")[0]; + assertEquals(3, weekday.length()); + } + } + // change ltz output format + statement.execute( + "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS TZH:TZM'"); + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String year = rs.getString(1).split("-")[0]; + assertEquals(4, year.length()); + } + } + + // unset ltz output format, then it should use timestamp_output_format + statement.execute("alter session unset TIMESTAMP_LTZ_OUTPUT_FORMAT"); + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String weekday = rs.getString(1).split(",")[0]; + assertEquals(3, weekday.length()); + } + } + // set ltz output format back to init value + statement.execute( + "alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + String weekday = rs.getString(1).split(",")[0]; + assertEquals(3, weekday.length()); + } + } + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - - finish(table, con); } @Test @@ -178,20 +183,26 @@ public void testTimestampLTZWithNulls() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'), (null),('") + "')"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < 2 * cases.length - 1) { - rs.next(); - if (i % 2 != 0) { - assertNull(rs.getTimestamp(1)); - } else { - assertEquals(times[i / 2], rs.getTimestamp(1).getTime()); - assertEquals(0, rs.getTimestamp(1).getNanos()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < 2 * cases.length - 1) { + assertTrue(rs.next()); + if (i % 2 != 0) { + assertNull(rs.getTimestamp(1)); + } else { + assertEquals(times[i / 2], rs.getTimestamp(1).getTime()); + assertEquals(0, rs.getTimestamp(1).getNanos()); + } + i++; + } + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - i++; } - finish(table, con); } @Test @@ -218,16 +229,22 @@ public void testTimestampLTZWithNanos() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(times[i], rs.getTimestamp(1).getTime()); - assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(times[i], rs.getTimestamp(1).getTime()); + assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java index 156bf10bd..e073bfccf 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java @@ -5,10 +5,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Statement; import java.util.Collection; import net.snowflake.client.category.TestCategoryArrow; import org.apache.commons.lang3.StringUtils; @@ -65,17 +67,23 @@ private void testTimestampTZWithScale(int scale) throws SQLException { String column = "(a timestamp_tz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(times[i++], rs.getTimestamp(1).getTime()); - assertEquals(0, rs.getTimestamp(1).getNanos()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(times[i++], rs.getTimestamp(1).getTime()); + assertEquals(0, rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } @Test @@ -111,22 +119,28 @@ public void testTimestampTZWithNanos() throws SQLException { String column = "(a timestamp_tz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { - // TODO: Is this a JDBC bug which happens in both arrow and json cases? - assertEquals("0001-01-01 00:00:01.790870987", rs.getTimestamp(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { + // TODO: Is this a JDBC bug which happens in both arrow and json cases? + assertEquals("0001-01-01 00:00:01.790870987", rs.getTimestamp(1).toString()); + } + + assertEquals(times[i], rs.getTimestamp(1).getTime()); + assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - - assertEquals(times[i], rs.getTimestamp(1).getTime()); - assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } @Test @@ -164,21 +178,27 @@ public void testTimestampTZWithMicros() throws SQLException { String column = "(a timestamp_tz(6))"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { - // TODO: Is this a JDBC bug which happens in both arrow and json cases? - assertEquals("0001-01-01 00:00:01.79087", rs.getTimestamp(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + if (i == cases.length - 1 && tz.equalsIgnoreCase("utc")) { + // TODO: Is this a JDBC bug which happens in both arrow and json cases? + assertEquals("0001-01-01 00:00:01.79087", rs.getTimestamp(1).toString()); + } + + assertEquals(times[i], rs.getTimestamp(1).getTime()); + assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - - assertEquals(times[i], rs.getTimestamp(1).getTime()); - assertEquals(nanos[i++], rs.getTimestamp(1).getNanos()); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java index 6e881615c..1351ea4f1 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java @@ -38,177 +38,193 @@ public class ResultSetAsyncIT extends BaseJDBCTest { @Test public void testAsyncResultSetFunctionsWithNewSession() throws SQLException { - Connection connection = getConnection(); final Map params = getConnectionParameters(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - String createTableSql = "select * from test_rsmd"; - ResultSet rs = statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql); - String queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - statement.execute("drop table if exists test_rsmd"); - rs.close(); - // close statement and connection - statement.close(); - connection.close(); - connection = getConnection(); - // open a new connection and create a result set - ResultSet resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // getCatalogName(), getSchemaName(), and getTableName() are empty - // when session is re-opened - assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getTableName(1)); - assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); - assertEquals("COLA", resultSetMetaData.getColumnName(1)); - assertEquals(3, resultSetMetaData.getColumnType(1)); - assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); - assertEquals(20, resultSetMetaData.getPrecision(1)); - assertEquals(5, resultSetMetaData.getScale(1)); - assertFalse(resultSetMetaData.isAutoIncrement(1)); - assertFalse(resultSetMetaData.isCaseSensitive(1)); - assertFalse(resultSetMetaData.isCurrency(1)); - assertFalse(resultSetMetaData.isDefinitelyWritable(1)); - assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); - assertTrue(resultSetMetaData.isReadOnly(1)); - assertTrue(resultSetMetaData.isSearchable(1)); - assertTrue(resultSetMetaData.isSigned(1)); + String queryID = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + String createTableSql = "select * from test_rsmd"; + try (ResultSet rs = + statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql)) { + queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + } + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } + try (Connection connection = getConnection(); + // open a new connection and create a result set + ResultSet resultSet = + connection.unwrap(SnowflakeConnection.class).createResultSet(queryID)) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // getCatalogName(), getSchemaName(), and getTableName() are empty + // when session is re-opened + assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getTableName(1)); + assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); + assertEquals("COLA", resultSetMetaData.getColumnName(1)); + assertEquals(3, resultSetMetaData.getColumnType(1)); + assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); + assertEquals(20, resultSetMetaData.getPrecision(1)); + assertEquals(5, resultSetMetaData.getScale(1)); + assertFalse(resultSetMetaData.isAutoIncrement(1)); + assertFalse(resultSetMetaData.isCaseSensitive(1)); + assertFalse(resultSetMetaData.isCurrency(1)); + assertFalse(resultSetMetaData.isDefinitelyWritable(1)); + assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); + assertTrue(resultSetMetaData.isReadOnly(1)); + assertTrue(resultSetMetaData.isSearchable(1)); + assertTrue(resultSetMetaData.isSigned(1)); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - List colNames = secretMetaData.getColumnNames(); - assertEquals("COLA", colNames.get(0)); - assertEquals("COLB", colNames.get(1)); - assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); - assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); - TestUtil.assertValidQueryId(secretMetaData.getQueryID()); - assertEquals( - secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); - resultSet.close(); - statement.close(); - connection.close(); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + List colNames = secretMetaData.getColumnNames(); + assertEquals("COLA", colNames.get(0)); + assertEquals("COLB", colNames.get(1)); + assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); + assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); + TestUtil.assertValidQueryId(secretMetaData.getQueryID()); + assertEquals( + secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); + } } @Test public void testResultSetMetadata() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); - assertEquals("", resultSetMetaData.getTableName(1)); - assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); - assertEquals("COLA", resultSetMetaData.getColumnName(1)); - assertEquals(3, resultSetMetaData.getColumnType(1)); - assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); - assertEquals(20, resultSetMetaData.getPrecision(1)); - assertEquals(5, resultSetMetaData.getScale(1)); - assertFalse(resultSetMetaData.isAutoIncrement(1)); - assertFalse(resultSetMetaData.isCaseSensitive(1)); - assertFalse(resultSetMetaData.isCurrency(1)); - assertFalse(resultSetMetaData.isDefinitelyWritable(1)); - assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); - assertTrue(resultSetMetaData.isReadOnly(1)); - assertTrue(resultSetMetaData.isSearchable(1)); - assertTrue(resultSetMetaData.isSigned(1)); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - List colNames = secretMetaData.getColumnNames(); - assertEquals("COLA", colNames.get(0)); - assertEquals("COLB", colNames.get(1)); - assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); - assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); - TestUtil.assertValidQueryId(secretMetaData.getQueryID()); - assertEquals( - secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); - - statement.execute("drop table if exists test_rsmd"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + try (ResultSet resultSet = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from test_rsmd")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + assertEquals("", resultSetMetaData.getCatalogName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getSchemaName(1).toUpperCase()); + assertEquals("", resultSetMetaData.getTableName(1)); + assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); + assertEquals("COLA", resultSetMetaData.getColumnName(1)); + assertEquals(3, resultSetMetaData.getColumnType(1)); + assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); + assertEquals(20, resultSetMetaData.getPrecision(1)); + assertEquals(5, resultSetMetaData.getScale(1)); + assertFalse(resultSetMetaData.isAutoIncrement(1)); + assertFalse(resultSetMetaData.isCaseSensitive(1)); + assertFalse(resultSetMetaData.isCurrency(1)); + assertFalse(resultSetMetaData.isDefinitelyWritable(1)); + assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); + assertTrue(resultSetMetaData.isReadOnly(1)); + assertTrue(resultSetMetaData.isSearchable(1)); + assertTrue(resultSetMetaData.isSigned(1)); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + List colNames = secretMetaData.getColumnNames(); + assertEquals("COLA", colNames.get(0)); + assertEquals("COLB", colNames.get(1)); + assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); + assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); + TestUtil.assertValidQueryId(secretMetaData.getQueryID()); + assertEquals( + secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); + } + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } } @Test public void testOrderAndClosureFunctions() throws SQLException { // Set up environment - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); + String queryID = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + try { + ResultSet resultSet = + statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); - // test isFirst, isBeforeFirst - assertTrue("should be before the first", resultSet.isBeforeFirst()); - assertFalse("should not be the first", resultSet.isFirst()); - resultSet.next(); - assertFalse("should not be before the first", resultSet.isBeforeFirst()); - assertTrue("should be the first", resultSet.isFirst()); + // test isFirst, isBeforeFirst + assertTrue("should be before the first", resultSet.isBeforeFirst()); + assertFalse("should not be the first", resultSet.isFirst()); + resultSet.next(); + assertFalse("should not be before the first", resultSet.isBeforeFirst()); + assertTrue("should be the first", resultSet.isFirst()); - // test isClosed functions - String queryID = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); - assertFalse(resultSet.isClosed()); - // close resultSet and test again - resultSet.close(); - assertTrue(resultSet.isClosed()); - // close connection and open a new one - statement.execute("drop table if exists test_rsmd"); - statement.close(); - connection.close(); - connection = getConnection(); - resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); - // test out isClosed, isLast, and isAfterLast - assertFalse(resultSet.isClosed()); - resultSet.next(); - resultSet.next(); - // cursor should be on last row - assertTrue(resultSet.isLast()); - resultSet.next(); - // cursor is after last row - assertTrue(resultSet.isAfterLast()); - resultSet.close(); - // resultSet should be closed - assertTrue(resultSet.isClosed()); - statement.close(); - connection.close(); + // test isClosed functions + queryID = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); + assertFalse(resultSet.isClosed()); + // close resultSet and test again + resultSet.close(); + assertTrue(resultSet.isClosed()); + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } + try (Connection connection = getConnection()) { + ResultSet resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); + // test out isClosed, isLast, and isAfterLast + assertFalse(resultSet.isClosed()); + resultSet.next(); + resultSet.next(); + // cursor should be on last row + assertTrue(resultSet.isLast()); + resultSet.next(); + // cursor is after last row + assertTrue(resultSet.isAfterLast()); + resultSet.close(); + // resultSet should be closed + assertTrue(resultSet.isClosed()); + } } @Test public void testWasNull() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - Clob emptyClob = connection.createClob(); - emptyClob.setString(1, ""); - statement.execute( - "create or replace table test_null(colA number, colB string, colNull string, emptyClob string)"); - PreparedStatement prepst = - connection.prepareStatement("insert into test_null values (?, ?, ?, ?)"); - prepst.setNull(1, Types.INTEGER); - prepst.setString(2, "hello"); - prepst.setString(3, null); - prepst.setClob(4, emptyClob); - prepst.execute(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + Clob emptyClob = connection.createClob(); + emptyClob.setString(1, ""); + statement.execute( + "create or replace table test_null(colA number, colB string, colNull string, emptyClob string)"); + try (PreparedStatement prepst = + connection.prepareStatement("insert into test_null values (?, ?, ?, ?)")) { + prepst.setNull(1, Types.INTEGER); + prepst.setString(2, "hello"); + prepst.setString(3, null); + prepst.setClob(4, emptyClob); + prepst.execute(); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_null"); - resultSet.next(); - resultSet.getInt(1); - assertTrue(resultSet.wasNull()); // integer value is null - resultSet.getString(2); - assertFalse(resultSet.wasNull()); // string value is not null - assertNull(resultSet.getClob(3)); - assertNull(resultSet.getClob("COLNULL")); - assertEquals("", resultSet.getClob("EMPTYCLOB").toString()); + try (ResultSet resultSet = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from test_null")) { + resultSet.next(); + resultSet.getInt(1); + assertTrue(resultSet.wasNull()); // integer value is null + resultSet.getString(2); + assertFalse(resultSet.wasNull()); // string value is not null + assertNull(resultSet.getClob(3)); + assertNull(resultSet.getClob("COLNULL")); + assertEquals("", resultSet.getClob("EMPTYCLOB").toString()); + } + } + } finally { + statement.execute("drop table if exists test_null"); + } + } } @Test @@ -228,88 +244,92 @@ public void testGetMethods() throws Throwable { Time time = new Time(500); Timestamp ts = new Timestamp(333); - Connection connection = getConnection(); - Clob clob = connection.createClob(); - clob.setString(1, "hello world"); - Statement statement = connection.createStatement(); - // TODO structuredType - add to test when WRITE is ready - SNOW-1157904 - statement.execute( - "create or replace table test_get(colA integer, colB number, colC number, colD string, colE double, colF float, colG boolean, colH text, colI binary(3), colJ number(38,9), colK int, colL date, colM time, colN timestamp_ltz)"); + try (Connection connection = getConnection()) { + Clob clob = connection.createClob(); + clob.setString(1, "hello world"); + try (Statement statement = connection.createStatement()) { + try { + // TODO structuredType - add to test when WRITE is ready - SNOW-1157904 + statement.execute( + "create or replace table test_get(colA integer, colB number, colC number, colD string, colE double, colF float, colG boolean, colH text, colI binary(3), colJ number(38,9), colK int, colL date, colM time, colN timestamp_ltz)"); - PreparedStatement prepStatement = connection.prepareStatement(prepInsertString); - prepStatement.setInt(1, bigInt); - prepStatement.setLong(2, bigLong); - prepStatement.setLong(3, bigShort); - prepStatement.setString(4, str); - prepStatement.setDouble(5, bigDouble); - prepStatement.setFloat(6, bigFloat); - prepStatement.setBoolean(7, true); - prepStatement.setClob(8, clob); - prepStatement.setBytes(9, bytes); - prepStatement.setBigDecimal(10, bigDecimal); - prepStatement.setByte(11, oneByte); - prepStatement.setDate(12, date); - prepStatement.setTime(13, time); - prepStatement.setTimestamp(14, ts); - prepStatement.execute(); + try (PreparedStatement prepStatement = connection.prepareStatement(prepInsertString)) { + prepStatement.setInt(1, bigInt); + prepStatement.setLong(2, bigLong); + prepStatement.setLong(3, bigShort); + prepStatement.setString(4, str); + prepStatement.setDouble(5, bigDouble); + prepStatement.setFloat(6, bigFloat); + prepStatement.setBoolean(7, true); + prepStatement.setClob(8, clob); + prepStatement.setBytes(9, bytes); + prepStatement.setBigDecimal(10, bigDecimal); + prepStatement.setByte(11, oneByte); + prepStatement.setDate(12, date); + prepStatement.setTime(13, time); + prepStatement.setTimestamp(14, ts); + prepStatement.execute(); - ResultSet resultSet = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_get"); - resultSet.next(); - assertEquals(bigInt, resultSet.getInt(1)); - assertEquals(bigInt, resultSet.getInt("COLA")); - assertEquals(bigLong, resultSet.getLong(2)); - assertEquals(bigLong, resultSet.getLong("COLB")); - assertEquals(bigShort, resultSet.getShort(3)); - assertEquals(bigShort, resultSet.getShort("COLC")); - assertEquals(str, resultSet.getString(4)); - assertEquals(str, resultSet.getString("COLD")); - Reader reader = resultSet.getCharacterStream("COLD"); - char[] sample = new char[str.length()]; + try (ResultSet resultSet = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from test_get")) { + resultSet.next(); + assertEquals(bigInt, resultSet.getInt(1)); + assertEquals(bigInt, resultSet.getInt("COLA")); + assertEquals(bigLong, resultSet.getLong(2)); + assertEquals(bigLong, resultSet.getLong("COLB")); + assertEquals(bigShort, resultSet.getShort(3)); + assertEquals(bigShort, resultSet.getShort("COLC")); + assertEquals(str, resultSet.getString(4)); + assertEquals(str, resultSet.getString("COLD")); + Reader reader = resultSet.getCharacterStream("COLD"); + char[] sample = new char[str.length()]; - assertEquals(str.length(), reader.read(sample)); - assertEquals(str.charAt(0), sample[0]); - assertEquals(str, new String(sample)); + assertEquals(str.length(), reader.read(sample)); + assertEquals(str.charAt(0), sample[0]); + assertEquals(str, new String(sample)); - assertEquals(bigDouble, resultSet.getDouble(5), 0); - assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); - assertEquals(bigFloat, resultSet.getFloat(6), 0); - assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); - assertTrue(resultSet.getBoolean(7)); - assertTrue(resultSet.getBoolean("COLG")); - assertEquals("hello world", resultSet.getClob("COLH").toString()); + assertEquals(bigDouble, resultSet.getDouble(5), 0); + assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); + assertEquals(bigFloat, resultSet.getFloat(6), 0); + assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); + assertTrue(resultSet.getBoolean(7)); + assertTrue(resultSet.getBoolean("COLG")); + assertEquals("hello world", resultSet.getClob("COLH").toString()); - // TODO: figure out why getBytes returns an offset. - // assertEquals(bytes, resultSet.getBytes(9)); - // assertEquals(bytes, resultSet.getBytes("COLI")); + // TODO: figure out why getBytes returns an offset. + // assertEquals(bytes, resultSet.getBytes(9)); + // assertEquals(bytes, resultSet.getBytes("COLI")); - DecimalFormat df = new DecimalFormat("#.00"); - assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal(10))); - assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal("COLJ"))); + DecimalFormat df = new DecimalFormat("#.00"); + assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal(10))); + assertEquals(df.format(bigDecimal), df.format(resultSet.getBigDecimal("COLJ"))); - assertEquals(oneByte, resultSet.getByte(11)); - assertEquals(oneByte, resultSet.getByte("COLK")); + assertEquals(oneByte, resultSet.getByte(11)); + assertEquals(oneByte, resultSet.getByte("COLK")); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); - assertEquals(sdf.format(date), sdf.format(resultSet.getDate(12))); - assertEquals(sdf.format(date), sdf.format(resultSet.getDate("COLL"))); - assertEquals(time, resultSet.getTime(13)); - assertEquals(time, resultSet.getTime("COLM")); - assertEquals(ts, resultSet.getTimestamp(14)); - assertEquals(ts, resultSet.getTimestamp("COLN")); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); + assertEquals(sdf.format(date), sdf.format(resultSet.getDate(12))); + assertEquals(sdf.format(date), sdf.format(resultSet.getDate("COLL"))); + assertEquals(time, resultSet.getTime(13)); + assertEquals(time, resultSet.getTime("COLM")); + assertEquals(ts, resultSet.getTimestamp(14)); + assertEquals(ts, resultSet.getTimestamp("COLN")); - // test getObject - assertEquals(str, resultSet.getObject(4).toString()); - assertEquals(str, resultSet.getObject("COLD").toString()); + // test getObject + assertEquals(str, resultSet.getObject(4).toString()); + assertEquals(str, resultSet.getObject("COLD").toString()); - // test getStatement method - assertEquals(statement, resultSet.getStatement()); - - prepStatement.close(); - statement.execute("drop table if exists table_get"); - statement.close(); - resultSet.close(); - connection.close(); + // test getStatement method + assertEquals(statement, resultSet.getStatement()); + } + } + } finally { + statement.execute("drop table if exists table_get"); + } + } + } } /** @@ -323,23 +343,23 @@ public void testGetMethods() throws Throwable { */ @Test public void testEmptyResultSet() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet rs = - statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from empty_table"); - // if user never calls getMetadata() or next(), empty result set is used to get results. - // empty ResultSet returns all nulls, 0s, and empty values. - assertFalse(rs.isClosed()); - assertEquals(0, rs.getInt(1)); - try { - rs.getInt("col1"); - fail("Fetching from a column name that does not exist should return a SQLException"); - } catch (SQLException e) { - // findColumn fails with empty metadata with exception "Column not found". - assertEquals(SqlState.UNDEFINED_COLUMN, e.getSQLState()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet rs = + statement + .unwrap(SnowflakeStatement.class) + .executeAsyncQuery("select * from empty_table")) { + // if user never calls getMetadata() or next(), empty result set is used to get results. + // empty ResultSet returns all nulls, 0s, and empty values. + assertFalse(rs.isClosed()); + assertEquals(0, rs.getInt(1)); + try { + rs.getInt("col1"); + fail("Fetching from a column name that does not exist should return a SQLException"); + } catch (SQLException e) { + // findColumn fails with empty metadata with exception "Column not found". + assertEquals(SqlState.UNDEFINED_COLUMN, e.getSQLState()); + } } - rs.close(); // close empty result set - assertTrue(rs.isClosed()); - connection.close(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java index e5dc110ce..dd534d469 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java @@ -21,30 +21,32 @@ public class ResultSetAsyncLatestIT extends BaseJDBCTest { @Test public void testAsyncResultSet() throws SQLException { String queryID; - Connection connection = getConnection(); - try (Statement statement = connection.createStatement()) { - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - String createTableSql = "select * from test_rsmd"; - ResultSet rs = statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql); - queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - statement.execute("drop table if exists test_rsmd"); - rs.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + String createTableSql = "select * from test_rsmd"; + try (ResultSet rs = + statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(createTableSql)) { + queryID = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + } + } finally { + statement.execute("drop table if exists test_rsmd"); + } } // Close and reopen connection - connection.close(); - connection = getConnection(); - // open a new connection and create a result set - ResultSet resultSet = connection.unwrap(SnowflakeConnection.class).createResultSet(queryID); - // Process result set - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - assertEquals( - secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); - // Close statement and resultset - resultSet.getStatement().close(); - resultSet.close(); - connection.close(); + + try (Connection connection = getConnection(); + // open a new connection and create a result set + ResultSet resultSet = + connection.unwrap(SnowflakeConnection.class).createResultSet(queryID)) { + // Process result set + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + assertEquals( + secretMetaData.getQueryID(), resultSet.unwrap(SnowflakeResultSet.class).getQueryID()); + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java index 2535a6579..e71f69d1a 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java @@ -18,11 +18,10 @@ public class ResultSetFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testQueryResultSetNotSupportedException() throws Throwable { - try (Connection connection = getConnection()) { - try (Statement statement = connection.createStatement()) { - ResultSet resultSet = statement.executeQuery("select 1"); - checkFeatureNotSupportedException(resultSet); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("select 1")) { + checkFeatureNotSupportedException(resultSet); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java index e521078c6..bce1d97cc 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java @@ -57,35 +57,37 @@ public ResultSetIT() { @Test public void testFindColumn() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(selectAllSQL); - assertEquals(1, resultSet.findColumn("COLA")); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertEquals(1, resultSet.findColumn("COLA")); + } } @Test public void testGetColumnClassNameForBinary() throws Throwable { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table bintable (b binary)"); - statement.execute("insert into bintable values ('00f1f2')"); - ResultSet resultSet = statement.executeQuery("select * from bintable"); - ResultSetMetaData metaData = resultSet.getMetaData(); - assertEquals(SnowflakeType.BINARY_CLASS_NAME, metaData.getColumnClassName(1)); - assertTrue(resultSet.next()); - Class klass = Class.forName(SnowflakeType.BINARY_CLASS_NAME); - Object ret0 = resultSet.getObject(1); - assertEquals(ret0.getClass(), klass); - byte[] ret = (byte[]) ret0; - assertEquals(3, ret.length); - assertEquals(ret[0], (byte) 0); - assertEquals(ret[1], (byte) -15); - assertEquals(ret[2], (byte) -14); - statement.execute("drop table if exists bintable"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table bintable (b binary)"); + statement.execute("insert into bintable values ('00f1f2')"); + try (ResultSet resultSet = statement.executeQuery("select * from bintable")) { + ResultSetMetaData metaData = resultSet.getMetaData(); + assertEquals(SnowflakeType.BINARY_CLASS_NAME, metaData.getColumnClassName(1)); + assertTrue(resultSet.next()); + Class klass = Class.forName(SnowflakeType.BINARY_CLASS_NAME); + Object ret0 = resultSet.getObject(1); + assertEquals(ret0.getClass(), klass); + byte[] ret = (byte[]) ret0; + assertEquals(3, ret.length); + assertEquals(ret[0], (byte) 0); + assertEquals(ret[1], (byte) -15); + assertEquals(ret[2], (byte) -14); + } + } finally { + statement.execute("drop table if exists bintable"); + } + } } @Test @@ -98,379 +100,388 @@ public void testGetMethod() throws Throwable { double bigDouble = Double.MAX_VALUE; float bigFloat = Float.MAX_VALUE; - Connection connection = init(); - Clob clob = connection.createClob(); - clob.setString(1, "hello world"); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table test_get(colA integer, colB number, colC number, " - + "colD string, colE double, colF float, colG boolean, colH text)"); - - PreparedStatement prepStatement = connection.prepareStatement(prepInsertString); - prepStatement.setInt(1, bigInt); - prepStatement.setLong(2, bigLong); - prepStatement.setLong(3, bigShort); - prepStatement.setString(4, str); - prepStatement.setDouble(5, bigDouble); - prepStatement.setFloat(6, bigFloat); - prepStatement.setBoolean(7, true); - prepStatement.setClob(8, clob); - prepStatement.execute(); - - statement.execute("select * from test_get"); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - assertEquals(bigInt, resultSet.getInt(1)); - assertEquals(bigInt, resultSet.getInt("COLA")); - assertEquals(bigLong, resultSet.getLong(2)); - assertEquals(bigLong, resultSet.getLong("COLB")); - assertEquals(bigShort, resultSet.getShort(3)); - assertEquals(bigShort, resultSet.getShort("COLC")); - assertEquals(str, resultSet.getString(4)); - assertEquals(str, resultSet.getString("COLD")); - Reader reader = resultSet.getCharacterStream("COLD"); - char[] sample = new char[str.length()]; - - assertEquals(str.length(), reader.read(sample)); - assertEquals(str.charAt(0), sample[0]); - assertEquals(str, new String(sample)); - - // assertEquals(bigDouble, resultSet.getDouble(5), 0); - // assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); - assertEquals(bigFloat, resultSet.getFloat(6), 0); - assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); - assertTrue(resultSet.getBoolean(7)); - assertTrue(resultSet.getBoolean("COLG")); - assertEquals("hello world", resultSet.getClob("COLH").toString()); - - // test getStatement method - assertEquals(statement, resultSet.getStatement()); - - prepStatement.close(); - statement.execute("drop table if exists table_get"); - statement.close(); - resultSet.close(); - connection.close(); + try (Connection connection = init()) { + Clob clob = connection.createClob(); + clob.setString(1, "hello world"); + try (Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_get(colA integer, colB number, colC number, " + + "colD string, colE double, colF float, colG boolean, colH text)"); + + try (PreparedStatement prepStatement = connection.prepareStatement(prepInsertString)) { + prepStatement.setInt(1, bigInt); + prepStatement.setLong(2, bigLong); + prepStatement.setLong(3, bigShort); + prepStatement.setString(4, str); + prepStatement.setDouble(5, bigDouble); + prepStatement.setFloat(6, bigFloat); + prepStatement.setBoolean(7, true); + prepStatement.setClob(8, clob); + prepStatement.execute(); + + statement.execute("select * from test_get"); + try (ResultSet resultSet = statement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(bigInt, resultSet.getInt(1)); + assertEquals(bigInt, resultSet.getInt("COLA")); + assertEquals(bigLong, resultSet.getLong(2)); + assertEquals(bigLong, resultSet.getLong("COLB")); + assertEquals(bigShort, resultSet.getShort(3)); + assertEquals(bigShort, resultSet.getShort("COLC")); + assertEquals(str, resultSet.getString(4)); + assertEquals(str, resultSet.getString("COLD")); + Reader reader = resultSet.getCharacterStream("COLD"); + char[] sample = new char[str.length()]; + + assertEquals(str.length(), reader.read(sample)); + assertEquals(str.charAt(0), sample[0]); + assertEquals(str, new String(sample)); + + // assertEquals(bigDouble, resultSet.getDouble(5), 0); + // assertEquals(bigDouble, resultSet.getDouble("COLE"), 0); + assertEquals(bigFloat, resultSet.getFloat(6), 0); + assertEquals(bigFloat, resultSet.getFloat("COLF"), 0); + assertTrue(resultSet.getBoolean(7)); + assertTrue(resultSet.getBoolean("COLG")); + assertEquals("hello world", resultSet.getClob("COLH").toString()); + + // test getStatement method + assertEquals(statement, resultSet.getStatement()); + } + } + } finally { + statement.execute("drop table if exists table_get"); + } + } + } } @Test public void testGetObjectOnDatabaseMetadataResultSet() throws SQLException { - Connection connection = init(); - DatabaseMetaData databaseMetaData = connection.getMetaData(); - ResultSet resultSet = databaseMetaData.getTypeInfo(); - resultSet.next(); - // SNOW-21375 "NULLABLE" Column is a SMALLINT TYPE - assertEquals(DatabaseMetaData.typeNullable, resultSet.getObject("NULLABLE")); - resultSet.close(); - connection.close(); + try (Connection connection = init()) { + DatabaseMetaData databaseMetaData = connection.getMetaData(); + try (ResultSet resultSet = databaseMetaData.getTypeInfo()) { + assertTrue(resultSet.next()); + // SNOW-21375 "NULLABLE" Column is a SMALLINT TYPE + assertEquals(DatabaseMetaData.typeNullable, resultSet.getObject("NULLABLE")); + } + } } @Test public void testGetShort() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getShort(i)); - } - - resultSet.next(); - assertEquals(2, resultSet.getShort(1)); - assertEquals(5, resultSet.getShort(2)); - assertEquals(3, resultSet.getShort(3)); - assertEquals(1, resultSet.getShort(4)); - assertEquals(1, resultSet.getShort(5)); - assertEquals(1, resultSet.getShort(6)); - assertEquals(9126, resultSet.getShort(7)); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getShort(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getShort(i)); } - } - resultSet.next(); - // certain column types can only have certain values when called by getShort() or else a - // SQLexception is thrown. - // These column types are varchar, char, and float. - for (int i = 5; i < 7; i++) { - try { - resultSet.getShort(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getShort(1)); + assertEquals(5, resultSet.getShort(2)); + assertEquals(3, resultSet.getShort(3)); + assertEquals(1, resultSet.getShort(4)); + assertEquals(1, resultSet.getShort(5)); + assertEquals(1, resultSet.getShort(6)); + assertEquals(9126, resultSet.getShort(7)); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getShort(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getShort() or else a + // SQLexception is thrown. + // These column types are varchar, char, and float. + + for (int i = 5; i < 7; i++) { + try { + resultSet.getShort(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetInt() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getInt(i)); - } - - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - assertEquals(5, resultSet.getInt(2)); - assertEquals(3, resultSet.getInt(3)); - assertEquals(1, resultSet.getInt(4)); - assertEquals(1, resultSet.getInt(5)); - assertEquals(1, resultSet.getInt(6)); - assertEquals(9126, resultSet.getInt(7)); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getInt(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getInt(i)); } - } - resultSet.next(); - // certain column types can only have certain values when called by getInt() or else a - // SQLException is thrown. - // These column types are varchar, char, and float. - for (int i = 5; i < 7; i++) { - try { - resultSet.getInt(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertEquals(5, resultSet.getInt(2)); + assertEquals(3, resultSet.getInt(3)); + assertEquals(1, resultSet.getInt(4)); + assertEquals(1, resultSet.getInt(5)); + assertEquals(1, resultSet.getInt(6)); + assertEquals(9126, resultSet.getInt(7)); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getInt(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getInt() or else a + // SQLException is thrown. + // These column types are varchar, char, and float. + for (int i = 5; i < 7; i++) { + try { + resultSet.getInt(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetLong() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getLong(i)); - } - - resultSet.next(); - assertEquals(2, resultSet.getLong(1)); - assertEquals(5, resultSet.getLong(2)); - assertEquals(3, resultSet.getLong(3)); - assertEquals(1, resultSet.getLong(4)); - assertEquals(1, resultSet.getLong(5)); - assertEquals(1, resultSet.getLong(6)); - assertEquals(9126, resultSet.getLong(7)); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getLong(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getLong(i)); } - } - resultSet.next(); - // certain column types can only have certain values when called by getLong() or else a - // SQLexception is thrown. - // These column types are varchar, char, and float. - for (int i = 5; i < 7; i++) { - try { - resultSet.getLong(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getLong(1)); + assertEquals(5, resultSet.getLong(2)); + assertEquals(3, resultSet.getLong(3)); + assertEquals(1, resultSet.getLong(4)); + assertEquals(1, resultSet.getLong(5)); + assertEquals(1, resultSet.getLong(6)); + assertEquals(9126, resultSet.getLong(7)); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getLong(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getLong() or else a + // SQLexception is thrown. + // These column types are varchar, char, and float. + for (int i = 5; i < 7; i++) { + try { + resultSet.getLong(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetFloat() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getFloat(i), .1); - } - - resultSet.next(); - assertEquals(2, resultSet.getFloat(1), .1); - assertEquals(5, resultSet.getFloat(2), .1); - assertEquals(3.5, resultSet.getFloat(3), .1); - assertEquals(1, resultSet.getFloat(4), .1); - assertEquals(1, resultSet.getFloat(5), .1); - assertEquals(1, resultSet.getFloat(6), .1); - assertEquals(9126, resultSet.getFloat(7), .1); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getFloat(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getFloat(i), .1); } - } - resultSet.next(); - // certain column types can only have certain values when called by getFloat() or else a - // SQLexception is thrown. - // These column types are varchar and char. - for (int i = 5; i < 7; i++) { - try { - resultSet.getFloat(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getFloat(1), .1); + assertEquals(5, resultSet.getFloat(2), .1); + assertEquals(3.5, resultSet.getFloat(3), .1); + assertEquals(1, resultSet.getFloat(4), .1); + assertEquals(1, resultSet.getFloat(5), .1); + assertEquals(1, resultSet.getFloat(6), .1); + assertEquals(9126, resultSet.getFloat(7), .1); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getFloat(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getFloat() or else a + // SQLexception is thrown. + // These column types are varchar and char. + for (int i = 5; i < 7; i++) { + try { + resultSet.getFloat(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetDouble() throws SQLException { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertEquals(0, resultSet.getDouble(i), .1); - } - - resultSet.next(); - assertEquals(2, resultSet.getDouble(1), .1); - assertEquals(5, resultSet.getDouble(2), .1); - assertEquals(3.5, resultSet.getDouble(3), .1); - assertEquals(1, resultSet.getDouble(4), .1); - assertEquals(1, resultSet.getDouble(5), .1); - assertEquals(1, resultSet.getDouble(6), .1); - assertEquals(9126, resultSet.getDouble(7), .1); - - for (int i = 8; i < 13; i++) { - try { - resultSet.getDouble(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertEquals(0, resultSet.getDouble(i), .1); } - } - resultSet.next(); - // certain column types can only have certain values when called by getDouble() or else a - // SQLexception is thrown. - // These column types are varchar and char. - for (int i = 5; i < 7; i++) { - try { - resultSet.getDouble(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getDouble(1), .1); + assertEquals(5, resultSet.getDouble(2), .1); + assertEquals(3.5, resultSet.getDouble(3), .1); + assertEquals(1, resultSet.getDouble(4), .1); + assertEquals(1, resultSet.getDouble(5), .1); + assertEquals(1, resultSet.getDouble(6), .1); + assertEquals(9126, resultSet.getDouble(7), .1); + + for (int i = 8; i < 13; i++) { + try { + resultSet.getDouble(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + // certain column types can only have certain values when called by getDouble() or else a + // SQLexception is thrown. + // These column types are varchar and char. + for (int i = 5; i < 7; i++) { + try { + resultSet.getDouble(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetBigDecimal() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_get(colA number(38,9))"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_get values(?)"); - BigDecimal bigDecimal1 = new BigDecimal("10000000000"); - preparedStatement.setBigDecimal(1, bigDecimal1); - preparedStatement.executeUpdate(); - - BigDecimal bigDecimal2 = new BigDecimal("100000000.123456789"); - preparedStatement.setBigDecimal(1, bigDecimal2); - preparedStatement.execute(); - - statement.execute("select * from test_get order by 1"); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - assertEquals(bigDecimal2, resultSet.getBigDecimal(1)); - assertEquals(bigDecimal2, resultSet.getBigDecimal("COLA")); - - preparedStatement.close(); - statement.execute("drop table if exists test_get"); - statement.close(); - resultSet.close(); - connection.close(); - - resultSet = numberCrossTesting(); - resultSet.next(); - for (int i = 1; i < 13; i++) { - assertNull(resultSet.getBigDecimal(i)); - } - resultSet.next(); - assertEquals(new BigDecimal(2), resultSet.getBigDecimal(1)); - assertEquals(new BigDecimal(5), resultSet.getBigDecimal(2)); - assertEquals(new BigDecimal(3.5), resultSet.getBigDecimal(3)); - assertEquals(new BigDecimal(1), resultSet.getBigDecimal(4)); - assertEquals(new BigDecimal(1), resultSet.getBigDecimal(5)); - assertEquals(new BigDecimal(1), resultSet.getBigDecimal(6)); - assertEquals(new BigDecimal(9126), resultSet.getBigDecimal(7)); - for (int i = 8; i < 13; i++) { - try { - resultSet.getBigDecimal(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_get(colA number(38,9))"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_get values(?)")) { + BigDecimal bigDecimal1 = new BigDecimal("10000000000"); + preparedStatement.setBigDecimal(1, bigDecimal1); + preparedStatement.executeUpdate(); + + BigDecimal bigDecimal2 = new BigDecimal("100000000.123456789"); + preparedStatement.setBigDecimal(1, bigDecimal2); + preparedStatement.execute(); + + statement.execute("select * from test_get order by 1"); + try (ResultSet resultSet = statement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(bigDecimal2, resultSet.getBigDecimal(1)); + assertEquals(bigDecimal2, resultSet.getBigDecimal("COLA")); + } } + statement.execute("drop table if exists test_get"); } - resultSet.next(); - for (int i = 5; i < 7; i++) { - try { - resultSet.getBigDecimal(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + for (int i = 1; i < 13; i++) { + assertNull(resultSet.getBigDecimal(i)); + } + assertTrue(resultSet.next()); + assertEquals(new BigDecimal(2), resultSet.getBigDecimal(1)); + assertEquals(new BigDecimal(5), resultSet.getBigDecimal(2)); + assertEquals(new BigDecimal(3.5), resultSet.getBigDecimal(3)); + assertEquals(new BigDecimal(1), resultSet.getBigDecimal(4)); + assertEquals(new BigDecimal(1), resultSet.getBigDecimal(5)); + assertEquals(new BigDecimal(1), resultSet.getBigDecimal(6)); + assertEquals(new BigDecimal(9126), resultSet.getBigDecimal(7)); + for (int i = 8; i < 13; i++) { + try { + resultSet.getBigDecimal(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + assertTrue(resultSet.next()); + for (int i = 5; i < 7; i++) { + try { + resultSet.getBigDecimal(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } } } @Test public void testGetBigDecimalNegative() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_dec(colA time)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_dec values(?)"); - java.sql.Time time = new java.sql.Time(System.currentTimeMillis()); - preparedStatement.setTime(1, time); - preparedStatement.executeUpdate(); - - statement.execute("select * from test_dec order by 1"); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - try { - resultSet.getBigDecimal(2, 38); - fail(); - } catch (SQLException ex) { - assertEquals(200032, ex.getErrorCode()); - } - statement.execute("drop table if exists test_dec"); - statement.close(); - resultSet.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_dec(colA time)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_dec values(?)")) { + java.sql.Time time = new java.sql.Time(System.currentTimeMillis()); + preparedStatement.setTime(1, time); + preparedStatement.executeUpdate(); + + statement.execute("select * from test_dec order by 1"); + try (ResultSet resultSet = statement.getResultSet(); ) { + assertTrue(resultSet.next()); + try { + resultSet.getBigDecimal(2, 38); + fail(); + } catch (SQLException ex) { + assertEquals(200032, ex.getErrorCode()); + } + } + } + } finally { + statement.execute("drop table if exists test_dec"); + } + } } @Test public void testCursorPosition() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute(selectAllSQL); - ResultSet resultSet = statement.getResultSet(); - resultSet.next(); - assertTrue(resultSet.isFirst()); - assertEquals(1, resultSet.getRow()); - resultSet.next(); - assertFalse(resultSet.isFirst()); - assertEquals(2, resultSet.getRow()); - assertFalse(resultSet.isLast()); - resultSet.next(); - assertEquals(3, resultSet.getRow()); - assertTrue(resultSet.isLast()); - resultSet.next(); - assertTrue(resultSet.isAfterLast()); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute(selectAllSQL); + try (ResultSet resultSet = statement.getResultSet()) { + assertTrue(resultSet.next()); + assertTrue(resultSet.isFirst()); + assertEquals(1, resultSet.getRow()); + assertTrue(resultSet.next()); + assertFalse(resultSet.isFirst()); + assertEquals(2, resultSet.getRow()); + assertFalse(resultSet.isLast()); + assertTrue(resultSet.next()); + assertEquals(3, resultSet.getRow()); + assertTrue(resultSet.isLast()); + assertFalse(resultSet.next()); + assertTrue(resultSet.isAfterLast()); + } + } } /** @@ -481,22 +492,27 @@ public void testCursorPosition() throws SQLException { @Test public void testGetBytes() throws SQLException { Properties props = new Properties(); - Connection connection = init(props); - ingestBinaryTestData(connection); - - // Get results in hex format (default). - ResultSet resultSet = connection.createStatement().executeQuery("select * from bin"); - resultSet.next(); - assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); - assertEquals("", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); - assertEquals("ABCD12", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); - assertEquals("00FF4201", resultSet.getString(1)); - connection.createStatement().execute("drop table if exists bin"); - connection.close(); + try (Connection connection = init(props); + Statement statement = connection.createStatement()) { + try { + ingestBinaryTestData(connection); + + // Get results in hex format (default). + try (ResultSet resultSet = statement.executeQuery("select * from bin")) { + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); + assertEquals("", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); + assertEquals("ABCD12", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); + assertEquals("00FF4201", resultSet.getString(1)); + } + } finally { + statement.execute("drop table if exists bin"); + } + } } /** @@ -506,13 +522,16 @@ public void testGetBytes() throws SQLException { * @throws SQLException arises if any exception occurs */ private void ingestBinaryTestData(Connection connection) throws SQLException { - connection.createStatement().execute("create or replace table bin (b Binary)"); - PreparedStatement prepStatement = - connection.prepareStatement("insert into bin values (?), (?), (?)"); - prepStatement.setBytes(1, byteArrayTestCase1); - prepStatement.setBytes(2, byteArrayTestCase2); - prepStatement.setBytes(3, byteArrayTestCase3); - prepStatement.execute(); + try (Statement statement = connection.createStatement()) { + statement.execute("create or replace table bin (b Binary)"); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into bin values (?), (?), (?)")) { + prepStatement.setBytes(1, byteArrayTestCase1); + prepStatement.setBytes(2, byteArrayTestCase2); + prepStatement.setBytes(3, byteArrayTestCase3); + prepStatement.execute(); + } + } } /** @@ -524,266 +543,280 @@ private void ingestBinaryTestData(Connection connection) throws SQLException { public void testGetBytesInBase64() throws Exception { Properties props = new Properties(); props.setProperty("binary_output_format", "BAse64"); - Connection connection = init(props); - ingestBinaryTestData(connection); - - ResultSet resultSet = connection.createStatement().executeQuery("select * from bin"); - resultSet.next(); - assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); - assertEquals("", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); - assertEquals("q80S", resultSet.getString(1)); - resultSet.next(); - assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); - assertEquals("AP9CAQ==", resultSet.getString(1)); - - connection.createStatement().execute("drop table if exists bin"); - connection.close(); + try (Connection connection = init(props); + Statement statement = connection.createStatement()) { + try { + ingestBinaryTestData(connection); + + try (ResultSet resultSet = statement.executeQuery("select * from bin")) { + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase1, resultSet.getBytes(1)); + assertEquals("", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase2, resultSet.getBytes(1)); + assertEquals("q80S", resultSet.getString(1)); + assertTrue(resultSet.next()); + assertArrayEquals(byteArrayTestCase3, resultSet.getBytes(1)); + assertEquals("AP9CAQ==", resultSet.getString(1)); + } + } finally { + statement.execute("drop table if exists bin"); + } + } } // SNOW-31647 @Test public void testColumnMetaWithZeroPrecision() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table testColDecimal(cola number(38, 0), " + "colb number(17, 5))"); - - ResultSet resultSet = statement.executeQuery("select * from testColDecimal"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - - assertThat(resultSetMetaData.getColumnType(1), is(Types.BIGINT)); - assertThat(resultSetMetaData.getColumnType(2), is(Types.DECIMAL)); - assertThat(resultSetMetaData.isSigned(1), is(true)); - assertThat(resultSetMetaData.isSigned(2), is(true)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table testColDecimal(cola number(38, 0), " + "colb number(17, 5))"); - statement.execute("drop table if exists testColDecimal"); + try (ResultSet resultSet = statement.executeQuery("select * from testColDecimal")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - connection.close(); + assertThat(resultSetMetaData.getColumnType(1), is(Types.BIGINT)); + assertThat(resultSetMetaData.getColumnType(2), is(Types.DECIMAL)); + assertThat(resultSetMetaData.isSigned(1), is(true)); + assertThat(resultSetMetaData.isSigned(2), is(true)); + } + } finally { + statement.execute("drop table if exists testColDecimal"); + } + } } @Test public void testGetObjectOnFixedView() throws Exception { - Connection connection = init(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table testFixedView" - + "(C1 STRING NOT NULL COMMENT 'JDBC', " - + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " - + "C7 STRING, C8 STRING, C9 STRING) " - + "stage_file_format = (field_delimiter='|' " - + "error_on_column_count_mismatch=false)"); - - // put files - assertTrue( - "Failed to put a file", + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView")); - - ResultSet resultSet = - statement.executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%testFixedView"); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - while (resultSet.next()) { - for (int i = 0; i < resultSetMetaData.getColumnCount(); i++) { - assertNotNull(resultSet.getObject(i + 1)); + "create or replace table testFixedView" + + "(C1 STRING NOT NULL COMMENT 'JDBC', " + + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " + + "C7 STRING, C8 STRING, C9 STRING) " + + "stage_file_format = (field_delimiter='|' " + + "error_on_column_count_mismatch=false)"); + + // put files + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView")); + + try (ResultSet resultSet = + statement.executeQuery( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%testFixedView")) { + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + while (resultSet.next()) { + for (int i = 0; i < resultSetMetaData.getColumnCount(); i++) { + assertNotNull(resultSet.getObject(i + 1)); + } + } + } + } finally { + statement.execute("drop table if exists testFixedView"); } } - - resultSet.close(); - statement.execute("drop table if exists testFixedView"); - statement.close(); - connection.close(); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetColumnDisplaySizeAndPrecision() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - ResultSet resultSet = statement.executeQuery("select cast(1 as char)"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(1, resultSetMetaData.getPrecision(1)); - - resultSet = statement.executeQuery("select cast(1 as number(38, 0))"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(39, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(38, resultSetMetaData.getPrecision(1)); + ResultSetMetaData resultSetMetaData = null; + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + + try (ResultSet resultSet = statement.executeQuery("select cast(1 as char)")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(1, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select cast(1 as decimal(25, 15))"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(27, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(25, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as number(38, 0))")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(39, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(38, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select cast(1 as string)"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(1, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as decimal(25, 15))")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(27, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(25, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select cast(1 as string(30))"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(1, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as string)")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(1, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select to_date('2016-12-13', 'YYYY-MM-DD')"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(10, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(10, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = statement.executeQuery("select cast(1 as string(30))")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(1, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(1, resultSetMetaData.getPrecision(1)); + } - resultSet = statement.executeQuery("select to_time('12:34:56', 'HH24:MI:SS')"); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(8, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals(8, resultSetMetaData.getPrecision(1)); + try (ResultSet resultSet = + statement.executeQuery("select to_date('2016-12-13', 'YYYY-MM-DD')")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(10, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(10, resultSetMetaData.getPrecision(1)); + } - statement.close(); - connection.close(); + try (ResultSet resultSet = + statement.executeQuery("select to_time('12:34:56', 'HH24:MI:SS')")) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(8, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals(8, resultSetMetaData.getPrecision(1)); + } + } } @Test public void testGetBoolean() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table testBoolean(cola boolean)"); - statement.execute("insert into testBoolean values(false)"); - ResultSet resultSet = statement.executeQuery("select * from testBoolean"); - resultSet.next(); - assertFalse(resultSet.getBoolean(1)); - - statement.execute("insert into testBoolean values(true)"); - resultSet = statement.executeQuery("select * from testBoolean"); - resultSet.next(); - assertFalse(resultSet.getBoolean(1)); - resultSet.next(); - assertTrue(resultSet.getBoolean(1)); - statement.execute("drop table if exists testBoolean"); - - statement.execute( - "create or replace table test_types(c1 number, c2 integer, c3 varchar, c4 char, " - + "c5 boolean, c6 float, c7 binary, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, " - + "c12 timestamp_tz)"); - statement.execute( - "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " - + "null, null)"); - statement.execute( - "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(1, 1, '1'," - + "'1', true, 1.0, '48454C4C4F', '1994-12-27', " - + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); - statement.execute("insert into test_types (c1, c2, c3, c4) values(2, 3, '4', '5')"); - resultSet = statement.executeQuery("select * from test_types"); - - resultSet.next(); - // assert that getBoolean returns false for null values - for (int i = 1; i < 13; i++) { - assertFalse(resultSet.getBoolean(i)); - } - // do the other columns that are out of order - // go to next row of result set column - resultSet.next(); - // assert that getBoolean returns true for values that equal 1 - assertTrue(resultSet.getBoolean(1)); - assertTrue(resultSet.getBoolean(2)); - assertTrue(resultSet.getBoolean(3)); - assertTrue(resultSet.getBoolean(4)); - assertTrue(resultSet.getBoolean(5)); - for (int i = 6; i < 13; i++) { - try { - resultSet.getBoolean(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table testBoolean(cola boolean)"); + statement.execute("insert into testBoolean values(false)"); + try (ResultSet resultSet = statement.executeQuery("select * from testBoolean")) { + assertTrue(resultSet.next()); + assertFalse(resultSet.getBoolean(1)); } - } - - resultSet.next(); - for (int i = 1; i < 5; i++) { - try { - resultSet.getBoolean(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + statement.execute("insert into testBoolean values(true)"); + try (ResultSet resultSet = statement.executeQuery("select * from testBoolean")) { + assertTrue(resultSet.next()); + assertFalse(resultSet.getBoolean(1)); + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(1)); + } + statement.execute("drop table if exists testBoolean"); + + statement.execute( + "create or replace table test_types(c1 number, c2 integer, c3 varchar, c4 char, " + + "c5 boolean, c6 float, c7 binary, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, " + + "c12 timestamp_tz)"); + statement.execute( + "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " + + "null, null)"); + statement.execute( + "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(1, 1, '1'," + + "'1', true, 1.0, '48454C4C4F', '1994-12-27', " + + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); + statement.execute("insert into test_types (c1, c2, c3, c4) values(2, 3, '4', '5')"); + try (ResultSet resultSet = statement.executeQuery("select * from test_types")) { + + assertTrue(resultSet.next()); + // assert that getBoolean returns false for null values + for (int i = 1; i < 13; i++) { + assertFalse(resultSet.getBoolean(i)); + } + // do the other columns that are out of order + // go to next row of result set column + assertTrue(resultSet.next()); + // assert that getBoolean returns true for values that equal 1 + assertTrue(resultSet.getBoolean(1)); + assertTrue(resultSet.getBoolean(2)); + assertTrue(resultSet.getBoolean(3)); + assertTrue(resultSet.getBoolean(4)); + assertTrue(resultSet.getBoolean(5)); + for (int i = 6; i < 13; i++) { + try { + resultSet.getBoolean(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } + + assertTrue(resultSet.next()); + for (int i = 1; i < 5; i++) { + try { + resultSet.getBoolean(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } + } } } - - statement.close(); - connection.close(); } @Test public void testGetClob() throws Throwable { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table testClob(cola text)"); - statement.execute("insert into testClob values('hello world')"); - statement.execute("insert into testClob values('hello world1')"); - statement.execute("insert into testClob values('hello world2')"); - statement.execute("insert into testClob values('hello world3')"); - ResultSet resultSet = statement.executeQuery("select * from testClob"); - resultSet.next(); - // test reading Clob - char[] chars = new char[100]; - Reader reader = resultSet.getClob(1).getCharacterStream(); - int charRead; - charRead = reader.read(chars, 0, chars.length); - assertEquals(charRead, 11); - assertEquals("hello world", resultSet.getClob(1).toString()); - - // test reading truncated clob - resultSet.next(); - Clob clob = resultSet.getClob(1); - assertEquals(clob.length(), 12); - clob.truncate(5); - reader = clob.getCharacterStream(); - - charRead = reader.read(chars, 0, chars.length); - assertEquals(charRead, 5); - - // read from input stream - resultSet.next(); - final InputStream input = resultSet.getClob(1).getAsciiStream(); - - Reader in = new InputStreamReader(input, StandardCharsets.UTF_8); - charRead = in.read(chars, 0, chars.length); - assertEquals(charRead, 12); - - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table testClob(cola text)"); + statement.execute("insert into testClob values('hello world')"); + statement.execute("insert into testClob values('hello world1')"); + statement.execute("insert into testClob values('hello world2')"); + statement.execute("insert into testClob values('hello world3')"); + try (ResultSet resultSet = statement.executeQuery("select * from testClob")) { + assertTrue(resultSet.next()); + // test reading Clob + char[] chars = new char[100]; + Reader reader = resultSet.getClob(1).getCharacterStream(); + int charRead; + charRead = reader.read(chars, 0, chars.length); + assertEquals(charRead, 11); + assertEquals("hello world", resultSet.getClob(1).toString()); + + // test reading truncated clob + assertTrue(resultSet.next()); + Clob clob = resultSet.getClob(1); + assertEquals(clob.length(), 12); + clob.truncate(5); + reader = clob.getCharacterStream(); + + charRead = reader.read(chars, 0, chars.length); + assertEquals(charRead, 5); + + // read from input stream + assertTrue(resultSet.next()); + final InputStream input = resultSet.getClob(1).getAsciiStream(); + + Reader in = new InputStreamReader(input, StandardCharsets.UTF_8); + charRead = in.read(chars, 0, chars.length); + assertEquals(charRead, 12); + } + } } @Test public void testFetchOnClosedResultSet() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(selectAllSQL); - assertFalse(resultSet.isClosed()); - resultSet.close(); - assertTrue(resultSet.isClosed()); - assertFalse(resultSet.next()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + ResultSet resultSet = statement.executeQuery(selectAllSQL); + assertFalse(resultSet.isClosed()); + resultSet.close(); + assertTrue(resultSet.isClosed()); + assertFalse(resultSet.next()); + } } @Test public void testReleaseDownloaderCurrentMemoryUsage() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); + try (Connection connection = init()) { + final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); + + try (Statement statement = connection.createStatement()) { - statement.executeQuery( - "select current_date(), true,2345234, 2343.0, 'testrgint\\n\\t' from table(generator(rowcount=>1000000))"); + statement.executeQuery( + "select current_date(), true,2345234, 2343.0, 'testrgint\\n\\t' from table(generator(rowcount=>1000000))"); - assertThat( - "hold memory usage for the resultSet before close", - SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); - statement.close(); - assertThat( - "closing statement didn't release memory allocated for result", - SnowflakeChunkDownloader.getCurrentMemoryUsage(), - equalTo(initialMemoryUsage)); - connection.close(); + assertThat( + "hold memory usage for the resultSet before close", + SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); + } + assertThat( + "closing statement didn't release memory allocated for result", + SnowflakeChunkDownloader.getCurrentMemoryUsage(), + equalTo(initialMemoryUsage)); + } } @Test @@ -800,200 +833,209 @@ public void testResultColumnSearchCaseSensitive() throws Exception { private void subTestResultColumnSearchCaseSensitive(String parameterName) throws Exception { Properties prop = new Properties(); prop.put("tracing", "FINEST"); - Connection connection = init(prop); - Statement statement = connection.createStatement(); - - ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL"); - - resultSet.next(); - assertEquals("1", resultSet.getString("TESTCOL")); - assertEquals("1", resultSet.getString("TESTCOL")); - try { - resultSet.getString("testcol"); - fail(); - } catch (SQLException e) { - assertEquals("Column not found: testcol", e.getMessage()); + try (Connection connection = init(prop); + Statement statement = connection.createStatement()) { + + try (ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL")) { + + assertTrue(resultSet.next()); + assertEquals("1", resultSet.getString("TESTCOL")); + assertEquals("1", resultSet.getString("TESTCOL")); + try { + resultSet.getString("testcol"); + fail(); + } catch (SQLException e) { + assertEquals("Column not found: testcol", e.getMessage()); + } + } + // try to do case-insensitive search + statement.executeQuery(String.format("alter session set %s=true", parameterName)); + + try (ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL")) { + assertTrue(resultSet.next()); + + // get twice so that the code path can hit the place where + // we use cached key pair (columnName, index) + assertEquals("1", resultSet.getString("TESTCOL")); + assertEquals("1", resultSet.getString("TESTCOL")); + assertEquals("1", resultSet.getString("testcol")); + assertEquals("1", resultSet.getString("testcol")); + } } - - // try to do case-insensitive search - statement.executeQuery(String.format("alter session set %s=true", parameterName)); - - resultSet = statement.executeQuery("select 1 AS TESTCOL"); - resultSet.next(); - - // get twice so that the code path can hit the place where - // we use cached key pair (columnName, index) - assertEquals("1", resultSet.getString("TESTCOL")); - assertEquals("1", resultSet.getString("TESTCOL")); - assertEquals("1", resultSet.getString("testcol")); - assertEquals("1", resultSet.getString("testcol")); } @Test public void testInvalidColumnIndex() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(selectAllSQL); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectAllSQL)) { - resultSet.next(); - try { - resultSet.getString(0); - fail(); - } catch (SQLException e) { - assertEquals(200032, e.getErrorCode()); - } - try { - resultSet.getString(2); - fail(); - } catch (SQLException e) { - assertEquals(200032, e.getErrorCode()); + assertTrue(resultSet.next()); + try { + resultSet.getString(0); + fail(); + } catch (SQLException e) { + assertEquals(200032, e.getErrorCode()); + } + try { + resultSet.getString(2); + fail(); + } catch (SQLException e) { + assertEquals(200032, e.getErrorCode()); + } } - resultSet.close(); - statement.close(); - connection.close(); } /** SNOW-28882: wasNull was not set properly */ @Test public void testWasNull() throws Exception { - Connection con = init(); - ResultSet ret = - con.createStatement() - .executeQuery( - "select cast(1/nullif(0,0) as double)," - + "cast(1/nullif(0,0) as int), 100, " - + "cast(1/nullif(0,0) as number(8,2))"); - ret.next(); - assertThat("Double value cannot be null", ret.getDouble(1), equalTo(0.0)); - assertThat("wasNull should be true", ret.wasNull()); - assertThat("Integer value cannot be null", ret.getInt(2), equalTo(0)); - assertThat("wasNull should be true", ret.wasNull()); - assertThat("Non null column", ret.getInt(3), equalTo(100)); - assertThat("wasNull should be false", !ret.wasNull()); - assertThat("BigDecimal value must be null", ret.getBigDecimal(4), nullValue()); - assertThat("wasNull should be true", ret.wasNull()); + try (Connection con = init(); + ResultSet ret = + con.createStatement() + .executeQuery( + "select cast(1/nullif(0,0) as double)," + + "cast(1/nullif(0,0) as int), 100, " + + "cast(1/nullif(0,0) as number(8,2))")) { + assertTrue(ret.next()); + assertThat("Double value cannot be null", ret.getDouble(1), equalTo(0.0)); + assertThat("wasNull should be true", ret.wasNull()); + assertThat("Integer value cannot be null", ret.getInt(2), equalTo(0)); + assertThat("wasNull should be true", ret.wasNull()); + assertThat("Non null column", ret.getInt(3), equalTo(100)); + assertThat("wasNull should be false", !ret.wasNull()); + assertThat("BigDecimal value must be null", ret.getBigDecimal(4), nullValue()); + assertThat("wasNull should be true", ret.wasNull()); + } } /** SNOW-28390 */ @Test public void testParseInfAndNaNNumber() throws Exception { - Connection con = init(); - ResultSet ret = - con.createStatement().executeQuery("select to_double('inf'), to_double('-inf')"); - ret.next(); - assertThat("Positive Infinite Number", ret.getDouble(1), equalTo(Double.POSITIVE_INFINITY)); - assertThat("Negative Infinite Number", ret.getDouble(2), equalTo(Double.NEGATIVE_INFINITY)); - assertThat("Positive Infinite Number", ret.getFloat(1), equalTo(Float.POSITIVE_INFINITY)); - assertThat("Negative Infinite Number", ret.getFloat(2), equalTo(Float.NEGATIVE_INFINITY)); - - ret = con.createStatement().executeQuery("select to_double('nan')"); - ret.next(); - assertThat("Parse NaN", ret.getDouble(1), equalTo(Double.NaN)); - assertThat("Parse NaN", ret.getFloat(1), equalTo(Float.NaN)); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try (ResultSet ret = statement.executeQuery("select to_double('inf'), to_double('-inf')")) { + assertTrue(ret.next()); + assertThat("Positive Infinite Number", ret.getDouble(1), equalTo(Double.POSITIVE_INFINITY)); + assertThat("Negative Infinite Number", ret.getDouble(2), equalTo(Double.NEGATIVE_INFINITY)); + assertThat("Positive Infinite Number", ret.getFloat(1), equalTo(Float.POSITIVE_INFINITY)); + assertThat("Negative Infinite Number", ret.getFloat(2), equalTo(Float.NEGATIVE_INFINITY)); + } + try (ResultSet ret = statement.executeQuery("select to_double('nan')")) { + assertTrue(ret.next()); + assertThat("Parse NaN", ret.getDouble(1), equalTo(Double.NaN)); + assertThat("Parse NaN", ret.getFloat(1), equalTo(Float.NaN)); + } + } } /** SNOW-33227 */ @Test public void testTreatDecimalAsInt() throws Exception { - Connection con = init(); - ResultSet ret = con.createStatement().executeQuery("select 1"); - - ResultSetMetaData metaData = ret.getMetaData(); - assertThat(metaData.getColumnType(1), equalTo(Types.BIGINT)); - - con.createStatement().execute("alter session set jdbc_treat_decimal_as_int = false"); + ResultSetMetaData metaData; + try (Connection con = init(); + Statement statement = con.createStatement()) { + try (ResultSet ret = statement.executeQuery("select 1")) { - ret = con.createStatement().executeQuery("select 1"); - metaData = ret.getMetaData(); - assertThat(metaData.getColumnType(1), equalTo(Types.DECIMAL)); + metaData = ret.getMetaData(); + assertThat(metaData.getColumnType(1), equalTo(Types.BIGINT)); + } + statement.execute("alter session set jdbc_treat_decimal_as_int = false"); - con.close(); + try (ResultSet ret = statement.executeQuery("select 1")) { + metaData = ret.getMetaData(); + assertThat(metaData.getColumnType(1), equalTo(Types.DECIMAL)); + } + } } @Test public void testIsLast() throws Exception { - Connection con = init(); - ResultSet ret = con.createStatement().executeQuery("select * from orders_jdbc"); - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); - - ret.next(); - - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); - - int cnt = 0; - while (ret.next()) { - cnt++; - if (cnt == 72) { - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try (ResultSet ret = statement.executeQuery("select * from orders_jdbc")) { + assertTrue("should be before the first", ret.isBeforeFirst()); + assertFalse("should not be the first", ret.isFirst()); + + assertTrue(ret.next()); + + assertFalse("should not be before the first", ret.isBeforeFirst()); + assertTrue("should be the first", ret.isFirst()); + + int cnt = 0; + while (ret.next()) { + cnt++; + if (cnt == 72) { + assertTrue("should be the last", ret.isLast()); + assertFalse("should not be after the last", ret.isAfterLast()); + } + } + assertEquals(72, cnt); + + assertFalse(ret.next()); + + assertFalse("should not be the last", ret.isLast()); + assertTrue("should be afterthe last", ret.isAfterLast()); } - } - assertEquals(72, cnt); + // PUT one file + try (ResultSet ret = + statement.executeQuery( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~")) { - ret.next(); + assertTrue("should be before the first", ret.isBeforeFirst()); + assertFalse("should not be the first", ret.isFirst()); - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be afterthe last", ret.isAfterLast()); + assertTrue(ret.next()); - // PUT one file - ret = - con.createStatement() - .executeQuery("PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~"); + assertFalse("should not be before the first", ret.isBeforeFirst()); + assertTrue("should be the first", ret.isFirst()); - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); - - ret.next(); - - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); - - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + assertTrue("should be the last", ret.isLast()); + assertFalse("should not be after the last", ret.isAfterLast()); - ret.next(); + assertFalse(ret.next()); - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be after the last", ret.isAfterLast()); + assertFalse("should not be the last", ret.isLast()); + assertTrue("should be after the last", ret.isAfterLast()); + } + } } @Test public void testUpdateCountOnCopyCmd() throws Exception { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute("create or replace table testcopy(cola string)"); - - // stage table has no file. Should return 0. - int rowCount = statement.executeUpdate("copy into testcopy"); - assertThat(rowCount, is(0)); - - // copy one file into table stage - statement.execute("copy into @%testcopy from (select 'test_string')"); - rowCount = statement.executeUpdate("copy into testcopy"); - assertThat(rowCount, is(1)); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table testcopy(cola string)"); - // cleanup - statement.execute("drop table if exists testcopy"); + // stage table has no file. Should return 0. + int rowCount = statement.executeUpdate("copy into testcopy"); + assertThat(rowCount, is(0)); - con.close(); + // copy one file into table stage + statement.execute("copy into @%testcopy from (select 'test_string')"); + rowCount = statement.executeUpdate("copy into testcopy"); + assertThat(rowCount, is(1)); + } finally { + // cleanup + statement.execute("drop table if exists testcopy"); + } + } } @Test public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { - try (Connection con = init()) { - con.createStatement().execute("create or replace table testnullts(c1 timestamp, c2 time)"); + try (Connection con = init(); + Statement statement = con.createStatement()) { try { - con.createStatement().execute("insert into testnullts(c1, c2) values(null, null)"); - ResultSet rs = con.createStatement().executeQuery("select * from testnullts"); - assertTrue("should return result", rs.next()); - assertNull("return value must be null", rs.getTime(1)); - assertNull("return value must be null", rs.getTimestamp(2)); - rs.close(); + statement.execute("create or replace table testnullts(c1 timestamp, c2 time)"); + statement.execute("insert into testnullts(c1, c2) values(null, null)"); + try (ResultSet rs = statement.executeQuery("select * from testnullts")) { + assertTrue("should return result", rs.next()); + assertNull("return value must be null", rs.getTime(1)); + assertNull("return value must be null", rs.getTimestamp(2)); + } } finally { - con.createStatement().execute("drop table if exists testnullts"); + statement.execute("drop table if exists testnullts"); } } } @@ -1001,17 +1043,17 @@ public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { @Test public void testNextNegative() throws SQLException { try (Connection con = init()) { - ResultSet rs = con.createStatement().executeQuery("select 1"); - rs.next(); - System.setProperty("snowflake.enable_incident_test2", "true"); - try { - rs.next(); - fail(); - } catch (SQLException ex) { - assertEquals(200014, ex.getErrorCode()); + try (ResultSet rs = con.createStatement().executeQuery("select 1")) { + assertTrue(rs.next()); + System.setProperty("snowflake.enable_incident_test2", "true"); + try { + assertTrue(rs.next()); + fail(); + } catch (SQLException ex) { + assertEquals(200014, ex.getErrorCode()); + } + System.setProperty("snowflake.enable_incident_test2", "false"); } - System.setProperty("snowflake.enable_incident_test2", "false"); - rs.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java index 5a13d368e..65cc27242 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java @@ -55,124 +55,147 @@ public ResultSetJsonVsArrowIT(String queryResultFormat) { public Connection init() throws SQLException { Connection conn = getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testGSResult() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - ResultSet rs = - statement.executeQuery( - "select 1, 128, 65500, 10000000000000, " - + "1000000000000000000000000000000000000, NULL, " - + "current_timestamp, current_timestamp(0), current_timestamp(5)," - + "current_date, current_time, current_time(0), current_time(5);"); - rs.next(); - assertEquals((byte) 1, rs.getByte(1)); - assertEquals((short) 128, rs.getShort(2)); - assertEquals(65500, rs.getInt(3)); - assertEquals(10000000000000l, rs.getLong(4)); - assertEquals(new BigDecimal("1000000000000000000000000000000000000"), rs.getBigDecimal(5)); - assertNull(rs.getString(6)); - assertNotNull(rs.getTimestamp(7)); - assertNotNull(rs.getTimestamp(8)); - assertNotNull(rs.getTimestamp(9)); - - assertNotNull(rs.getDate(10)); - assertNotNull(rs.getTime(11)); - assertNotNull(rs.getTime(12)); - assertNotNull(rs.getTime(13)); + try (Connection con = init(); + Statement statement = con.createStatement(); + ResultSet rs = + statement.executeQuery( + "select 1, 128, 65500, 10000000000000, " + + "1000000000000000000000000000000000000, NULL, " + + "current_timestamp, current_timestamp(0), current_timestamp(5)," + + "current_date, current_time, current_time(0), current_time(5);")) { + assertTrue(rs.next()); + assertEquals((byte) 1, rs.getByte(1)); + assertEquals((short) 128, rs.getShort(2)); + assertEquals(65500, rs.getInt(3)); + assertEquals(10000000000000l, rs.getLong(4)); + assertEquals(new BigDecimal("1000000000000000000000000000000000000"), rs.getBigDecimal(5)); + assertNull(rs.getString(6)); + assertNotNull(rs.getTimestamp(7)); + assertNotNull(rs.getTimestamp(8)); + assertNotNull(rs.getTimestamp(9)); + + assertNotNull(rs.getDate(10)); + assertNotNull(rs.getTime(11)); + assertNotNull(rs.getTime(12)); + assertNotNull(rs.getTime(13)); + } } @Test public void testGSResultReal() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - statement.execute("create or replace table t (a real)"); - statement.execute("insert into t values (123.456)"); - ResultSet rs = statement.executeQuery("select * from t;"); - rs.next(); - assertEquals(123.456, rs.getFloat(1), 0.001); - finish("t", con); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table t (a real)"); + statement.execute("insert into t values (123.456)"); + try (ResultSet rs = statement.executeQuery("select * from t;")) { + assertTrue(rs.next()); + assertEquals(123.456, rs.getFloat(1), 0.001); + } + } finally { + statement.execute("drop table if exists t"); + } + } } @Test public void testGSResultScan() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - statement.execute("create or replace table t (a text)"); - statement.execute("insert into t values ('test')"); - ResultSet rs = statement.executeQuery("select count(*) from t;"); - rs.next(); - assertEquals(1, rs.getInt(1)); - String queryId = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - rs = con.createStatement().executeQuery("select * from table(result_scan('" + queryId + "'))"); - rs.next(); - assertEquals(1, rs.getInt(1)); - finish("t", con); + String queryId = null; + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table t (a text)"); + statement.execute("insert into t values ('test')"); + try (ResultSet rs = statement.executeQuery("select count(*) from t;")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + queryId = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + } + try (ResultSet rs = + statement.executeQuery("select * from table(result_scan('" + queryId + "'))")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + } finally { + statement.execute("drop table if exists t"); + } + } } @Test public void testGSResultForEmptyAndSmallTable() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - statement.execute("create or replace table t (a int)"); - ResultSet rs = statement.executeQuery("select * from t;"); - assertFalse(rs.next()); - statement.execute("insert into t values (1)"); - rs = statement.executeQuery("select * from t;"); - rs.next(); - assertEquals(1, rs.getInt(1)); - finish("t", con); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table t (a int)"); + try (ResultSet rs = statement.executeQuery("select * from t;")) { + assertFalse(rs.next()); + } + statement.execute("insert into t values (1)"); + try (ResultSet rs = statement.executeQuery("select * from t;")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + } finally { + statement.execute("drop table if exists t"); + } + } } @Test public void testSNOW89737() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute( - "create or replace table test_types(c1 number, c2 integer, c3 float, c4 varchar, c5 char, c6 " - + "binary, c7 boolean, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, c12 timestamp_tz, c13 " - + "variant, c14 object, c15 array)"); - statement.execute( - "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " - + "null, null, null, null, null)"); - statement.execute( - "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(5, 5, 5.0," - + "'hello', 'h', '48454C4C4F', true, '1994-12-27', " - + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); - statement.execute( - "insert into test_types(c13) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); - statement.execute( - "insert into test_types(c14) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); - statement.execute( - "insert into test_types(c15) select parse_json('{\"fruits\" : [\"apples\", \"pears\", " - + "\"oranges\"]}')"); - ResultSet resultSet = statement.executeQuery("select * from test_types"); - // test first row of result set against all "get" methods - assertTrue(resultSet.next()); - // test getString method against all other data types - assertEquals(null, resultSet.getString(1)); - assertEquals(null, resultSet.getString(2)); - assertEquals(null, resultSet.getString(3)); - assertEquals(null, resultSet.getString(4)); - assertEquals(null, resultSet.getString(5)); - assertEquals(null, resultSet.getString(6)); - assertEquals(null, resultSet.getString(7)); - assertEquals(null, resultSet.getString(8)); - assertEquals(null, resultSet.getString(9)); - assertEquals(null, resultSet.getString(10)); - assertEquals(null, resultSet.getString(11)); - assertEquals(null, resultSet.getString(12)); - assertEquals(null, resultSet.getString(13)); - assertEquals(null, resultSet.getString(14)); - assertEquals(null, resultSet.getString(15)); - finish("test_types", con); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute( + "create or replace table test_types(c1 number, c2 integer, c3 float, c4 varchar, c5 char, c6 " + + "binary, c7 boolean, c8 date, c9 datetime, c10 time, c11 timestamp_ltz, c12 timestamp_tz, c13 " + + "variant, c14 object, c15 array)"); + statement.execute( + "insert into test_types values (null, null, null, null, null, null, null, null, null, null, " + + "null, null, null, null, null)"); + statement.execute( + "insert into test_types (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) values(5, 5, 5.0," + + "'hello', 'h', '48454C4C4F', true, '1994-12-27', " + + "'1994-12-27 05:05:05', '05:05:05', '1994-12-27 05:05:05 +00:05', '1994-12-27 05:05:05')"); + statement.execute( + "insert into test_types(c13) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); + statement.execute( + "insert into test_types(c14) select parse_json(' { \"key1\\x00\":\"value1\" } ')"); + statement.execute( + "insert into test_types(c15) select parse_json('{\"fruits\" : [\"apples\", \"pears\", " + + "\"oranges\"]}')"); + ResultSet resultSet = statement.executeQuery("select * from test_types"); + // test first row of result set against all "get" methods + assertTrue(resultSet.next()); + // test getString method against all other data types + assertEquals(null, resultSet.getString(1)); + assertEquals(null, resultSet.getString(2)); + assertEquals(null, resultSet.getString(3)); + assertEquals(null, resultSet.getString(4)); + assertEquals(null, resultSet.getString(5)); + assertEquals(null, resultSet.getString(6)); + assertEquals(null, resultSet.getString(7)); + assertEquals(null, resultSet.getString(8)); + assertEquals(null, resultSet.getString(9)); + assertEquals(null, resultSet.getString(10)); + assertEquals(null, resultSet.getString(11)); + assertEquals(null, resultSet.getString(12)); + assertEquals(null, resultSet.getString(13)); + assertEquals(null, resultSet.getString(14)); + assertEquals(null, resultSet.getString(15)); + } finally { + statement.execute("drop table if exists t"); + } + } } /** @@ -182,10 +205,10 @@ public void testSNOW89737() throws SQLException { */ @Test public void testSemiStructuredData() throws SQLException { - Connection con = init(); - ResultSet rs = - con.createStatement() - .executeQuery( + try (Connection con = init(); + Statement statement = con.createStatement(); + ResultSet rs = + statement.executeQuery( "select array_construct(10, 20, 30), " + "array_construct(null, 'hello', 3::double, 4, 5), " + "array_construct(), " @@ -193,59 +216,59 @@ public void testSemiStructuredData() throws SQLException { + "object_construct('Key_One', parse_json('NULL'), 'Key_Two', null, 'Key_Three', 'null')," + "to_variant(3.2)," + "parse_json('{ \"a\": null}')," - + " 100::variant;"); - while (rs.next()) { - assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); - assertEquals( - "[\n" - + " undefined,\n" - + " \"hello\",\n" - + " 3.000000000000000e+00,\n" - + " 4,\n" - + " 5\n" - + "]", - rs.getString(2)); - assertEquals("{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\"\n" + "}", rs.getString(4)); - assertEquals( - "{\n" + " \"Key_One\": null,\n" + " \"Key_Three\": \"null\"\n" + "}", rs.getString(5)); - assertEquals("{\n" + " \"a\": null\n" + "}", rs.getString(7)); - assertEquals("[]", rs.getString(3)); - assertEquals("3.2", rs.getString(6)); - assertEquals("100", rs.getString(8)); + + " 100::variant;")) { + while (rs.next()) { + assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); + assertEquals( + "[\n" + + " undefined,\n" + + " \"hello\",\n" + + " 3.000000000000000e+00,\n" + + " 4,\n" + + " 5\n" + + "]", + rs.getString(2)); + assertEquals("{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\"\n" + "}", rs.getString(4)); + assertEquals( + "{\n" + " \"Key_One\": null,\n" + " \"Key_Three\": \"null\"\n" + "}", + rs.getString(5)); + assertEquals("{\n" + " \"a\": null\n" + "}", rs.getString(7)); + assertEquals("[]", rs.getString(3)); + assertEquals("3.2", rs.getString(6)); + assertEquals("100", rs.getString(8)); + } } - con.close(); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStructuredTypes() throws SQLException { - Connection con = init(); - - Statement stmt = con.createStatement(); - stmt.execute("alter session set feature_structured_types = 'ENABLED';"); - - stmt.close(); - - ResultSet rs = - con.createStatement() - .executeQuery( - "select array_construct(10, 20, 30)::array(int), " - + "object_construct_keep_null('a', 1, 'b', 'BBBB', 'c', null)::object(a int, b varchar, c int), " - + "object_construct_keep_null('k1', 'v1', 'k2', null)::map(varchar, varchar);"); - while (rs.next()) { - assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); - assertEquals( - "{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\",\n" + " \"c\": null\n" + "}", - rs.getString(2)); - assertEquals("{\n" + " \"k1\": \"v1\",\n" + " \"k2\": null\n" + "}", rs.getString(3)); + try (Connection con = init(); + Statement stmt = con.createStatement()) { + stmt.execute("alter session set feature_structured_types = 'ENABLED';"); + + try (ResultSet rs = + stmt.executeQuery( + "select array_construct(10, 20, 30)::array(int), " + + "object_construct_keep_null('a', 1, 'b', 'BBBB', 'c', null)::object(a int, b varchar, c int), " + + "object_construct_keep_null('k1', 'v1', 'k2', null)::map(varchar, varchar);")) { + while (rs.next()) { + assertEquals("[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", rs.getString(1)); + assertEquals( + "{\n" + " \"a\": 1,\n" + " \"b\": \"BBBB\",\n" + " \"c\": null\n" + "}", + rs.getString(2)); + assertEquals("{\n" + " \"k1\": \"v1\",\n" + " \"k2\": null\n" + "}", rs.getString(3)); + } + } } - con.close(); } private Connection init(String table, String column, String values) throws SQLException { Connection con = init(); - con.createStatement().execute("create or replace table " + table + " " + column); - con.createStatement().execute("insert into " + table + " values " + values); + try (Statement statement = con.createStatement()) { + statement.execute("create or replace table " + table + " " + column); + statement.execute("insert into " + table + " values " + values); + } return con; } @@ -253,11 +276,6 @@ private boolean isJSON() { return queryResultFormat.equalsIgnoreCase("json"); } - private void finish(String table, Connection con) throws SQLException { - con.createStatement().execute("drop table " + table); - con.close(); - } - /** * compare behaviors (json vs arrow) * @@ -275,43 +293,47 @@ public void testTinyInt() throws SQLException { String table = "test_arrow_tiny_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(cases[i], rs.getInt(1)); - assertEquals((short) cases[i], rs.getShort(1)); - assertEquals((long) cases[i], rs.getLong(1)); - assertEquals((Integer.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - assertEquals(cases[i], rs.getByte(1)); - - byte[] bytes = new byte[1]; - bytes[0] = (byte) cases[i]; - assertArrayEquals(bytes, rs.getBytes(1)); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(cases[i], rs.getInt(1)); + assertEquals((short) cases[i], rs.getShort(1)); + assertEquals((long) cases[i], rs.getLong(1)); + assertEquals((Integer.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + assertEquals(cases[i], rs.getByte(1)); + + byte[] bytes = new byte[1]; + bytes[0] = (byte) cases[i]; + assertArrayEquals(bytes, rs.getBytes(1)); + } + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); + } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -333,80 +355,84 @@ public void testScaledTinyInt() throws SQLException { String table = "test_arrow_tiny_int"; String column = "(a number(3,2))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_tiny_int"); - double delta = 0.001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_tiny_int")) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals((String.format("%.2f", cases[i])), rs.getString(1)); - assertEquals(cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - if (isJSON()) { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + assertEquals((String.format("%.2f", cases[i])), rs.getString(1)); + assertEquals(cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + if (isJSON()) { + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } + } else { + assertEquals(((byte) (cases[i] * 100)), rs.getByte(1)); + } + + if (!isJSON()) { + byte[] bytes = new byte[1]; + bytes[0] = rs.getByte(1); + assertArrayEquals(bytes, rs.getBytes(1)); + } } - } else { - assertEquals(((byte) (cases[i] * 100)), rs.getByte(1)); - } - if (!isJSON()) { - byte[] bytes = new byte[1]; - bytes[0] = rs.getByte(1); - assertArrayEquals(bytes, rs.getBytes(1)); + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -426,65 +452,70 @@ public void testSmallInt() throws SQLException { String table = "test_arrow_small_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(cases[i], rs.getInt(1)); - assertEquals(cases[i], rs.getShort(1)); - assertEquals((long) cases[i], rs.getLong(1)); - assertEquals((Integer.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - if (cases[i] <= 127 && cases[i] >= -128) { - assertEquals(cases[i], rs.getByte(1)); - } else { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(cases[i], rs.getInt(1)); + assertEquals(cases[i], rs.getShort(1)); + assertEquals((long) cases[i], rs.getLong(1)); + assertEquals((Integer.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + if (cases[i] <= 127 && cases[i] >= -128) { + assertEquals(cases[i], rs.getByte(1)); + } else { + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + } + ByteBuffer bb = ByteBuffer.allocate(2); + bb.putShort(cases[i]); if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + byte[] res = rs.getBytes(1); + for (int j = res.length - 1; j >= 0; j--) { + assertEquals(bb.array()[2 - res.length + j], res[j]); + } } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertArrayEquals(bb.array(), rs.getBytes(1)); } } - } - ByteBuffer bb = ByteBuffer.allocate(2); - bb.putShort(cases[i]); - if (isJSON()) { - byte[] res = rs.getBytes(1); - for (int j = res.length - 1; j >= 0; j--) { - assertEquals(bb.array()[2 - res.length + j], res[j]); - } - } else { - assertArrayEquals(bb.array(), rs.getBytes(1)); + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -507,87 +538,93 @@ public void testScaledSmallInt() throws SQLException { String table = "test_arrow_small_int"; String column = "(a number(5,3))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_small_int"); - double delta = 0.0001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_small_int")) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.0001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals((String.format("%.3f", cases[i])), rs.getString(1)); - assertEquals(cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - ByteBuffer byteBuffer = ByteBuffer.allocate(2); - byteBuffer.putShort(shortCompact[i]); - assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals((String.format("%.3f", cases[i])), rs.getString(1)); + assertEquals(cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + ByteBuffer byteBuffer = ByteBuffer.allocate(2); + byteBuffer.putShort(shortCompact[i]); + assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -610,78 +647,84 @@ public void testInt() throws SQLException { String table = "test_arrow_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - assertEquals(cases[i], rs.getInt(1)); - if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { - assertEquals((short) cases[i], rs.getShort(1)); - } else { - try { - assertEquals((short) cases[i], rs.getShort(1)); - fail(); - } catch (Exception e) { - { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + assertEquals(cases[i], rs.getInt(1)); + if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { + assertEquals((short) cases[i], rs.getShort(1)); + } else { + try { + assertEquals((short) cases[i], rs.getShort(1)); + fail(); + } catch (Exception e) { + { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } - } - } - assertEquals((long) cases[i], rs.getLong(1)); - assertEquals((Integer.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - if (cases[i] <= 127 && cases[i] >= -128) { - assertEquals(cases[i], rs.getByte(1)); - } else { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { + assertEquals((long) cases[i], rs.getLong(1)); + assertEquals((Integer.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Integer.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + if (cases[i] <= 127 && cases[i] >= -128) { + assertEquals(cases[i], rs.getByte(1)); + } else { + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + } + ByteBuffer bb = ByteBuffer.allocate(4); + bb.putInt(cases[i]); if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + byte[] res = rs.getBytes(1); + for (int j = res.length - 1; j >= 0; j--) { + assertEquals(bb.array()[4 - res.length + j], res[j]); + } } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertArrayEquals(bb.array(), rs.getBytes(1)); } } - } - ByteBuffer bb = ByteBuffer.allocate(4); - bb.putInt(cases[i]); - if (isJSON()) { - byte[] res = rs.getBytes(1); - for (int j = res.length - 1; j >= 0; j--) { - assertEquals(bb.array()[4 - res.length + j], res[j]); - } - } else { - assertArrayEquals(bb.array(), rs.getBytes(1)); + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertNull(rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertNull(rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -712,87 +755,93 @@ public void testScaledInt() throws SQLException { String column = String.format("(a number(10,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_int"); - double delta = 0.0000000001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_int")) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.0000000001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - ByteBuffer byteBuffer = ByteBuffer.allocate(4); - byteBuffer.putInt(intCompacts[i]); - assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + ByteBuffer byteBuffer = ByteBuffer.allocate(4); + byteBuffer.putInt(intCompacts[i]); + assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -831,88 +880,95 @@ public void testBigInt() throws SQLException { String table = "test_arrow_big_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - - if (cases[i] >= Integer.MIN_VALUE && cases[i] <= Integer.MAX_VALUE) { - assertEquals(cases[i], rs.getInt(1)); - } else { - try { - assertEquals(cases[i], rs.getInt(1)); - fail(); - } catch (Exception e) { - { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.BIGINT, columnType); + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + + if (cases[i] >= Integer.MIN_VALUE && cases[i] <= Integer.MAX_VALUE) { + assertEquals(cases[i], rs.getInt(1)); + } else { + try { + assertEquals(cases[i], rs.getInt(1)); + fail(); + } catch (Exception e) { + { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } - } - } - if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { - assertEquals((short) cases[i], rs.getShort(1)); - } else { - try { - assertEquals((short) cases[i], rs.getShort(1)); - fail(); - } catch (Exception e) { - { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + if (cases[i] >= Short.MIN_VALUE && cases[i] <= Short.MAX_VALUE) { + assertEquals((short) cases[i], rs.getShort(1)); + } else { + try { + assertEquals((short) cases[i], rs.getShort(1)); + fail(); + } catch (Exception e) { + { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } - } - } - assertEquals(cases[i], rs.getLong(1)); - assertEquals((Long.toString(cases[i])), rs.getString(1)); - assertEquals((float) cases[i], rs.getFloat(1), delta); - double val = cases[i]; - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(Long.toString(cases[i])), rs.getBigDecimal(1)); - assertEquals(rs.getLong(1), rs.getObject(1)); - if (cases[i] <= 127 && cases[i] >= -128) { - assertEquals(cases[i], rs.getByte(1)); - } else { - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); + assertEquals(cases[i], rs.getLong(1)); + assertEquals((Long.toString(cases[i])), rs.getString(1)); + assertEquals((float) cases[i], rs.getFloat(1), delta); + double val = cases[i]; + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(Long.toString(cases[i])), rs.getBigDecimal(1)); + assertEquals(rs.getLong(1), rs.getObject(1)); + if (cases[i] <= 127 && cases[i] >= -128) { + assertEquals(cases[i], rs.getByte(1)); } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + } + ByteBuffer bb = ByteBuffer.allocate(8); + bb.putLong(cases[i]); + byte[] res = rs.getBytes(1); + for (int j = res.length - 1; j >= 0; j--) { + assertEquals(bb.array()[8 - res.length + j], res[j]); } } - } - ByteBuffer bb = ByteBuffer.allocate(8); - bb.putLong(cases[i]); - byte[] res = rs.getBytes(1); - for (int j = res.length - 1; j >= 0; j--) { - assertEquals(bb.array()[8 - res.length + j], res[j]); + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -945,88 +1001,93 @@ public void testScaledBigInt() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.0000000000000000001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); - try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.0000000000000000001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - ByteBuffer byteBuffer = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); - byteBuffer.putLong(longCompacts[i]); - assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + ByteBuffer byteBuffer = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + byteBuffer.putLong(longCompacts[i]); + assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1059,79 +1120,83 @@ public void testDecimalNoScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.1; - int columnType = rs.getMetaData().getColumnType(1); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + double delta = 0.1; + int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.BIGINT, columnType); + assertEquals(Types.BIGINT, columnType); - for (int i = 0; i < cases.length; i++) { - rs.next(); - try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } - assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1164,86 +1229,91 @@ public void testDecimalWithLargeScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.00000000000000000000000000000000000001; - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); - try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.00000000000000000000000000000000000001; + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1277,87 +1347,92 @@ public void testDecimal() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - Connection con = init(table, column, values); - - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - - double delta = 0.00000000000000000000000000000000000001; - ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); - int columnType = rs.getMetaData().getColumnType(1); - assertEquals(Types.DECIMAL, columnType); - - for (int i = 0; i < cases.length; i++) { - rs.next(); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { try { - rs.getInt(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getShort(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - try { - rs.getLong(1); - fail(); - } catch (Exception e) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } + double delta = 0.00000000000000000000000000000000000001; + ByteBuffer byteBuf = ByteBuffer.allocate(BigIntVector.TYPE_WIDTH); + int columnType = rs.getMetaData().getColumnType(1); + assertEquals(Types.DECIMAL, columnType); + + for (int i = 0; i < cases.length; i++) { + assertTrue(rs.next()); + try { + rs.getInt(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getShort(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + try { + rs.getLong(1); + fail(); + } catch (Exception e) { + SQLException se = (SQLException) e; + assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } - assertEquals(cases[i].toPlainString(), rs.getString(1)); - assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); - double val = Double.parseDouble(cases[i].toString()); - assertEquals(val, rs.getDouble(1), delta); - assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); - assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - try { - rs.getByte(1); - fail(); - } catch (Exception e) { - if (isJSON()) { - // Note: not caught by SQLException! - assertTrue(e.toString().contains("NumberFormatException")); - } else { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); - } - } - try { - assertArrayEquals(byteBuf.putLong(0, longCompacts[i]).array(), rs.getBytes(1)); - } catch (Exception e) { - if (isJSON()) { - SQLException se = (SQLException) e; - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + assertEquals(cases[i].toPlainString(), rs.getString(1)); + assertEquals(Float.parseFloat(cases[i].toString()), rs.getFloat(1), delta); + double val = Double.parseDouble(cases[i].toString()); + assertEquals(val, rs.getDouble(1), delta); + assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); + assertEquals(rs.getBigDecimal(1), rs.getObject(1)); + try { + rs.getByte(1); + fail(); + } catch (Exception e) { + if (isJSON()) { + // Note: not caught by SQLException! + assertTrue(e.toString().contains("NumberFormatException")); + } else { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } + try { + assertArrayEquals(byteBuf.putLong(0, longCompacts[i]).array(), rs.getBytes(1)); + } catch (Exception e) { + if (isJSON()) { + SQLException se = (SQLException) e; + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), se.getSQLState()); + } + } } + + // null value + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + assertEquals((short) 0, rs.getShort(1)); + assertEquals((long) 0, rs.getLong(1)); + assertNull(rs.getString(1)); + assertEquals((float) 0, rs.getFloat(1), delta); + double val = 0; + assertEquals(val, rs.getDouble(1), delta); + assertNull(rs.getBigDecimal(1)); + assertEquals(null, rs.getObject(1)); + assertEquals(0, rs.getByte(1)); + assertNull(rs.getBytes(1)); + assertTrue(rs.wasNull()); + } finally { + statement.execute("drop table if exists " + table); } } - - // null value - rs.next(); - assertEquals(0, rs.getInt(1)); - assertEquals((short) 0, rs.getShort(1)); - assertEquals((long) 0, rs.getLong(1)); - assertNull(rs.getString(1)); - assertEquals((float) 0, rs.getFloat(1), delta); - double val = 0; - assertEquals(val, rs.getDouble(1), delta); - assertNull(rs.getBigDecimal(1)); - assertEquals(null, rs.getObject(1)); - assertEquals(0, rs.getByte(1)); - assertNull(rs.getBytes(1)); - assertTrue(rs.wasNull()); - finish(table, con); } /** @@ -1395,20 +1470,25 @@ public void testDoublePrecision() throws SQLException { String column = "(a double)"; String values = "(" + StringUtils.join(cases, "),(") + ")"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - if (isJSON()) { - while (rs.next()) { - assertEquals(json_results[i++], Double.toString(rs.getDouble(1))); - } - } else { - // Arrow results has no precision loss - while (rs.next()) { - assertEquals(cases[i++], Double.toString(rs.getDouble(1))); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + try { + int i = 0; + if (isJSON()) { + while (rs.next()) { + assertEquals(json_results[i++], Double.toString(rs.getDouble(1))); + } + } else { + // Arrow results has no precision loss + while (rs.next()) { + assertEquals(cases[i++], Double.toString(rs.getDouble(1))); + } + } + } finally { + statement.execute("drop table if exists " + table); } } - finish(table, con); } @Test @@ -1416,19 +1496,20 @@ public void testBoolean() throws SQLException { String table = "test_arrow_boolean"; String column = "(a boolean)"; String values = "(true),(null),(false)"; - Connection conn = init(table, column, values); - Statement statement = conn.createStatement(); - ResultSet rs = statement.executeQuery("select * from " + table); - assertTrue(rs.next()); - assertTrue(rs.getBoolean(1)); - assertEquals("TRUE", rs.getString(1)); - assertTrue(rs.next()); - assertFalse(rs.getBoolean(1)); - assertTrue(rs.next()); - assertFalse(rs.getBoolean(1)); - assertEquals("FALSE", rs.getString(1)); - assertFalse(rs.next()); - finish(table, conn); + try (Connection conn = init(table, column, values); + Statement statement = conn.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + assertTrue(rs.next()); + assertTrue(rs.getBoolean(1)); + assertEquals("TRUE", rs.getString(1)); + assertTrue(rs.next()); + assertFalse(rs.getBoolean(1)); + assertTrue(rs.next()); + assertFalse(rs.getBoolean(1)); + assertEquals("FALSE", rs.getString(1)); + assertFalse(rs.next()); + statement.execute("drop table if exists " + table); + } } @Test @@ -1436,19 +1517,24 @@ public void testClientSideSorting() throws SQLException { String table = "test_arrow_sort_on"; String column = "( a int, b double, c string)"; String values = "(1,2.0,'test'),(0,2.0, 'test'),(1,2.0,'abc')"; - Connection conn = init(table, column, values); - Statement statement = conn.createStatement(); - // turn on sorting mode - statement.execute("set-sf-property sort on"); - - ResultSet rs = statement.executeQuery("select * from " + table); - rs.next(); - assertEquals("0", rs.getString(1)); - rs.next(); - assertEquals("1", rs.getString(1)); - rs.next(); - assertEquals("test", rs.getString(3)); - finish(table, conn); + try (Connection conn = init(table, column, values); + Statement statement = conn.createStatement()) { + try { + // turn on sorting mode + statement.execute("set-sf-property sort on"); + + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + assertTrue(rs.next()); + assertEquals("0", rs.getString(1)); + assertTrue(rs.next()); + assertEquals("1", rs.getString(1)); + assertTrue(rs.next()); + assertEquals("test", rs.getString(3)); + } + } finally { + statement.execute("drop table if exists " + table); + } + } } @Test @@ -1471,58 +1557,69 @@ public void testClientSideSortingOnBatchedChunk() throws SQLException { "insert into T values (3);", }; - try (Connection conn = init()) { - Statement stat = conn.createStatement(); - for (String q : queries) { - stat.execute(q); - } + try (Connection conn = init(); + Statement stat = conn.createStatement()) { + try { + for (String q : queries) { + stat.execute(q); + } - ResultSet rs = stat.executeQuery("select * from S"); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(3, rs.getInt(1)); - assertFalse(rs.next()); - stat.execute("drop stream S"); - stat.execute("drop table T"); + try (ResultSet rs = stat.executeQuery("select * from S")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(3, rs.getInt(1)); + assertFalse(rs.next()); + } + } finally { + stat.execute("drop stream S"); + stat.execute("drop table T"); + } } } @Test public void testTimestampNTZAreAllNulls() throws SQLException { - try (Connection con = init()) { - Statement statement = con.createStatement(); - statement.executeQuery( - "create or replace table test_null_ts_ntz (a timestampntz(9)) as select null from table(generator" - + "(rowcount => 1000000)) v " - + "order by 1;"); - ResultSet rs = statement.executeQuery("select * from test_null_ts_ntz"); - while (rs.next()) { - rs.getObject(1); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.executeQuery( + "create or replace table test_null_ts_ntz (a timestampntz(9)) as select null from table(generator" + + "(rowcount => 1000000)) v " + + "order by 1;"); + try (ResultSet rs = statement.executeQuery("select * from test_null_ts_ntz")) { + while (rs.next()) { + rs.getObject(1); + } + } + } finally { + statement.executeQuery("drop table if exists test_null_ts_ntz"); } - statement.executeQuery("drop table if exists test_null_ts_ntz"); - statement.close(); } } @Test public void TestArrowStringRoundTrip() throws SQLException { String big_number = "11111111112222222222333333333344444444"; - try (Connection con = init()) { - Statement st = con.createStatement(); - for (int i = 0; i < 38; i++) { - StringBuilder to_insert = new StringBuilder(big_number); - if (i != 0) { - int insert_to = 38 - i; - to_insert.insert(insert_to, "."); + try (Connection con = init(); + Statement st = con.createStatement()) { + try { + for (int i = 0; i < 38; i++) { + StringBuilder to_insert = new StringBuilder(big_number); + if (i != 0) { + int insert_to = 38 - i; + to_insert.insert(insert_to, "."); + } + st.execute("create or replace table test_arrow_string (a NUMBER(38, " + i + ") )"); + st.execute("insert into test_arrow_string values (" + to_insert + ")"); + try (ResultSet rs = st.executeQuery("select * from test_arrow_string")) { + assertTrue(rs.next()); + assertEquals(to_insert.toString(), rs.getString(1)); + } } - st.execute("create or replace table test_arrow_string (a NUMBER(38, " + i + ") )"); - st.execute("insert into test_arrow_string values (" + to_insert + ")"); - ResultSet rs = st.executeQuery("select * from test_arrow_string"); - assertTrue(rs.next()); - assertEquals(to_insert.toString(), rs.getString(1)); + } finally { st.execute("drop table if exists test_arrow_string"); } } @@ -1531,14 +1628,18 @@ public void TestArrowStringRoundTrip() throws SQLException { @Test public void TestArrowFloatRoundTrip() throws SQLException { float[] cases = {Float.MAX_VALUE, Float.MIN_VALUE}; - try (Connection con = init()) { - Statement st = con.createStatement(); - for (float f : cases) { - st.executeQuery("create or replace table test_arrow_float (a FLOAT)"); - st.executeQuery("insert into test_arrow_float values (" + f + ")"); - ResultSet rs = st.executeQuery("select * from test_arrow_float"); - assertTrue(rs.next()); - assertEquals(f, rs.getFloat(1), Float.MIN_VALUE); + try (Connection con = init(); + Statement st = con.createStatement()) { + try { + for (float f : cases) { + st.executeQuery("create or replace table test_arrow_float (a FLOAT)"); + st.executeQuery("insert into test_arrow_float values (" + f + ")"); + try (ResultSet rs = st.executeQuery("select * from test_arrow_float")) { + assertTrue(rs.next()); + assertEquals(f, rs.getFloat(1), Float.MIN_VALUE); + } + } + } finally { st.executeQuery("drop table if exists test_arrow_float"); } } @@ -1549,8 +1650,8 @@ public void TestArrowFloatRoundTrip() throws SQLException { public void TestTimestampNTZWithDLS() throws SQLException { TimeZone origTz = TimeZone.getDefault(); String[] timeZones = new String[] {"America/New_York", "America/Los_Angeles"}; - try (Connection con = init()) { - Statement st = con.createStatement(); + try (Connection con = init(); + Statement st = con.createStatement()) { for (String timeZone : timeZones) { TimeZone.setDefault(TimeZone.getTimeZone(timeZone)); st.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); @@ -1627,21 +1728,22 @@ public void TestTimestampNTZWithDLS() throws SQLException { + "')"); } - ResultSet resultSet = st.executeQuery("select col1, col2, col3 from src_ts"); - int j = 0; - while (resultSet.next()) { - Object data1 = resultSet.getObject(1); - assertEquals(testTimestampNTZValues.get(j), data1.toString()); - - Object data2 = resultSet.getObject(2); - assertEquals(testTimestampLTZValues.get(j)[1], data2.toString()); - - Object data3 = resultSet.getObject(3); - assertThat(data3, instanceOf(Timestamp.class)); - assertEquals( - parseTimestampTZ(testTimestampTZValues.get(j)).toEpochSecond(), - ((Timestamp) data3).getTime() / 1000); - j++; + try (ResultSet resultSet = st.executeQuery("select col1, col2, col3 from src_ts")) { + int j = 0; + while (resultSet.next()) { + Object data1 = resultSet.getObject(1); + assertEquals(testTimestampNTZValues.get(j), data1.toString()); + + Object data2 = resultSet.getObject(2); + assertEquals(testTimestampLTZValues.get(j)[1], data2.toString()); + + Object data3 = resultSet.getObject(3); + assertThat(data3, instanceOf(Timestamp.class)); + assertEquals( + parseTimestampTZ(testTimestampTZValues.get(j)).toEpochSecond(), + ((Timestamp) data3).getTime() / 1000); + j++; + } } } } finally { @@ -1654,24 +1756,25 @@ public void TestTimestampNTZBinding() throws SQLException { TimeZone origTz = TimeZone.getDefault(); try (Connection con = init()) { TimeZone.setDefault(TimeZone.getTimeZone("PST")); - Statement st = con.createStatement(); - st.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); - st.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); - st.execute("create or replace table src_ts(col1 TIMESTAMP_NTZ)"); - PreparedStatement prepst = con.prepareStatement("insert into src_ts values(?)"); - Timestamp tz = Timestamp.valueOf("2018-03-11 01:10:34.0"); - prepst.setTimestamp(1, tz); - prepst.execute(); - - ResultSet resultSet = st.executeQuery("SELECT COL1 FROM SRC_TS"); - Object data; - int i = 1; - while (resultSet.next()) { - data = resultSet.getObject(i); - System.out.println(data.toString()); + try (Statement st = con.createStatement()) { + st.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); + st.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); + st.execute("create or replace table src_ts(col1 TIMESTAMP_NTZ)"); + try (PreparedStatement prepst = con.prepareStatement("insert into src_ts values(?)")) { + Timestamp tz = Timestamp.valueOf("2018-03-11 01:10:34.0"); + prepst.setTimestamp(1, tz); + prepst.execute(); + } + try (ResultSet resultSet = st.executeQuery("SELECT COL1 FROM SRC_TS")) { + Object data; + int i = 1; + while (resultSet.next()) { + data = resultSet.getObject(i); + System.out.println(data.toString()); + } + } } - } finally { - TimeZone.setDefault(origTz); } + TimeZone.setDefault(origTz); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java index c45e7d9c2..6add203f5 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java @@ -5,6 +5,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -44,16 +45,16 @@ public static Collection data() { public static Connection getConnection(int injectSocketTimeout) throws SQLException { Connection connection = BaseJDBCTest.getConnection(injectSocketTimeout); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + } return connection; } @@ -65,19 +66,14 @@ public ResultSetJsonVsArrowMultiTZIT(String queryResultFormat, String timeZone) private Connection init(String table, String column, String values) throws SQLException { Connection con = getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); - con.createStatement() - .execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - con.createStatement().execute("create or replace table " + table + " " + column); - con.createStatement().execute("insert into " + table + " values " + values); + try (Statement statement = con.createStatement()) { + statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + statement.execute("create or replace table " + table + " " + column); + statement.execute("insert into " + table + " values " + values); + } return con; } - private void finish(String table, Connection con) throws SQLException { - con.createStatement().execute("drop table " + table); - con.close(); - System.clearProperty("user.timezone"); - } - @Test public void testTime() throws SQLException { String[] times = { @@ -116,21 +112,25 @@ public void testDate() throws Exception { String column = "(a date)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - if (i == cases.length - 2) { - assertEquals("0001-01-01", rs.getDate(1).toString()); - } else { - assertEquals(cases[i], rs.getDate(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + if (i == cases.length - 2) { + assertEquals("0001-01-01", rs.getDate(1).toString()); + } else { + assertEquals(cases[i], rs.getDate(1).toString()); + } + i++; + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); } - i++; + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } public void testTimeWithScale(String[] times, int scale) throws SQLException { @@ -138,15 +138,17 @@ public void testTimeWithScale(String[] times, int scale) throws SQLException { String column = "(a time(" + scale + "))"; String values = "('" + StringUtils.join(times, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - for (int i = 0; i < times.length; i++) { - rs.next(); - // Java Time class does not have nanoseconds - assertEquals("00:01:23", rs.getString(1)); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select * from " + table)) { + for (int i = 0; i < times.length; i++) { + assertTrue(rs.next()); + // Java Time class does not have nanoseconds + assertEquals("00:01:23", rs.getString(1)); + } + assertTrue(rs.next()); + assertNull(rs.getTime(1)); } - rs.next(); - assertNull(rs.getTime(1)); } @Test @@ -184,16 +186,20 @@ public void testTimestampNTZWithScale(int scale) throws SQLException { String column = "(a timestamp_ntz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(results[i++], rs.getString(1)); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(results[i++], rs.getString(1)); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } @Test @@ -214,15 +220,20 @@ public void testTimestampNTZWithNanos() throws SQLException { String column = "(a timestamp_ntz)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); - ResultSet rs = con.createStatement().executeQuery("select * from " + table); - int i = 0; - while (i < cases.length) { - rs.next(); - assertEquals(cases[i++], rs.getTimestamp(1).toString()); + try (Connection con = init(table, column, values); + Statement statement = con.createStatement()) { + try (ResultSet rs = statement.executeQuery("select * from " + table)) { + int i = 0; + while (i < cases.length) { + assertTrue(rs.next()); + assertEquals(cases[i++], rs.getTimestamp(1).toString()); + } + assertTrue(rs.next()); + assertNull(rs.getString(1)); + } finally { + statement.execute("drop table " + table); + System.clearProperty("user.timezone"); + } } - rs.next(); - assertNull(rs.getString(1)); - finish(table, con); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index 51fd295e9..add205145 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -93,34 +93,34 @@ public ResultSetLatestIT() { */ @Test public void testMemoryClearingAfterInterrupt() throws Throwable { - ResultSet resultSet = null; - final Connection connection = getConnection(); - final Statement statement = connection.createStatement(); - final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); - try { - // Inject an InterruptedException into the SnowflakeChunkDownloader.terminate() function - SnowflakeChunkDownloader.setInjectedDownloaderException(new InterruptedException()); - // 10000 rows should be enough to force result into multiple chunks - resultSet = - statement.executeQuery( - "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))"); - assertThat( - "hold memory usage for the resultSet before close", - SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); - // Result closure should catch InterruptedException and throw a SQLException after its caught - resultSet.close(); - fail("Exception should have been thrown"); - } catch (SQLException ex) { - assertEquals((int) ErrorCode.INTERRUPTED.getMessageCode(), ex.getErrorCode()); - // Assert all memory was released - assertThat( - "closing statement didn't release memory allocated for result", - SnowflakeChunkDownloader.getCurrentMemoryUsage(), - equalTo(initialMemoryUsage)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); + try { + // Inject an InterruptedException into the SnowflakeChunkDownloader.terminate() function + SnowflakeChunkDownloader.setInjectedDownloaderException(new InterruptedException()); + // 10000 rows should be enough to force result into multiple chunks + try (ResultSet resultSet = + statement.executeQuery( + "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))")) { + assertThat( + "hold memory usage for the resultSet before close", + SnowflakeChunkDownloader.getCurrentMemoryUsage() - initialMemoryUsage >= 0); + // Result closure should catch InterruptedException and throw a SQLException after its + // caught + } + fail("Exception should have been thrown"); + } catch (SQLException ex) { + assertEquals((int) ErrorCode.INTERRUPTED.getMessageCode(), ex.getErrorCode()); + // Assert all memory was released + assertThat( + "closing statement didn't release memory allocated for result", + SnowflakeChunkDownloader.getCurrentMemoryUsage(), + equalTo(initialMemoryUsage)); + } + // Unset the exception injection so statement and connection can close without exceptions + SnowflakeChunkDownloader.setInjectedDownloaderException(null); } - // Unset the exception injection so statement and connection can close without exceptions - SnowflakeChunkDownloader.setInjectedDownloaderException(null); - closeSQLObjects(resultSet, statement, connection); } /** @@ -132,34 +132,36 @@ public void testMemoryClearingAfterInterrupt() throws Throwable { public void testChunkDownloaderNoHang() throws SQLException { int stmtCount = 30; int rowCount = 170000; - Connection connection = getConnection(); - List rsList = new ArrayList<>(); - // Set memory limit to low number - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(2000000); - // open multiple statements concurrently to overwhelm current memory allocation - for (int i = 0; i < stmtCount; ++i) { - Statement stmt = connection.createStatement(); - ResultSet resultSet = - stmt.executeQuery( - "select randstr(100, random()) from table(generator(rowcount => " + rowCount + "))"); - rsList.add(resultSet); - } - // Assert that all resultSets exist and can successfully download the needed chunks without - // hanging - for (int i = 0; i < stmtCount; i++) { - rsList.get(i).next(); - assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", rsList.get(i).getString(1))); - rsList.get(i).close(); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement()) { + List rsList = new ArrayList<>(); + // Set memory limit to low number + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(2000000); + // open multiple statements concurrently to overwhelm current memory allocation + for (int i = 0; i < stmtCount; ++i) { + ResultSet resultSet = + stmt.executeQuery( + "select randstr(100, random()) from table(generator(rowcount => " + + rowCount + + "))"); + rsList.add(resultSet); + } + // Assert that all resultSets exist and can successfully download the needed chunks without + // hanging + for (int i = 0; i < stmtCount; i++) { + rsList.get(i).next(); + assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", rsList.get(i).getString(1))); + rsList.get(i).close(); + } + // set memory limit back to default invalid value so it does not get used + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); } - // set memory limit back to default invalid value so it does not get used - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); - connection.close(); } /** This tests that the SnowflakeChunkDownloader doesn't hang when memory limits are low. */ @@ -167,39 +169,42 @@ public void testChunkDownloaderNoHang() throws SQLException { public void testChunkDownloaderSetRetry() throws SQLException { int stmtCount = 3; int rowCount = 170000; - Connection connection = getConnection(); - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(1 * 1024 * 1024); - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 1); - // Set memory limit to low number - // open multiple statements concurrently to overwhelm current memory allocation - for (int i = 0; i < stmtCount; ++i) { - Statement stmt = connection.createStatement(); - ResultSet resultSet = - stmt.executeQuery( - "select randstr(100, random()) from table(generator(rowcount => " + rowCount + "))"); - // consume half of the results and go to the next statement - for (int j = 0; j < rowCount / 2; j++) { - resultSet.next(); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement()) { + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(1 * 1024 * 1024); + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 1); + // Set memory limit to low number + // open multiple statements concurrently to overwhelm current memory allocation + for (int i = 0; i < stmtCount; ++i) { + try (ResultSet resultSet = + stmt.executeQuery( + "select randstr(100, random()) from table(generator(rowcount => " + + rowCount + + "))")) { + // consume half of the results and go to the next statement + for (int j = 0; j < rowCount / 2; j++) { + resultSet.next(); + } + assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", resultSet.getString(1))); + } } - assertTrue(Pattern.matches("[a-zA-Z0-9]{100}", resultSet.getString(1))); + // reset retry to MAX_NUM_OF_RETRY, which is 10 + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 10); + // set memory limit back to default invalid value so it does not get used + connection + .unwrap(SnowflakeConnectionV1.class) + .getSFBaseSession() + .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); } - // reset retry to MAX_NUM_OF_RETRY, which is 10 - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setOtherParameter(SessionUtil.JDBC_CHUNK_DOWNLOADER_MAX_RETRY, 10); - // set memory limit back to default invalid value so it does not get used - connection - .unwrap(SnowflakeConnectionV1.class) - .getSFBaseSession() - .setMemoryLimitForTesting(SFBaseSession.MEMORY_LIMIT_UNSET); - connection.close(); } /** @@ -212,57 +217,59 @@ public void testChunkDownloaderSetRetry() throws SQLException { @Test public void testMetadataAPIMetricCollection() throws SQLException, ExecutionException, InterruptedException { - Connection con = init(); - Telemetry telemetry = - con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); - DatabaseMetaData metadata = con.getMetaData(); - // Call one of the DatabaseMetadata API functions but for simplicity, ensure returned ResultSet - // is empty - metadata.getColumns("fakecatalog", "fakeschema", null, null); - LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); - // No result set has been downloaded from server so no chunk downloader metrics have been - // collected - // Logs should contain 1 item: the data about the getColumns() parameters - assertEquals(logs.size(), 1); - // Assert the log is of type client_metadata_api_metrics - assertEquals( - logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), - TelemetryField.METADATA_METRICS.toString()); - // Assert function name and params match and that query id exists - assertEquals(logs.get(0).getMessage().get("function_name").textValue(), "getColumns"); - TestUtil.assertValidQueryId(logs.get(0).getMessage().get("query_id").textValue()); - JsonNode parameterValues = logs.get(0).getMessage().get("function_parameters"); - assertEquals(parameterValues.get("catalog").textValue(), "fakecatalog"); - assertEquals(parameterValues.get("schema").textValue(), "fakeschema"); - assertNull(parameterValues.get("general_name_pattern").textValue()); - assertNull(parameterValues.get("specific_name_pattern").textValue()); - - // send data to clear log for next test - telemetry.sendBatchAsync().get(); - assertEquals(0, ((TelemetryClient) telemetry).logBuffer().size()); - - String catalog = con.getCatalog(); - String schema = con.getSchema(); - metadata.getColumns(catalog, schema, null, null); - logs = ((TelemetryClient) telemetry).logBuffer(); - assertEquals(logs.size(), 2); - // first item in log buffer is metrics on time to consume first result set chunk - assertEquals( - logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), - TelemetryField.TIME_CONSUME_FIRST_RESULT.toString()); - // second item in log buffer is metrics on getProcedureColumns() parameters - // Assert the log is of type client_metadata_api_metrics - assertEquals( - logs.get(1).getMessage().get(TelemetryUtil.TYPE).textValue(), - TelemetryField.METADATA_METRICS.toString()); - // Assert function name and params match and that query id exists - assertEquals(logs.get(1).getMessage().get("function_name").textValue(), "getColumns"); - TestUtil.assertValidQueryId(logs.get(1).getMessage().get("query_id").textValue()); - parameterValues = logs.get(1).getMessage().get("function_parameters"); - assertEquals(parameterValues.get("catalog").textValue(), catalog); - assertEquals(parameterValues.get("schema").textValue(), schema); - assertNull(parameterValues.get("general_name_pattern").textValue()); - assertNull(parameterValues.get("specific_name_pattern").textValue()); + try (Connection con = init()) { + Telemetry telemetry = + con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); + DatabaseMetaData metadata = con.getMetaData(); + // Call one of the DatabaseMetadata API functions but for simplicity, ensure returned + // ResultSet + // is empty + metadata.getColumns("fakecatalog", "fakeschema", null, null); + LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); + // No result set has been downloaded from server so no chunk downloader metrics have been + // collected + // Logs should contain 1 item: the data about the getColumns() parameters + assertEquals(logs.size(), 1); + // Assert the log is of type client_metadata_api_metrics + assertEquals( + logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), + TelemetryField.METADATA_METRICS.toString()); + // Assert function name and params match and that query id exists + assertEquals(logs.get(0).getMessage().get("function_name").textValue(), "getColumns"); + TestUtil.assertValidQueryId(logs.get(0).getMessage().get("query_id").textValue()); + JsonNode parameterValues = logs.get(0).getMessage().get("function_parameters"); + assertEquals(parameterValues.get("catalog").textValue(), "fakecatalog"); + assertEquals(parameterValues.get("schema").textValue(), "fakeschema"); + assertNull(parameterValues.get("general_name_pattern").textValue()); + assertNull(parameterValues.get("specific_name_pattern").textValue()); + + // send data to clear log for next test + telemetry.sendBatchAsync().get(); + assertEquals(0, ((TelemetryClient) telemetry).logBuffer().size()); + + String catalog = con.getCatalog(); + String schema = con.getSchema(); + metadata.getColumns(catalog, schema, null, null); + logs = ((TelemetryClient) telemetry).logBuffer(); + assertEquals(logs.size(), 2); + // first item in log buffer is metrics on time to consume first result set chunk + assertEquals( + logs.get(0).getMessage().get(TelemetryUtil.TYPE).textValue(), + TelemetryField.TIME_CONSUME_FIRST_RESULT.toString()); + // second item in log buffer is metrics on getProcedureColumns() parameters + // Assert the log is of type client_metadata_api_metrics + assertEquals( + logs.get(1).getMessage().get(TelemetryUtil.TYPE).textValue(), + TelemetryField.METADATA_METRICS.toString()); + // Assert function name and params match and that query id exists + assertEquals(logs.get(1).getMessage().get("function_name").textValue(), "getColumns"); + TestUtil.assertValidQueryId(logs.get(1).getMessage().get("query_id").textValue()); + parameterValues = logs.get(1).getMessage().get("function_parameters"); + assertEquals(parameterValues.get("catalog").textValue(), catalog); + assertEquals(parameterValues.get("schema").textValue(), schema); + assertNull(parameterValues.get("general_name_pattern").textValue()); + assertNull(parameterValues.get("specific_name_pattern").textValue()); + } } /** @@ -273,15 +280,15 @@ public void testMetadataAPIMetricCollection() */ @Test public void testGetCharacterStreamNull() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table JDBC_NULL_CHARSTREAM (col1 varchar(16))"); - statement.execute("insert into JDBC_NULL_CHARSTREAM values(NULL)"); - ResultSet rs = statement.executeQuery("select * from JDBC_NULL_CHARSTREAM"); - rs.next(); - assertNull(rs.getCharacterStream(1)); - rs.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table JDBC_NULL_CHARSTREAM (col1 varchar(16))"); + statement.execute("insert into JDBC_NULL_CHARSTREAM values(NULL)"); + try (ResultSet rs = statement.executeQuery("select * from JDBC_NULL_CHARSTREAM")) { + rs.next(); + assertNull(rs.getCharacterStream(1)); + } + } } /** @@ -291,46 +298,50 @@ public void testGetCharacterStreamNull() throws SQLException { */ @Test public void testMultipleChunks() throws Exception { - Connection con = init(); - Statement statement = con.createStatement(); - - // 10000 rows should be enough to force result into multiple chunks - ResultSet resultSet = - statement.executeQuery( - "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))"); - int cnt = 0; - while (resultSet.next()) { - ++cnt; - } - assertTrue(cnt >= 0); - Telemetry telemetry = - con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); - LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); - - // there should be a log for each of the following fields - TelemetryField[] expectedFields = { - TelemetryField.TIME_CONSUME_FIRST_RESULT, TelemetryField.TIME_CONSUME_LAST_RESULT, - TelemetryField.TIME_WAITING_FOR_CHUNKS, TelemetryField.TIME_DOWNLOADING_CHUNKS, - TelemetryField.TIME_PARSING_CHUNKS - }; - boolean[] succeeded = new boolean[expectedFields.length]; - - for (int i = 0; i < expectedFields.length; i++) { - succeeded[i] = false; - for (TelemetryData log : logs) { - if (log.getMessage().get(TelemetryUtil.TYPE).textValue().equals(expectedFields[i].field)) { - succeeded[i] = true; - break; + try (Connection con = init(); + Statement statement = con.createStatement(); + + // 10000 rows should be enough to force result into multiple chunks + ResultSet resultSet = + statement.executeQuery( + "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))")) { + int cnt = 0; + while (resultSet.next()) { + ++cnt; + } + assertTrue(cnt >= 0); + Telemetry telemetry = + con.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); + LinkedList logs = ((TelemetryClient) telemetry).logBuffer(); + + // there should be a log for each of the following fields + TelemetryField[] expectedFields = { + TelemetryField.TIME_CONSUME_FIRST_RESULT, TelemetryField.TIME_CONSUME_LAST_RESULT, + TelemetryField.TIME_WAITING_FOR_CHUNKS, TelemetryField.TIME_DOWNLOADING_CHUNKS, + TelemetryField.TIME_PARSING_CHUNKS + }; + boolean[] succeeded = new boolean[expectedFields.length]; + + for (int i = 0; i < expectedFields.length; i++) { + succeeded[i] = false; + for (TelemetryData log : logs) { + if (log.getMessage() + .get(TelemetryUtil.TYPE) + .textValue() + .equals(expectedFields[i].field)) { + succeeded[i] = true; + break; + } } } - } - for (int i = 0; i < expectedFields.length; i++) { - assertThat( - String.format("%s field not found in telemetry logs\n", expectedFields[i].field), - succeeded[i]); + for (int i = 0; i < expectedFields.length; i++) { + assertThat( + String.format("%s field not found in telemetry logs\n", expectedFields[i].field), + succeeded[i]); + } + telemetry.sendBatchAsync(); } - telemetry.sendBatchAsync(); } /** @@ -340,48 +351,49 @@ public void testMultipleChunks() throws Exception { */ @Test public void testResultSetMetadata() throws SQLException { - Connection connection = init(); final Map params = getConnectionParameters(); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); - statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); - ResultSet resultSet = statement.executeQuery("select * from test_rsmd"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals( - params.get("database").toUpperCase(), resultSetMetaData.getCatalogName(1).toUpperCase()); - assertEquals( - params.get("schema").toUpperCase(), resultSetMetaData.getSchemaName(1).toUpperCase()); - assertEquals("TEST_RSMD", resultSetMetaData.getTableName(1)); - assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); - assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); - assertEquals("COLA", resultSetMetaData.getColumnName(1)); - assertEquals(3, resultSetMetaData.getColumnType(1)); - assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); - assertEquals(20, resultSetMetaData.getPrecision(1)); - assertEquals(5, resultSetMetaData.getScale(1)); - assertFalse(resultSetMetaData.isAutoIncrement(1)); - assertFalse(resultSetMetaData.isCaseSensitive(1)); - assertFalse(resultSetMetaData.isCurrency(1)); - assertFalse(resultSetMetaData.isDefinitelyWritable(1)); - assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); - assertTrue(resultSetMetaData.isReadOnly(1)); - assertTrue(resultSetMetaData.isSearchable(1)); - assertTrue(resultSetMetaData.isSigned(1)); - SnowflakeResultSetMetaData secretMetaData = - resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); - List colNames = secretMetaData.getColumnNames(); - assertEquals("COLA", colNames.get(0)); - assertEquals("COLB", colNames.get(1)); - assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); - assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); - TestUtil.assertValidQueryId(secretMetaData.getQueryID()); - - statement.execute("drop table if exists test_rsmd"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); + statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); + ResultSet resultSet = statement.executeQuery("select * from test_rsmd"); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + assertEquals( + params.get("database").toUpperCase(), + resultSetMetaData.getCatalogName(1).toUpperCase()); + assertEquals( + params.get("schema").toUpperCase(), resultSetMetaData.getSchemaName(1).toUpperCase()); + assertEquals("TEST_RSMD", resultSetMetaData.getTableName(1)); + assertEquals(String.class.getName(), resultSetMetaData.getColumnClassName(2)); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(22, resultSetMetaData.getColumnDisplaySize(1)); + assertEquals("COLA", resultSetMetaData.getColumnLabel(1)); + assertEquals("COLA", resultSetMetaData.getColumnName(1)); + assertEquals(3, resultSetMetaData.getColumnType(1)); + assertEquals("NUMBER", resultSetMetaData.getColumnTypeName(1)); + assertEquals(20, resultSetMetaData.getPrecision(1)); + assertEquals(5, resultSetMetaData.getScale(1)); + assertFalse(resultSetMetaData.isAutoIncrement(1)); + assertFalse(resultSetMetaData.isCaseSensitive(1)); + assertFalse(resultSetMetaData.isCurrency(1)); + assertFalse(resultSetMetaData.isDefinitelyWritable(1)); + assertEquals(ResultSetMetaData.columnNullable, resultSetMetaData.isNullable(1)); + assertTrue(resultSetMetaData.isReadOnly(1)); + assertTrue(resultSetMetaData.isSearchable(1)); + assertTrue(resultSetMetaData.isSigned(1)); + SnowflakeResultSetMetaData secretMetaData = + resultSetMetaData.unwrap(SnowflakeResultSetMetaData.class); + List colNames = secretMetaData.getColumnNames(); + assertEquals("COLA", colNames.get(0)); + assertEquals("COLB", colNames.get(1)); + assertEquals(Types.DECIMAL, secretMetaData.getInternalColumnType(1)); + assertEquals(Types.VARCHAR, secretMetaData.getInternalColumnType(2)); + TestUtil.assertValidQueryId(secretMetaData.getQueryID()); + } finally { + statement.execute("drop table if exists test_rsmd"); + } + } } /** @@ -391,110 +403,107 @@ public void testResultSetMetadata() throws SQLException { */ @Test public void testEmptyResultSet() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - // the only function that returns ResultSetV1.emptyResultSet() - ResultSet rs = statement.getGeneratedKeys(); - assertFalse(rs.next()); - assertFalse(rs.isClosed()); - assertEquals(0, rs.getInt(1)); - assertEquals(0, rs.getInt("col1")); - assertEquals(0L, rs.getLong(2)); - assertEquals(0L, rs.getLong("col2")); - assertEquals(0, rs.getShort(3)); - assertEquals(0, rs.getShort("col3")); - assertEquals("", rs.getString(4)); - assertEquals("", rs.getString("col4")); - assertEquals(0, rs.getDouble(5), 0); - assertEquals(0, rs.getDouble("col5"), 0); - assertEquals(0, rs.getFloat(6), 0); - assertEquals(0, rs.getFloat("col6"), 0); - assertEquals(false, rs.getBoolean(7)); - assertEquals(false, rs.getBoolean("col7")); - assertEquals((byte) 0, rs.getByte(8)); - assertEquals((byte) 0, rs.getByte("col8")); - assertEquals(null, rs.getBinaryStream(9)); - assertEquals(null, rs.getBinaryStream("col9")); - assertEquals(null, rs.getDate(10)); - assertEquals(null, rs.getDate(10, new FakeCalendar())); - assertEquals(null, rs.getDate("col10")); - assertEquals(null, rs.getDate("col10", new FakeCalendar())); - assertEquals(null, rs.getTime(11)); - assertEquals(null, rs.getTime(11, new FakeCalendar())); - assertEquals(null, rs.getTime("col11")); - assertEquals(null, rs.getTime("col11", new FakeCalendar())); - assertEquals(null, rs.getTimestamp(12)); - assertEquals(null, rs.getTimestamp(12, new FakeCalendar())); - assertEquals(null, rs.getTimestamp("col12")); - assertEquals(null, rs.getTimestamp("col12", new FakeCalendar())); - assertEquals(null, rs.getDate(13)); - assertEquals(null, rs.getDate("col13")); - assertEquals(null, rs.getAsciiStream(14)); - assertEquals(null, rs.getAsciiStream("col14")); - assertArrayEquals(new byte[0], rs.getBytes(15)); - assertArrayEquals(new byte[0], rs.getBytes("col15")); - assertNull(rs.getBigDecimal(16)); - assertNull(rs.getBigDecimal(16, 38)); - assertNull(rs.getBigDecimal("col16")); - assertNull(rs.getBigDecimal("col16", 38)); - assertNull(rs.getRef(17)); - assertNull(rs.getRef("col17")); - assertNull(rs.getArray(18)); - assertNull(rs.getArray("col18")); - assertNull(rs.getBlob(19)); - assertNull(rs.getBlob("col19")); - assertNull(rs.getClob(20)); - assertNull(rs.getClob("col20")); - assertEquals(0, rs.findColumn("col1")); - assertNull(rs.getUnicodeStream(21)); - assertNull(rs.getUnicodeStream("col21")); - assertNull(rs.getURL(22)); - assertNull(rs.getURL("col22")); - assertNull(rs.getObject(23)); - assertNull(rs.getObject("col24")); - assertNull(rs.getObject(23, SnowflakeResultSetV1.class)); - assertNull(rs.getObject("col23", SnowflakeResultSetV1.class)); - assertNull(rs.getNString(25)); - assertNull(rs.getNString("col25")); - assertNull(rs.getNClob(26)); - assertNull(rs.getNClob("col26")); - assertNull(rs.getNCharacterStream(27)); - assertNull(rs.getNCharacterStream("col27")); - assertNull(rs.getCharacterStream(28)); - assertNull(rs.getCharacterStream("col28")); - assertNull(rs.getSQLXML(29)); - assertNull(rs.getSQLXML("col29")); - assertNull(rs.getStatement()); - assertNull(rs.getWarnings()); - assertNull(rs.getCursorName()); - assertNull(rs.getMetaData()); - assertNull(rs.getRowId(1)); - assertNull(rs.getRowId("col1")); - assertEquals(0, rs.getRow()); - assertEquals(0, rs.getFetchDirection()); - assertEquals(0, rs.getFetchSize()); - assertEquals(0, rs.getType()); - assertEquals(0, rs.getConcurrency()); - assertEquals(0, rs.getHoldability()); - assertNull(rs.unwrap(SnowflakeResultSetV1.class)); - assertFalse(rs.isWrapperFor(SnowflakeResultSetV1.class)); - assertFalse(rs.wasNull()); - assertFalse(rs.isFirst()); - assertFalse(rs.isBeforeFirst()); - assertFalse(rs.isLast()); - assertFalse(rs.isAfterLast()); - assertFalse(rs.first()); - assertFalse(rs.last()); - assertFalse(rs.previous()); - assertFalse(rs.rowUpdated()); - assertFalse(rs.rowInserted()); - assertFalse(rs.rowDeleted()); - assertFalse(rs.absolute(1)); - assertFalse(rs.relative(1)); - rs.close(); - assertTrue(rs.isClosed()); - statement.close(); - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement(); + // the only function that returns ResultSetV1.emptyResultSet() + ResultSet rs = statement.getGeneratedKeys()) { + assertFalse(rs.next()); + assertFalse(rs.isClosed()); + assertEquals(0, rs.getInt(1)); + assertEquals(0, rs.getInt("col1")); + assertEquals(0L, rs.getLong(2)); + assertEquals(0L, rs.getLong("col2")); + assertEquals(0, rs.getShort(3)); + assertEquals(0, rs.getShort("col3")); + assertEquals("", rs.getString(4)); + assertEquals("", rs.getString("col4")); + assertEquals(0, rs.getDouble(5), 0); + assertEquals(0, rs.getDouble("col5"), 0); + assertEquals(0, rs.getFloat(6), 0); + assertEquals(0, rs.getFloat("col6"), 0); + assertEquals(false, rs.getBoolean(7)); + assertEquals(false, rs.getBoolean("col7")); + assertEquals((byte) 0, rs.getByte(8)); + assertEquals((byte) 0, rs.getByte("col8")); + assertEquals(null, rs.getBinaryStream(9)); + assertEquals(null, rs.getBinaryStream("col9")); + assertEquals(null, rs.getDate(10)); + assertEquals(null, rs.getDate(10, new FakeCalendar())); + assertEquals(null, rs.getDate("col10")); + assertEquals(null, rs.getDate("col10", new FakeCalendar())); + assertEquals(null, rs.getTime(11)); + assertEquals(null, rs.getTime(11, new FakeCalendar())); + assertEquals(null, rs.getTime("col11")); + assertEquals(null, rs.getTime("col11", new FakeCalendar())); + assertEquals(null, rs.getTimestamp(12)); + assertEquals(null, rs.getTimestamp(12, new FakeCalendar())); + assertEquals(null, rs.getTimestamp("col12")); + assertEquals(null, rs.getTimestamp("col12", new FakeCalendar())); + assertEquals(null, rs.getDate(13)); + assertEquals(null, rs.getDate("col13")); + assertEquals(null, rs.getAsciiStream(14)); + assertEquals(null, rs.getAsciiStream("col14")); + assertArrayEquals(new byte[0], rs.getBytes(15)); + assertArrayEquals(new byte[0], rs.getBytes("col15")); + assertNull(rs.getBigDecimal(16)); + assertNull(rs.getBigDecimal(16, 38)); + assertNull(rs.getBigDecimal("col16")); + assertNull(rs.getBigDecimal("col16", 38)); + assertNull(rs.getRef(17)); + assertNull(rs.getRef("col17")); + assertNull(rs.getArray(18)); + assertNull(rs.getArray("col18")); + assertNull(rs.getBlob(19)); + assertNull(rs.getBlob("col19")); + assertNull(rs.getClob(20)); + assertNull(rs.getClob("col20")); + assertEquals(0, rs.findColumn("col1")); + assertNull(rs.getUnicodeStream(21)); + assertNull(rs.getUnicodeStream("col21")); + assertNull(rs.getURL(22)); + assertNull(rs.getURL("col22")); + assertNull(rs.getObject(23)); + assertNull(rs.getObject("col24")); + assertNull(rs.getObject(23, SnowflakeResultSetV1.class)); + assertNull(rs.getObject("col23", SnowflakeResultSetV1.class)); + assertNull(rs.getNString(25)); + assertNull(rs.getNString("col25")); + assertNull(rs.getNClob(26)); + assertNull(rs.getNClob("col26")); + assertNull(rs.getNCharacterStream(27)); + assertNull(rs.getNCharacterStream("col27")); + assertNull(rs.getCharacterStream(28)); + assertNull(rs.getCharacterStream("col28")); + assertNull(rs.getSQLXML(29)); + assertNull(rs.getSQLXML("col29")); + assertNull(rs.getStatement()); + assertNull(rs.getWarnings()); + assertNull(rs.getCursorName()); + assertNull(rs.getMetaData()); + assertNull(rs.getRowId(1)); + assertNull(rs.getRowId("col1")); + assertEquals(0, rs.getRow()); + assertEquals(0, rs.getFetchDirection()); + assertEquals(0, rs.getFetchSize()); + assertEquals(0, rs.getType()); + assertEquals(0, rs.getConcurrency()); + assertEquals(0, rs.getHoldability()); + assertNull(rs.unwrap(SnowflakeResultSetV1.class)); + assertFalse(rs.isWrapperFor(SnowflakeResultSetV1.class)); + assertFalse(rs.wasNull()); + assertFalse(rs.isFirst()); + assertFalse(rs.isBeforeFirst()); + assertFalse(rs.isLast()); + assertFalse(rs.isAfterLast()); + assertFalse(rs.first()); + assertFalse(rs.last()); + assertFalse(rs.previous()); + assertFalse(rs.rowUpdated()); + assertFalse(rs.rowInserted()); + assertFalse(rs.rowDeleted()); + assertFalse(rs.absolute(1)); + assertFalse(rs.relative(1)); + } } /** @@ -504,67 +513,69 @@ public void testEmptyResultSet() throws SQLException { */ @Test public void testBytesCrossTypeTests() throws Exception { - ResultSet resultSet = numberCrossTesting(); - resultSet.next(); - // assert that 0 is returned for null values for every type of value - for (int i = 1; i < 13; i++) { - assertArrayEquals(null, resultSet.getBytes(i)); - } - resultSet.next(); - assertArrayEquals(intToByteArray(2), resultSet.getBytes(1)); - assertArrayEquals(intToByteArray(5), resultSet.getBytes(2)); - assertArrayEquals(floatToByteArray(3.5f), resultSet.getBytes(3)); - assertArrayEquals(new byte[] {1}, resultSet.getBytes(4)); - assertArrayEquals(new byte[] {(byte) '1'}, resultSet.getBytes(5)); - assertArrayEquals("1".getBytes(), resultSet.getBytes(6)); - - for (int i = 7; i < 12; i++) { - try { - resultSet.getBytes(i); - fail("Failing on " + i); - } catch (SQLException ex) { - assertEquals(200038, ex.getErrorCode()); + try (ResultSet resultSet = numberCrossTesting()) { + assertTrue(resultSet.next()); + // assert that 0 is returned for null values for every type of value + for (int i = 1; i < 13; i++) { + assertArrayEquals(null, resultSet.getBytes(i)); + } + assertTrue(resultSet.next()); + assertArrayEquals(intToByteArray(2), resultSet.getBytes(1)); + assertArrayEquals(intToByteArray(5), resultSet.getBytes(2)); + assertArrayEquals(floatToByteArray(3.5f), resultSet.getBytes(3)); + assertArrayEquals(new byte[] {1}, resultSet.getBytes(4)); + assertArrayEquals(new byte[] {(byte) '1'}, resultSet.getBytes(5)); + assertArrayEquals("1".getBytes(), resultSet.getBytes(6)); + + for (int i = 7; i < 12; i++) { + try { + resultSet.getBytes(i); + fail("Failing on " + i); + } catch (SQLException ex) { + assertEquals(200038, ex.getErrorCode()); + } } - } - byte[] decoded = SFBinary.fromHex("48454C4C4F").getBytes(); + byte[] decoded = SFBinary.fromHex("48454C4C4F").getBytes(); - assertArrayEquals(decoded, resultSet.getBytes(12)); + assertArrayEquals(decoded, resultSet.getBytes(12)); + } } // SNOW-204185 // 30s for timeout. This test usually finishes in around 10s. @Test(timeout = 30000) public void testResultChunkDownloaderException() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { - // The generated resultSet must be big enough for triggering result chunk downloader - String query = - "select current_date(), true,2345234, 2343.0, 'testrgint\\n" - + "\\t' from table(generator(rowcount=>10000))"; + // The generated resultSet must be big enough for triggering result chunk downloader + String query = + "select current_date(), true,2345234, 2343.0, 'testrgint\\n" + + "\\t' from table(generator(rowcount=>10000))"; - ResultSet resultSet = statement.executeQuery(query); - resultSet.next(); // should finish successfully + try (ResultSet resultSet = statement.executeQuery(query)) { + assertTrue(resultSet.next()); // should finish successfully + } - try { - SnowflakeChunkDownloader.setInjectedDownloaderException( - new OutOfMemoryError("Fake OOM error for testing")); - resultSet = statement.executeQuery(query); try { - // Normally this step won't cause too long. Because we will get exception once trying to get - // result from the first chunk downloader - while (resultSet.next()) {} - fail("Should not reach here. Last next() command is supposed to throw an exception"); - } catch (SnowflakeSQLException ex) { - // pass, do nothing + SnowflakeChunkDownloader.setInjectedDownloaderException( + new OutOfMemoryError("Fake OOM error for testing")); + try (ResultSet resultSet = statement.executeQuery(query)) { + try { + // Normally this step won't cause too long. Because we will get exception once trying to + // get + // result from the first chunk downloader + while (resultSet.next()) {} + fail("Should not reach here. Last next() command is supposed to throw an exception"); + } catch (SnowflakeSQLException ex) { + // pass, do nothing + } + } + } finally { + SnowflakeChunkDownloader.setInjectedDownloaderException(null); } - } finally { - SnowflakeChunkDownloader.setInjectedDownloaderException(null); } - - statement.close(); - connection.close(); } /** @@ -574,21 +585,21 @@ public void testResultChunkDownloaderException() throws SQLException { */ @Test public void testGetObjectWithBigInt() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='json'"); - // test with greatest possible number and greatest negative possible number - String[] extremeNumbers = { - "99999999999999999999999999999999999999", "-99999999999999999999999999999999999999" - }; - for (int i = 0; i < extremeNumbers.length; i++) { - ResultSet resultSet = statement.executeQuery("select " + extremeNumbers[i]); - resultSet.next(); - assertEquals(Types.BIGINT, resultSet.getMetaData().getColumnType(1)); - assertEquals(new BigDecimal(extremeNumbers[i]), resultSet.getObject(1)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set jdbc_query_result_format ='json'"); + // test with greatest possible number and greatest negative possible number + String[] extremeNumbers = { + "99999999999999999999999999999999999999", "-99999999999999999999999999999999999999" + }; + for (int i = 0; i < extremeNumbers.length; i++) { + try (ResultSet resultSet = statement.executeQuery("select " + extremeNumbers[i])) { + assertTrue(resultSet.next()); + assertEquals(Types.BIGINT, resultSet.getMetaData().getColumnType(1)); + assertEquals(new BigDecimal(extremeNumbers[i]), resultSet.getObject(1)); + } + } } - statement.close(); - connection.close(); } private byte[] intToByteArray(int i) { @@ -607,50 +618,57 @@ private byte[] floatToByteArray(float i) { */ @Test public void testGetBigDecimalWithScale() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_get(colA number(38,9))"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_get values(?)"); - preparedStatement.setBigDecimal(1, null); - preparedStatement.addBatch(); - BigDecimal bigDecimal = new BigDecimal("100000000.123456789"); - preparedStatement.setBigDecimal(1, bigDecimal); - preparedStatement.addBatch(); - preparedStatement.executeBatch(); - - ResultSet resultSet = statement.executeQuery("select * from test_get"); - resultSet.next(); - assertEquals(null, resultSet.getBigDecimal(1, 5)); - assertEquals(null, resultSet.getBigDecimal("COLA", 5)); - resultSet.next(); - assertEquals(bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal(1, 5)); - assertEquals(bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal("COLA", 5)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_get(colA number(38,9))"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_get values(?)")) { + preparedStatement.setBigDecimal(1, null); + preparedStatement.addBatch(); + BigDecimal bigDecimal = new BigDecimal("100000000.123456789"); + preparedStatement.setBigDecimal(1, bigDecimal); + preparedStatement.addBatch(); + preparedStatement.executeBatch(); + + try (ResultSet resultSet = statement.executeQuery("select * from test_get")) { + assertTrue(resultSet.next()); + assertEquals(null, resultSet.getBigDecimal(1, 5)); + assertEquals(null, resultSet.getBigDecimal("COLA", 5)); + assertTrue(resultSet.next()); + assertEquals(bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal(1, 5)); + assertEquals( + bigDecimal.setScale(5, RoundingMode.HALF_UP), resultSet.getBigDecimal("COLA", 5)); + } + } + } } @Test public void testGetDataTypeWithTimestampTz() throws Exception { try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); - statement.executeQuery("create or replace table ts_test(ts timestamp_tz)"); - ResultSet resultSet = statement.executeQuery("select * from ts_test"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP_WITH_TIMEZONE - assertEquals(resultSetMetaData.getColumnType(1), 2014); - // Assert that TIMESTAMP_TZ column returns Timestamp class name - assertEquals(resultSetMetaData.getColumnClassName(1), Timestamp.class.getName()); - + ResultSetMetaData resultSetMetaData = null; + try (Statement statement = connection.createStatement()) { + statement.executeQuery("create or replace table ts_test(ts timestamp_tz)"); + try (ResultSet resultSet = statement.executeQuery("select * from ts_test")) { + resultSetMetaData = resultSet.getMetaData(); + // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP_WITH_TIMEZONE + assertEquals(resultSetMetaData.getColumnType(1), 2014); + // Assert that TIMESTAMP_TZ column returns Timestamp class name + assertEquals(resultSetMetaData.getColumnClassName(1), Timestamp.class.getName()); + } + } SFBaseSession baseSession = connection.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); Field field = SFBaseSession.class.getDeclaredField("enableReturnTimestampWithTimeZone"); field.setAccessible(true); field.set(baseSession, false); - statement = connection.createStatement(); - resultSet = statement.executeQuery("select * from ts_test"); - resultSetMetaData = resultSet.getMetaData(); - // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP when - // enableReturnTimestampWithTimeZone is false. - assertEquals(resultSetMetaData.getColumnType(1), Types.TIMESTAMP); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("select * from ts_test")) { + resultSetMetaData = resultSet.getMetaData(); + // Assert that TIMESTAMP_TZ type matches java.sql.TIMESTAMP when + // enableReturnTimestampWithTimeZone is false. + assertEquals(resultSetMetaData.getColumnType(1), Types.TIMESTAMP); + } } } @@ -662,29 +680,32 @@ public void testGetDataTypeWithTimestampTz() throws Exception { */ @Test public void testGetEmptyOrNullClob() throws SQLException { - Connection connection = init(); - Clob clob = connection.createClob(); - clob.setString(1, "hello world"); - Clob emptyClob = connection.createClob(); - emptyClob.setString(1, ""); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table test_get_clob(colA varchar, colNull varchar, colEmpty text)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_get_clob values(?, ?, ?)"); - preparedStatement.setClob(1, clob); - preparedStatement.setString(2, null); - preparedStatement.setClob(3, emptyClob); - preparedStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select * from test_get_clob"); - resultSet.next(); - assertEquals("hello world", resultSet.getClob(1).toString()); - assertEquals("hello world", resultSet.getClob("COLA").toString()); - assertNull(resultSet.getClob(2)); - assertNull(resultSet.getClob("COLNULL")); - assertEquals("", resultSet.getClob(3).toString()); - assertEquals("", resultSet.getClob("COLEMPTY").toString()); + try (Connection connection = init()) { + Clob clob = connection.createClob(); + clob.setString(1, "hello world"); + Clob emptyClob = connection.createClob(); + emptyClob.setString(1, ""); + try (Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table test_get_clob(colA varchar, colNull varchar, colEmpty text)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_get_clob values(?, ?, ?)")) { + preparedStatement.setClob(1, clob); + preparedStatement.setString(2, null); + preparedStatement.setClob(3, emptyClob); + preparedStatement.execute(); + } + try (ResultSet resultSet = statement.executeQuery("select * from test_get_clob")) { + assertTrue(resultSet.next()); + assertEquals("hello world", resultSet.getClob(1).toString()); + assertEquals("hello world", resultSet.getClob("COLA").toString()); + assertNull(resultSet.getClob(2)); + assertNull(resultSet.getClob("COLNULL")); + assertEquals("", resultSet.getClob(3).toString()); + assertEquals("", resultSet.getClob("COLEMPTY").toString()); + } + } + } } /** @@ -695,27 +716,32 @@ public void testGetEmptyOrNullClob() throws SQLException { */ @Test public void testSetNullClob() throws SQLException { - Connection connection = init(); - Clob clob = null; - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_set_clob(colNull varchar)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_set_clob values(?)"); - preparedStatement.setClob(1, clob); - preparedStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select * from test_set_clob"); - resultSet.next(); - assertNull(resultSet.getClob(1)); - assertNull(resultSet.getClob("COLNULL")); + try (Connection connection = init()) { + Clob clob = null; + try (Statement statement = connection.createStatement()) { + statement.execute("create or replace table test_set_clob(colNull varchar)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_set_clob values(?)")) { + preparedStatement.setClob(1, clob); + preparedStatement.execute(); + } + + try (ResultSet resultSet = statement.executeQuery("select * from test_set_clob")) { + assertTrue(resultSet.next()); + assertNull(resultSet.getClob(1)); + assertNull(resultSet.getClob("COLNULL")); + } + } + } } @Test public void testCallStatementType() throws SQLException { Properties props = new Properties(); props.put("USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS", "true"); - try (Connection connection = getConnection(props)) { - try (Statement statement = connection.createStatement()) { + try (Connection connection = getConnection(props); + Statement statement = connection.createStatement()) { + try { String sp = "CREATE OR REPLACE PROCEDURE \"SP_ZSDLEADTIME_ARCHIVE_DAILY\"()\n" + "RETURNS VARCHAR(16777216)\n" @@ -761,15 +787,15 @@ public void testCallStatementType() throws SQLException { statement.execute("create or replace table MYTABLE1 (ID int, NAME string)"); statement.execute(sp); - CallableStatement cs = connection.prepareCall("CALL SP_ZSDLEADTIME_ARCHIVE_DAILY()"); - cs.execute(); - ResultSetMetaData resultSetMetaData = cs.getMetaData(); - assertEquals("SP_ZSDLEADTIME_ARCHIVE_DAILY", resultSetMetaData.getColumnName(1)); - assertEquals("VARCHAR", resultSetMetaData.getColumnTypeName(1)); - assertEquals(0, resultSetMetaData.getScale(1)); - assertEquals(16777216, resultSetMetaData.getPrecision(1)); - - cs.close(); + try (CallableStatement cs = connection.prepareCall("CALL SP_ZSDLEADTIME_ARCHIVE_DAILY()")) { + cs.execute(); + ResultSetMetaData resultSetMetaData = cs.getMetaData(); + assertEquals("SP_ZSDLEADTIME_ARCHIVE_DAILY", resultSetMetaData.getColumnName(1)); + assertEquals("VARCHAR", resultSetMetaData.getColumnTypeName(1)); + assertEquals(0, resultSetMetaData.getScale(1)); + assertEquals(16777216, resultSetMetaData.getPrecision(1)); + } + } finally { statement.execute("drop procedure if exists SP_ZSDLEADTIME_ARCHIVE_DAILY()"); statement.execute("drop table if exists MYTABLE1"); statement.execute("drop table if exists MYCSVTABLE"); @@ -783,89 +809,92 @@ public void testCallStatementType() throws SQLException { */ @Test public void testNewFeaturesNotSupported() throws SQLException { - Connection con = init(); - ResultSet rs = con.createStatement().executeQuery("select 1"); - try { - rs.unwrap(SnowflakeResultSet.class).getQueryErrorMessage(); - } catch (SQLFeatureNotSupportedException ex) { - // catch SQLFeatureNotSupportedException - assertEquals("This function is only supported for asynchronous queries.", ex.getMessage()); + try (Connection con = init(); + ResultSet rs = con.createStatement().executeQuery("select 1")) { + try { + rs.unwrap(SnowflakeResultSet.class).getQueryErrorMessage(); + } catch (SQLFeatureNotSupportedException ex) { + // catch SQLFeatureNotSupportedException + assertEquals("This function is only supported for asynchronous queries.", ex.getMessage()); + } } - rs.close(); - con.close(); } @Test public void testGetObjectJsonResult() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='json'"); - statement.execute("create or replace table testObj (colA double, colB boolean)"); - - PreparedStatement preparedStatement = - connection.prepareStatement("insert into testObj values(?, ?)"); - preparedStatement.setDouble(1, 22.2); - preparedStatement.setBoolean(2, true); - preparedStatement.executeQuery(); - - ResultSet resultSet = statement.executeQuery("select * from testObj"); - resultSet.next(); - assertEquals(22.2, resultSet.getObject(1)); - assertEquals(true, resultSet.getObject(2)); - - statement.execute("drop table if exists testObj"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set jdbc_query_result_format ='json'"); + statement.execute("create or replace table testObj (colA double, colB boolean)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testObj values(?, ?)")) { + preparedStatement.setDouble(1, 22.2); + preparedStatement.setBoolean(2, true); + preparedStatement.executeQuery(); + } + try (ResultSet resultSet = statement.executeQuery("select * from testObj")) { + assertTrue(resultSet.next()); + assertEquals(22.2, resultSet.getObject(1)); + assertEquals(true, resultSet.getObject(2)); + } + } finally { + statement.execute("drop table if exists testObj"); + } + } } @Test public void testMetadataIsCaseSensitive() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - String sampleCreateTableWithAllColTypes = - "CREATE or replace TABLE case_sensitive (" - + " boolean_col BOOLEAN," - + " date_col DATE," - + " time_col TIME," - + " timestamp_col TIMESTAMP," - + " timestamp_ltz_col TIMESTAMP_LTZ," - + " timestamp_ntz_col TIMESTAMP_NTZ," - + " number_col NUMBER," - + " float_col FLOAT," - + " double_col DOUBLE," - + " binary_col BINARY," - + " geography_col GEOGRAPHY," - + " variant_col VARIANT," - + " object_col1 OBJECT," - + " array_col1 ARRAY," - + " text_col1 TEXT," - + " varchar_col VARCHAR(16777216)," - + " char_col CHAR(16777216)" - + ");"; - - statement.execute(sampleCreateTableWithAllColTypes); - ResultSet rs = statement.executeQuery("select * from case_sensitive"); - ResultSetMetaData metaData = rs.getMetaData(); - - assertFalse(metaData.isCaseSensitive(1)); // BOOLEAN - assertFalse(metaData.isCaseSensitive(2)); // DATE - assertFalse(metaData.isCaseSensitive(3)); // TIME - assertFalse(metaData.isCaseSensitive(4)); // TIMESTAMP - assertFalse(metaData.isCaseSensitive(5)); // TIMESTAMP_LTZ - assertFalse(metaData.isCaseSensitive(6)); // TIMESTAMP_NTZ - assertFalse(metaData.isCaseSensitive(7)); // NUMBER - assertFalse(metaData.isCaseSensitive(8)); // FLOAT - assertFalse(metaData.isCaseSensitive(9)); // DOUBLE - assertFalse(metaData.isCaseSensitive(10)); // BINARY - - assertTrue(metaData.isCaseSensitive(11)); // GEOGRAPHY - assertTrue(metaData.isCaseSensitive(12)); // VARIANT - assertTrue(metaData.isCaseSensitive(13)); // OBJECT - assertTrue(metaData.isCaseSensitive(14)); // ARRAY - assertTrue(metaData.isCaseSensitive(15)); // TEXT - assertTrue(metaData.isCaseSensitive(16)); // VARCHAR - assertTrue(metaData.isCaseSensitive(17)); // CHAR + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + + String sampleCreateTableWithAllColTypes = + "CREATE or replace TABLE case_sensitive (" + + " boolean_col BOOLEAN," + + " date_col DATE," + + " time_col TIME," + + " timestamp_col TIMESTAMP," + + " timestamp_ltz_col TIMESTAMP_LTZ," + + " timestamp_ntz_col TIMESTAMP_NTZ," + + " number_col NUMBER," + + " float_col FLOAT," + + " double_col DOUBLE," + + " binary_col BINARY," + + " geography_col GEOGRAPHY," + + " variant_col VARIANT," + + " object_col1 OBJECT," + + " array_col1 ARRAY," + + " text_col1 TEXT," + + " varchar_col VARCHAR(16777216)," + + " char_col CHAR(16777216)" + + ");"; + + statement.execute(sampleCreateTableWithAllColTypes); + try (ResultSet rs = statement.executeQuery("select * from case_sensitive")) { + ResultSetMetaData metaData = rs.getMetaData(); + + assertFalse(metaData.isCaseSensitive(1)); // BOOLEAN + assertFalse(metaData.isCaseSensitive(2)); // DATE + assertFalse(metaData.isCaseSensitive(3)); // TIME + assertFalse(metaData.isCaseSensitive(4)); // TIMESTAMP + assertFalse(metaData.isCaseSensitive(5)); // TIMESTAMP_LTZ + assertFalse(metaData.isCaseSensitive(6)); // TIMESTAMP_NTZ + assertFalse(metaData.isCaseSensitive(7)); // NUMBER + assertFalse(metaData.isCaseSensitive(8)); // FLOAT + assertFalse(metaData.isCaseSensitive(9)); // DOUBLE + assertFalse(metaData.isCaseSensitive(10)); // BINARY + + assertTrue(metaData.isCaseSensitive(11)); // GEOGRAPHY + assertTrue(metaData.isCaseSensitive(12)); // VARIANT + assertTrue(metaData.isCaseSensitive(13)); // OBJECT + assertTrue(metaData.isCaseSensitive(14)); // ARRAY + assertTrue(metaData.isCaseSensitive(15)); // TEXT + assertTrue(metaData.isCaseSensitive(16)); // VARCHAR + assertTrue(metaData.isCaseSensitive(17)); // CHAR + } + } } @Test @@ -873,21 +902,23 @@ public void testMetadataIsCaseSensitive() throws SQLException { public void testAutoIncrementJsonResult() throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("ENABLE_FIX_759900", true); - Connection connection = init(paramProperties); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='json'"); + try (Connection connection = init(paramProperties); + Statement statement = connection.createStatement()) { + statement.execute("alter session set jdbc_query_result_format ='json'"); - statement.execute( - "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); - statement.execute("insert into auto_inc(name) values('test1')"); + statement.execute( + "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); + statement.execute("insert into auto_inc(name) values('test1')"); - ResultSet resultSet = statement.executeQuery("select * from auto_inc"); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from auto_inc")) { + assertTrue(resultSet.next()); - ResultSetMetaData metaData = resultSet.getMetaData(); - assertTrue(metaData.isAutoIncrement(1)); - assertFalse(metaData.isAutoIncrement(2)); - assertTrue(metaData.isAutoIncrement(3)); + ResultSetMetaData metaData = resultSet.getMetaData(); + assertTrue(metaData.isAutoIncrement(1)); + assertFalse(metaData.isAutoIncrement(2)); + assertTrue(metaData.isAutoIncrement(3)); + } + } } @Test @@ -895,71 +926,68 @@ public void testAutoIncrementJsonResult() throws SQLException { public void testAutoIncrementArrowResult() throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("ENABLE_FIX_759900", true); - Connection connection = init(paramProperties); - Statement statement = connection.createStatement(); - statement.execute("alter session set jdbc_query_result_format ='arrow'"); + try (Connection connection = init(paramProperties); + Statement statement = connection.createStatement()) { + statement.execute("alter session set jdbc_query_result_format ='arrow'"); - statement.execute( - "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); - statement.execute("insert into auto_inc(name) values('test1')"); + statement.execute( + "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); + statement.execute("insert into auto_inc(name) values('test1')"); - ResultSet resultSet = statement.executeQuery("select * from auto_inc"); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from auto_inc")) { + assertTrue(resultSet.next()); - ResultSetMetaData metaData = resultSet.getMetaData(); - assertTrue(metaData.isAutoIncrement(1)); - assertFalse(metaData.isAutoIncrement(2)); - assertTrue(metaData.isAutoIncrement(3)); + ResultSetMetaData metaData = resultSet.getMetaData(); + assertTrue(metaData.isAutoIncrement(1)); + assertFalse(metaData.isAutoIncrement(2)); + assertTrue(metaData.isAutoIncrement(3)); + } + } } @Test public void testGranularTimeFunctionsInSessionTimezone() throws SQLException { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("create or replace table testGranularTime(t time)"); - statement.execute("insert into testGranularTime values ('10:10:10')"); - ResultSet resultSet = statement.executeQuery("select * from testGranularTime"); - resultSet.next(); - assertEquals(Time.valueOf("10:10:10"), resultSet.getTime(1)); - assertEquals(10, resultSet.getTime(1).getHours()); - assertEquals(10, resultSet.getTime(1).getMinutes()); - assertEquals(10, resultSet.getTime(1).getSeconds()); - resultSet.close(); - } finally { - statement.execute("drop table if exists testGranularTime"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table testGranularTime(t time)"); + statement.execute("insert into testGranularTime values ('10:10:10')"); + try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { + assertTrue(resultSet.next()); + assertEquals(Time.valueOf("10:10:10"), resultSet.getTime(1)); + assertEquals(10, resultSet.getTime(1).getHours()); + assertEquals(10, resultSet.getTime(1).getMinutes()); + assertEquals(10, resultSet.getTime(1).getSeconds()); + } + } finally { + statement.execute("drop table if exists testGranularTime"); + } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGranularTimeFunctionsInUTC() throws SQLException { - Connection connection = null; - Statement statement = null; - TimeZone origTz = TimeZone.getDefault(); - TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); - try { - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); - statement.execute("create or replace table testGranularTime(t time)"); - statement.execute("insert into testGranularTime values ('10:10:10')"); - ResultSet resultSet = statement.executeQuery("select * from testGranularTime"); - resultSet.next(); - assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); - assertEquals(02, resultSet.getTime(1).getHours()); - assertEquals(10, resultSet.getTime(1).getMinutes()); - assertEquals(10, resultSet.getTime(1).getSeconds()); - resultSet.close(); - } finally { - TimeZone.setDefault(origTz); - statement.execute("drop table if exists testGranularTime"); - statement.close(); - connection.close(); + try (Connection connection = getConnection()) { + TimeZone origTz = TimeZone.getDefault(); + try (Statement statement = connection.createStatement()) { + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); + statement.execute("create or replace table testGranularTime(t time)"); + statement.execute("insert into testGranularTime values ('10:10:10')"); + try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { + assertTrue(resultSet.next()); + assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); + assertEquals(02, resultSet.getTime(1).getHours()); + assertEquals(10, resultSet.getTime(1).getMinutes()); + assertEquals(10, resultSet.getTime(1).getSeconds()); + } + } finally { + TimeZone.setDefault(origTz); + statement.execute("drop table if exists testGranularTime"); + } + } } } @@ -1143,7 +1171,7 @@ private void assertResultValueAndType( Statement statement, Object expected, String columnName, Class type) throws SQLException { try (ResultSet resultSetString = statement.executeQuery(String.format("select %s from test_all_types", columnName))) { - resultSetString.next(); + assertTrue(resultSetString.next()); assertEquals(expected, resultSetString.getObject(1, type)); } } @@ -1152,7 +1180,7 @@ private void assertResultValueAsString( Statement statement, Object expected, String columnName, Class type) throws SQLException { try (ResultSet resultSetString = statement.executeQuery(String.format("select %s from test_all_types", columnName))) { - resultSetString.next(); + assertTrue(resultSetString.next()); assertEquals(expected.toString(), resultSetString.getObject(1, type).toString()); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java index 1ed7d09ea..c0a494613 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java @@ -64,125 +64,128 @@ public ResultSetMultiTimeZoneIT(String queryResultFormat, String timeZone) { public Connection init() throws SQLException { Connection connection = BaseJDBCTest.getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); - connection - .createStatement() - .execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return connection; } public Connection init(Properties paramProperties) throws SQLException { Connection conn = getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Before public void setUp() throws SQLException { - Connection con = init(); - - // TEST_RS - con.createStatement().execute("create or replace table test_rs (colA string)"); - con.createStatement().execute("insert into test_rs values('rowOne')"); - con.createStatement().execute("insert into test_rs values('rowTwo')"); - con.createStatement().execute("insert into test_rs values('rowThree')"); - - // ORDERS_JDBC - Statement statement = con.createStatement(); - statement.execute( - "create or replace table orders_jdbc" - + "(C1 STRING NOT NULL COMMENT 'JDBC', " - + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " - + "C7 STRING, C8 STRING, C9 STRING) " - + "stage_file_format = (field_delimiter='|' " - + "error_on_column_count_mismatch=false)"); - // put files - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); - - int numRows = statement.executeUpdate("copy into orders_jdbc"); - - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); - - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement()) { + + // TEST_RS + statement.execute("create or replace table test_rs (colA string)"); + statement.execute("insert into test_rs values('rowOne')"); + statement.execute("insert into test_rs values('rowTwo')"); + statement.execute("insert into test_rs values('rowThree')"); + + // ORDERS_JDBC + statement.execute( + "create or replace table orders_jdbc" + + "(C1 STRING NOT NULL COMMENT 'JDBC', " + + "C2 STRING, C3 STRING, C4 STRING, C5 STRING, C6 STRING, " + + "C7 STRING, C8 STRING, C9 STRING) " + + "stage_file_format = (field_delimiter='|' " + + "error_on_column_count_mismatch=false)"); + // put files + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + + int numRows = statement.executeUpdate("copy into orders_jdbc"); + + assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + } } @After public void tearDown() throws SQLException { System.clearProperty("user.timezone"); - Connection con = init(); - con.createStatement().execute("drop table if exists orders_jdbc"); - con.createStatement().execute("drop table if exists test_rs"); - con.close(); + try (Connection con = init(); + Statement statement = con.createStatement()) { + statement.execute("drop table if exists orders_jdbc"); + statement.execute("drop table if exists test_rs"); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetDateAndTime() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table dateTime(colA Date, colB Timestamp, colC Time)"); - - java.util.Date today = new java.util.Date(); - Date date = buildDate(2016, 3, 20); - Timestamp ts = new Timestamp(today.getTime()); - Time tm = new Time(12345678); // 03:25:45.678 - final String insertTime = "insert into datetime values(?, ?, ?)"; - PreparedStatement prepStatement = connection.prepareStatement(insertTime); - prepStatement.setDate(1, date); - prepStatement.setTimestamp(2, ts); - prepStatement.setTime(3, tm); - - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select * from datetime"); - resultSet.next(); - assertEquals(date, resultSet.getDate(1)); - assertEquals(date, resultSet.getDate("COLA")); - assertEquals(ts, resultSet.getTimestamp(2)); - assertEquals(ts, resultSet.getTimestamp("COLB")); - assertEquals(tm, resultSet.getTime(3)); - assertEquals(tm, resultSet.getTime("COLC")); - - statement.execute( - "create or replace table datetime(colA timestamp_ltz, colB timestamp_ntz, colC timestamp_tz)"); - statement.execute( - "insert into dateTime values ('2019-01-01 17:17:17', '2019-01-01 17:17:17', '2019-01-01 " - + "17:17:17')"); - prepStatement = - connection.prepareStatement( - "insert into datetime values(?, '2019-01-01 17:17:17', '2019-01-01 17:17:17')"); - Timestamp dateTime = new Timestamp(date.getTime()); - prepStatement.setTimestamp(1, dateTime); - prepStatement.execute(); - resultSet = statement.executeQuery("select * from datetime"); - resultSet.next(); - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - formatter.setTimeZone(TimeZone.getDefault()); - String d = formatter.format(resultSet.getDate("COLA")); - assertEquals("2019-01-02 01:17:17", d); - resultSet.next(); - assertEquals(date, resultSet.getDate(1)); - assertEquals(date, resultSet.getDate("COLA")); - statement.execute("drop table if exists datetime"); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table dateTime(colA Date, colB Timestamp, colC Time)"); + + java.util.Date today = new java.util.Date(); + Date date = buildDate(2016, 3, 20); + Timestamp ts = new Timestamp(today.getTime()); + Time tm = new Time(12345678); // 03:25:45.678 + final String insertTime = "insert into datetime values(?, ?, ?)"; + try (PreparedStatement prepStatement = connection.prepareStatement(insertTime)) { + prepStatement.setDate(1, date); + prepStatement.setTimestamp(2, ts); + prepStatement.setTime(3, tm); + + prepStatement.execute(); + + ResultSet resultSet = statement.executeQuery("select * from datetime"); + assertTrue(resultSet.next()); + assertEquals(date, resultSet.getDate(1)); + assertEquals(date, resultSet.getDate("COLA")); + assertEquals(ts, resultSet.getTimestamp(2)); + assertEquals(ts, resultSet.getTimestamp("COLB")); + assertEquals(tm, resultSet.getTime(3)); + assertEquals(tm, resultSet.getTime("COLC")); + } + statement.execute( + "create or replace table datetime(colA timestamp_ltz, colB timestamp_ntz, colC timestamp_tz)"); + statement.execute( + "insert into dateTime values ('2019-01-01 17:17:17', '2019-01-01 17:17:17', '2019-01-01 " + + "17:17:17')"); + try (PreparedStatement prepStatement = + connection.prepareStatement( + "insert into datetime values(?, '2019-01-01 17:17:17', '2019-01-01 17:17:17')")) { + Timestamp dateTime = new Timestamp(date.getTime()); + prepStatement.setTimestamp(1, dateTime); + prepStatement.execute(); + try (ResultSet resultSet = statement.executeQuery("select * from datetime")) { + assertTrue(resultSet.next()); + SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + formatter.setTimeZone(TimeZone.getDefault()); + String d = formatter.format(resultSet.getDate("COLA")); + assertEquals("2019-01-02 01:17:17", d); + assertTrue(resultSet.next()); + assertEquals(date, resultSet.getDate(1)); + assertEquals(date, resultSet.getDate("COLA")); + } + } + } finally { + statement.execute("drop table if exists datetime"); + } + } } // SNOW-25029: The driver should reduce Time milliseconds mod 24h. @@ -190,246 +193,275 @@ public void testGetDateAndTime() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testTimeRange() throws SQLException { final String insertTime = "insert into timeTest values (?), (?), (?), (?)"; - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table timeTest (c1 time)"); - - long ms1 = -2202968667333L; // 1900-03-11 09:15:33.667 - long ms2 = -1; // 1969-12-31 23:59:99.999 - long ms3 = 86400 * 1000; // 1970-01-02 00:00:00 - long ms4 = 1451680250123L; // 2016-01-01 12:30:50.123 - - Time tm1 = new Time(ms1); - Time tm2 = new Time(ms2); - Time tm3 = new Time(ms3); - Time tm4 = new Time(ms4); - - PreparedStatement prepStatement = connection.prepareStatement(insertTime); - prepStatement.setTime(1, tm1); - prepStatement.setTime(2, tm2); - prepStatement.setTime(3, tm3); - prepStatement.setTime(4, tm4); - - prepStatement.execute(); - - // Note that the resulting Time objects are NOT equal because they have - // their milliseconds in the range 0 to 86,399,999, i.e. inside Jan 1, 1970. - // PreparedStatement accepts Time objects outside this range, but it reduces - // modulo 24 hours to discard the date information before sending to GS. - - final long M = 86400 * 1000; - ResultSet resultSet = statement.executeQuery("select * from timeTest"); - resultSet.next(); - assertNotEquals(tm1, resultSet.getTime(1)); - assertEquals(new Time((ms1 % M + M) % M), resultSet.getTime(1)); - resultSet.next(); - assertNotEquals(tm2, resultSet.getTime(1)); - assertEquals(new Time((ms2 % M + M) % M), resultSet.getTime(1)); - resultSet.next(); - assertNotEquals(tm3, resultSet.getTime(1)); - assertEquals(new Time((ms3 % M + M) % M), resultSet.getTime(1)); - resultSet.next(); - assertNotEquals(tm4, resultSet.getTime(1)); - assertEquals(new Time((ms4 % M + M) % M), resultSet.getTime(1)); - statement.execute("drop table if exists timeTest"); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table timeTest (c1 time)"); + + long ms1 = -2202968667333L; // 1900-03-11 09:15:33.667 + long ms2 = -1; // 1969-12-31 23:59:99.999 + long ms3 = 86400 * 1000; // 1970-01-02 00:00:00 + long ms4 = 1451680250123L; // 2016-01-01 12:30:50.123 + + Time tm1 = new Time(ms1); + Time tm2 = new Time(ms2); + Time tm3 = new Time(ms3); + Time tm4 = new Time(ms4); + + try (PreparedStatement prepStatement = connection.prepareStatement(insertTime)) { + prepStatement.setTime(1, tm1); + prepStatement.setTime(2, tm2); + prepStatement.setTime(3, tm3); + prepStatement.setTime(4, tm4); + + prepStatement.execute(); + } + + // Note that the resulting Time objects are NOT equal because they have + // their milliseconds in the range 0 to 86,399,999, i.e. inside Jan 1, 1970. + // PreparedStatement accepts Time objects outside this range, but it reduces + // modulo 24 hours to discard the date information before sending to GS. + + final long M = 86400 * 1000; + try (ResultSet resultSet = statement.executeQuery("select * from timeTest")) { + assertTrue(resultSet.next()); + assertNotEquals(tm1, resultSet.getTime(1)); + assertEquals(new Time((ms1 % M + M) % M), resultSet.getTime(1)); + assertTrue(resultSet.next()); + assertNotEquals(tm2, resultSet.getTime(1)); + assertEquals(new Time((ms2 % M + M) % M), resultSet.getTime(1)); + assertTrue(resultSet.next()); + assertNotEquals(tm3, resultSet.getTime(1)); + assertEquals(new Time((ms3 % M + M) % M), resultSet.getTime(1)); + assertTrue(resultSet.next()); + assertNotEquals(tm4, resultSet.getTime(1)); + assertEquals(new Time((ms4 % M + M) % M), resultSet.getTime(1)); + } + } finally { + statement.execute("drop table if exists timeTest"); + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCurrentTime() throws SQLException { final String insertTime = "insert into datetime values (?, ?, ?)"; - Connection connection = init(); - - assertFalse(connection.createStatement().execute("alter session set TIMEZONE='UTC'")); - - Statement statement = connection.createStatement(); - statement.execute("create or replace table datetime (d date, ts timestamp, tm time)"); - PreparedStatement prepStatement = connection.prepareStatement(insertTime); - - long currentMillis = System.currentTimeMillis(); - Date currentDate = new Date(currentMillis); - Timestamp currentTS = new Timestamp(currentMillis); - Time currentTime = new Time(currentMillis); - - prepStatement.setDate(1, currentDate); - prepStatement.setTimestamp(2, currentTS); - prepStatement.setTime(3, currentTime); - - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select ts::date = d from datetime"); - resultSet.next(); - assertTrue(resultSet.getBoolean(1)); - resultSet = statement.executeQuery("select ts::time = tm from datetime"); - resultSet.next(); - assertTrue(resultSet.getBoolean(1)); - - statement.execute("drop table if exists datetime"); - connection.close(); + try (Connection connection = init()) { + + assertFalse(connection.createStatement().execute("alter session set TIMEZONE='UTC'")); + + try (Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table datetime (d date, ts timestamp, tm time)"); + try (PreparedStatement prepStatement = connection.prepareStatement(insertTime)) { + + long currentMillis = System.currentTimeMillis(); + Date currentDate = new Date(currentMillis); + Timestamp currentTS = new Timestamp(currentMillis); + Time currentTime = new Time(currentMillis); + + prepStatement.setDate(1, currentDate); + prepStatement.setTimestamp(2, currentTS); + prepStatement.setTime(3, currentTime); + + prepStatement.execute(); + + try (ResultSet resultSet = + statement.executeQuery("select ts::date = d from datetime")) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(1)); + } + try (ResultSet resultSet = + statement.executeQuery("select ts::time = tm from datetime")) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(1)); + } + } + } finally { + statement.execute("drop table if exists datetime"); + } + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testBindTimestampTZ() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table testBindTimestampTZ(" + "cola int, colb timestamp_tz)"); - - long millSeconds = System.currentTimeMillis(); - Timestamp ts = new Timestamp(millSeconds); - PreparedStatement prepStatement = - connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)"); - prepStatement.setInt(1, 123); - prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("UTC"))); - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select cola, colb from testBindTimestampTz"); - resultSet.next(); - assertThat("integer", resultSet.getInt(1), equalTo(123)); - assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); - - statement.execute("drop table if exists testBindTimestampTZ"); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table testBindTimestampTZ(" + "cola int, colb timestamp_tz)"); + + long millSeconds = System.currentTimeMillis(); + Timestamp ts = new Timestamp(millSeconds); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)")) { + prepStatement.setInt(1, 123); + prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("UTC"))); + prepStatement.execute(); + } + + try (ResultSet resultSet = + statement.executeQuery("select cola, colb from testBindTimestampTz")) { + assertTrue(resultSet.next()); + assertThat("integer", resultSet.getInt(1), equalTo(123)); + assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); + } + } finally { + statement.execute("drop table if exists testBindTimestampTZ"); + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetOldDate() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table testOldDate(d date)"); - statement.execute( - "insert into testOldDate values ('0001-01-01'), " - + "(to_date('1000-01-01')), ('1300-01-01'), ('1400-02-02'), " - + "('1500-01-01'), ('1600-02-03')"); - - ResultSet resultSet = statement.executeQuery("select * from testOldDate order by d"); - resultSet.next(); - assertEquals("0001-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("0001-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1000-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("1000-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1300-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("1300-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1400-02-02", resultSet.getString(1)); - assertEquals(Date.valueOf("1400-02-02"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1500-01-01", resultSet.getString(1)); - assertEquals(Date.valueOf("1500-01-01"), resultSet.getDate(1)); - resultSet.next(); - assertEquals("1600-02-03", resultSet.getString(1)); - assertEquals(Date.valueOf("1600-02-03"), resultSet.getDate(1)); - - resultSet.close(); - statement.execute("drop table if exists testOldDate"); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table testOldDate(d date)"); + statement.execute( + "insert into testOldDate values ('0001-01-01'), " + + "(to_date('1000-01-01')), ('1300-01-01'), ('1400-02-02'), " + + "('1500-01-01'), ('1600-02-03')"); + + try (ResultSet resultSet = statement.executeQuery("select * from testOldDate order by d")) { + assertTrue(resultSet.next()); + assertEquals("0001-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("0001-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1000-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("1000-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1300-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("1300-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1400-02-02", resultSet.getString(1)); + assertEquals(Date.valueOf("1400-02-02"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1500-01-01", resultSet.getString(1)); + assertEquals(Date.valueOf("1500-01-01"), resultSet.getDate(1)); + assertTrue(resultSet.next()); + assertEquals("1600-02-03", resultSet.getString(1)); + assertEquals(Date.valueOf("1600-02-03"), resultSet.getDate(1)); + } + } finally { + statement.execute("drop table if exists testOldDate"); + } + } } @Test public void testGetStringForDates() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - String expectedDate1 = "2020-08-01"; - String expectedDate2 = "1920-11-11"; - ResultSet rs = statement.executeQuery("SELECT '" + expectedDate1 + "'::DATE as D1"); - rs.next(); - assertEquals(expectedDate1, rs.getString(1)); - rs = statement.executeQuery("SELECT '" + expectedDate2 + "'::DATE as D1"); - rs.next(); - assertEquals(expectedDate2, rs.getString(1)); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + String expectedDate1 = "2020-08-01"; + String expectedDate2 = "1920-11-11"; + try (ResultSet rs = statement.executeQuery("SELECT '" + expectedDate1 + "'::DATE as D1")) { + rs.next(); + assertEquals(expectedDate1, rs.getString(1)); + } + try (ResultSet rs = statement.executeQuery("SELECT '" + expectedDate2 + "'::DATE as D1")) { + rs.next(); + assertEquals(expectedDate2, rs.getString(1)); + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDateTimeRelatedTypeConversion() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table testDateTime" - + "(colDate DATE, colTS timestamp_ltz, colTime TIME, colString string)"); - PreparedStatement preparedStatement = - connection.prepareStatement("insert into testDateTime values(?, ?, ?, ?)"); - - Timestamp ts = buildTimestamp(2016, 3, 20, 3, 25, 45, 67800000); - Date date = buildDate(2016, 3, 20); - Time time = new Time(12345678); // 03:25:45.678 - - preparedStatement.setDate(1, date); - preparedStatement.setTimestamp(2, ts); - preparedStatement.setTime(3, time); - preparedStatement.setString(4, "aaa"); - - preparedStatement.execute(); - ResultSet resultSet = statement.executeQuery("select * from testDateTime"); - resultSet.next(); - - // ResultSet.getDate() - assertEquals(date, resultSet.getDate("COLDATE")); - try { - resultSet.getDate("COLTIME"); - fail(); - } catch (SnowflakeSQLException e) { - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); - } - - // ResultSet.getTimestamp() - assertEquals(new Timestamp(date.getTime()), resultSet.getTimestamp("COLDATE")); - assertEquals(ts, resultSet.getTimestamp("COLTS")); - assertEquals(new Timestamp(time.getTime()), resultSet.getTimestamp("COLTIME")); - try { - resultSet.getTimestamp("COLSTRING"); - fail(); - } catch (SnowflakeSQLException e) { - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); - } - - // ResultSet.getTime() - try { - resultSet.getTime("COLDATE"); - fail(); - } catch (SnowflakeSQLException e) { - assertEquals((int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); - assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table testDateTime" + + "(colDate DATE, colTS timestamp_ltz, colTime TIME, colString string)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testDateTime values(?, ?, ?, ?)")) { + Timestamp ts = buildTimestamp(2016, 3, 20, 3, 25, 45, 67800000); + Date date = buildDate(2016, 3, 20); + Time time = new Time(12345678); // 03:25:45.678 + + preparedStatement.setDate(1, date); + preparedStatement.setTimestamp(2, ts); + preparedStatement.setTime(3, time); + preparedStatement.setString(4, "aaa"); + + preparedStatement.execute(); + try (ResultSet resultSet = statement.executeQuery("select * from testDateTime")) { + assertTrue(resultSet.next()); + + // ResultSet.getDate() + assertEquals(date, resultSet.getDate("COLDATE")); + try { + resultSet.getDate("COLTIME"); + fail(); + } catch (SnowflakeSQLException e) { + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + } + + // ResultSet.getTimestamp() + assertEquals(new Timestamp(date.getTime()), resultSet.getTimestamp("COLDATE")); + assertEquals(ts, resultSet.getTimestamp("COLTS")); + assertEquals(new Timestamp(time.getTime()), resultSet.getTimestamp("COLTIME")); + try { + resultSet.getTimestamp("COLSTRING"); + fail(); + } catch (SnowflakeSQLException e) { + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + } + + // ResultSet.getTime() + try { + resultSet.getTime("COLDATE"); + fail(); + } catch (SnowflakeSQLException e) { + assertEquals( + (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), e.getErrorCode()); + assertEquals(ErrorCode.INVALID_VALUE_CONVERT.getSqlState(), e.getSQLState()); + } + assertEquals(time, resultSet.getTime("COLTIME")); + assertEquals(new Time(ts.getTime()), resultSet.getTime("COLTS")); + } + } + } finally { + statement.execute("drop table if exists testDateTime"); + } } - assertEquals(time, resultSet.getTime("COLTIME")); - assertEquals(new Time(ts.getTime()), resultSet.getTime("COLTS")); - - statement.execute("drop table if exists testDateTime"); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetOldTimestamp() throws SQLException { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute("create or replace table testOldTs(cola timestamp_ntz)"); - statement.execute( - "insert into testOldTs values ('1582-06-22 17:00:00'), " + "('1000-01-01 17:00:00')"); - - ResultSet resultSet = statement.executeQuery("select * from testOldTs"); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table testOldTs(cola timestamp_ntz)"); + statement.execute( + "insert into testOldTs values ('1582-06-22 17:00:00'), " + "('1000-01-01 17:00:00')"); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from testOldTs")) { - assertThat(resultSet.getTimestamp(1).toString(), equalTo("1582-06-22 17:00:00.0")); - assertThat(resultSet.getString(1), equalTo("Fri, 22 Jun 1582 17:00:00 Z")); + assertTrue(resultSet.next()); - resultSet.next(); - assertThat(resultSet.getTimestamp(1).toString(), equalTo("1000-01-01 17:00:00.0")); - assertThat(resultSet.getString(1), equalTo("Mon, 01 Jan 1000 17:00:00 Z")); + assertThat(resultSet.getTimestamp(1).toString(), equalTo("1582-06-22 17:00:00.0")); + assertThat(resultSet.getString(1), equalTo("Fri, 22 Jun 1582 17:00:00 Z")); - statement.execute("drop table if exists testOldTs"); - statement.close(); - con.close(); + assertTrue(resultSet.next()); + assertThat(resultSet.getTimestamp(1).toString(), equalTo("1000-01-01 17:00:00.0")); + assertThat(resultSet.getString(1), equalTo("Mon, 01 Jan 1000 17:00:00 Z")); + } + } finally { + statement.execute("drop table if exists testOldTs"); + } + } } @Test @@ -437,31 +469,26 @@ public void testGetOldTimestamp() throws SQLException { public void testPrepareOldTimestamp() throws SQLException { TimeZone origTz = TimeZone.getDefault(); TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - try { - Connection con = init(); - Statement statement = con.createStatement(); - - statement.execute("create or replace table testPrepOldTs(cola timestamp_ntz, colb date)"); - statement.execute("alter session set client_timestamp_type_mapping=timestamp_ntz"); - PreparedStatement ps = con.prepareStatement("insert into testPrepOldTs values (?, ?)"); - - ps.setTimestamp(1, Timestamp.valueOf("0001-01-01 08:00:00")); - ps.setDate(2, Date.valueOf("0001-01-01")); - ps.executeUpdate(); - - ResultSet resultSet = statement.executeQuery("select * from testPrepOldTs"); - - resultSet.next(); - assertThat(resultSet.getTimestamp(1).toString(), equalTo("0001-01-01 08:00:00.0")); - assertThat(resultSet.getDate(2).toString(), equalTo("0001-01-01")); - - statement.execute("drop table if exists testPrepOldTs"); - - statement.close(); - - con.close(); - } finally { - TimeZone.setDefault(origTz); + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace table testPrepOldTs(cola timestamp_ntz, colb date)"); + statement.execute("alter session set client_timestamp_type_mapping=timestamp_ntz"); + PreparedStatement ps = con.prepareStatement("insert into testPrepOldTs values (?, ?)"); + + ps.setTimestamp(1, Timestamp.valueOf("0001-01-01 08:00:00")); + ps.setDate(2, Date.valueOf("0001-01-01")); + ps.executeUpdate(); + + ResultSet resultSet = statement.executeQuery("select * from testPrepOldTs"); + + assertTrue(resultSet.next()); + assertThat(resultSet.getTimestamp(1).toString(), equalTo("0001-01-01 08:00:00.0")); + assertThat(resultSet.getDate(2).toString(), equalTo("0001-01-01")); + } finally { + statement.execute("drop table if exists testPrepOldTs"); + TimeZone.setDefault(origTz); + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java index 3720b9ae5..06a253b95 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java @@ -1,6 +1,7 @@ package net.snowflake.client.jdbc; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.Date; @@ -55,19 +56,17 @@ public ResultSetMultiTimeZoneLatestIT(String queryResultFormat, String timeZone) public Connection init() throws SQLException { Connection connection = BaseJDBCTest.getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); - connection - .createStatement() - .execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return connection; } @@ -79,27 +78,29 @@ public Connection init() throws SQLException { */ @Test public void testTimesWithGetTimestamp() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - String timeStringValue = "10:30:50.123456789"; - String timestampStringValue = "1970-01-01 " + timeStringValue; - int length = timestampStringValue.length(); - statement.execute( - "create or replace table SRC_DATE_TIME (C2_TIME_3 TIME(3), C3_TIME_5 TIME(5), C4_TIME" - + " TIME(9))"); - statement.execute( - "insert into SRC_DATE_TIME values ('" - + timeStringValue - + "','" - + timeStringValue - + "','" - + timeStringValue - + "')"); - ResultSet rs = statement.executeQuery("select * from SRC_DATE_TIME"); - rs.next(); - assertEquals(timestampStringValue.substring(0, length - 6), rs.getTimestamp(1).toString()); - assertEquals(timestampStringValue.substring(0, length - 4), rs.getTimestamp(2).toString()); - assertEquals(timestampStringValue, rs.getTimestamp(3).toString()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + String timeStringValue = "10:30:50.123456789"; + String timestampStringValue = "1970-01-01 " + timeStringValue; + int length = timestampStringValue.length(); + statement.execute( + "create or replace table SRC_DATE_TIME (C2_TIME_3 TIME(3), C3_TIME_5 TIME(5), C4_TIME" + + " TIME(9))"); + statement.execute( + "insert into SRC_DATE_TIME values ('" + + timeStringValue + + "','" + + timeStringValue + + "','" + + timeStringValue + + "')"); + try (ResultSet rs = statement.executeQuery("select * from SRC_DATE_TIME")) { + assertTrue(rs.next()); + assertEquals(timestampStringValue.substring(0, length - 6), rs.getTimestamp(1).toString()); + assertEquals(timestampStringValue.substring(0, length - 4), rs.getTimestamp(2).toString()); + assertEquals(timestampStringValue, rs.getTimestamp(3).toString()); + } + } } /** @@ -112,17 +113,16 @@ public void testTimesWithGetTimestamp() throws SQLException { */ @Test public void testTimestampNTZWithDaylightSavings() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set TIMESTAMP_TYPE_MAPPING='TIMESTAMP_NTZ'," + "TIMEZONE='Europe/London'"); - ResultSet rs = statement.executeQuery("select TIMESTAMP '2011-09-04 00:00:00'"); - rs.next(); - Timestamp expected = Timestamp.valueOf("2011-09-04 00:00:00"); - assertEquals(expected, rs.getTimestamp(1)); - rs.close(); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter session set TIMESTAMP_TYPE_MAPPING='TIMESTAMP_NTZ'," + "TIMEZONE='Europe/London'"); + try (ResultSet rs = statement.executeQuery("select TIMESTAMP '2011-09-04 00:00:00'")) { + assertTrue(rs.next()); + Timestamp expected = Timestamp.valueOf("2011-09-04 00:00:00"); + assertEquals(expected, rs.getTimestamp(1)); + } + } } /** @@ -132,57 +132,63 @@ public void testTimestampNTZWithDaylightSavings() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDateAndTimestampWithTimezone() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); - ResultSet rs = - statement.executeQuery( - "SELECT DATE '1970-01-02 00:00:00' as datefield, " - + "TIMESTAMP '1970-01-02 00:00:00' as timestampfield"); - rs.next(); - - // Set a timezone for results to be returned in and set a format for date and timestamp objects - Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - sdf.setTimeZone(cal.getTimeZone()); - - // Date object and calendar object should return the same timezone offset with calendar - Date dateWithZone = rs.getDate(1, cal); - Timestamp timestampWithZone = rs.getTimestamp(2, cal); - assertEquals(sdf.format(dateWithZone), sdf.format(timestampWithZone)); - - // When fetching Date object with getTimestamp versus Timestamp object with getTimestamp, - // results should match - assertEquals(rs.getTimestamp(1, cal), rs.getTimestamp(2, cal)); + Calendar cal = null; + SimpleDateFormat sdf = null; - // When fetching Timestamp object with getDate versus Date object with getDate, results should - // match - assertEquals(rs.getDate(1, cal), rs.getDate(2, cal)); - - // getDate() without Calendar offset called on Date type should return the same date with no - // timezone offset - assertEquals("1970-01-02 00:00:00", sdf.format(rs.getDate(1))); - // getDate() without Calendar offset called on Timestamp type returns date with timezone offset - assertEquals("1970-01-02 08:00:00", sdf.format(rs.getDate(2))); - - // getTimestamp() without Calendar offset called on Timestamp type should return the timezone - // offset - assertEquals("1970-01-02 08:00:00", sdf.format(rs.getTimestamp(2))); - // getTimestamp() without Calendar offset called on Date type should not return the timezone - // offset - assertEquals("1970-01-02 00:00:00", sdf.format(rs.getTimestamp(1))); - - // test that session parameter functions as expected. When false, getDate() has same behavior - // with or without Calendar input - statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=false"); - rs = statement.executeQuery("SELECT DATE '1945-05-10 00:00:00' as datefield"); - rs.next(); - assertEquals(rs.getDate(1, cal), rs.getDate(1)); - assertEquals("1945-05-10 00:00:00", sdf.format(rs.getDate(1, cal))); - - rs.close(); - statement.close(); - connection.close(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); + try (ResultSet rs = + statement.executeQuery( + "SELECT DATE '1970-01-02 00:00:00' as datefield, " + + "TIMESTAMP '1970-01-02 00:00:00' as timestampfield")) { + assertTrue(rs.next()); + + // Set a timezone for results to be returned in and set a format for date and timestamp + // objects + cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + sdf.setTimeZone(cal.getTimeZone()); + + // Date object and calendar object should return the same timezone offset with calendar + Date dateWithZone = rs.getDate(1, cal); + Timestamp timestampWithZone = rs.getTimestamp(2, cal); + assertEquals(sdf.format(dateWithZone), sdf.format(timestampWithZone)); + + // When fetching Date object with getTimestamp versus Timestamp object with getTimestamp, + // results should match + assertEquals(rs.getTimestamp(1, cal), rs.getTimestamp(2, cal)); + + // When fetching Timestamp object with getDate versus Date object with getDate, results + // should + // match + assertEquals(rs.getDate(1, cal), rs.getDate(2, cal)); + + // getDate() without Calendar offset called on Date type should return the same date with no + // timezone offset + assertEquals("1970-01-02 00:00:00", sdf.format(rs.getDate(1))); + // getDate() without Calendar offset called on Timestamp type returns date with timezone + // offset + assertEquals("1970-01-02 08:00:00", sdf.format(rs.getDate(2))); + + // getTimestamp() without Calendar offset called on Timestamp type should return the + // timezone + // offset + assertEquals("1970-01-02 08:00:00", sdf.format(rs.getTimestamp(2))); + // getTimestamp() without Calendar offset called on Date type should not return the timezone + // offset + assertEquals("1970-01-02 00:00:00", sdf.format(rs.getTimestamp(1))); + } + // test that session parameter functions as expected. When false, getDate() has same behavior + // with or without Calendar input + statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=false"); + try (ResultSet rs = + statement.executeQuery("SELECT DATE '1945-05-10 00:00:00' as datefield")) { + assertTrue(rs.next()); + assertEquals(rs.getDate(1, cal), rs.getDate(1)); + assertEquals("1945-05-10 00:00:00", sdf.format(rs.getDate(1, cal))); + } + } } /** @@ -226,139 +232,152 @@ public void testUseSessionTimeZoneOverrides() throws SQLException { * @throws SQLException */ private void testUseSessionTimeZoneHelper(boolean useDefaultParamSettings) throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - // create table with all timestamp types, time, and date - statement.execute( - "create or replace table datetimetypes(colA timestamp_ltz, colB timestamp_ntz, colC" - + " timestamp_tz, colD time, colE date)"); - // Enable session parameter JDBC_USE_SESSION_TIMEZONE - statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=true"); - if (!useDefaultParamSettings) { - // these are 3 other session params that also alter the session display behavior - statement.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); - statement.execute("alter session set CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ=false"); - statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + // create table with all timestamp types, time, and date + statement.execute( + "create or replace table datetimetypes(colA timestamp_ltz, colB timestamp_ntz, colC" + + " timestamp_tz, colD time, colE date)"); + // Enable session parameter JDBC_USE_SESSION_TIMEZONE + statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=true"); + if (!useDefaultParamSettings) { + // these are 3 other session params that also alter the session display behavior + statement.execute("alter session set JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true"); + statement.execute("alter session set CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ=false"); + statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); + } + + String expectedTimestamp = "2019-01-01 17:17:17.6"; + String expectedTime = "17:17:17"; + String expectedDate = "2019-01-01"; + String expectedTimestamp2 = "1943-12-31 01:01:33.0"; + String expectedTime2 = "01:01:33"; + String expectedDate2 = "1943-12-31"; + try (PreparedStatement prepSt = + connection.prepareStatement("insert into datetimetypes values(?, ?, ?, ?, ?)")) { + prepSt.setString(1, expectedTimestamp); + prepSt.setString(2, expectedTimestamp); + prepSt.setString(3, expectedTimestamp); + prepSt.setString(4, expectedTime); + prepSt.setString(5, expectedDate); + prepSt.execute(); + prepSt.setString(1, expectedTimestamp2); + prepSt.setString(2, expectedTimestamp2); + prepSt.setString(3, expectedTimestamp2); + prepSt.setString(4, expectedTime2); + prepSt.setString(5, expectedDate2); + prepSt.execute(); + } + // Results differ depending on whether flag JDBC_USE_SESSION_TIMEZONE=true. If true, the + // returned ResultSet value should match the value inserted into the table with no offset + // (with + // exceptions for getTimestamp() on date and time objects). + try (ResultSet rs = statement.executeQuery("select * from datetimetypes")) { + assertTrue(rs.next()); + // Assert date has no offset. When flag is false, timestamp_ltz and timestamp_ntz will + // show + // offset. + assertEquals(expectedDate, rs.getDate("COLA").toString()); + // always true since timezone_ntz doesn't add time offset + assertEquals(expectedDate, rs.getDate("COLB").toString()); + assertEquals(expectedDate, rs.getDate("COLC").toString()); + // cannot getDate() for Time column (ColD) + // always true since Date objects don't have timezone offsets + assertEquals(expectedDate, rs.getDate("COLE").toString()); + + // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz + // will + // show + // offset. + assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); + // always true since timezone_ntz doesn't add time offset + assertEquals(expectedTimestamp, rs.getTimestamp("COLB").toString()); + assertEquals(expectedTimestamp, rs.getTimestamp("COLC").toString()); + // Getting timestamp from Time column will default to epoch start date so date portion is + // different than input date of the timestamp + assertEquals("1970-01-01 17:17:17.0", rs.getTimestamp("COLD").toString()); + // Getting timestamp from Date column will default to wallclock time of 0 so time portion + // is + // different than input time of the timestamp + assertEquals("2019-01-01 00:00:00.0", rs.getTimestamp("COLE").toString()); + + // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will + // show + // offset. + assertEquals(expectedTime, rs.getTime("COLA").toString()); + assertEquals(expectedTime, rs.getTime("COLB").toString()); + assertEquals(expectedTime, rs.getTime("COLC").toString()); + assertEquals(expectedTime, rs.getTime("COLD").toString()); + // Cannot getTime() for Date column (colE) + + assertTrue(rs.next()); + // Assert date has no offset. Offset will never be seen regardless of flag because + // 01:01:33 + // is + // too early for any timezone to round it to the next day. + assertEquals(expectedDate2, rs.getDate("COLA").toString()); + assertEquals(expectedDate2, rs.getDate("COLB").toString()); + assertEquals(expectedDate2, rs.getDate("COLC").toString()); + // cannot getDate() for Time column (ColD) + assertEquals(expectedDate2, rs.getDate("COLE").toString()); + + // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz + // will + // show + // offset. + assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); + assertEquals(expectedTimestamp2, rs.getTimestamp("COLB").toString()); + assertEquals(expectedTimestamp2, rs.getTimestamp("COLC").toString()); + // Getting timestamp from Time column will default to epoch start date + assertEquals("1970-01-01 01:01:33.0", rs.getTimestamp("COLD").toString()); + // Getting timestamp from Date column will default to wallclock time of 0 + assertEquals("1943-12-31 00:00:00.0", rs.getTimestamp("COLE").toString()); + + // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will + // show + // offset. + assertEquals(expectedTime2, rs.getTime("COLA").toString()); + assertEquals(expectedTime2, rs.getTime("COLB").toString()); + assertEquals(expectedTime2, rs.getTime("COLC").toString()); + assertEquals(expectedTime2, rs.getTime("COLD").toString()); + // Cannot getTime() for Date column (colE) + } + // Test special case for timestamp_tz (offset added) + // create table with of type timestamp_tz + statement.execute("create or replace table tabletz (colA timestamp_tz)"); + try (PreparedStatement prepSt = + connection.prepareStatement("insert into tabletz values(?), (?)")) { + // insert 2 timestamp values, but add an offset of a few hours on the end of each value + prepSt.setString( + 1, expectedTimestamp + " +0500"); // inserted value is 2019-01-01 17:17:17.6 +0500 + prepSt.setString( + 2, expectedTimestamp2 + " -0200"); // inserted value is 1943-12-31 01:01:33.0 -0200 + prepSt.execute(); + + try (ResultSet rs = statement.executeQuery("select * from tabletz")) { + assertTrue(rs.next()); + // Assert timestamp is displayed with no offset when flag is true. Timestamp should look + // identical to inserted value + assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); + // Time value looks identical to the time portion of inserted timestamp_tz value + assertEquals(expectedTime, rs.getTime("COLA").toString()); + // Date value looks identical to the date portion of inserted timestamp_tz value + assertEquals(expectedDate, rs.getDate("COLA").toString()); + assertTrue(rs.next()); + // Test that the same results occur for 2nd timestamp_tz value + assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); + assertEquals(expectedTime2, rs.getTime("COLA").toString()); + assertEquals(expectedDate2, rs.getDate("COLA").toString()); + } + } + } finally { + // clean up + statement.execute("alter session unset JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC"); + statement.execute("alter session unset CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ"); + statement.execute("alter session unset JDBC_FORMAT_DATE_WITH_TIMEZONE"); + statement.execute("alter session unset JDBC_USE_SESSION_TIMEZONE"); + } } - - String expectedTimestamp = "2019-01-01 17:17:17.6"; - String expectedTime = "17:17:17"; - String expectedDate = "2019-01-01"; - String expectedTimestamp2 = "1943-12-31 01:01:33.0"; - String expectedTime2 = "01:01:33"; - String expectedDate2 = "1943-12-31"; - PreparedStatement prepSt = - connection.prepareStatement("insert into datetimetypes values(?, ?, ?, ?, ?)"); - prepSt.setString(1, expectedTimestamp); - prepSt.setString(2, expectedTimestamp); - prepSt.setString(3, expectedTimestamp); - prepSt.setString(4, expectedTime); - prepSt.setString(5, expectedDate); - prepSt.execute(); - prepSt.setString(1, expectedTimestamp2); - prepSt.setString(2, expectedTimestamp2); - prepSt.setString(3, expectedTimestamp2); - prepSt.setString(4, expectedTime2); - prepSt.setString(5, expectedDate2); - prepSt.execute(); - - // Results differ depending on whether flag JDBC_USE_SESSION_TIMEZONE=true. If true, the - // returned ResultSet value should match the value inserted into the table with no offset (with - // exceptions for getTimestamp() on date and time objects). - ResultSet rs = statement.executeQuery("select * from datetimetypes"); - rs.next(); - // Assert date has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedDate, rs.getDate("COLA").toString()); - // always true since timezone_ntz doesn't add time offset - assertEquals(expectedDate, rs.getDate("COLB").toString()); - assertEquals(expectedDate, rs.getDate("COLC").toString()); - // cannot getDate() for Time column (ColD) - // always true since Date objects don't have timezone offsets - assertEquals(expectedDate, rs.getDate("COLE").toString()); - - // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); - // always true since timezone_ntz doesn't add time offset - assertEquals(expectedTimestamp, rs.getTimestamp("COLB").toString()); - assertEquals(expectedTimestamp, rs.getTimestamp("COLC").toString()); - // Getting timestamp from Time column will default to epoch start date so date portion is - // different than input date of the timestamp - assertEquals("1970-01-01 17:17:17.0", rs.getTimestamp("COLD").toString()); - // Getting timestamp from Date column will default to wallclock time of 0 so time portion is - // different than input time of the timestamp - assertEquals("2019-01-01 00:00:00.0", rs.getTimestamp("COLE").toString()); - - // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTime, rs.getTime("COLA").toString()); - assertEquals(expectedTime, rs.getTime("COLB").toString()); - assertEquals(expectedTime, rs.getTime("COLC").toString()); - assertEquals(expectedTime, rs.getTime("COLD").toString()); - // Cannot getTime() for Date column (colE) - - rs.next(); - // Assert date has no offset. Offset will never be seen regardless of flag because 01:01:33 is - // too early for any timezone to round it to the next day. - assertEquals(expectedDate2, rs.getDate("COLA").toString()); - assertEquals(expectedDate2, rs.getDate("COLB").toString()); - assertEquals(expectedDate2, rs.getDate("COLC").toString()); - // cannot getDate() for Time column (ColD) - assertEquals(expectedDate2, rs.getDate("COLE").toString()); - - // Assert timestamp has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); - assertEquals(expectedTimestamp2, rs.getTimestamp("COLB").toString()); - assertEquals(expectedTimestamp2, rs.getTimestamp("COLC").toString()); - // Getting timestamp from Time column will default to epoch start date - assertEquals("1970-01-01 01:01:33.0", rs.getTimestamp("COLD").toString()); - // Getting timestamp from Date column will default to wallclock time of 0 - assertEquals("1943-12-31 00:00:00.0", rs.getTimestamp("COLE").toString()); - - // Assert time has no offset. When flag is false, timestamp_ltz and timestamp_ntz will show - // offset. - assertEquals(expectedTime2, rs.getTime("COLA").toString()); - assertEquals(expectedTime2, rs.getTime("COLB").toString()); - assertEquals(expectedTime2, rs.getTime("COLC").toString()); - assertEquals(expectedTime2, rs.getTime("COLD").toString()); - // Cannot getTime() for Date column (colE) - - // Test special case for timestamp_tz (offset added) - // create table with of type timestamp_tz - statement.execute("create or replace table tabletz (colA timestamp_tz)"); - prepSt = connection.prepareStatement("insert into tabletz values(?), (?)"); - // insert 2 timestamp values, but add an offset of a few hours on the end of each value - prepSt.setString( - 1, expectedTimestamp + " +0500"); // inserted value is 2019-01-01 17:17:17.6 +0500 - prepSt.setString( - 2, expectedTimestamp2 + " -0200"); // inserted value is 1943-12-31 01:01:33.0 -0200 - prepSt.execute(); - - rs = statement.executeQuery("select * from tabletz"); - rs.next(); - // Assert timestamp is displayed with no offset when flag is true. Timestamp should look - // identical to inserted value - assertEquals(expectedTimestamp, rs.getTimestamp("COLA").toString()); - // Time value looks identical to the time portion of inserted timestamp_tz value - assertEquals(expectedTime, rs.getTime("COLA").toString()); - // Date value looks identical to the date portion of inserted timestamp_tz value - assertEquals(expectedDate, rs.getDate("COLA").toString()); - rs.next(); - // Test that the same results occur for 2nd timestamp_tz value - assertEquals(expectedTimestamp2, rs.getTimestamp("COLA").toString()); - assertEquals(expectedTime2, rs.getTime("COLA").toString()); - assertEquals(expectedDate2, rs.getDate("COLA").toString()); - - // clean up - statement.execute("alter session unset JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC"); - statement.execute("alter session unset CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ"); - statement.execute("alter session unset JDBC_FORMAT_DATE_WITH_TIMEZONE"); - statement.execute("alter session unset JDBC_USE_SESSION_TIMEZONE"); - - rs.close(); - statement.close(); - connection.close(); } } From 8dcd217a044eeec30bd9ac9e3bf3486aa920944a Mon Sep 17 00:00:00 2001 From: Przemyslaw Motacki Date: Fri, 26 Apr 2024 19:16:05 +0200 Subject: [PATCH 03/54] SNOW-1157904 write and bindings structured types (#1727) * Structures type write by bindings --- .../client/core/FieldSchemaCreator.java | 96 +++++ .../snowflake/client/core/JsonSqlInput.java | 16 +- .../snowflake/client/core/JsonSqlOutput.java | 397 ++++++++++++++++++ .../client/core/ObjectMapperFactory.java | 2 + .../client/core/ParameterBindingDTO.java | 36 +- .../net/snowflake/client/core/ResultUtil.java | 2 +- .../client/core/SFJsonResultSet.java | 1 - .../net/snowflake/client/core/SfSqlArray.java | 21 + ...imestampUtil.java => SfTimestampUtil.java} | 16 +- .../client/core/json/Converters.java | 4 +- .../client/jdbc/BindingParameterMetadata.java | 168 ++++++++ .../snowflake/client/jdbc/FieldMetadata.java | 7 + .../client/jdbc/SnowflakeColumn.java | 74 ++++ .../client/jdbc/SnowflakeConnectionV1.java | 5 +- .../jdbc/SnowflakePreparedStatement.java | 10 + .../jdbc/SnowflakePreparedStatementV1.java | 85 +++- .../snowflake/client/jdbc/SnowflakeType.java | 3 + .../snowflake/client/jdbc/SnowflakeUtil.java | 11 +- .../client/util/ThrowingBiCallable.java | 9 + .../client/util/ThrowingTriCallable.java | 9 + .../core/SqlInputTimestampUtilTest.java | 2 +- ...ngAndInsertingStructuredTypesLatestIT.java | 363 ++++++++++++++++ .../jdbc/ConnectionFeatureNotSupportedIT.java | 2 - ...reparedStatementFeatureNotSupportedIT.java | 2 - .../client/jdbc/ResultSetFormatType.java | 12 + .../ResultSetStructuredTypesLatestIT.java | 97 +++-- .../sqldata/AllTypesClass.java | 64 ++- .../structuredtypes/sqldata/SimpleClass.java | 25 +- .../structuredtypes/sqldata/StringClass.java | 31 ++ ...ConnectionFeatureNotSupportedLatestIT.java | 2 - 30 files changed, 1492 insertions(+), 80 deletions(-) create mode 100644 src/main/java/net/snowflake/client/core/FieldSchemaCreator.java create mode 100644 src/main/java/net/snowflake/client/core/JsonSqlOutput.java rename src/main/java/net/snowflake/client/core/{SqlInputTimestampUtil.java => SfTimestampUtil.java} (76%) create mode 100644 src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java create mode 100644 src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java create mode 100644 src/main/java/net/snowflake/client/util/ThrowingBiCallable.java create mode 100644 src/main/java/net/snowflake/client/util/ThrowingTriCallable.java create mode 100644 src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java create mode 100644 src/test/java/net/snowflake/client/jdbc/ResultSetFormatType.java create mode 100644 src/test/java/net/snowflake/client/jdbc/structuredtypes/sqldata/StringClass.java diff --git a/src/main/java/net/snowflake/client/core/FieldSchemaCreator.java b/src/main/java/net/snowflake/client/core/FieldSchemaCreator.java new file mode 100644 index 000000000..b61dbd1f8 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/FieldSchemaCreator.java @@ -0,0 +1,96 @@ +package net.snowflake.client.core; + +import java.sql.SQLException; +import java.sql.Types; +import java.util.Optional; +import net.snowflake.client.jdbc.BindingParameterMetadata; +import net.snowflake.client.jdbc.SnowflakeColumn; +import net.snowflake.client.jdbc.SnowflakeType; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class FieldSchemaCreator { + static final SFLogger logger = SFLoggerFactory.getLogger(FieldSchemaCreator.class); + public static final int MAX_TEXT_COLUMN_SIZE = 134217728; + public static final int MAX_BINARY_COLUMN_SIZE = 67108864; + + public static BindingParameterMetadata buildSchemaForText( + String fieldName, Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse("text")) + .withLength(maybeColumn.map(cl -> cl.length()).orElse(MAX_TEXT_COLUMN_SIZE)) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .build(); + } + + public static BindingParameterMetadata buildSchemaForBytesType( + String fieldName, Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse("binary")) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .withLength(maybeColumn.map(cl -> cl.precision()).orElse(MAX_TEXT_COLUMN_SIZE)) + .withByteLength(maybeColumn.map(cl -> cl.byteLength()).orElse(MAX_BINARY_COLUMN_SIZE)) + .build(); + } + + public static BindingParameterMetadata buildSchemaTypeAndNameOnly( + String fieldName, String type, Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse(type)) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .build(); + } + + public static BindingParameterMetadata buildSchemaWithScaleAndPrecision( + String fieldName, + String type, + int scale, + int precision, + Optional maybeColumn) { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType(maybeColumn.map(cl -> cl.type()).filter(str -> !str.isEmpty()).orElse(type)) + .withScale(maybeColumn.map(cl -> cl.scale()).filter(i -> i > 0).orElse(scale)) + .withName(maybeColumn.map(cl -> cl.name()).filter(str -> !str.isEmpty()).orElse(fieldName)) + .withPrecision(maybeColumn.map(cl -> cl.precision()).filter(i -> i > 0).orElse(precision)) + .build(); + } + + public static BindingParameterMetadata buildBindingSchemaForType(int baseType) + throws SQLException { + return buildBindingSchemaForType(baseType, true); + } + + public static BindingParameterMetadata buildBindingSchemaForType(int baseType, boolean addName) + throws SQLException { + String name = addName ? SnowflakeType.javaTypeToSFType(baseType, null).name() : null; + switch (baseType) { + case Types.VARCHAR: + case Types.CHAR: + return FieldSchemaCreator.buildSchemaForText(name, Optional.empty()); + case Types.FLOAT: + case Types.DOUBLE: + case Types.DECIMAL: + return FieldSchemaCreator.buildSchemaWithScaleAndPrecision( + name, "real", 9, 38, Optional.empty()); + case Types.NUMERIC: + case Types.INTEGER: + case Types.SMALLINT: + case Types.TINYINT: + case Types.BIGINT: + return FieldSchemaCreator.buildSchemaWithScaleAndPrecision( + null, "fixed", 0, 38, Optional.empty()); + case Types.BOOLEAN: + return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "boolean", Optional.empty()); + case Types.DATE: + return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "date", Optional.empty()); + case Types.TIMESTAMP: + case Types.TIME: + return FieldSchemaCreator.buildSchemaWithScaleAndPrecision( + name, "timestamp", 9, 0, Optional.empty()); + default: + logger.error("Could not create schema for type : " + baseType); + throw new SQLException("Could not create schema for type : " + baseType); + } + } +} diff --git a/src/main/java/net/snowflake/client/core/JsonSqlInput.java b/src/main/java/net/snowflake/client/core/JsonSqlInput.java index 6b0e6e34e..d0aeb1a93 100644 --- a/src/main/java/net/snowflake/client/core/JsonSqlInput.java +++ b/src/main/java/net/snowflake/client/core/JsonSqlInput.java @@ -234,8 +234,12 @@ public List readList(Class type) throws SQLException { List result = new ArrayList(); if (ArrayNode.class.isAssignableFrom(value.getClass())) { for (JsonNode node : (ArrayNode) value) { - - result.add(convertObject(type, TimeZone.getDefault(), getValue(node), fieldMetadata)); + result.add( + convertObject( + type, + TimeZone.getDefault(), + getValue(node), + fieldMetadata.getFields().get(0))); } return result; } else { @@ -259,7 +263,11 @@ public T[] readArray(Class type) throws SQLException { int counter = 0; for (JsonNode node : valueNodes) { array[counter++] = - convertObject(type, TimeZone.getDefault(), getValue(node), fieldMetadata); + convertObject( + type, + TimeZone.getDefault(), + getValue(node), + fieldMetadata.getFields().get(0)); } return array; } else { @@ -306,7 +314,7 @@ private Timestamp convertTimestamp(TimeZone tz, Object value, FieldMetadata fiel int columnSubType = fieldMetadata.getType(); int scale = fieldMetadata.getScale(); Timestamp result = - SqlInputTimestampUtil.getTimestampFromType( + SfTimestampUtil.getTimestampFromType( columnSubType, (String) value, session, sessionTimeZone, tz); if (result != null) { return result; diff --git a/src/main/java/net/snowflake/client/core/JsonSqlOutput.java b/src/main/java/net/snowflake/client/core/JsonSqlOutput.java new file mode 100644 index 000000000..f3fb4c06c --- /dev/null +++ b/src/main/java/net/snowflake/client/core/JsonSqlOutput.java @@ -0,0 +1,397 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.core; + +import static net.snowflake.client.core.FieldSchemaCreator.buildSchemaTypeAndNameOnly; +import static net.snowflake.client.core.FieldSchemaCreator.buildSchemaWithScaleAndPrecision; + +import java.io.InputStream; +import java.io.Reader; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLData; +import java.sql.SQLException; +import java.sql.SQLOutput; +import java.sql.SQLXML; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.TimeZone; +import java.util.stream.Collectors; +import net.minidev.json.JSONObject; +import net.snowflake.client.jdbc.BindingParameterMetadata; +import net.snowflake.client.jdbc.SnowflakeColumn; +import net.snowflake.client.jdbc.SnowflakeLoggedFeatureNotSupportedException; +import net.snowflake.client.jdbc.SnowflakeType; +import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.util.ThrowingTriCallable; +import net.snowflake.common.core.SFBinary; +import net.snowflake.common.core.SFTime; +import net.snowflake.common.core.SFTimestamp; +import net.snowflake.common.core.SnowflakeDateTimeFormat; + +@SnowflakeJdbcInternalApi +public class JsonSqlOutput implements SQLOutput { + static final SFLogger logger = SFLoggerFactory.getLogger(JsonSqlOutput.class); + private JSONObject json; + private SQLData original; + private SFBaseSession session; + private Iterator fields; + private BindingParameterMetadata schema; + private TimeZone sessionTimezone; + + public JsonSqlOutput(SQLData original, SFBaseSession sfBaseSession) { + this.original = original; + this.session = sfBaseSession; + this.sessionTimezone = getSessionTimezone(sfBaseSession); + fields = getClassFields(original).iterator(); + schema = new BindingParameterMetadata("object"); + schema.setFields(new ArrayList<>()); + json = new JSONObject(); + } + + private TimeZone getSessionTimezone(SFBaseSession sfBaseSession) { + String timeZoneName = + (String) ResultUtil.effectiveParamValue(sfBaseSession.getCommonParameters(), "TIMEZONE"); + return TimeZone.getTimeZone(timeZoneName); + } + + private static List getClassFields(SQLData original) { + return Arrays.stream(original.getClass().getDeclaredFields()) + .filter( + field -> + !Modifier.isStatic(field.getModifiers()) + && !Modifier.isTransient(field.getModifiers())) + .collect(Collectors.toList()); + } + + @Override + public void writeString(String value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(FieldSchemaCreator.buildSchemaForText(fieldName, maybeColumn)); + })); + } + + @Override + public void writeBoolean(boolean value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "boolean", maybeColumn)); + })); + } + + @Override + public void writeByte(byte value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeShort(short value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeInt(int input) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, input); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeLong(long value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "fixed", 0, 38, maybeColumn)); + })); + } + + @Override + public void writeFloat(float value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "real", maybeColumn)); + })); + } + + @Override + public void writeDouble(double value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "real", maybeColumn)); + })); + } + + @Override + public void writeBigDecimal(BigDecimal value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, value); + schema + .getFields() + .add( + buildSchemaWithScaleAndPrecision( + fieldName, "fixed", value.scale(), 38, maybeColumn)); + })); + } + + @Override + public void writeBytes(byte[] value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put(fieldName, new SFBinary(value).toHex()); + schema + .getFields() + .add(FieldSchemaCreator.buildSchemaForBytesType(fieldName, maybeColumn)); + })); + } + + @Override + public void writeDate(Date value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + json.put( + fieldName, + ResultUtil.getDateAsString(value, getDateTimeFormat("DATE_OUTPUT_FORMAT"))); + schema.getFields().add(buildSchemaTypeAndNameOnly(fieldName, "date", maybeColumn)); + })); + } + + @Override + public void writeTime(Time x) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + long nanosSinceMidnight = SfTimestampUtil.getTimeInNanoseconds(x); + String result = + ResultUtil.getSFTimeAsString( + SFTime.fromNanoseconds(nanosSinceMidnight), + 9, + getDateTimeFormat("TIME_OUTPUT_FORMAT")); + + json.put(fieldName, result); + schema + .getFields() + .add(buildSchemaWithScaleAndPrecision(fieldName, "time", 9, 0, maybeColumn)); + })); + } + + @Override + public void writeTimestamp(Timestamp value) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + String timestampSessionType = + (String) + ResultUtil.effectiveParamValue( + session.getCommonParameters(), "CLIENT_TIMESTAMP_TYPE_MAPPING"); + SnowflakeType snowflakeType = + SnowflakeType.fromString( + maybeColumn + .map(cl -> cl.type()) + .filter(str -> !str.isEmpty()) + .orElse(timestampSessionType)); + int columnType = snowflakeTypeToJavaType(snowflakeType); + TimeZone timeZone = timeZoneDependOnType(snowflakeType, session, null); + String timestampAsString = + SnowflakeUtil.mapSFExceptionToSQLException( + () -> + ResultUtil.getSFTimestampAsString( + new SFTimestamp(value, timeZone), + columnType, + 9, + getDateTimeFormat("TIMESTAMP_NTZ_OUTPUT_FORMAT"), + getDateTimeFormat("TIMESTAMP_LTZ_OUTPUT_FORMAT"), + getDateTimeFormat("TIMESTAMP_TZ_OUTPUT_FORMAT"), + session)); + + json.put(fieldName, timestampAsString); + schema + .getFields() + .add( + buildSchemaWithScaleAndPrecision( + fieldName, snowflakeType.name(), 9, 0, maybeColumn)); + })); + } + + @Override + public void writeCharacterStream(Reader x) throws SQLException { + logger.debug(" Unsupported method writeCharacterStream(Reader x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeAsciiStream(InputStream x) throws SQLException { + logger.debug("Unsupported method writeAsciiStream(InputStream x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeBinaryStream(InputStream x) throws SQLException { + logger.debug("Unsupported method writeBinaryStream(InputStream x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeObject(SQLData sqlData) throws SQLException { + withNextValue( + ((json, fieldName, maybeColumn) -> { + JsonSqlOutput jsonSqlOutput = new JsonSqlOutput(sqlData, session); + sqlData.writeSQL(jsonSqlOutput); + json.put(fieldName, jsonSqlOutput.getJsonObject()); + BindingParameterMetadata structSchema = jsonSqlOutput.getSchema(); + structSchema.setName(fieldName); + schema.getFields().add(structSchema); + })); + } + + @Override + public void writeRef(Ref x) throws SQLException { + logger.debug("Unsupported method writeRef(Ref x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeBlob(Blob x) throws SQLException { + logger.debug("Unsupported method writeBlob(Blob x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeClob(Clob x) throws SQLException { + logger.debug("Unsupported method writeClob(Clob x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeStruct(Struct x) throws SQLException { + logger.debug("Unsupported method writeStruct(Struct x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeArray(Array x) throws SQLException { + logger.debug("Unsupported method writeArray(Array x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeURL(URL x) throws SQLException { + logger.debug("Unsupported method writeURL(URL x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeNString(String x) throws SQLException { + logger.debug("Unsupported method writeNString(String x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeNClob(NClob x) throws SQLException { + logger.debug("Unsupported method writeNClob(NClob x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeRowId(RowId x) throws SQLException { + logger.debug("Unsupported method writeRowId(RowId x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + @Override + public void writeSQLXML(SQLXML x) throws SQLException { + logger.debug("Unsupported method writeSQLXML(SQLXML x)", false); + throw new SnowflakeLoggedFeatureNotSupportedException(session); + } + + public String getJsonString() { + return json.toJSONString(); + } + + public JSONObject getJsonObject() { + return json; + } + + private void withNextValue( + ThrowingTriCallable, SQLException> action) + throws SQLException { + Field field = fields.next(); + String fieldName = field.getName(); + Optional maybeColumn = + Optional.ofNullable(field.getAnnotation(SnowflakeColumn.class)); + action.apply(json, fieldName, maybeColumn); + } + + private SnowflakeDateTimeFormat getDateTimeFormat(String format) { + String rawFormat = (String) session.getCommonParameters().get(format); + if (rawFormat == null || rawFormat.isEmpty()) { + rawFormat = (String) session.getCommonParameters().get("TIMESTAMP_OUTPUT_FORMAT"); + } + SnowflakeDateTimeFormat formatter = SnowflakeDateTimeFormat.fromSqlFormat(rawFormat); + return formatter; + } + + public BindingParameterMetadata getSchema() { + return schema; + } + + private TimeZone timeZoneDependOnType( + SnowflakeType snowflakeType, SFBaseSession session, TimeZone tz) { + if (snowflakeType == SnowflakeType.TIMESTAMP_NTZ) { + return null; + } else if (snowflakeType == SnowflakeType.TIMESTAMP_LTZ) { + return getSessionTimezone(session); + } else if (snowflakeType == SnowflakeType.TIMESTAMP_TZ) { + return Optional.ofNullable(tz).orElse(sessionTimezone); + } + return TimeZone.getDefault(); + } + + private int snowflakeTypeToJavaType(SnowflakeType snowflakeType) { + if (snowflakeType == SnowflakeType.TIMESTAMP_NTZ) { + return SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ; + } else if (snowflakeType == SnowflakeType.TIMESTAMP_LTZ) { + return SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ; + } + return SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ; + } +} diff --git a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java index c751d4d20..e7b1056ba 100644 --- a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java +++ b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java @@ -1,6 +1,7 @@ package net.snowflake.client.core; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; @@ -21,6 +22,7 @@ public static ObjectMapper getObjectMapper() { ObjectMapper mapper = new ObjectMapper(); mapper.configure(MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS, false); mapper.configure(MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS, false); + mapper.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS); // override the maxStringLength value in ObjectMapper int maxJsonStringLength = diff --git a/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java b/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java index 36b5727a9..98c6690dc 100644 --- a/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java +++ b/src/main/java/net/snowflake/client/core/ParameterBindingDTO.java @@ -1,20 +1,36 @@ /* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. */ package net.snowflake.client.core; +import net.snowflake.client.jdbc.BindingParameterMetadata; + /** This class represents a binding object passed to server side Created by hyu on 6/15/17. */ public class ParameterBindingDTO { /** Type of binding */ private String type; + private String fmt; + private BindingParameterMetadata schema; + /** Value is a String object if it's a single bind, otherwise is an array of String */ private Object value; - public ParameterBindingDTO(String type, Object value) { + public ParameterBindingDTO( + String fmt, String type, Object value, BindingParameterMetadata schema) { + this.fmt = fmt; this.type = type; this.value = value; + this.schema = schema; + } + + public ParameterBindingDTO(String fmt, String type, Object value) { + this(fmt, type, value, null); + } + + public ParameterBindingDTO(String type, Object value) { + this(null, type, value, null); } public Object getValue() { @@ -32,4 +48,20 @@ public void setType(String type) { public void setValue(Object value) { this.value = value; } + + public String getFmt() { + return fmt; + } + + public void setFmt(String fmt) { + this.fmt = fmt; + } + + public BindingParameterMetadata getSchema() { + return schema; + } + + public void setSchema(BindingParameterMetadata schema) { + this.schema = schema; + } } diff --git a/src/main/java/net/snowflake/client/core/ResultUtil.java b/src/main/java/net/snowflake/client/core/ResultUtil.java index 8581df1fc..b3e8bf3bb 100644 --- a/src/main/java/net/snowflake/client/core/ResultUtil.java +++ b/src/main/java/net/snowflake/client/core/ResultUtil.java @@ -278,7 +278,7 @@ public static String getSFTimestampAsString( throws SFException { // Derive the timestamp formatter to use SnowflakeDateTimeFormat formatter; - if (columnType == Types.TIMESTAMP) { + if (columnType == Types.TIMESTAMP || columnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ) { formatter = timestampNTZFormatter; } else if (columnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ) { formatter = timestampLTZFormatter; diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 9ad7cb983..a959215cd 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -95,7 +95,6 @@ public Object getObject(int columnIndex) throws SFException { } else { throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } - default: throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } diff --git a/src/main/java/net/snowflake/client/core/SfSqlArray.java b/src/main/java/net/snowflake/client/core/SfSqlArray.java index 83270796a..70682b4f4 100644 --- a/src/main/java/net/snowflake/client/core/SfSqlArray.java +++ b/src/main/java/net/snowflake/client/core/SfSqlArray.java @@ -1,11 +1,17 @@ package net.snowflake.client.core; +import static net.snowflake.client.core.FieldSchemaCreator.buildBindingSchemaForType; + +import com.fasterxml.jackson.core.JsonProcessingException; import java.sql.Array; import java.sql.JDBCType; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.util.Arrays; import java.util.Map; +import net.snowflake.client.jdbc.BindingParameterMetadata; +import net.snowflake.client.jdbc.SnowflakeUtil; @SnowflakeJdbcInternalApi public class SfSqlArray implements Array { @@ -74,4 +80,19 @@ public ResultSet getResultSet(long index, int count, Map> map) @Override public void free() throws SQLException {} + + public String getJsonString() throws SQLException { + try { + return SnowflakeUtil.mapJson(elements); + } catch (JsonProcessingException e) { + throw new SQLException("There is exception during array to json string.", e); + } + } + + public BindingParameterMetadata getSchema() throws SQLException { + return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType("array") + .withFields(Arrays.asList(buildBindingSchemaForType(getBaseType(), false))) + .build(); + } } diff --git a/src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java b/src/main/java/net/snowflake/client/core/SfTimestampUtil.java similarity index 76% rename from src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java rename to src/main/java/net/snowflake/client/core/SfTimestampUtil.java index b95c518c6..ed58f4481 100644 --- a/src/main/java/net/snowflake/client/core/SqlInputTimestampUtil.java +++ b/src/main/java/net/snowflake/client/core/SfTimestampUtil.java @@ -4,6 +4,7 @@ package net.snowflake.client.core; +import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.TimeZone; @@ -11,7 +12,9 @@ import net.snowflake.common.core.SnowflakeDateTimeFormat; @SnowflakeJdbcInternalApi -public class SqlInputTimestampUtil { +public class SfTimestampUtil { + + static final long MS_IN_DAY = 86400 * 1000; public static Timestamp getTimestampFromType( int columnSubType, @@ -25,7 +28,7 @@ public static Timestamp getTimestampFromType( } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ || columnSubType == Types.TIMESTAMP) { return getTimestampFromFormat( - "TIMESTAMP_NTZ_OUTPUT_FORMAT", value, session, sessionTimeZone, tz); + "TIMESTAMP_NTZ_OUTPUT_FORMAT", value, session, sessionTimeZone, TimeZone.getDefault()); } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ) { return getTimestampFromFormat( "TIMESTAMP_TZ_OUTPUT_FORMAT", value, session, sessionTimeZone, tz); @@ -46,4 +49,13 @@ private static Timestamp getTimestampFromFormat( SnowflakeDateTimeFormat formatter = SnowflakeDateTimeFormat.fromSqlFormat(rawFormat); return formatter.parse(value, tz, 0, false).getTimestamp(); } + + public static long getTimeInNanoseconds(Time x) { + long msSinceEpoch = x.getTime(); + // Use % + % instead of just % to get the nonnegative remainder. + // TODO(mkember): Change to use Math.floorMod when Client is on Java 8. + long msSinceMidnight = (msSinceEpoch % MS_IN_DAY + MS_IN_DAY) % MS_IN_DAY; + long nanosSinceMidnight = msSinceMidnight * 1000 * 1000; + return nanosSinceMidnight; + } } diff --git a/src/main/java/net/snowflake/client/core/json/Converters.java b/src/main/java/net/snowflake/client/core/json/Converters.java index 584e0d12e..afe663f90 100644 --- a/src/main/java/net/snowflake/client/core/json/Converters.java +++ b/src/main/java/net/snowflake/client/core/json/Converters.java @@ -13,8 +13,8 @@ import java.util.TimeZone; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SfTimestampUtil; import net.snowflake.client.core.SnowflakeJdbcInternalApi; -import net.snowflake.client.core.SqlInputTimestampUtil; import net.snowflake.client.core.arrow.StructuredTypeDateTimeConverter; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; @@ -224,7 +224,7 @@ public Converter timestampFromStringConverter( TimeZone sessionTimezone) { return value -> { Timestamp result = - SqlInputTimestampUtil.getTimestampFromType( + SfTimestampUtil.getTimestampFromType( columnSubType, (String) value, session, sessionTimezone, tz); if (result != null) { return result; diff --git a/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java b/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java new file mode 100644 index 000000000..db1c85e7b --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/BindingParameterMetadata.java @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import com.fasterxml.jackson.annotation.JsonInclude; +import java.util.List; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public class BindingParameterMetadata { + private String type; + private String name; + private Integer length; + private Integer byteLength; + private Integer precision; + private Integer scale; + + private boolean nullable = true; + private List fields; + + public BindingParameterMetadata(String type) { + this.type = type; + } + + public BindingParameterMetadata(String type, String name) { + this.type = type; + this.name = name; + } + + public BindingParameterMetadata( + String type, + String name, + Integer length, + Integer byteLength, + Integer precision, + Integer scale, + Boolean nullable) { + this.type = type; + this.name = name; + this.length = length; + this.byteLength = byteLength; + this.precision = precision; + this.scale = scale; + this.nullable = nullable; + } + + public BindingParameterMetadata() {} + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getLength() { + return length; + } + + public void setLength(Integer length) { + this.length = length; + } + + public Integer getByteLength() { + return byteLength; + } + + public void setByteLength(Integer byteLength) { + this.byteLength = byteLength; + } + + public Integer getPrecision() { + return precision; + } + + public void setPrecision(Integer precision) { + this.precision = precision; + } + + public Integer getScale() { + return scale; + } + + public void setScale(Integer scale) { + this.scale = scale; + } + + public Boolean isNullable() { + return nullable; + } + + public void setNullable(Boolean nullable) { + this.nullable = nullable; + } + + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public static class BindingParameterMetadataBuilder { + private BindingParameterMetadata bindingParameterMetadata; + + private BindingParameterMetadataBuilder() { + bindingParameterMetadata = new BindingParameterMetadata(); + } + + public BindingParameterMetadataBuilder withType(String type) { + bindingParameterMetadata.type = type; + return this; + } + + public BindingParameterMetadataBuilder withName(String name) { + bindingParameterMetadata.name = name; + return this; + } + + public BindingParameterMetadataBuilder withLength(Integer length) { + bindingParameterMetadata.length = length; + return this; + } + + public BindingParameterMetadataBuilder withByteLength(Integer byteLength) { + bindingParameterMetadata.byteLength = byteLength; + return this; + } + + public BindingParameterMetadataBuilder withPrecision(Integer precision) { + bindingParameterMetadata.precision = precision; + return this; + } + + public BindingParameterMetadataBuilder withScale(Integer scale) { + bindingParameterMetadata.scale = scale; + return this; + } + + public BindingParameterMetadataBuilder withNullable(Boolean nullable) { + bindingParameterMetadata.nullable = nullable; + return this; + } + + public BindingParameterMetadataBuilder withFields(List fields) { + bindingParameterMetadata.fields = fields; + return this; + } + + public static BindingParameterMetadataBuilder bindingParameterMetadata() { + return new BindingParameterMetadataBuilder(); + } + + public BindingParameterMetadata build() { + return bindingParameterMetadata; + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java index cf019d62e..d38011c0e 100644 --- a/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java @@ -3,7 +3,9 @@ */ package net.snowflake.client.jdbc; +import java.util.ArrayList; import java.util.List; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; public class FieldMetadata { @@ -43,6 +45,11 @@ public FieldMetadata( this.fields = fields; } + @SnowflakeJdbcInternalApi + public FieldMetadata() { + this.fields = new ArrayList<>(); + } + public String getName() { return name; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java new file mode 100644 index 000000000..10f06dafa --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumn.java @@ -0,0 +1,74 @@ +package net.snowflake.client.jdbc; + +import static java.lang.annotation.ElementType.FIELD; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target({FIELD}) +@Retention(RetentionPolicy.RUNTIME) +public @interface SnowflakeColumn { + + /** + * (Optional) The name for a column in database, + * + *

The default value is empty string. Provided name can override SqlData field name + */ + String name() default ""; + + /** + * (Optional) The snowflake type for a column + * + *

The default value is empty string Provided type can override default type + */ + String type() default ""; + + /** + * (Optional) The snowflake nullable flag for a column + * + *

The default value is true Provided value can override default nullable value + */ + boolean nullable() default true; + + /** + * (Optional) The length for a column of SQL type {@code varchar} or {@code binary}, or of similar + * database-native type. + * + *

Applies only to columns of exact varchar and binary type. + * + *

The default value {@code -1} indicates that a provider-determined length should be inferred. + */ + int length() default -1; + /** + * (Optional) The length for a column of SQL type {@code binary}, or of similar database-native + * type. + * + *

Applies only to columns of exact varchar and binary type. + * + *

The default value {@code -1} indicates that a provider-determined byteLength should be + * inferred. + */ + int byteLength() default -1; + + /** + * (Optional) The precision for a column of SQL type {@code decimal} or {@code numeric}, or of + * similar database-native type. + * + *

Applies only to columns of exact numeric type. + * + *

The default value {@code -1} indicates that a provider-determined precision should be + * inferred. + */ + int precision() default -1; + + /** + * (Optional) The scale for a column of SQL type {@code decimal} or {@code numeric}, or of similar + * database-native type. + * + *

Applies only to columns of exact numeric type. + * + *

The default value {@code 0} indicates that a provider-determined scale should be inferred. + */ + int scale() default -1; +} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java index 0da7d44c8..9cae2de4b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java @@ -18,6 +18,7 @@ import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverPropertyInfo; +import java.sql.JDBCType; import java.sql.NClob; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -43,6 +44,7 @@ import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; +import net.snowflake.client.core.SfSqlArray; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.log.SFLoggerUtil; @@ -683,8 +685,7 @@ public String getClientInfo(String name) throws SQLException { @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { logger.debug("Array createArrayOf(String typeName, Object[] " + "elements)", false); - - throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); + return new SfSqlArray(JDBCType.valueOf(typeName).getVendorTypeNumber(), elements); } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java index e4535ad32..ee3dc3ec8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatement.java @@ -3,6 +3,7 @@ import java.math.BigInteger; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.Map; public interface SnowflakePreparedStatement { /** @@ -26,4 +27,13 @@ public interface SnowflakePreparedStatement { * @throws SQLException */ void setBigInteger(int parameterIndex, BigInteger x) throws SQLException; + + /** + * Sets the designated parameter to the given Map instance. + * + * @param parameterIndex + * @param map + * @throws SQLException + */ + void setMap(int parameterIndex, Map map, int type) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java index 2c01f0d04..9754447de 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java @@ -4,6 +4,7 @@ package net.snowflake.client.jdbc; +import com.fasterxml.jackson.core.JsonProcessingException; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; @@ -20,6 +21,7 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.RowId; +import java.sql.SQLData; import java.sql.SQLException; import java.sql.SQLXML; import java.sql.Time; @@ -36,11 +38,15 @@ import java.util.Set; import java.util.TimeZone; import net.snowflake.client.core.ExecTimeTelemetryData; +import net.snowflake.client.core.FieldSchemaCreator; +import net.snowflake.client.core.JsonSqlOutput; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFPreparedStatementMetaData; +import net.snowflake.client.core.SfSqlArray; +import net.snowflake.client.core.SfTimestampUtil; import net.snowflake.client.core.StmtUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -324,6 +330,20 @@ public void setBytes(int parameterIndex, byte[] x) throws SQLException { parameterBindings.put(String.valueOf(parameterIndex), binding); } + private void setObjectInternal(int parameterIndex, SQLData sqlData) throws SQLException { + logger.debug("setObjectInternal(parameterIndex: {}, SqlData sqlData)", parameterIndex); + + JsonSqlOutput stream = new JsonSqlOutput(sqlData, connection.getSFBaseSession()); + sqlData.writeSQL(stream); + ParameterBindingDTO binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.STRUCT, connection.getSFBaseSession()), + stream.getJsonString(), + stream.getSchema()); + parameterBindings.put(String.valueOf(parameterIndex), binding); + } + @Override public void setDate(int parameterIndex, Date x) throws SQLException { logger.debug("setDate(parameterIndex: {}, Date x)", parameterIndex); @@ -351,12 +371,7 @@ public void setTime(int parameterIndex, Time x) throws SQLException { setNull(parameterIndex, Types.TIME); } else { // Convert to nanoseconds since midnight using the input time mod 24 hours. - final long MS_IN_DAY = 86400 * 1000; - long msSinceEpoch = x.getTime(); - // Use % + % instead of just % to get the nonnegative remainder. - // TODO(mkember): Change to use Math.floorMod when Client is on Java 8. - long msSinceMidnight = (msSinceEpoch % MS_IN_DAY + MS_IN_DAY) % MS_IN_DAY; - long nanosSinceMidnight = msSinceMidnight * 1000 * 1000; + long nanosSinceMidnight = SfTimestampUtil.getTimeInNanoseconds(x); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -479,6 +494,8 @@ public void setObject(int parameterIndex, Object x) throws SQLException { setBoolean(parameterIndex, (Boolean) x); } else if (x instanceof byte[]) { setBytes(parameterIndex, (byte[]) x); + } else if (x instanceof SQLData) { + setObjectInternal(parameterIndex, (SQLData) x); } else { throw new SnowflakeSQLLoggedException( connection.getSFBaseSession(), @@ -600,8 +617,60 @@ public void setClob(int parameterIndex, Clob x) throws SQLException { } @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); + public void setArray(int parameterIndex, Array array) throws SQLException { + if (array instanceof SfSqlArray) { + SfSqlArray sfArray = (SfSqlArray) array; + ParameterBindingDTO binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.ARRAY, connection.getSFBaseSession()), + sfArray.getJsonString(), + sfArray.getSchema()); + parameterBindings.put(String.valueOf(parameterIndex), binding); + } else { + SfSqlArray sfArray = new SfSqlArray(Types.INTEGER, array); + ParameterBindingDTO binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.ARRAY, connection.getSFBaseSession()), + sfArray.getJsonString(), + sfArray.getSchema()); + parameterBindings.put(String.valueOf(parameterIndex), binding); + } + } + + @Override + public void setMap(int parameterIndex, Map map, int type) throws SQLException { + BindingParameterMetadata valueTypeSchema; + if (Types.STRUCT == type) { + SQLData sqlData = (SQLData) map.values().stream().findFirst().orElse(null); + JsonSqlOutput stream = new JsonSqlOutput(sqlData, connection.getSFBaseSession()); + sqlData.writeSQL(stream); + valueTypeSchema = stream.getSchema(); + } else { + valueTypeSchema = FieldSchemaCreator.buildBindingSchemaForType(type, false); + } + + BindingParameterMetadata schema = + BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata() + .withType("map") + .withFields( + Arrays.asList( + FieldSchemaCreator.buildBindingSchemaForType(Types.VARCHAR, false), + valueTypeSchema)) + .build(); + ParameterBindingDTO binding = null; + try { + binding = + new ParameterBindingDTO( + "json", + SnowflakeUtil.javaTypeToSFTypeString(Types.STRUCT, connection.getSFBaseSession()), + SnowflakeUtil.mapJson(map), + schema); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + parameterBindings.put(String.valueOf(parameterIndex), binding); } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java index beccc79b2..ea958c551 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java @@ -447,6 +447,9 @@ public static SnowflakeType javaTypeToSFType(int javaType, SFBaseSession session case Types.STRUCT: return OBJECT; + case Types.ARRAY: + return ARRAY; + case Types.NULL: return ANY; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index fecba4bba..e156e206b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -6,7 +6,9 @@ import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.google.common.base.Strings; import java.io.BufferedReader; @@ -35,6 +37,7 @@ import java.util.concurrent.TimeUnit; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.OCSPMode; +import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSessionProperty; @@ -55,6 +58,7 @@ public class SnowflakeUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeUtil.class); + private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); /** Additional data types not covered by standard JDBC */ public static final int EXTRA_TYPES_TIMESTAMP_LTZ = 50000; @@ -86,6 +90,10 @@ public class SnowflakeUtil { public static final String BYTE_STR = "byte"; public static final String BYTES_STR = "byte array"; + public static String mapJson(Object ob) throws JsonProcessingException { + return OBJECT_MAPPER.writeValueAsString(ob); + } + public static void checkErrorAndThrowExceptionIncludingReauth(JsonNode rootNode) throws SnowflakeSQLException { checkErrorAndThrowExceptionSub(rootNode, true); @@ -437,7 +445,8 @@ static String javaTypeToSFTypeString(int javaType, SFBaseSession session) return SnowflakeType.javaTypeToSFType(javaType, session).name(); } - static SnowflakeType javaTypeToSFType(int javaType, SFBaseSession session) + @SnowflakeJdbcInternalApi + public static SnowflakeType javaTypeToSFType(int javaType, SFBaseSession session) throws SnowflakeSQLException { return SnowflakeType.javaTypeToSFType(javaType, session); } diff --git a/src/main/java/net/snowflake/client/util/ThrowingBiCallable.java b/src/main/java/net/snowflake/client/util/ThrowingBiCallable.java new file mode 100644 index 000000000..e81020195 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/ThrowingBiCallable.java @@ -0,0 +1,9 @@ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +@FunctionalInterface +public interface ThrowingBiCallable { + void apply(A a, B b) throws T; +} diff --git a/src/main/java/net/snowflake/client/util/ThrowingTriCallable.java b/src/main/java/net/snowflake/client/util/ThrowingTriCallable.java new file mode 100644 index 000000000..a21ce7a79 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/ThrowingTriCallable.java @@ -0,0 +1,9 @@ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +@FunctionalInterface +public interface ThrowingTriCallable { + void apply(A a, B b, C c) throws T; +} diff --git a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java index 7d2b22d33..752229fc9 100644 --- a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java @@ -48,7 +48,7 @@ public void shouldGetTimestampForDifferentType() { } private Timestamp getFromType(int type, String value, TimeZone explicitTimezone) { - return SqlInputTimestampUtil.getTimestampFromType( + return SfTimestampUtil.getTimestampFromType( type, value, mockSession, TimeZone.getTimeZone("GMT"), explicitTimezone); } } diff --git a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java new file mode 100644 index 000000000..4a4d000e0 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java @@ -0,0 +1,363 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Connection; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.HashMap; +import java.util.Map; +import java.util.TimeZone; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; +import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; +import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(TestCategoryResultSet.class) +public class BindingAndInsertingStructuredTypesLatestIT extends BaseJDBCTest { + + public Connection init() throws SQLException { + Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); + stmt.execute("alter session set IGNORE_CLIENT_VESRION_IN_STRUCTURED_TYPES_RESPONSE = true"); + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_BINDS = enable"); + stmt.execute("alter session set ENABLE_OBJECT_TYPED_BINDS = true"); + stmt.execute("alter session set enable_structured_types_in_fdn_tables=true"); + stmt.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); + } + return conn; + } + + @Before + public void clean() { + SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); + } + + // TODO Structured types feature exists only on QA environments + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteObject() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SimpleClass sc = new SimpleClass("text1", 2); + SimpleClass sc2 = new SimpleClass("text2", 3); + try (Connection connection = init()) { + Statement statement = connection.createStatement(); + statement.execute( + "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); + try (SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_table select ?"); + SnowflakePreparedStatementV1 stmt3 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("SELECT ob FROM test_table where ob = ?"); ) { + + stmt.setObject(1, sc); + stmt.executeUpdate(); + + stmt.setObject(1, sc2); + stmt.executeUpdate(); + + stmt3.setObject(1, sc2); + + try (ResultSet resultSet = stmt3.executeQuery()) { + + resultSet.next(); + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("text2", object.getString()); + assertEquals(Integer.valueOf("3"), object.getIntValue()); + assertFalse(resultSet.next()); + } + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteNullObject() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmtement2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_table select null"); + SnowflakePreparedStatementV1 statement3 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("SELECT * FROM test_table"); ) { + + statement.execute( + "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); + + stmtement2.executeUpdate(); + + try (ResultSet resultSet = statement3.executeQuery()) { + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(1)); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteObjectBindingNull() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_table select ?"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("SELECT * FROM test_table"); ) { + statement.execute( + "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); + stmt.setObject(1, null); + stmt.executeUpdate(); + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertNull(object); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteObjectAllTypes() throws SQLException { + TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC)); + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("insert into test_all_types_object select ?"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("select * from test_all_types_object where ob=?"); ) { + + statement.execute( + " CREATE OR REPLACE TABLE test_all_types_object (" + + " ob OBJECT(string VARCHAR, " + + " b TINYINT, " + + " s SMALLINT, " + + " i INTEGER, " + + " l BIGINT, " + + " f FLOAT, " + + " d DOUBLE, " + + " bd NUMBER(38,2), " + + " bool BOOLEAN, " + + " timestampLtz TIMESTAMP_LTZ, " + + " timestampNtz TIMESTAMP_NTZ, " + + " timestampTz TIMESTAMP_TZ, " + + " date DATE," + + " time TIME, " + + " binary BINARY, " + + " simpleClass OBJECT(string VARCHAR, intValue INTEGER)" + + " ) )"); + + AllTypesClass allTypeInstance = + new AllTypesClass( + "string", + "1".getBytes(StandardCharsets.UTF_8)[0], + Short.valueOf("2"), + Integer.valueOf(3), + Long.valueOf(4), + 1.1f, + 2.24, + new BigDecimal("999999999999999999999999999999999999.55"), + Boolean.TRUE, + Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), + toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, ZoneId.of("UTC"))), + toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, ZoneId.of("Asia/Tokyo"))), + Date.valueOf("2023-12-24"), + Time.valueOf("12:34:56"), + new byte[] {'a', 'b', 'c'}, + new SimpleClass("testString", 2)); + stmt.setObject(1, allTypeInstance); + stmt.executeUpdate(); + statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); + + stmt2.setObject(1, allTypeInstance); + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); + assertEquals("string", object.getString()); + assertEquals(49, (long) object.getB()); + assertEquals(2, (long) object.getS()); + assertEquals(3, (long) object.getI()); + assertEquals(4, (long) object.getL()); + assertEquals(1.1, (double) object.getF(), 0.01); + assertEquals(2.24, (double) object.getD(), 0.01); + assertEquals(new BigDecimal("999999999999999999999999999999999999.55"), object.getBd()); + assertEquals(Boolean.TRUE, object.getBool()); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), object.getTimestampLtz()); + assertEquals( + // toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, + // ZoneId.of("Europe/Warsaw"))), + Timestamp.valueOf(LocalDateTime.of(2021, 12, 23, 9, 44, 44)), object.getTimestampNtz()); + assertEquals( + toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, ZoneId.of("Asia/Tokyo"))), + object.getTimestampTz()); + assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), object.getDate()); + assertEquals(Time.valueOf(LocalTime.of(12, 34, 56)), object.getTime()); + assertArrayEquals(new byte[] {'a', 'b', 'c'}, object.getBinary()); + assertEquals("testString", object.getSimpleClass().getString()); + assertEquals(Integer.valueOf("2"), object.getSimpleClass().getIntValue()); + } + } + } + + public static Timestamp toTimestamp(ZonedDateTime dateTime) { + return new Timestamp(dateTime.toInstant().getEpochSecond() * 1000L); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteArray() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement( + "INSERT INTO array_of_integers (arrayInt) SELECT ?;"); ) { + + statement.execute(" CREATE OR REPLACE TABLE array_of_integers(arrayInt ARRAY(INTEGER))"); + + Array array = connection.createArrayOf("INTEGER", new Integer[] {1, 2, 3}); + stmt.setArray(1, array); + stmt.executeUpdate(); + + try (ResultSet resultSet = statement.executeQuery("SELECT * from array_of_integers"); ) { + resultSet.next(); + + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf(1), resultArray[0]); + assertEquals(Long.valueOf(2), resultArray[1]); + assertEquals(Long.valueOf(3), resultArray[2]); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteArrayNoBinds() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement( + "insert into array_of_integers select ([1, 2, 3]::array(integer));"); ) { + + statement.execute(" CREATE OR REPLACE TABLE array_of_integers(arrayInt ARRAY(INTEGER))"); + + stmt.executeUpdate(); + + try (ResultSet resultSet = statement.executeQuery("SELECT * from array_of_integers"); ) { + resultSet.next(); + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf(1), resultArray[0]); + assertEquals(Long.valueOf(2), resultArray[1]); + assertEquals(Long.valueOf(3), resultArray[2]); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteMapOfSqlData() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("INSERT INTO map_of_objects (mapp) SELECT ?;"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("select * from map_of_objects where mapp=?"); ) { + + statement.execute( + " CREATE OR REPLACE TABLE map_of_objects(mapp MAP(VARCHAR, OBJECT(string VARCHAR, intValue INTEGER)))"); + + Map mapStruct = + Stream.of( + new Object[][] { + {"x", new SimpleClass("string1", 1)}, + {"y", new SimpleClass("string2", 2)}, + }) + .collect(Collectors.toMap(data -> (String) data[0], data -> (SimpleClass) data[1])); + + stmt.setMap(1, mapStruct, Types.STRUCT); + stmt.executeUpdate(); + + stmt2.setMap(1, mapStruct, Types.STRUCT); + + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testWriteMapOfInteger() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + SnowflakePreparedStatementV1 stmt = + (SnowflakePreparedStatementV1) + connection.prepareStatement("INSERT INTO map_of_objects (mapp) SELECT ?;"); + SnowflakePreparedStatementV1 stmt2 = + (SnowflakePreparedStatementV1) + connection.prepareStatement("select * from map_of_objects where mapp=?"); ) { + + statement.execute(" CREATE OR REPLACE TABLE map_of_objects(mapp MAP(VARCHAR, INTEGER))"); + + Map mapStruct = new HashMap<>(); + mapStruct.put("x", 1); + mapStruct.put("y", 2); + + stmt.setMap(1, mapStruct, Types.INTEGER); + stmt.executeUpdate(); + + stmt2.setMap(1, mapStruct, Types.INTEGER); + + try (ResultSet resultSet = stmt2.executeQuery()) { + resultSet.next(); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Integer.class); + } + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java index 7b6c758ce..ae6d1fac4 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java @@ -41,8 +41,6 @@ public void testFeatureNotSupportedException() throws Throwable { expectFeatureNotSupportedException(connection::createBlob); expectFeatureNotSupportedException(connection::createNClob); expectFeatureNotSupportedException(connection::createSQLXML); - expectFeatureNotSupportedException( - () -> connection.createArrayOf("fakeType", new Object[] {})); expectFeatureNotSupportedException( () -> connection.createStruct("fakeType", new Object[] {})); } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java index 9311d1f96..e3ec67b83 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java @@ -16,8 +16,6 @@ public class PreparedStatementFeatureNotSupportedIT extends BaseJDBCTest { public void testFeatureNotSupportedException() throws Throwable { try (Connection connection = getConnection()) { PreparedStatement preparedStatement = connection.prepareStatement("select ?"); - expectFeatureNotSupportedException( - () -> preparedStatement.setArray(1, new BaseJDBCTest.FakeArray())); expectFeatureNotSupportedException( () -> preparedStatement.setAsciiStream(1, new BaseJDBCTest.FakeInputStream())); expectFeatureNotSupportedException( diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFormatType.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFormatType.java new file mode 100644 index 000000000..a080ce9af --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFormatType.java @@ -0,0 +1,12 @@ +package net.snowflake.client.jdbc; + +public enum ResultSetFormatType { + JSON("JSON"), + ARROW_WITH_JSON_STRUCTURED_TYPES("ARROW"), + NATIVE_ARROW("ARROW"); + public final String sessionParameterTypeValue; + + ResultSetFormatType(String sessionParameterTypeValue) { + this.sessionParameterTypeValue = sessionParameterTypeValue; + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java index 2eaaa5e39..442a940b9 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java @@ -29,12 +29,13 @@ import net.snowflake.client.category.TestCategoryResultSet; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.jdbc.BaseJDBCTest; +import net.snowflake.client.jdbc.ResultSetFormatType; import net.snowflake.client.jdbc.SnowflakeBaseResultSet; import net.snowflake.client.jdbc.SnowflakeResultSetMetaData; import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.NestedStructSqlData; import net.snowflake.client.jdbc.structuredtypes.sqldata.NullableFieldsSqlData; -import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; +import net.snowflake.client.jdbc.structuredtypes.sqldata.StringClass; import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -81,7 +82,7 @@ public Connection init() throws SQLException { @Before public void clean() throws Exception { - SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + SnowflakeObjectTypeFactories.unregister(StringClass.class); SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); } @@ -100,12 +101,12 @@ public void testMapStructToObjectWithReflection() throws SQLException { private void testMapJson(boolean registerFactory) throws SQLException { if (registerFactory) { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); } withFirstRow( "select {'string':'a'}::OBJECT(string VARCHAR)", (resultSet) -> { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); + StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); }); } @@ -123,7 +124,7 @@ public void testMapNullStruct() throws SQLException { withFirstRow( "select null::OBJECT(string VARCHAR)", (resultSet) -> { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); + StringClass object = resultSet.getObject(1, StringClass.class); assertNull(object); }); } @@ -155,7 +156,7 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { + "'date': '2023-12-24'::DATE, " + "'time': '12:34:56'::TIME, " + "'binary': TO_BINARY('616263', 'HEX'), " - + "'simpleClass': {'string': 'b'}" + + "'simpleClass': {'string': 'b', 'intValue': 2}" + "}::OBJECT(" + "string VARCHAR, " + "b TINYINT, " @@ -172,7 +173,7 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { + "date DATE, " + "time TIME, " + "binary BINARY, " - + "simpleClass OBJECT(string VARCHAR)" + + "simpleClass OBJECT(string VARCHAR, intValue INTEGER)" + ")"); ) { resultSet.next(); AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); @@ -205,6 +206,7 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { assertArrayEquals(new byte[] {'a', 'b', 'c'}, object.getBinary()); assertTrue(object.getBool()); assertEquals("b", object.getSimpleClass().getString()); + assertEquals(Integer.valueOf(2), object.getSimpleClass().getIntValue()); } } } @@ -212,12 +214,12 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsArrayOfSqlData() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "SELECT ARRAY_CONSTRUCT({'string':'one'}, {'string':'two'}, {'string':'three'})::ARRAY(OBJECT(string VARCHAR))", (resultSet) -> { - SimpleClass[] resultArray = - resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, SimpleClass.class); + StringClass[] resultArray = + resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, StringClass.class); assertEquals("one", resultArray[0].getString()); assertEquals("two", resultArray[1].getString()); assertEquals("three", resultArray[2].getString()); @@ -257,9 +259,9 @@ public void testReturnNullsForAllTpesInSqlData() throws SQLException { try (ResultSet resultSet = statement.executeQuery( "SELECT OBJECT_CONSTRUCT_KEEP_NULL('string', null, 'b', null, 's', null, 'i', null, 'l', null, 'f', null,'d', null, 'bd', null, 'bool', null," - + " 'timestamp_ltz', null, 'timestamp_ntz', null, 'timestamp_tz', null, 'date', null, 'time', null, 'binary', null, 'simpleClass', null)" + + " 'timestamp_ltz', null, 'timestamp_ntz', null, 'timestamp_tz', null, 'date', null, 'time', null, 'binary', null, 'StringClass', null)" + "::OBJECT(string VARCHAR, b TINYINT, s SMALLINT, i INTEGER, l BIGINT, f FLOAT, d DOUBLE, bd DOUBLE, bool BOOLEAN, timestamp_ltz TIMESTAMP_LTZ, " - + "timestamp_ntz TIMESTAMP_NTZ, timestamp_tz TIMESTAMP_TZ, date DATE, time TIME, binary BINARY, simpleClass OBJECT(string VARCHAR))"); ) { + + "timestamp_ntz TIMESTAMP_NTZ, timestamp_tz TIMESTAMP_TZ, date DATE, time TIME, binary BINARY, StringClass OBJECT(string VARCHAR))"); ) { resultSet.next(); AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); assertNull(object.getString()); @@ -368,12 +370,12 @@ public void testReturnAsListOfDouble() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsMap() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { - Map map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertEquals("one", map.get("x").getString()); assertEquals("two", map.get("y").getString()); assertEquals("three", map.get("z").getString()); @@ -383,12 +385,12 @@ public void testReturnAsMap() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsMapWithNullableValues() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select {'x':{'string':'one'},'y':null,'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { - Map map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertEquals("one", map.get("x").getString()); assertNull(map.get("y")); assertEquals("three", map.get("z").getString()); @@ -398,7 +400,7 @@ public void testReturnAsMapWithNullableValues() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullAsObjectOfTypeMap() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -411,12 +413,12 @@ public void testReturnNullAsObjectOfTypeMap() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullAsMap() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { - Map map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertNull(map); }); } @@ -521,12 +523,12 @@ public void testReturnAsMapOfBoolean() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsList() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))", (resultSet) -> { - List map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, SimpleClass.class); + List map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, StringClass.class); assertEquals("one", map.get(0).getString()); assertEquals("two", map.get(1).getString()); }); @@ -539,7 +541,7 @@ public void testMapStructsFromChunks() throws SQLException { "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))", (resultSet) -> { while (resultSet.next()) { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); + StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); } }); @@ -736,17 +738,17 @@ public void testMapArrayOfArrays() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapNestedStructures() throws SQLException { withFirstRow( - "SELECT {'simpleClass': {'string': 'a'}, " - + "'simpleClasses': ARRAY_CONSTRUCT({'string': 'a'}, {'string': 'b'}), " - + "'arrayOfSimpleClasses': ARRAY_CONSTRUCT({'string': 'a'}, {'string': 'b'}), " - + "'mapOfSimpleClasses':{'x':{'string': 'c'}, 'y':{'string': 'd'}}," + "SELECT {'simpleClass': {'string': 'a', 'intValue': 2}, " + + "'simpleClasses': ARRAY_CONSTRUCT({'string': 'a', 'intValue': 2}, {'string': 'b', 'intValue': 2}), " + + "'arrayOfSimpleClasses': ARRAY_CONSTRUCT({'string': 'a', 'intValue': 2}, {'string': 'b', 'intValue': 2}), " + + "'mapOfSimpleClasses':{'x':{'string': 'c', 'intValue': 2}, 'y':{'string': 'd', 'intValue': 2}}," + "'texts': ARRAY_CONSTRUCT('string', 'a'), " + "'arrayOfDates': ARRAY_CONSTRUCT(to_date('2023-12-24', 'YYYY-MM-DD'), to_date('2023-12-25', 'YYYY-MM-DD')), " + "'mapOfIntegers':{'x':3, 'y':4}}" - + "::OBJECT(simpleClass OBJECT(string VARCHAR), " - + "simpleClasses ARRAY(OBJECT(string VARCHAR))," - + "arrayOfSimpleClasses ARRAY(OBJECT(string VARCHAR))," - + "mapOfSimpleClasses MAP(VARCHAR, OBJECT(string VARCHAR))," + + "::OBJECT(simpleClass OBJECT(string VARCHAR, intValue INTEGER), " + + "simpleClasses ARRAY(OBJECT(string VARCHAR, intValue INTEGER))," + + "arrayOfSimpleClasses ARRAY(OBJECT(string VARCHAR, intValue INTEGER))," + + "mapOfSimpleClasses MAP(VARCHAR, OBJECT(string VARCHAR, intValue INTEGER))," + "texts ARRAY(VARCHAR)," + "arrayOfDates ARRAY(DATE)," + "mapOfIntegers MAP(VARCHAR, INTEGER))", @@ -755,15 +757,30 @@ public void testMapNestedStructures() throws SQLException { resultSet.getObject(1, NestedStructSqlData.class); ; assertEquals("a", nestedStructSqlData.getSimpleClass().getString()); + assertEquals(Integer.valueOf(2), nestedStructSqlData.getSimpleClass().getIntValue()); assertEquals("a", nestedStructSqlData.getSimpleClassses().get(0).getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getSimpleClassses().get(0).getIntValue()); assertEquals("b", nestedStructSqlData.getSimpleClassses().get(1).getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getSimpleClassses().get(1).getIntValue()); assertEquals("a", nestedStructSqlData.getArrayOfSimpleClasses()[0].getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getArrayOfSimpleClasses()[0].getIntValue()); assertEquals("b", nestedStructSqlData.getArrayOfSimpleClasses()[1].getString()); + assertEquals( + Integer.valueOf(2), nestedStructSqlData.getArrayOfSimpleClasses()[1].getIntValue()); assertEquals("c", nestedStructSqlData.getMapOfSimpleClasses().get("x").getString()); + assertEquals( + Integer.valueOf(2), + nestedStructSqlData.getMapOfSimpleClasses().get("x").getIntValue()); assertEquals("d", nestedStructSqlData.getMapOfSimpleClasses().get("y").getString()); + assertEquals( + Integer.valueOf(2), + nestedStructSqlData.getMapOfSimpleClasses().get("y").getIntValue()); assertEquals("string", nestedStructSqlData.getTexts().get(0)); assertEquals("a", nestedStructSqlData.getTexts().get(1)); @@ -834,16 +851,4 @@ private void withFirstRow(String sqlText, ThrowingConsumer logicalConnection.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT)); expectFeatureNotSupportedException( () -> logicalConnection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT)); - expectFeatureNotSupportedException( - () -> logicalConnection.createArrayOf("fakeType", new Object[] {})); expectFeatureNotSupportedException( () -> logicalConnection.createStruct("fakeType", new Object[] {})); expectFeatureNotSupportedException( From 7cb73ff2ca00973550e4349453d6de4c0c193119 Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Sat, 27 Apr 2024 02:17:21 +0900 Subject: [PATCH 04/54] SNOW-1213117: Wrap connection, statement and result set in try with resources(2/4) (#1722) --- .../client/jdbc/DellBoomiCloudIT.java | 18 +- .../client/jdbc/FileUploaderLatestIT.java | 1177 ++++++++--------- .../snowflake/client/jdbc/GCPLargeResult.java | 19 +- .../client/jdbc/HeartbeatAsyncLatestIT.java | 34 +- .../snowflake/client/jdbc/HeartbeatIT.java | 68 +- .../client/jdbc/MultiStatementIT.java | 712 +++++----- .../client/jdbc/MultiStatementLatestIT.java | 530 ++++---- .../client/jdbc/OpenGroupCLIFuncIT.java | 4 +- .../client/jdbc/PreparedMultiStmtIT.java | 364 ++--- .../client/jdbc/PreparedStatement0IT.java | 6 +- .../client/jdbc/PreparedStatement1IT.java | 279 ++-- .../jdbc/PreparedStatement1LatestIT.java | 249 ++-- .../client/jdbc/PreparedStatement2IT.java | 579 ++++---- .../jdbc/PreparedStatement2LatestIT.java | 379 +++--- ...reparedStatementFeatureNotSupportedIT.java | 4 +- .../PreparedStatementLargeUpdateLatestIT.java | 90 +- 16 files changed, 2224 insertions(+), 2288 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java index f61ff65e7..794af78df 100644 --- a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java @@ -26,17 +26,15 @@ public void setup() { @Test public void testSelectLargeResultSet() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("select seq4() from table" + "(generator" + "(rowcount=>10000))"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "select seq4() from table" + "(generator" + "(rowcount=>10000))")) { - while (resultSet.next()) { - resultSet.getString(1); + while (resultSet.next()) { + resultSet.getString(1); + } } - - resultSet.close(); - statement.close(); - connection.close(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java index ac1ffe249..66b33b9ab 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java @@ -67,24 +67,24 @@ public class FileUploaderLatestIT extends FileUploaderPrepIT { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetS3StageDataWithS3Session() throws SQLException { - Connection con = getConnection("s3testaccount"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - // Set UseRegionalS3EndpointsForPresignedURL to true in session - sfSession.setUseRegionalS3EndpointsForPresignedURL(true); - - // Get sample stage info with session - StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - // Assert that true value from session is reflected in StageInfo - Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); - - // Set UseRegionalS3EndpointsForPresignedURL to false in session - sfSession.setUseRegionalS3EndpointsForPresignedURL(false); - stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - // Assert that false value from session is reflected in StageInfo - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); - con.close(); + try (Connection con = getConnection("s3testaccount")) { + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Set UseRegionalS3EndpointsForPresignedURL to true in session + sfSession.setUseRegionalS3EndpointsForPresignedURL(true); + + // Get sample stage info with session + StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); + Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + // Assert that true value from session is reflected in StageInfo + Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); + + // Set UseRegionalS3EndpointsForPresignedURL to false in session + sfSession.setUseRegionalS3EndpointsForPresignedURL(false); + stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); + Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + // Assert that false value from session is reflected in StageInfo + Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + } } /** @@ -96,56 +96,56 @@ public void testGetS3StageDataWithS3Session() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetS3StageDataWithAzureSession() throws SQLException { - Connection con = getConnection("azureaccount"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - // Set UseRegionalS3EndpointsForPresignedURL to true in session. This is redundant since session - // is Azure - sfSession.setUseRegionalS3EndpointsForPresignedURL(true); - - // Get sample stage info with session - StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleAzureJsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); - // Assert that UseRegionalS3EndpointsForPresignedURL is false in StageInfo even if it was set to - // true. - // The value should always be false for non-S3 accounts - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); - con.close(); + try (Connection con = getConnection("azureaccount")) { + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Set UseRegionalS3EndpointsForPresignedURL to true in session. This is redundant since + // session + // is Azure + sfSession.setUseRegionalS3EndpointsForPresignedURL(true); + + // Get sample stage info with session + StageInfo stageInfo = + SnowflakeFileTransferAgent.getStageInfo(exampleAzureJsonNode, sfSession); + Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + // Assert that UseRegionalS3EndpointsForPresignedURL is false in StageInfo even if it was set + // to + // true. + // The value should always be false for non-S3 accounts + Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetObjectMetadataWithGCS() throws Exception { - Connection connection = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; - statement.execute(putCommand); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeGCSClient client = - SnowflakeGCSClient.createSnowflakeGCSClient( - info, sfAgent.getEncryptionMaterial().get(0), sfSession); - - String location = info.getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + TEST_DATA_FILE + ".gz"; - StorageObjectMetadata metadata = client.getObjectMetadata(remoteStageLocation, path); - Assert.assertEquals("gzip", metadata.getContentEncoding()); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; + statement.execute(putCommand); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeGCSClient client = + SnowflakeGCSClient.createSnowflakeGCSClient( + info, sfAgent.getEncryptionMaterial().get(0), sfSession); + + String location = info.getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + TEST_DATA_FILE + ".gz"; + StorageObjectMetadata metadata = client.getObjectMetadata(remoteStageLocation, path); + Assert.assertEquals("gzip", metadata.getContentEncoding()); + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @@ -153,41 +153,38 @@ public void testGetObjectMetadataWithGCS() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { - Connection connection = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; - statement.execute(putCommand); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeGCSClient client = - SnowflakeGCSClient.createSnowflakeGCSClient( - info, sfAgent.getEncryptionMaterial().get(0), sfSession); - - String location = info.getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + "wrong_file.csv.gz"; - client.getObjectMetadata(remoteStageLocation, path); - fail("should raise exception"); - } catch (Exception ex) { - assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); - assertTrue(ex.getMessage().matches(".*Blob.*not found in bucket.*")); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; + statement.execute(putCommand); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeGCSClient client = + SnowflakeGCSClient.createSnowflakeGCSClient( + info, sfAgent.getEncryptionMaterial().get(0), sfSession); + + String location = info.getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + "wrong_file.csv.gz"; + client.getObjectMetadata(remoteStageLocation, path); + fail("should raise exception"); + } catch (Exception ex) { + assertTrue( + "Wrong type of exception. Message: " + ex.getMessage(), + ex instanceof StorageProviderException); + assertTrue(ex.getMessage().matches(".*Blob.*not found in bucket.*")); + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @@ -195,116 +192,114 @@ public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetObjectMetadataStorageExceptionWithGCS() throws Exception { - Connection connection = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; - statement.execute(putCommand); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeGCSClient client = - SnowflakeGCSClient.createSnowflakeGCSClient( - info, sfAgent.getEncryptionMaterial().get(0), sfSession); - - String location = info.getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - client.getObjectMetadata(remoteStageLocation, ""); - fail("should raise exception"); - } catch (Exception ex) { - assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); - assertTrue(ex.getMessage().matches(".*Permission.*denied.*")); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + String putCommand = "PUT file://" + sourceFilePath + " @" + OBJ_META_STAGE; + statement.execute(putCommand); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeGCSClient client = + SnowflakeGCSClient.createSnowflakeGCSClient( + info, sfAgent.getEncryptionMaterial().get(0), sfSession); + + String location = info.getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + client.getObjectMetadata(remoteStageLocation, ""); + fail("should raise exception"); + } catch (Exception ex) { + assertTrue( + "Wrong type of exception. Message: " + ex.getMessage(), + ex instanceof StorageProviderException); + assertTrue(ex.getMessage().matches(".*Permission.*denied.*")); + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @Test public void testGetFileTransferCommandType() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, sfAgent.getCommandType()); - statement.execute("drop stage if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, sfAgent.getCommandType()); + } finally { + statement.execute("drop stage if exists testStage"); + } + } } @Test public void testNullCommand() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - try { - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(null, sfSession, new SFStatement(sfSession)); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage() - .contains("JDBC driver internal error: Missing sql for statement execution")); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(null, sfSession, new SFStatement(sfSession)); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue( + err.getMessage() + .contains("JDBC driver internal error: Missing sql for statement execution")); + } finally { + statement.execute("drop stage if exists testStage"); + } } - statement.execute("drop stage if exists testStage"); - con.close(); } @Test public void testCompressStreamWithGzipException() throws Exception { - Connection con = null; // inject the NoSuchAlgorithmException SnowflakeFileTransferAgent.setInjectedFileTransferException(new NoSuchAlgorithmException()); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - List metadataList = sfAgent.getFileTransferMetadatas(); - SnowflakeFileTransferMetadataV1 metadata = - (SnowflakeFileTransferMetadataV1) metadataList.get(0); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - InputStream inputStream = new FileInputStream(srcPath); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(metadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(PUT_COMMAND) - .build()); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage() - .contains("JDBC driver internal error: error encountered for compression")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + List metadataList = sfAgent.getFileTransferMetadatas(); + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + InputStream inputStream = new FileInputStream(srcPath); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(metadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(PUT_COMMAND) + .build()); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue( + err.getMessage() + .contains("JDBC driver internal error: error encountered for compression")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -312,46 +307,43 @@ public void testCompressStreamWithGzipException() throws Exception { @Test public void testCompressStreamWithGzipNoDigestException() throws Exception { - Connection con = null; // inject the IOException SnowflakeFileTransferAgent.setInjectedFileTransferException(new IOException()); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - List metadataList = sfAgent.getFileTransferMetadatas(); - SnowflakeFileTransferMetadataV1 metadata = - (SnowflakeFileTransferMetadataV1) metadataList.get(0); - metadata.setEncryptionMaterial(null, null, null); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - - InputStream inputStream = new FileInputStream(srcPath); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(metadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(PUT_COMMAND) - .build()); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage() - .contains("JDBC driver internal error: error encountered for compression")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + List metadataList = sfAgent.getFileTransferMetadatas(); + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + metadata.setEncryptionMaterial(null, null, null); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + + InputStream inputStream = new FileInputStream(srcPath); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(metadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(PUT_COMMAND) + .build()); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue( + err.getMessage() + .contains("JDBC driver internal error: error encountered for compression")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -359,46 +351,43 @@ public void testCompressStreamWithGzipNoDigestException() throws Exception { @Test public void testUploadWithoutConnectionException() throws Exception { - Connection con = null; // inject the IOException SnowflakeFileTransferAgent.setInjectedFileTransferException( new Exception("Exception encountered during file upload: failed to push to remote store")); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - List metadataList = sfAgent.getFileTransferMetadatas(); - SnowflakeFileTransferMetadataV1 metadata = - (SnowflakeFileTransferMetadataV1) metadataList.get(0); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - - InputStream inputStream = new FileInputStream(srcPath); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(metadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(PUT_COMMAND) - .build()); - } catch (Exception err) { - Assert.assertTrue( - err.getMessage() - .contains( - "Exception encountered during file upload: failed to push to remote store")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + List metadataList = sfAgent.getFileTransferMetadatas(); + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + + InputStream inputStream = new FileInputStream(srcPath); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(metadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(PUT_COMMAND) + .build()); + } catch (Exception err) { + Assert.assertTrue( + err.getMessage() + .contains( + "Exception encountered during file upload: failed to push to remote store")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -406,73 +395,64 @@ public void testUploadWithoutConnectionException() throws Exception { @Test public void testInitFileMetadataFileNotFound() throws Exception { - Connection con = null; - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals(200008, err.getErrorCode()); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals(200008, err.getErrorCode()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testInitFileMetadataFileIsDirectory() throws Exception { - Connection con = null; - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - String srcPath = - getFullPathFileInResource(""); // will pull the resources directory without a file - String command = "put file://" + srcPath + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals(200009, err.getErrorCode()); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + String srcPath = + getFullPathFileInResource(""); // will pull the resources directory without a file + String command = "put file://" + srcPath + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals(200009, err.getErrorCode()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testCompareAndSkipFilesException() throws Exception { - Connection con = null; // inject the NoSuchAlgorithmException SnowflakeFileTransferAgent.setInjectedFileTransferException(new NoSuchAlgorithmException()); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Error reading:")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue(err.getMessage().contains("Error reading:")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -480,24 +460,22 @@ public void testCompareAndSkipFilesException() throws Exception { @Test public void testParseCommandException() throws SQLException { - Connection con = null; // inject the SnowflakeSQLException SnowflakeFileTransferAgent.setInjectedFileTransferException( new SnowflakeSQLException("invalid data")); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Failed to parse the locations")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + } catch (SnowflakeSQLException err) { + Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + Assert.assertTrue(err.getMessage().contains("Failed to parse the locations")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -505,63 +483,58 @@ public void testParseCommandException() throws SQLException { @Test public void testPopulateStatusRowsWithSortOn() throws Exception { - Connection con = null; - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - statement.execute("set-sf-property sort on"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // upload files orders_101.csv and orders_100.csv - String command = "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent1.execute(); // upload files - - // check that source files were sorted - assertEquals(2, sfAgent1.statusRows.size()); - assertEquals("orders_100.csv", sfAgent1.getNextRow().get(0).toString()); - - String getCommand = "GET @testStage file:///tmp"; - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); - sfAgent2.execute(); - // check that files are sorted on download - assertEquals(2, sfAgent2.statusRows.size()); - assertEquals("orders_100.csv.gz", sfAgent2.getNextRow().get(0).toString()); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + statement.execute("set-sf-property sort on"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // upload files orders_101.csv and orders_100.csv + String command = + "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent1.execute(); // upload files + + // check that source files were sorted + assertEquals(2, sfAgent1.statusRows.size()); + assertEquals("orders_100.csv", sfAgent1.getNextRow().get(0).toString()); + + String getCommand = "GET @testStage file:///tmp"; + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); + sfAgent2.execute(); + // check that files are sorted on download + assertEquals(2, sfAgent2.statusRows.size()); + assertEquals("orders_100.csv.gz", sfAgent2.getNextRow().get(0).toString()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testListObjectsStorageException() throws Exception { - Connection con = null; // inject the StorageProviderException SnowflakeFileTransferAgent.setInjectedFileTransferException( new StorageProviderException(new Exception("could not list objects"))); - try { - con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("create or replace stage testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - - sfAgent.execute(); - } catch (SnowflakeSQLException err) { - Assert.assertEquals(200016, err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Encountered exception during listObjects")); - } finally { - if (con != null) { - con.createStatement().execute("DROP STAGE if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + + sfAgent.execute(); + } catch (SnowflakeSQLException err) { + Assert.assertEquals(200016, err.getErrorCode()); + Assert.assertTrue(err.getMessage().contains("Encountered exception during listObjects")); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -572,75 +545,73 @@ public void testUploadStreamInterruptedException() throws IOException, SQLExcept final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; // inject the InterruptedException SnowflakeFileTransferAgent.setInjectedFileTransferException(new InterruptedException()); - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); - - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); - - } catch (SnowflakeSQLLoggedException err) { - Assert.assertEquals(200003, err.getErrorCode()); - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); + + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); + + } catch (SnowflakeSQLLoggedException err) { + Assert.assertEquals(200003, err.getErrorCode()); + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); } @Test public void testFileTransferStageInfo() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); - - StageInfo stageInfo = sfAgent.getStageInfo(); - assertEquals(sfAgent.getStageCredentials(), stageInfo.getCredentials()); - assertEquals(sfAgent.getStageLocation(), stageInfo.getLocation()); - - statement.execute("drop stage if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); + + StageInfo stageInfo = sfAgent.getStageInfo(); + assertEquals(sfAgent.getStageCredentials(), stageInfo.getCredentials()); + assertEquals(sfAgent.getStageLocation(), stageInfo.getLocation()); + } finally { + statement.execute("drop stage if exists testStage"); + } + } } @Test public void testFileTransferMappingFromSourceFile() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - String command = "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent1.execute(); - - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent( - "GET @testStage file:///tmp/", sfSession, new SFStatement(sfSession)); - - assertEquals(2, sfAgent2.getSrcToMaterialsMap().size()); - assertEquals(2, sfAgent2.getSrcToPresignedUrlMap().size()); - - statement.execute("drop stage if exists testStage"); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + String command = + "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent1.execute(); + + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent( + "GET @testStage file:///tmp/", sfSession, new SFStatement(sfSession)); + + assertEquals(2, sfAgent2.getSrcToMaterialsMap().size()); + assertEquals(2, sfAgent2.getSrcToPresignedUrlMap().size()); + } finally { + statement.execute("drop stage if exists testStage"); + } + } } @Test @@ -648,25 +619,20 @@ public void testUploadFileCallableFileNotFound() throws Exception { // inject the FileNotFoundException SnowflakeFileTransferAgent.setInjectedFileTransferException( new FileNotFoundException("file does not exist")); - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); - sfAgent.execute(); - } catch (Exception err) { - assertEquals(err.getCause(), instanceOf(FileNotFoundException.class)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testStage"); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + String command = "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testStage"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); + sfAgent.execute(); + } catch (Exception err) { + assertEquals(err.getCause(), instanceOf(FileNotFoundException.class)); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); @@ -674,64 +640,59 @@ public void testUploadFileCallableFileNotFound() throws Exception { @Test public void testUploadFileStreamWithNoOverwrite() throws Exception { - Connection connection = null; - - try { - connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - - uploadFileToStageUsingStream(connection, false); - ResultSet resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String expectedValue = resultSet.getString("last_modified"); - - Thread.sleep(1000); // add 1 sec delay between uploads. - - uploadFileToStageUsingStream(connection, false); - resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String actualValue = resultSet.getString("last_modified"); - - assertTrue(expectedValue.equals(actualValue)); - } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testStage"); - connection.close(); + String expectedValue = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + + uploadFileToStageUsingStream(connection, false); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + expectedValue = resultSet.getString("last_modified"); + } + Thread.sleep(1000); // add 1 sec delay between uploads. + + uploadFileToStageUsingStream(connection, false); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + String actualValue = resultSet.getString("last_modified"); + assertEquals(expectedValue, actualValue); + } + } catch (Exception e) { + Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @Test public void testUploadFileStreamWithOverwrite() throws Exception { - Connection connection = null; - - try { - connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("CREATE OR REPLACE STAGE testStage"); - - uploadFileToStageUsingStream(connection, true); - ResultSet resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String expectedValue = resultSet.getString("last_modified"); - - Thread.sleep(1000); // add 1 sec delay between uploads. - - uploadFileToStageUsingStream(connection, true); - resultSet = statement.executeQuery("LIST @testStage"); - resultSet.next(); - String actualValue = resultSet.getString("last_modified"); - - assertFalse(expectedValue.equals(actualValue)); - } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testStage"); - connection.close(); + String expectedValue = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE STAGE testStage"); + + uploadFileToStageUsingStream(connection, true); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + expectedValue = resultSet.getString("last_modified"); + } + Thread.sleep(1000); // add 1 sec delay between uploads. + + uploadFileToStageUsingStream(connection, true); + try (ResultSet resultSet = statement.executeQuery("LIST @testStage")) { + assertTrue(resultSet.next()); + String actualValue = resultSet.getString("last_modified"); + + assertFalse(expectedValue.equals(actualValue)); + } + } catch (Exception e) { + Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + } finally { + statement.execute("DROP STAGE if exists testStage"); } } } @@ -739,71 +700,67 @@ public void testUploadFileStreamWithOverwrite() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetS3StorageObjectMetadata() throws Throwable { - Connection connection = null; - try { - connection = getConnection("s3testaccount"); - Statement statement = connection.createStatement(); - + try (Connection connection = getConnection("s3testaccount"); + Statement statement = connection.createStatement()) { // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // Test put file with internal compression - String putCommand = "put file:///dummy/path/file1.gz @" + OBJ_META_STAGE; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - List metadata = sfAgent.getFileTransferMetadatas(); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadata) { - InputStream inputStream = new FileInputStream(srcPath); - - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand) - .build()); - - SnowflakeStorageClient client = - StorageClientFactory.getFactory() - .createClient( - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), - 1, - null, - /*session = */ null); - - String location = - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + "file1.gz"; - StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); - - ObjectMetadata s3Meta = new ObjectMetadata(); - s3Meta.setContentLength(meta.getContentLength()); - s3Meta.setContentEncoding(meta.getContentEncoding()); - s3Meta.setUserMetadata(meta.getUserMetadata()); - - S3StorageObjectMetadata s3Metadata = new S3StorageObjectMetadata(s3Meta); - RemoteStoreFileEncryptionMaterial encMat = sfAgent.getEncryptionMaterial().get(0); - Map matDesc = - mapper.readValue(s3Metadata.getUserMetadata().get("x-amz-matdesc"), Map.class); - - assertEquals(encMat.getQueryId(), matDesc.get("queryId")); - assertEquals(encMat.getSmkId().toString(), matDesc.get("smkId")); - assertEquals(1360, s3Metadata.getContentLength()); - assertEquals("gzip", s3Metadata.getContentEncoding()); - } - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + OBJ_META_STAGE); - connection.close(); + try { + statement.execute("CREATE OR REPLACE STAGE " + OBJ_META_STAGE); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // Test put file with internal compression + String putCommand = "put file:///dummy/path/file1.gz @" + OBJ_META_STAGE; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + List metadata = sfAgent.getFileTransferMetadatas(); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadata) { + InputStream inputStream = new FileInputStream(srcPath); + + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand) + .build()); + + SnowflakeStorageClient client = + StorageClientFactory.getFactory() + .createClient( + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), + 1, + null, + /*session = */ null); + + String location = + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + "file1.gz"; + StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); + + ObjectMetadata s3Meta = new ObjectMetadata(); + s3Meta.setContentLength(meta.getContentLength()); + s3Meta.setContentEncoding(meta.getContentEncoding()); + s3Meta.setUserMetadata(meta.getUserMetadata()); + + S3StorageObjectMetadata s3Metadata = new S3StorageObjectMetadata(s3Meta); + RemoteStoreFileEncryptionMaterial encMat = sfAgent.getEncryptionMaterial().get(0); + Map matDesc = + mapper.readValue(s3Metadata.getUserMetadata().get("x-amz-matdesc"), Map.class); + + assertEquals(encMat.getQueryId(), matDesc.get("queryId")); + assertEquals(encMat.getSmkId().toString(), matDesc.get("smkId")); + assertEquals(1360, s3Metadata.getContentLength()); + assertEquals("gzip", s3Metadata.getContentEncoding()); + } + } finally { + statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } } } @@ -832,10 +789,6 @@ private void uploadFileToStageUsingStream(Connection connection, boolean overwri @Test public void testUploadFileWithTildeInFolderName() throws SQLException, IOException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - Writer writer = null; Path topDataDir = null; try { @@ -847,47 +800,40 @@ public void testUploadFileWithTildeInFolderName() throws SQLException, IOExcepti // create a test data File dataFile = new File(subDir.toFile(), "test.txt"); - writer = + try (Writer writer = new BufferedWriter( new OutputStreamWriter( Files.newOutputStream(Paths.get(dataFile.getCanonicalPath())), - StandardCharsets.UTF_8)); - writer.write("1,test1"); - writer.close(); - - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("create or replace stage testStage"); - String sql = String.format("PUT 'file://%s' @testStage", dataFile.getCanonicalPath()); - - // Escape backslashes. This must be done by the application. - sql = sql.replaceAll("\\\\", "\\\\\\\\"); - resultSet = statement.executeQuery(sql); - while (resultSet.next()) { - assertEquals("UPLOADED", resultSet.getString("status")); - } - } finally { - if (connection != null) { - connection.createStatement().execute("drop stage if exists testStage"); + StandardCharsets.UTF_8))) { + writer.write("1,test1"); } - closeSQLObjects(resultSet, statement, connection); - if (writer != null) { - writer.close(); + + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + String sql = String.format("PUT 'file://%s' @testStage", dataFile.getCanonicalPath()); + + // Escape backslashes. This must be done by the application. + sql = sql.replaceAll("\\\\", "\\\\\\\\"); + try (ResultSet resultSet = statement.executeQuery(sql)) { + while (resultSet.next()) { + assertEquals("UPLOADED", resultSet.getString("status")); + } + } + } finally { + statement.execute("drop stage if exists testStage"); + } } + } finally { FileUtils.deleteDirectory(topDataDir.toFile()); } } @Test public void testUploadWithTildeInPath() throws SQLException, IOException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - Writer writer = null; Path subDir = null; - try { - String homeDir = systemGetProperty("user.home"); // create sub directory where the name includes ~ @@ -895,30 +841,29 @@ public void testUploadWithTildeInPath() throws SQLException, IOException { // create a test data File dataFile = new File(subDir.toFile(), "test.txt"); - writer = + try (Writer writer = new BufferedWriter( new OutputStreamWriter( Files.newOutputStream(Paths.get(dataFile.getCanonicalPath())), - StandardCharsets.UTF_8)); - writer.write("1,test1"); - writer.close(); - - connection = getConnection(); - statement = connection.createStatement(); - statement.execute("create or replace stage testStage"); - - resultSet = statement.executeQuery("PUT 'file://~/snowflake/test.txt' @testStage"); - while (resultSet.next()) { - assertEquals("UPLOADED", resultSet.getString("status")); - } - } finally { - if (connection != null) { - connection.createStatement().execute("drop stage if exists testStage"); + StandardCharsets.UTF_8))) { + writer.write("1,test1"); } - closeSQLObjects(resultSet, statement, connection); - if (writer != null) { - writer.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace stage testStage"); + + try (ResultSet resultSet = + statement.executeQuery("PUT 'file://~/snowflake/test.txt' @testStage")) { + while (resultSet.next()) { + assertEquals("UPLOADED", resultSet.getString("status")); + } + } + } finally { + statement.execute("drop stage if exists testStage"); + } } + } finally { FileUtils.deleteDirectory(subDir.toFile()); } } diff --git a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java index 7cb79b404..b2c316d50 100644 --- a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java +++ b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java @@ -37,17 +37,18 @@ Connection init() throws SQLException { @Test public void testLargeResultSetGCP() throws Throwable { - try (Connection con = init()) { - PreparedStatement stmt = - con.prepareStatement( - "select seq8(), randstr(1000, random()) from table(generator(rowcount=>1000))"); + try (Connection con = init(); + PreparedStatement stmt = + con.prepareStatement( + "select seq8(), randstr(1000, random()) from table(generator(rowcount=>1000))")) { stmt.setMaxRows(999); - ResultSet rset = stmt.executeQuery(); - int cnt = 0; - while (rset.next()) { - ++cnt; + try (ResultSet rset = stmt.executeQuery()) { + int cnt = 0; + while (rset.next()) { + ++cnt; + } + assertEquals(cnt, 999); } - assertEquals(cnt, 999); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java index c59ddc987..e7217f695 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java @@ -39,21 +39,17 @@ public class HeartbeatAsyncLatestIT extends HeartbeatIT { @Override protected void submitQuery(boolean useKeepAliveSession, int queryIdx) throws SQLException, InterruptedException { - Connection connection = null; - ResultSet resultSet = null; - try { - Properties sessionParams = new Properties(); - sessionParams.put( - "CLIENT_SESSION_KEEP_ALIVE", - useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); + Properties sessionParams = new Properties(); + sessionParams.put( + "CLIENT_SESSION_KEEP_ALIVE", + useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); - connection = getConnection(sessionParams); - - Statement stmt = connection.createStatement(); - // Query will take 5 seconds to run, but ResultSet will be returned immediately - resultSet = - stmt.unwrap(SnowflakeStatement.class) - .executeAsyncQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 5))"); + try (Connection connection = getConnection(sessionParams); + Statement stmt = connection.createStatement(); + // Query will take 5 seconds to run, but ResultSet will be returned immediately + ResultSet resultSet = + stmt.unwrap(SnowflakeStatement.class) + .executeAsyncQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 5))")) { Thread.sleep(61000); // sleep 61 seconds to await original session expiration time QueryStatus qs = resultSet.unwrap(SnowflakeResultSet.class).getStatus(); // Ensure query succeeded. Avoid flaky test failure by waiting until query is complete to @@ -69,10 +65,6 @@ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) assertTrue(resultSet.next()); assertFalse(resultSet.next()); logger.fine("Query " + queryIdx + " passed "); - - } finally { - resultSet.close(); - connection.close(); } } @@ -92,16 +84,12 @@ public void testAsynchronousQueryFailure() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testIsValidWithInvalidSession() throws Exception { - Connection connection = null; - try { - connection = getConnection(); + try (Connection connection = getConnection()) { // assert that connection starts out valid assertTrue(connection.isValid(5)); Thread.sleep(61000); // sleep 61 seconds to await session expiration time // assert that connection is no longer valid after session has expired assertFalse(connection.isValid(5)); - } finally { - connection.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java index 16e8364d6..eb41ce76f 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java @@ -46,15 +46,14 @@ public class HeartbeatIT extends AbstractDriverIT { @BeforeClass public static void setUpClass() throws Exception { if (!RunningOnGithubAction.isRunningOnGithubAction()) { - Connection connection = getSnowflakeAdminConnection(); - connection - .createStatement() - .execute( - "alter system set" - + " master_token_validity=60" - + ",session_token_validity=20" - + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=1"); - connection.close(); + try (Connection connection = getSnowflakeAdminConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter system set" + + " master_token_validity=60" + + ",session_token_validity=20" + + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=1"); + } } } @@ -65,15 +64,14 @@ public static void setUpClass() throws Exception { @AfterClass public static void tearDownClass() throws Exception { if (!RunningOnGithubAction.isRunningOnGithubAction()) { - Connection connection = getSnowflakeAdminConnection(); - connection - .createStatement() - .execute( - "alter system set" - + " master_token_validity=default" - + ",session_token_validity=default" - + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=default"); - connection.close(); + try (Connection connection = getSnowflakeAdminConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter system set" + + " master_token_validity=default" + + ",session_token_validity=default" + + ",SESSION_RECORD_ACCESS_INTERVAL_SECS=default"); + } } } @@ -87,34 +85,28 @@ public static void tearDownClass() throws Exception { */ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) throws SQLException, InterruptedException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; ResultSetMetaData resultSetMetaData; - try { - Properties sessionParams = new Properties(); - sessionParams.put( - "CLIENT_SESSION_KEEP_ALIVE", - useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); + Properties sessionParams = new Properties(); + sessionParams.put( + "CLIENT_SESSION_KEEP_ALIVE", + useKeepAliveSession ? Boolean.TRUE.toString() : Boolean.FALSE.toString()); - connection = getConnection(sessionParams); - statement = connection.createStatement(); + try (Connection connection = getConnection(sessionParams); + Statement statement = connection.createStatement()) { Thread.sleep(61000); // sleep 61 seconds - resultSet = statement.executeQuery("SELECT 1"); - resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = statement.executeQuery("SELECT 1")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 row - assertTrue(resultSet.next()); + // assert we get 1 row + assertTrue(resultSet.next()); - logger.fine("Query " + queryIdx + " passed "); - statement.close(); - } finally { - closeSQLObjects(resultSet, statement, connection); + logger.fine("Query " + queryIdx + " passed "); + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java index e12c3c5fb..c090bab03 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java @@ -31,356 +31,339 @@ public class MultiStatementIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testMultiStmtExecuteUpdateFail() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "select 1;\n" - + "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - try { - statement.executeUpdate(multiStmtQuery); - fail("executeUpdate should have failed because the first statement yields a result set"); - } catch (SQLException ex) { - assertThat( - ex.getErrorCode(), is(ErrorCode.UPDATE_FIRST_RESULT_NOT_UPDATE_COUNT.getMessageCode())); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "select 1;\n" + + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + try { + statement.executeUpdate(multiStmtQuery); + fail("executeUpdate should have failed because the first statement yields a result set"); + } catch (SQLException ex) { + assertThat( + ex.getErrorCode(), is(ErrorCode.UPDATE_FIRST_RESULT_NOT_UPDATE_COUNT.getMessageCode())); + } } - - statement.close(); - connection.close(); } @Test public void testMultiStmtExecuteQueryFail() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - try { - statement.executeQuery(multiStmtQuery); - fail("executeQuery should have failed because the first statement yields an update count"); - } catch (SQLException ex) { - assertThat( - ex.getErrorCode(), is(ErrorCode.QUERY_FIRST_RESULT_NOT_RESULT_SET.getMessageCode())); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; - statement.close(); - connection.close(); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + try { + statement.executeQuery(multiStmtQuery); + fail("executeQuery should have failed because the first statement yields an update count"); + } catch (SQLException ex) { + assertThat( + ex.getErrorCode(), is(ErrorCode.QUERY_FIRST_RESULT_NOT_RESULT_SET.getMessageCode())); + } + } } @Test public void testMultiStmtSetUnset() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - // setting session variable should propagate outside of query - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("set testvar = 1; select 1"); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select $testvar"); - rs.next(); - assertEquals(1, rs.getInt(1)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - // selecting unset variable should cause error - try { + // setting session variable should propagate outside of query statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("unset testvar; select $testvar"); - fail("Expected a failure"); - } catch (SQLException ex) { - assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); - } + statement.execute("set testvar = 1; select 1"); - // unsetting session variable should propagate outside of query - try { statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("select $testvar"); - fail("Expected a failure"); - } catch (SQLException ex) { - assertEquals(SqlState.NO_DATA, ex.getSQLState()); + try (ResultSet rs = statement.executeQuery("select $testvar")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + + // selecting unset variable should cause error + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("unset testvar; select $testvar"); + fail("Expected a failure"); + } catch (SQLException ex) { + assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); + } + + // unsetting session variable should propagate outside of query + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("select $testvar"); + fail("Expected a failure"); + } catch (SQLException ex) { + assertEquals(SqlState.NO_DATA, ex.getSQLState()); + } + } } - - statement.close(); - connection.close(); } @Test public void testMultiStmtParseError() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute("set testvar = 1"); - try { - // fails in the antlr parser - statement.execute("garbage text; set testvar = 2"); - fail("Expected a compiler error to be thrown"); - } catch (SQLException ex) { - assertEquals(SqlState.SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION, ex.getSQLState()); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - ResultSet rs = statement.executeQuery("select $testvar"); - rs.next(); - assertEquals(1, rs.getInt(1)); + statement.execute("set testvar = 1"); + try { + // fails in the antlr parser + statement.execute("garbage text; set testvar = 2"); + fail("Expected a compiler error to be thrown"); + } catch (SQLException ex) { + assertEquals(SqlState.SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION, ex.getSQLState()); + } - statement.close(); - connection.close(); + try (ResultSet rs = statement.executeQuery("select $testvar")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + } } @Test public void testMultiStmtExecError() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + // fails during execution (javascript invokes statement where it gets typechecked) + statement.execute( + "set testvar = 1; select nonexistent_column from nonexistent_table; set testvar = 2"); + fail("Expected an execution error to be thrown"); + } catch (SQLException ex) { + assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); + } - try { - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - // fails during execution (javascript invokes statement where it gets typechecked) - statement.execute( - "set testvar = 1; select nonexistent_column from nonexistent_table; set testvar = 2"); - fail("Expected an execution error to be thrown"); - } catch (SQLException ex) { - assertEquals(SqlState.PLSQL_ERROR, ex.getSQLState()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select $testvar")) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } } - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select $testvar"); - rs.next(); - assertEquals(1, rs.getInt(1)); - - statement.close(); - connection.close(); } @Test public void testMultiStmtTempTable() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - String entry = "success"; - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "create or replace temporary table test_multi (cola string); insert into test_multi values ('" - + entry - + "')"); - // temporary table should persist outside of the above statement - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select * from test_multi"); - rs.next(); - assertEquals(entry, rs.getString(1)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + String entry = "success"; + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "create or replace temporary table test_multi (cola string); insert into test_multi values ('" + + entry + + "')"); + // temporary table should persist outside of the above statement + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select * from test_multi")) { + assertTrue(rs.next()); + assertEquals(entry, rs.getString(1)); + } + } } @Test public void testMultiStmtUseStmt() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - SFSession session = - statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - String originalSchema = session.getSchema(); + SFSession session = + statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("use schema public; select 1"); - // current schema change should persist outside of the above statement + String originalSchema = session.getSchema(); - assertEquals("PUBLIC", session.getSchema()); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - ResultSet rs = statement.executeQuery("select current_schema()"); - rs.next(); - assertEquals("PUBLIC", rs.getString(1)); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute(String.format("use schema %s; select 1", originalSchema)); - // current schema change should persist outside of the above statement + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("use schema public; select 1"); + // current schema change should persist outside of the above statement - session = statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); - assertEquals(originalSchema, session.getSchema()); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - rs = statement.executeQuery("select current_schema()"); - rs.next(); - assertEquals(originalSchema, rs.getString(1)); + assertEquals("PUBLIC", session.getSchema()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select current_schema()")) { + assertTrue(rs.next()); + assertEquals("PUBLIC", rs.getString(1)); + } + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute(String.format("use schema %s; select 1", originalSchema)); + // current schema change should persist outside of the above statement - statement.close(); - connection.close(); + session = statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); + assertEquals(originalSchema, session.getSchema()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + try (ResultSet rs = statement.executeQuery("select current_schema()")) { + assertTrue(rs.next()); + assertEquals(originalSchema, rs.getString(1)); + } + } } @Test public void testMultiStmtAlterSessionParams() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - SFSession session = - statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // we need an arbitrary parameter which is updated by the client after each query for this test - String param = "AUTOCOMMIT"; - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("alter session set " + param + "=false; select 1"); - assertFalse(session.getAutoCommit()); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("alter session set " + param + "=true; select 1"); - assertTrue(session.getAutoCommit()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + SFSession session = + statement.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // we need an arbitrary parameter which is updated by the client after each query for this + // test + String param = "AUTOCOMMIT"; + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("alter session set " + param + "=false; select 1"); + assertFalse(session.getAutoCommit()); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("alter session set " + param + "=true; select 1"); + assertTrue(session.getAutoCommit()); + } } @Test public void testMultiStmtMultiLine() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - // these statements should not fail - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("select 1;\nselect 2"); - statement.execute("select \n 1; select 2"); - statement.execute("select \r\n 1; select 2"); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + // these statements should not fail + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("select 1;\nselect 2"); + statement.execute("select \n 1; select 2"); + statement.execute("select \r\n 1; select 2"); + } } @Test public void testMultiStmtQuotes() throws SQLException { // test various quotation usage and ensure they succeed - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "create or replace temporary table \"test_multi\" (cola string); select * from \"test_multi\""); - statement.execute( - "create or replace temporary table `test_multi` (cola string); select * from `test_multi`"); - statement.execute("select 'str'; select 'str2'"); - statement.execute("select '\\` backticks'; select '\\\\` more `backticks`'"); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "create or replace temporary table \"test_multi\" (cola string); select * from \"test_multi\""); + statement.execute( + "create or replace temporary table `test_multi` (cola string); select * from `test_multi`"); + statement.execute("select 'str'; select 'str2'"); + statement.execute("select '\\` backticks'; select '\\\\` more `backticks`'"); + } } @Test public void testMultiStmtCommitRollback() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("begin"); - statement.execute("insert into test_multi values ('abc')"); - // "commit" inside multistatement commits previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); commit"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("rollback"); - ResultSet rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("begin"); - statement.execute("insert into test_multi values ('abc')"); - // "rollback" inside multistatement rolls back previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); rollback"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("begin; insert into test_multi values ('abc')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("insert into test_multi values ('def')"); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("begin; insert into test_multi values ('abc')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("insert into test_multi values ('def')"); - statement.execute("rollback"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("begin"); + statement.execute("insert into test_multi values ('abc')"); + // "commit" inside multistatement commits previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); commit"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("begin"); + statement.execute("insert into test_multi values ('abc')"); + // "rollback" inside multistatement rolls back previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); rollback"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("begin; insert into test_multi values ('abc')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("insert into test_multi values ('def')"); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("begin; insert into test_multi values ('abc')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("insert into test_multi values ('def')"); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + } } @Test public void testMultiStmtCommitRollbackNoAutocommit() throws SQLException { - Connection connection = getConnection(); - connection.setAutoCommit(false); - Statement statement = connection.createStatement(); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("insert into test_multi values ('abc')"); - // "commit" inside multistatement commits previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); commit"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("rollback"); - ResultSet rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - statement.execute("insert into test_multi values ('abc')"); - // "rollback" inside multistatement rolls back previous DML calls - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute("insert into test_multi values ('def'); rollback"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "insert into test_multi values ('abc'); insert into test_multi values ('def')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("commit"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - - statement.execute("create or replace table test_multi (cola string)"); - // open transaction inside multistatement continues after - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - statement.execute( - "insert into test_multi values ('abc'); insert into test_multi values ('def')"); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("rollback"); - rs = statement.executeQuery("select count(*) from test_multi"); - assertTrue(rs.next()); - assertEquals(0, rs.getInt(1)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + connection.setAutoCommit(false); + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("insert into test_multi values ('abc')"); + // "commit" inside multistatement commits previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); commit"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + + statement.execute("create or replace table test_multi (cola string)"); + statement.execute("insert into test_multi values ('abc')"); + // "rollback" inside multistatement rolls back previous DML calls + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute("insert into test_multi values ('def'); rollback"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "insert into test_multi values ('abc'); insert into test_multi values ('def')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("commit"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + } + statement.execute("create or replace table test_multi (cola string)"); + // open transaction inside multistatement continues after + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + statement.execute( + "insert into test_multi values ('abc'); insert into test_multi values ('def')"); + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("rollback"); + try (ResultSet rs = statement.executeQuery("select count(*) from test_multi")) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } + } } @Test @@ -388,97 +371,102 @@ public void testMultiStmtLarge() throws SQLException { // this test verifies that multiple-statement support does not break // with many statements // it also ensures that results are returned in the correct order - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - StringBuilder multiStmtBuilder = new StringBuilder(); - String query = "SELECT %d;"; - for (int i = 0; i < 100; i++) { - multiStmtBuilder.append(String.format(query, i)); - } - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 100); - - assertTrue(statement.execute(multiStmtBuilder.toString())); - for (int i = 0; i < 100; i++) { - ResultSet rs = statement.getResultSet(); - assertNotNull(rs); - assertEquals(-1, statement.getUpdateCount()); - assertTrue(rs.next()); - assertEquals(i, rs.getInt(1)); - assertFalse(rs.next()); - - if (i != 99) { - assertTrue(statement.getMoreResults()); - } else { - assertFalse(statement.getMoreResults()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + StringBuilder multiStmtBuilder = new StringBuilder(); + String query = "SELECT %d;"; + for (int i = 0; i < 100; i++) { + multiStmtBuilder.append(String.format(query, i)); + } + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 100); + + assertTrue(statement.execute(multiStmtBuilder.toString())); + for (int i = 0; i < 100; i++) { + try (ResultSet rs = statement.getResultSet()) { + assertNotNull(rs); + assertEquals(-1, statement.getUpdateCount()); + assertTrue(rs.next()); + assertEquals(i, rs.getInt(1)); + assertFalse(rs.next()); + + if (i != 99) { + assertTrue(statement.getMoreResults()); + } else { + assertFalse(statement.getMoreResults()); + } + } } } - - statement.close(); - connection.close(); } @Test public void testMultiStmtCountNotMatch() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - try { - statement.execute("select 1; select 2; select 3"); - fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(8)); - } + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("select 1; select 2; select 3"); + fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(8)); + } - try { - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - statement.execute("select 1"); - fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(8)); - } + try { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + statement.execute("select 1"); + fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(8)); + } - // 0 means any number of statement can be executed - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 0); - statement.execute("select 1; select 2; select 3"); + // 0 means any number of statement can be executed + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 0); + statement.execute("select 1; select 2; select 3"); + } } @Test @ConditionalIgnore(condition = RunningOnGithubAction.class) public void testInvalidParameterCount() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - ResultSet rs = statement.executeQuery("select current_account_locator()"); - rs.next(); - String accountName = rs.getString(1); - - rs = statement.executeQuery("select current_user()"); - rs.next(); - String userName = rs.getString(1); - - String[] testSuites = new String[5]; - testSuites[0] = - String.format("alter account %s set " + "multi_statement_count = 20", accountName); - testSuites[1] = - String.format("alter account %s set " + "multi_statement_count = -1", accountName); - testSuites[2] = String.format("alter user %s set " + "multi_statement_count = 20", userName); - testSuites[3] = String.format("alter user %s set " + "multi_statement_count = -1", userName); - testSuites[4] = "alter session set " + "multi_statement_count = -1"; - - int[] expectedErrorCodes = new int[5]; - expectedErrorCodes[0] = 1008; - expectedErrorCodes[1] = 1008; - expectedErrorCodes[2] = 1006; - expectedErrorCodes[3] = 1006; - expectedErrorCodes[4] = 1008; - - statement.execute("use role accountadmin"); - - for (int i = 0; i < testSuites.length; i++) { - try { - statement.execute(testSuites[i]); - Assert.fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(expectedErrorCodes[i])); + String userName = null; + String accountName = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + try (ResultSet rs = statement.executeQuery("select current_account_locator()")) { + assertTrue(rs.next()); + accountName = rs.getString(1); + } + + try (ResultSet rs = statement.executeQuery("select current_user()")) { + assertTrue(rs.next()); + userName = rs.getString(1); + } + + String[] testSuites = new String[5]; + testSuites[0] = + String.format("alter account %s set " + "multi_statement_count = 20", accountName); + testSuites[1] = + String.format("alter account %s set " + "multi_statement_count = -1", accountName); + testSuites[2] = String.format("alter user %s set " + "multi_statement_count = 20", userName); + testSuites[3] = String.format("alter user %s set " + "multi_statement_count = -1", userName); + testSuites[4] = "alter session set " + "multi_statement_count = -1"; + + int[] expectedErrorCodes = new int[5]; + expectedErrorCodes[0] = 1008; + expectedErrorCodes[1] = 1008; + expectedErrorCodes[2] = 1006; + expectedErrorCodes[3] = 1006; + expectedErrorCodes[4] = 1008; + + statement.execute("use role accountadmin"); + + for (int i = 0; i < testSuites.length; i++) { + try { + statement.execute(testSuites[i]); + Assert.fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(expectedErrorCodes[i])); + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java index c4e91e872..59f5ba795 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java @@ -29,302 +29,300 @@ public class MultiStatementLatestIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testMultiStmtExecute() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - boolean hasResultSet = statement.execute(multiStmtQuery); - // first statement - assertFalse(hasResultSet); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + boolean hasResultSet = statement.execute(multiStmtQuery); + // first statement + assertFalse(hasResultSet); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } } @Test public void testMultiStmtTransaction() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table test_multi_txn(c1 number, c2 string)" + " as select 10, 'z'"); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - String multiStmtQuery = - "begin;\n" - + "delete from test_multi_txn;\n" - + "insert into test_multi_txn values (1, 'a'), (2, 'b');\n" - + "commit"; - - boolean hasResultSet = statement.execute(multiStmtQuery); - // first statement - assertFalse(hasResultSet); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(1, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // fourth statement - assertFalse(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("drop table if exists test_multi_txn"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_multi_txn(c1 number, c2 string)" + " as select 10, 'z'"); + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + String multiStmtQuery = + "begin;\n" + + "delete from test_multi_txn;\n" + + "insert into test_multi_txn values (1, 'a'), (2, 'b');\n" + + "commit"; + + boolean hasResultSet = statement.execute(multiStmtQuery); + // first statement + assertFalse(hasResultSet); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(1, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // fourth statement + assertFalse(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + + } finally { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("drop table if exists test_multi_txn"); + } + } } @Test public void testMultiStmtExecuteUpdate() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); - int rowCount = statement.executeUpdate(multiStmtQuery); - // first statement - assertEquals(0, rowCount); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 3); + int rowCount = statement.executeUpdate(multiStmtQuery); + // first statement + assertEquals(0, rowCount); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } } @Test public void testMultiStmtTransactionRollback() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table test_multi_txn_rb(c1 number, c2 string)" + " as select 10, 'z'"); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - String multiStmtQuery = - "begin;\n" - + "delete from test_multi_txn_rb;\n" - + "rollback;\n" - + "select count(*) from test_multi_txn_rb"; - - boolean hasResultSet = statement.execute(multiStmtQuery); - // first statement - assertFalse(hasResultSet); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(1, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // fourth statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); - statement.execute("drop table if exists test_multi_txn_rb"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_multi_txn_rb(c1 number, c2 string)" + + " as select 10, 'z'"); + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + String multiStmtQuery = + "begin;\n" + + "delete from test_multi_txn_rb;\n" + + "rollback;\n" + + "select count(*) from test_multi_txn_rb"; + + boolean hasResultSet = statement.execute(multiStmtQuery); + // first statement + assertFalse(hasResultSet); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(1, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // fourth statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } finally { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 1); + statement.execute("drop table if exists test_multi_txn_rb"); + } + } } @Test public void testMultiStmtExecuteQuery() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "select 1;\n" - + "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; - - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); - ResultSet rs = statement.executeQuery(multiStmtQuery); - // first statement - assertNotNull(rs); - assertNotNull(statement.getResultSet()); - assertEquals(-1, statement.getUpdateCount()); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertFalse(rs.next()); - - // second statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(0, statement.getUpdateCount()); - - // third statement - assertTrue(statement.getMoreResults()); - assertNull(statement.getResultSet()); - assertEquals(2, statement.getUpdateCount()); - - // fourth statement - assertTrue(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - assertTrue(rs.next()); - assertEquals(2, rs.getInt(1)); - assertFalse(rs.next()); - - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "select 1;\n" + + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; + + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 4); + try (ResultSet rs = statement.executeQuery(multiStmtQuery)) { + // first statement + assertNotNull(rs); + assertNotNull(statement.getResultSet()); + assertEquals(-1, statement.getUpdateCount()); + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertFalse(rs.next()); + + // second statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(0, statement.getUpdateCount()); + + // third statement + assertTrue(statement.getMoreResults()); + assertNull(statement.getResultSet()); + assertEquals(2, statement.getUpdateCount()); + + // fourth statement + assertTrue(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertFalse(rs.next()); + + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + } + } } @Test public void testMultiStmtUpdateCount() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); - boolean isResultSet = - statement.execute( - "CREATE OR REPLACE TEMPORARY TABLE TABLIST AS " - + "SELECT TABLE_SCHEMA, TABLE_NAME FROM INFORMATION_SCHEMA.TABLES " - + "WHERE TABLE_NAME LIKE 'K%' " - + "ORDER BY TABLE_SCHEMA, TABLE_NAME; " - + "SELECT * FROM TABLIST " - + "JOIN INFORMATION_SCHEMA.COLUMNS " - + "ON COLUMNS.TABLE_SCHEMA = TABLIST.TABLE_SCHEMA " - + "AND COLUMNS.TABLE_NAME = TABLIST.TABLE_NAME;"); - assertEquals(isResultSet, false); - int statementUpdateCount = statement.getUpdateCount(); - assertEquals(statementUpdateCount, 0); - isResultSet = statement.getMoreResults(); - assertEquals(isResultSet, true); - statementUpdateCount = statement.getUpdateCount(); - assertEquals(statementUpdateCount, -1); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.unwrap(SnowflakeStatement.class).setParameter("MULTI_STATEMENT_COUNT", 2); + boolean isResultSet = + statement.execute( + "CREATE OR REPLACE TEMPORARY TABLE TABLIST AS " + + "SELECT TABLE_SCHEMA, TABLE_NAME FROM INFORMATION_SCHEMA.TABLES " + + "WHERE TABLE_NAME LIKE 'K%' " + + "ORDER BY TABLE_SCHEMA, TABLE_NAME; " + + "SELECT * FROM TABLIST " + + "JOIN INFORMATION_SCHEMA.COLUMNS " + + "ON COLUMNS.TABLE_SCHEMA = TABLIST.TABLE_SCHEMA " + + "AND COLUMNS.TABLE_NAME = TABLIST.TABLE_NAME;"); + assertEquals(isResultSet, false); + int statementUpdateCount = statement.getUpdateCount(); + assertEquals(statementUpdateCount, 0); + isResultSet = statement.getMoreResults(); + assertEquals(isResultSet, true); + statementUpdateCount = statement.getUpdateCount(); + assertEquals(statementUpdateCount, -1); + } } /** Test use of anonymous blocks (SNOW-758262) */ @Test public void testAnonymousBlocksUse() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table tab758262(c1 number)"); - // Test anonymous block with multistatement - int multistatementcount = 2; - statement - .unwrap(SnowflakeStatement.class) - .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); - String multiStmtQuery = - "begin\n" - + "insert into tab758262 values (1);\n" - + "return 'done';\n" - + "end;\n" - + "select * from tab758262;"; - - statement.execute(multiStmtQuery); - for (int i = 0; i < multistatementcount - 1; i++) { - assertTrue(statement.getMoreResults()); - } - ResultSet rs = statement.getResultSet(); - assertTrue(rs.next()); - assertEquals(1, rs.getInt(1)); - - // Test anonymous block in the middle of other queries in multistatement - multiStmtQuery = - "insert into tab758262 values (25), (26);\n" - + "begin\n" - + "insert into tab758262 values (27);\n" - + "return 'done';\n" - + "end;\n" - + "select * from tab758262;"; - multistatementcount = 3; - statement - .unwrap(SnowflakeStatement.class) - .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); - statement.execute(multiStmtQuery); - for (int i = 0; i < multistatementcount - 1; i++) { - assertTrue(statement.getMoreResults()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table tab758262(c1 number)"); + // Test anonymous block with multistatement + int multistatementcount = 2; + statement + .unwrap(SnowflakeStatement.class) + .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); + String multiStmtQuery = + "begin\n" + + "insert into tab758262 values (1);\n" + + "return 'done';\n" + + "end;\n" + + "select * from tab758262;"; + + statement.execute(multiStmtQuery); + for (int i = 0; i < multistatementcount - 1; i++) { + assertTrue(statement.getMoreResults()); + } + try (ResultSet rs = statement.getResultSet()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + } + + // Test anonymous block in the middle of other queries in multistatement + multiStmtQuery = + "insert into tab758262 values (25), (26);\n" + + "begin\n" + + "insert into tab758262 values (27);\n" + + "return 'done';\n" + + "end;\n" + + "select * from tab758262;"; + multistatementcount = 3; + statement + .unwrap(SnowflakeStatement.class) + .setParameter("MULTI_STATEMENT_COUNT", multistatementcount); + statement.execute(multiStmtQuery); + for (int i = 0; i < multistatementcount - 1; i++) { + assertTrue(statement.getMoreResults()); + } + try (ResultSet rs = statement.getResultSet()) { + assertEquals(4, getSizeOfResultSet(rs)); + } } - rs = statement.getResultSet(); - assertEquals(4, getSizeOfResultSet(rs)); - rs.close(); - statement.close(); - connection.close(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java index d34cc4bc9..d767456a2 100644 --- a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java @@ -142,8 +142,8 @@ public void testSystemFunctions() throws SQLException { } static void testFunction(Connection connection, String sql, String expected) throws SQLException { - try (Statement statement = connection.createStatement()) { - ResultSet resultSet = statement.executeQuery(sql); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(sql)) { assertTrue(resultSet.next()); assertEquals(expected, resultSet.getString(1)); } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java index 112df2724..3d1997193 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java @@ -3,6 +3,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.PreparedStatement; @@ -21,204 +22,215 @@ public class PreparedMultiStmtIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @Test public void testExecuteUpdateCount() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - statement.execute("create or replace table test_multi_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement( - "insert into test_multi_bind(c1) values(?); insert into " - + "test_multi_bind values (?), (?)"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); - - preparedStatement.setInt(1, 20); - preparedStatement.setInt(2, 30); - preparedStatement.setInt(3, 40); - - // first statement - int rowCount = preparedStatement.executeUpdate(); - assertThat(rowCount, is(1)); - assertThat(preparedStatement.getResultSet(), is(nullValue())); - assertThat(preparedStatement.getUpdateCount(), is(1)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(false)); - assertThat(preparedStatement.getUpdateCount(), is(2)); - - ResultSet resultSet = statement.executeQuery("select c1 from test_multi_bind order by c1 asc"); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(30)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - - statement.execute("drop table if exists test_multi_bind"); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + statement.execute("create or replace table test_multi_bind(c1 number)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into test_multi_bind(c1) values(?); insert into " + + "test_multi_bind values (?), (?)")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); + + preparedStatement.setInt(1, 20); + preparedStatement.setInt(2, 30); + preparedStatement.setInt(3, 40); + + // first statement + int rowCount = preparedStatement.executeUpdate(); + assertThat(rowCount, is(1)); + assertThat(preparedStatement.getResultSet(), is(nullValue())); + assertThat(preparedStatement.getUpdateCount(), is(1)); + + // second statement + assertThat(preparedStatement.getMoreResults(), is(false)); + assertThat(preparedStatement.getUpdateCount(), is(2)); + + try (ResultSet resultSet = + statement.executeQuery("select c1 from test_multi_bind order by c1 asc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(30)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + } + } + } finally { + statement.execute("drop table if exists test_multi_bind"); + } + } } /** Less bindings than expected in statement */ @Test public void testExecuteLessBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - statement.execute("create or replace table test_multi_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement( - "insert into test_multi_bind(c1) values(?); insert into " - + "test_multi_bind values (?), (?)"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); - - preparedStatement.setInt(1, 20); - preparedStatement.setInt(2, 30); - - // first statement - try { - preparedStatement.executeUpdate(); - Assert.fail(); - } catch (SQLException e) { - // error code comes from xp, which is js execution failed. - assertThat(e.getErrorCode(), is(100132)); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + statement.execute("create or replace table test_multi_bind(c1 number)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into test_multi_bind(c1) values(?); insert into " + + "test_multi_bind values (?), (?)")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); + + preparedStatement.setInt(1, 20); + preparedStatement.setInt(2, 30); + + // first statement + try { + preparedStatement.executeUpdate(); + Assert.fail(); + } catch (SQLException e) { + // error code comes from xp, which is js execution failed. + assertThat(e.getErrorCode(), is(100132)); + } + } + } finally { + statement.execute("drop table if exists test_multi_bind"); + } } - - statement.execute("drop table if exists test_multi_bind"); - preparedStatement.close(); - connection.close(); } @Test public void testExecuteMoreBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - statement.execute("create or replace table test_multi_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement( - "insert into test_multi_bind(c1) values(?); insert into " - + "test_multi_bind values (?), (?)"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); - - preparedStatement.setInt(1, 20); - preparedStatement.setInt(2, 30); - preparedStatement.setInt(3, 40); - // 4th binding should be ignored - preparedStatement.setInt(4, 50); - - // first statement - int rowCount = preparedStatement.executeUpdate(); - assertThat(rowCount, is(1)); - assertThat(preparedStatement.getResultSet(), is(nullValue())); - assertThat(preparedStatement.getUpdateCount(), is(1)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(false)); - assertThat(preparedStatement.getUpdateCount(), is(2)); - - ResultSet resultSet = statement.executeQuery("select c1 from test_multi_bind order by c1 asc"); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(30)); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - - statement.execute("drop table if exists test_multi_bind"); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + statement.execute("create or replace table test_multi_bind(c1 number)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into test_multi_bind(c1) values(?); insert into " + + "test_multi_bind values (?), (?)")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(3)); + + preparedStatement.setInt(1, 20); + preparedStatement.setInt(2, 30); + preparedStatement.setInt(3, 40); + // 4th binding should be ignored + preparedStatement.setInt(4, 50); + + // first statement + int rowCount = preparedStatement.executeUpdate(); + assertThat(rowCount, is(1)); + assertThat(preparedStatement.getResultSet(), is(nullValue())); + assertThat(preparedStatement.getUpdateCount(), is(1)); + + // second statement + assertThat(preparedStatement.getMoreResults(), is(false)); + assertThat(preparedStatement.getUpdateCount(), is(2)); + + try (ResultSet resultSet = + statement.executeQuery("select c1 from test_multi_bind order by c1 asc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(30)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + } + } + } finally { + statement.execute("drop table if exists test_multi_bind"); + } + } } @Test public void testExecuteQueryBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - - PreparedStatement preparedStatement = - connection.prepareStatement("select ?; select ?, ?; select ?, ?, ?"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(6)); - - preparedStatement.setInt(1, 10); - preparedStatement.setInt(2, 20); - preparedStatement.setInt(3, 30); - preparedStatement.setInt(4, 40); - preparedStatement.setInt(5, 50); - preparedStatement.setInt(6, 60); - - // first statement - ResultSet resultSet = preparedStatement.executeQuery(); - assertThat(resultSet.next(), is(true)); - assertThat(resultSet.getInt(1), is(10)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - assertThat(resultSet.getInt(2), is(30)); - - // third statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - assertThat(resultSet.getInt(2), is(50)); - assertThat(resultSet.getInt(3), is(60)); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("select ?; select ?, ?; select ?, ?, ?")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(6)); + + preparedStatement.setInt(1, 10); + preparedStatement.setInt(2, 20); + preparedStatement.setInt(3, 30); + preparedStatement.setInt(4, 40); + preparedStatement.setInt(5, 50); + preparedStatement.setInt(6, 60); + + // first statement + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertThat(resultSet.next(), is(true)); + assertThat(resultSet.getInt(1), is(10)); + } + // second statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertThat(resultSet.getInt(2), is(30)); + } + + // third statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + assertThat(resultSet.getInt(2), is(50)); + assertThat(resultSet.getInt(3), is(60)); + } + } + } } @Test public void testExecuteQueryNoBindings() throws Exception { - SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); - Statement statement = connection.createStatement(); - statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); - - PreparedStatement preparedStatement = - connection.prepareStatement("select 10; select 20, 30; select 40, 50, 60"); - - assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(0)); - - // first statement - ResultSet resultSet = preparedStatement.executeQuery(); - assertThat(resultSet.next(), is(true)); - assertThat(resultSet.getInt(1), is(10)); - - // second statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(20)); - assertThat(resultSet.getInt(2), is(30)); - - // third statement - assertThat(preparedStatement.getMoreResults(), is(true)); - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertThat(resultSet.getInt(1), is(40)); - assertThat(resultSet.getInt(2), is(50)); - assertThat(resultSet.getInt(3), is(60)); - - preparedStatement.close(); - connection.close(); + try (SnowflakeConnectionV1 connection = (SnowflakeConnectionV1) getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("select 10; select 20, 30; select 40, 50, 60")) { + + assertThat(preparedStatement.getParameterMetaData().getParameterCount(), is(0)); + + // first statement + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertThat(resultSet.next(), is(true)); + assertThat(resultSet.getInt(1), is(10)); + } + + // second statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(20)); + assertThat(resultSet.getInt(2), is(30)); + } + + // third statement + assertThat(preparedStatement.getMoreResults(), is(true)); + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt(1), is(40)); + assertThat(resultSet.getInt(2), is(50)); + assertThat(resultSet.getInt(3), is(60)); + } + } + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java index f3a2c942a..7c05163dc 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java @@ -15,9 +15,9 @@ abstract class PreparedStatement0IT extends BaseJDBCTest { Connection init() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java index bd23d803d..56bef419f 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java @@ -85,7 +85,8 @@ public void testGetParameterMetaData() throws SQLException { /** Trigger default stage array binding threshold so that it can be run on travis */ @Test public void testInsertStageArrayBind() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { connection .createStatement() .execute("create or replace table testStageArrayBind(c1 integer)"); @@ -98,14 +99,12 @@ public void testInsertStageArrayBind() throws SQLException { } prepStatement.executeBatch(); - try (Statement statement = connection.createStatement()) { - try (ResultSet resultSet = - statement.executeQuery("select * from testStageArrayBind order by c1 asc")) { - int count = 0; - while (resultSet.next()) { - assertThat(resultSet.getInt(1), is(count)); - count++; - } + try (ResultSet resultSet = + statement.executeQuery("select * from testStageArrayBind order by c1 asc")) { + int count = 0; + while (resultSet.next()) { + assertThat(resultSet.getInt(1), is(count)); + count++; } } } @@ -227,16 +226,15 @@ public void testInsertBatchStageMultipleTimes() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchNull() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled for (int threshold : thresholds) { - connection.createStatement().execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table - connection - .createStatement() - .execute( - String.format( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); + statement.execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table + statement.execute( + String.format( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { prepStatement.setNull(1, Types.INTEGER); prepStatement.setNull(2, Types.DOUBLE); @@ -250,9 +248,8 @@ public void testStageBatchNull() throws SQLException { assertEquals(1, countResult[0]); } - try (ResultSet resultSet = - connection.createStatement().executeQuery("SELECT * FROM TEST_PREPST")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("SELECT * FROM TEST_PREPST")) { + assertTrue(resultSet.next()); String errorMessage = "Column should be null (" + (threshold > 0 ? "stage" : "non-stage") + ")"; resultSet.getInt(1); @@ -275,19 +272,18 @@ public void testStageBatchNull() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageString() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled String[] rows = { null, "", "\"", ",", "\n", "\r\n", "\"\"", "null", "\\\n", "\",", "\\\",\\\"" }; for (int threshold : thresholds) { - connection.createStatement().execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table - connection - .createStatement() - .execute( - String.format( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); + statement.execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table + statement.execute( + String.format( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { for (int i = 0; i < rows.length; i++) { bindOneParamSet(prepStatement, i, 0.0, 0.0f, rows[i], 0, (short) 0); @@ -296,13 +292,11 @@ public void testStageString() throws SQLException { prepStatement.executeBatch(); try (ResultSet resultSet = - connection - .createStatement() - .executeQuery("SELECT colC FROM TEST_PREPST ORDER BY id ASC")) { + statement.executeQuery("SELECT colC FROM TEST_PREPST ORDER BY id ASC")) { String errorMessage = "Strings should match (" + (threshold > 0 ? "stage" : "non-stage") + ")"; for (String row : rows) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(errorMessage, row, resultSet.getString(1)); } } @@ -314,16 +308,15 @@ public void testStageString() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testIncorrectTypes() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled for (int threshold : thresholds) { - connection.createStatement().execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table - connection - .createStatement() - .execute( - String.format( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); + statement.execute("DELETE FROM TEST_PREPST WHERE 1=1"); // clear table + statement.execute( + String.format( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = %d", threshold)); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { prepStatement.setString(1, "notAnInt"); // should cause error @@ -348,7 +341,8 @@ public void testIncorrectTypes() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchTimestamps() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { Timestamp tsEpoch = new Timestamp(0L); Timestamp tsEpochMinusOneSec = new Timestamp(-1000L); // negative epoch no fraction of seconds Timestamp tsPast = new Timestamp(-2208988800100L); // very large negative epoch @@ -363,22 +357,14 @@ public void testStageBatchTimestamps() throws SQLException { try { // Test that stage and non-stage bindings are consistent for each timestamp type for (String tsType : tsTypes) { - connection - .createStatement() - .execute("ALTER SESSION SET TIMESTAMP_TYPE_MAPPING = " + tsType); - connection - .createStatement() - .execute("ALTER SESSION SET CLIENT_TIMESTAMP_TYPE_MAPPING = " + tsType); - - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_ts (id INTEGER, tz TIMESTAMP)"); + statement.execute("ALTER SESSION SET TIMESTAMP_TYPE_MAPPING = " + tsType); + statement.execute("ALTER SESSION SET CLIENT_TIMESTAMP_TYPE_MAPPING = " + tsType); + + statement.execute("CREATE OR REPLACE TABLE test_prepst_ts (id INTEGER, tz TIMESTAMP)"); try (PreparedStatement prepStatement = connection.prepareStatement("INSERT INTO test_prepst_ts(id, tz) VALUES(?,?)")) { // First, run with non-stage binding - connection - .createStatement() - .executeQuery("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); + statement.executeQuery("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); for (int i = 0; i < timestamps.length; i++) { prepStatement.setInt(1, i); prepStatement.setTimestamp(2, timestamps[i]); @@ -390,22 +376,18 @@ public void testStageBatchTimestamps() throws SQLException { } Timestamp[] nonStageResult = new Timestamp[timestamps.length]; - ResultSet rsNonStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC"); - for (int i = 0; i < nonStageResult.length; i++) { - rsNonStage.next(); - nonStageResult[i] = rsNonStage.getTimestamp(2); + try (ResultSet rsNonStage = + statement.executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC")) { + for (int i = 0; i < nonStageResult.length; i++) { + assertTrue(rsNonStage.next()); + nonStageResult[i] = rsNonStage.getTimestamp(2); + } } - - connection.createStatement().execute("DELETE FROM test_prepst_ts WHERE 1=1"); + statement.execute("DELETE FROM test_prepst_ts WHERE 1=1"); // Now, run with stage binding - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage // bind for (int i = 0; i < timestamps.length; i++) { prepStatement.setInt(1, i); @@ -418,27 +400,26 @@ public void testStageBatchTimestamps() throws SQLException { } Timestamp[] stageResult = new Timestamp[timestamps.length]; - ResultSet rsStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC"); - for (int i = 0; i < stageResult.length; i++) { - rsStage.next(); - stageResult[i] = rsStage.getTimestamp(2); - } - - for (int i = 0; i < timestamps.length; i++) { - assertEquals( - "Stage binding timestamp should match non-stage binding timestamp (" - + tsType - + ")", - nonStageResult[i], - stageResult[i]); + try (ResultSet rsStage = + statement.executeQuery("SELECT * FROM test_prepst_ts ORDER BY id ASC")) { + for (int i = 0; i < stageResult.length; i++) { + assertTrue(rsStage.next()); + stageResult[i] = rsStage.getTimestamp(2); + } + + for (int i = 0; i < timestamps.length; i++) { + assertEquals( + "Stage binding timestamp should match non-stage binding timestamp (" + + tsType + + ")", + nonStageResult[i], + stageResult[i]); + } } } } } finally { - connection.createStatement().execute("DROP TABLE IF EXISTS test_prepst_ts"); + statement.execute("DROP TABLE IF EXISTS test_prepst_ts"); } } } @@ -446,7 +427,8 @@ public void testStageBatchTimestamps() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchTimes() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { Time tMidnight = new Time(0); Time tNeg = new Time(-1); Time tPos = new Time(1); @@ -455,16 +437,12 @@ public void testStageBatchTimes() throws SQLException { Time[] times = new Time[] {tMidnight, tNeg, tPos, tNow, tNoon, null}; int[] countResult; try { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_time (id INTEGER, tod TIME)"); + statement.execute("CREATE OR REPLACE TABLE test_prepst_time (id INTEGER, tod TIME)"); try (PreparedStatement prepStatement = connection.prepareStatement("INSERT INTO test_prepst_time(id, tod) VALUES(?,?)")) { // First, run with non-stage binding - connection - .createStatement() - .execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); for (int i = 0; i < times.length; i++) { prepStatement.setInt(1, i); prepStatement.setTime(2, times[i]); @@ -477,21 +455,17 @@ public void testStageBatchTimes() throws SQLException { Time[] nonStageResult = new Time[times.length]; ResultSet rsNonStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC"); + statement.executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC"); for (int i = 0; i < nonStageResult.length; i++) { - rsNonStage.next(); + assertTrue(rsNonStage.next()); nonStageResult[i] = rsNonStage.getTime(2); } - connection.createStatement().execute("DELETE FROM test_prepst_time WHERE 1=1"); + statement.execute("DELETE FROM test_prepst_time WHERE 1=1"); // Now, run with stage binding - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage // bind for (int i = 0; i < times.length; i++) { prepStatement.setInt(1, i); @@ -504,24 +478,23 @@ public void testStageBatchTimes() throws SQLException { } Time[] stageResult = new Time[times.length]; - ResultSet rsStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC"); - for (int i = 0; i < stageResult.length; i++) { - rsStage.next(); - stageResult[i] = rsStage.getTime(2); - } + try (ResultSet rsStage = + statement.executeQuery("SELECT * FROM test_prepst_time ORDER BY id ASC")) { + for (int i = 0; i < stageResult.length; i++) { + assertTrue(rsStage.next()); + stageResult[i] = rsStage.getTime(2); + } - for (int i = 0; i < times.length; i++) { - assertEquals( - "Stage binding time should match non-stage binding time", - nonStageResult[i], - stageResult[i]); + for (int i = 0; i < times.length; i++) { + assertEquals( + "Stage binding time should match non-stage binding time", + nonStageResult[i], + stageResult[i]); + } } } } finally { - connection.createStatement().execute("DROP TABLE IF EXISTS test_prepst_time"); + statement.execute("DROP TABLE IF EXISTS test_prepst_time"); } } } @@ -541,7 +514,7 @@ public void testClearParameters() throws SQLException { prepStatement.executeUpdate(); try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(3, resultSet.getInt(1)); assertFalse(resultSet.next()); } @@ -574,7 +547,7 @@ public void testClearBatch() throws SQLException { assertThat(batchSize, is(0)); try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(3, resultSet.getInt(1)); assertFalse(resultSet.next()); } @@ -584,15 +557,14 @@ public void testClearBatch() throws SQLException { @Test public void testInsertOneRow() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); assertEquals(1, prepStatement.executeUpdate()); } - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(1, getSizeOfResultSet(resultSet)); } try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -606,10 +578,9 @@ public void testInsertOneRow() throws SQLException { @Test public void testUpdateOneRow() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -621,8 +592,8 @@ public void testUpdateOneRow() throws SQLException { prepStatement.setInt(1, 1); int count = prepStatement.executeUpdate(); assertEquals(1, count); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertTrue(resultSet.next()); assertEquals("newString", resultSet.getString(4)); } } @@ -631,9 +602,9 @@ public void testUpdateOneRow() throws SQLException { assertFalse(prepStatement.execute()); assertEquals(1, prepStatement.getUpdateCount()); assertEquals(1L, prepStatement.getLargeUpdateCount()); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertTrue(resultSet.next()); + assertTrue(resultSet.next()); assertEquals("newString", resultSet.getString(4)); } } @@ -642,10 +613,9 @@ public void testUpdateOneRow() throws SQLException { @Test public void testDeleteOneRow() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -658,7 +628,7 @@ public void testDeleteOneRow() throws SQLException { prepStatement.setInt(1, 1); int count = prepStatement.executeUpdate(); assertEquals(1, count); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(1, getSizeOfResultSet(resultSet)); } // evaluate query ids @@ -672,7 +642,7 @@ public void testDeleteOneRow() throws SQLException { assertFalse(prepStatement.execute()); assertEquals(1, prepStatement.getUpdateCount()); assertEquals(1L, prepStatement.getLargeUpdateCount()); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(0, getSizeOfResultSet(resultSet)); // evaluate query ids assertTrue(prepStatement.isWrapperFor(SnowflakePreparedStatement.class)); @@ -736,9 +706,9 @@ public void testUpdateBatch() throws SQLException { assertEquals(0, prepStatement.getUpdateCount()); assertEquals(0L, prepStatement.getLargeUpdateCount()); try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getString(4), is("newString")); - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getString(4), is("newString")); } } @@ -748,9 +718,10 @@ public void testUpdateBatch() throws SQLException { @Test public void testBatchInsertWithCacheEnabled() throws SQLException { int[] countResult; - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // ensure enable the cache result use - connection.createStatement().execute(enableCacheReuse); + statement.execute(enableCacheReuse); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 1); @@ -770,14 +741,14 @@ public void testBatchInsertWithCacheEnabled() throws SQLException { assertEquals(1, countResult[0]); assertEquals(1, countResult[1]); - try (ResultSet resultSet = connection.createStatement().executeQuery(selectAllSQL)) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery(selectAllSQL)) { + assertTrue(resultSet.next()); assertEquals(1, resultSet.getInt(1)); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(2, resultSet.getInt(1)); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(3, resultSet.getInt(1)); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(4, resultSet.getInt(1)); assertFalse(resultSet.next()); } @@ -805,16 +776,18 @@ public void manualTestForPreparedStatementLogging() throws SQLException { props.put("user", params.get("user")); props.put("password", params.get("password")); props.put("tracing", "info"); - Connection con = DriverManager.getConnection(uri, props); - con.createStatement() - .executeUpdate("alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=true"); - con.createStatement().execute(createTableSQL); - PreparedStatement prepStatement = con.prepareStatement(insertSQL, Statement.NO_GENERATED_KEYS); - bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); - prepStatement.addBatch(); - prepStatement.executeBatch(); - con.createStatement() - .executeUpdate("alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=false"); - con.close(); + try (Connection con = DriverManager.getConnection(uri, props); + Statement statement = con.createStatement()) { + statement.executeUpdate("alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=true"); + statement.execute(createTableSQL); + try (PreparedStatement prepStatement = + con.prepareStatement(insertSQL, Statement.NO_GENERATED_KEYS)) { + bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); + prepStatement.addBatch(); + prepStatement.executeBatch(); + statement.executeUpdate( + "alter session set CLIENT_ENABLE_LOG_INFO_STATEMENT_PARAMETERS=false"); + } + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java index 52b4f4518..872c8aab6 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java @@ -42,9 +42,10 @@ public PreparedStatement1LatestIT() { @Test public void testPrepStWithCacheEnabled() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // ensure enable the cache result use - connection.createStatement().execute(enableCacheReuse); + statement.execute(enableCacheReuse); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); @@ -54,13 +55,12 @@ public void testPrepStWithCacheEnabled() throws SQLException { prepStatement.execute(); } - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from test_prepst")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from test_prepst")) { + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(1), 1); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(1), 1); - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(1), 100); } @@ -69,13 +69,13 @@ public void testPrepStWithCacheEnabled() throws SQLException { prepStatement.setInt(1, 1); prepStatement.setInt(2, 1); try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(2), 2); prepStatement.setInt(1, 1); prepStatement.setInt(2, 100); } try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(resultSet.getInt(2), 101); } } @@ -110,35 +110,37 @@ public void testPrepStWithCacheEnabled() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testInsertStageArrayBindWithTime() throws SQLException { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - statement.execute("alter session set CLIENT_STAGE_ARRAY_BINDING_THRESHOLD=2"); - statement.execute("create or replace table testStageBindTime (c1 time, c2 time)"); - PreparedStatement prepSt = - connection.prepareStatement("insert into testStageBindTime values (?, ?)"); - Time[][] timeValues = { - {new Time(0), new Time(1)}, - {new Time(1000), new Time(Integer.MAX_VALUE)}, - {new Time(123456), new Time(55555)}, - {Time.valueOf("01:02:00"), new Time(-100)}, - }; - for (Time[] value : timeValues) { - prepSt.setTime(1, value[0]); - prepSt.setTime(2, value[1]); - prepSt.addBatch(); - } - prepSt.executeBatch(); - // check results - ResultSet rs = statement.executeQuery("select * from testStageBindTime"); - for (Time[] timeValue : timeValues) { - rs.next(); - assertEquals(timeValue[0].toString(), rs.getTime(1).toString()); - assertEquals(timeValue[1].toString(), rs.getTime(2).toString()); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set CLIENT_STAGE_ARRAY_BINDING_THRESHOLD=2"); + statement.execute("create or replace table testStageBindTime (c1 time, c2 time)"); + PreparedStatement prepSt = + connection.prepareStatement("insert into testStageBindTime values (?, ?)"); + Time[][] timeValues = { + {new Time(0), new Time(1)}, + {new Time(1000), new Time(Integer.MAX_VALUE)}, + {new Time(123456), new Time(55555)}, + {Time.valueOf("01:02:00"), new Time(-100)}, + }; + for (Time[] value : timeValues) { + prepSt.setTime(1, value[0]); + prepSt.setTime(2, value[1]); + prepSt.addBatch(); + } + prepSt.executeBatch(); + // check results + try (ResultSet rs = statement.executeQuery("select * from testStageBindTime")) { + for (Time[] timeValue : timeValues) { + assertTrue(rs.next()); + assertEquals(timeValue[0].toString(), rs.getTime(1).toString()); + assertEquals(timeValue[1].toString(), rs.getTime(2).toString()); + } + } + } finally { + statement.execute("drop table if exists testStageBindTime"); + statement.execute("alter session unset CLIENT_STAGE_ARRAY_BINDING_THRESHOLD"); } - rs.close(); - statement.execute("drop table if exists testStageBindTime"); - statement.execute("alter session unset CLIENT_STAGE_ARRAY_BINDING_THRESHOLD"); - statement.close(); } } @@ -155,48 +157,48 @@ public void testInsertStageArrayBindWithTime() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSetObjectForTimestampTypes() throws SQLException { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // set timestamp mapping to default value - statement.execute("ALTER SESSION UNSET CLIENT_TIMESTAMP_TYPE_MAPPING"); - statement.execute("create or replace table TS (ntz TIMESTAMP_NTZ, ltz TIMESTAMP_LTZ)"); - PreparedStatement prepst = connection.prepareStatement("insert into TS values (?, ?)"); - String date1 = "2014-01-01 16:00:00"; - String date2 = "1945-11-12 5:25:00"; - Timestamp[] testTzs = {Timestamp.valueOf(date1), Timestamp.valueOf(date2)}; - for (int i = 0; i < testTzs.length; i++) { - // Disable stage array binding and insert the timestamp values - statement.execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); // disable stage bind - prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); - prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); - prepst.addBatch(); - prepst.executeBatch(); - // Enable stage array binding and insert the same timestamp values as above - statement.execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind - prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); - prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); - prepst.addBatch(); - prepst.executeBatch(); - } - ResultSet rs = statement.executeQuery("select * from TS"); - // Get results for each timestamp value tested - for (int i = 0; i < testTzs.length; i++) { - // Assert that the first row of inserts with payload binding matches the second row of - // inserts that used stage array binding - rs.next(); - Timestamp expectedNTZTs = rs.getTimestamp(1); - Timestamp expectedLTZTs = rs.getTimestamp(2); - rs.next(); - assertEquals(expectedNTZTs, rs.getTimestamp(1)); - assertEquals(expectedLTZTs, rs.getTimestamp(2)); + try { + statement.execute("ALTER SESSION UNSET CLIENT_TIMESTAMP_TYPE_MAPPING"); + statement.execute("create or replace table TS (ntz TIMESTAMP_NTZ, ltz TIMESTAMP_LTZ)"); + PreparedStatement prepst = connection.prepareStatement("insert into TS values (?, ?)"); + String date1 = "2014-01-01 16:00:00"; + String date2 = "1945-11-12 5:25:00"; + Timestamp[] testTzs = {Timestamp.valueOf(date1), Timestamp.valueOf(date2)}; + for (int i = 0; i < testTzs.length; i++) { + // Disable stage array binding and insert the timestamp values + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); // disable stage bind + prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); + prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); + prepst.addBatch(); + prepst.executeBatch(); + // Enable stage array binding and insert the same timestamp values as above + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind + prepst.setObject(1, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ); + prepst.setObject(2, testTzs[i], SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ); + prepst.addBatch(); + prepst.executeBatch(); + } + try (ResultSet rs = statement.executeQuery("select * from TS")) { + // Get results for each timestamp value tested + for (int i = 0; i < testTzs.length; i++) { + // Assert that the first row of inserts with payload binding matches the second row of + // inserts that used stage array binding + assertTrue(rs.next()); + Timestamp expectedNTZTs = rs.getTimestamp(1); + Timestamp expectedLTZTs = rs.getTimestamp(2); + assertTrue(rs.next()); + assertEquals(expectedNTZTs, rs.getTimestamp(1)); + assertEquals(expectedLTZTs, rs.getTimestamp(2)); + } + } + } finally { + statement.execute("ALTER SESSION UNSET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD;"); } - - // clean up - statement.execute("ALTER SESSION UNSET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD;"); - rs.close(); - statement.close(); } } @@ -290,26 +292,29 @@ public void testSetObjectMethodWithLargeBigIntegerColumn() { @Test public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - statement.execute("alter session set TIMESTAMP_INPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FFTZH'"); - statement.execute( - "create or replace table testStageBindTypes (c1 date, c2 datetime, c3 timestamp)"); - java.util.Date today = new java.util.Date(); - java.sql.Date sqldate = new java.sql.Date(today.getDate()); - java.sql.Timestamp todaySQL = new java.sql.Timestamp(today.getTime()); - PreparedStatement prepSt = - connection.prepareStatement("insert into testStageBindTypes values (?, ?, ?)"); - for (int i = 1; i < 30000; i++) { - prepSt.setDate(1, sqldate); - prepSt.setDate(2, sqldate); - prepSt.setTimestamp(3, todaySQL); - prepSt.addBatch(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set TIMESTAMP_INPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FFTZH'"); + statement.execute( + "create or replace table testStageBindTypes (c1 date, c2 datetime, c3 timestamp)"); + java.util.Date today = new java.util.Date(); + java.sql.Date sqldate = new java.sql.Date(today.getDate()); + java.sql.Timestamp todaySQL = new java.sql.Timestamp(today.getTime()); + try (PreparedStatement prepSt = + connection.prepareStatement("insert into testStageBindTypes values (?, ?, ?)")) { + for (int i = 1; i < 30000; i++) { + prepSt.setDate(1, sqldate); + prepSt.setDate(2, sqldate); + prepSt.setTimestamp(3, todaySQL); + prepSt.addBatch(); + } + prepSt.executeBatch(); // should not throw a parsing error. + } + } finally { + statement.execute("drop table if exists testStageBindTypes"); + statement.execute("alter session unset TIMESTAMP_INPUT_FORMAT"); } - prepSt.executeBatch(); // should not throw a parsing error. - statement.execute("drop table if exists testStageBindTypes"); - statement.execute("alter session unset TIMESTAMP_INPUT_FORMAT"); - statement.close(); } } @@ -322,34 +327,36 @@ public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { @Test @Ignore public void testCallStatement() throws SQLException { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); - statement.executeQuery( - "ALTER SESSION SET USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS=true"); - statement.executeQuery( - "create or replace procedure\n" - + "TEST_SP_CALL_STMT_ENABLED(in1 float, in2 variant)\n" - + "returns string language javascript as $$\n" - + "let res = snowflake.execute({sqlText: 'select ? c1, ? c2', binds:[IN1, JSON.stringify(IN2)]});\n" - + "res.next();\n" - + "return res.getColumnValueAsString(1) + ' ' + res.getColumnValueAsString(2) + ' ' + IN2;\n" - + "$$;"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.executeQuery( + "ALTER SESSION SET USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS=true"); + statement.executeQuery( + "create or replace procedure\n" + + "TEST_SP_CALL_STMT_ENABLED(in1 float, in2 variant)\n" + + "returns string language javascript as $$\n" + + "let res = snowflake.execute({sqlText: 'select ? c1, ? c2', binds:[IN1, JSON.stringify(IN2)]});\n" + + "res.next();\n" + + "return res.getColumnValueAsString(1) + ' ' + res.getColumnValueAsString(2) + ' ' + IN2;\n" + + "$$;"); - PreparedStatement prepStatement = - connection.prepareStatement("call TEST_SP_CALL_STMT_ENABLED(?, to_variant(?))"); - prepStatement.setDouble(1, 1); - prepStatement.setString(2, "[2,3]"); + try (PreparedStatement prepStatement = + connection.prepareStatement("call TEST_SP_CALL_STMT_ENABLED(?, to_variant(?))")) { + prepStatement.setDouble(1, 1); + prepStatement.setString(2, "[2,3]"); - ResultSet rs = prepStatement.executeQuery(); - String result = "1 \"[2,3]\" [2,3]"; - while (rs.next()) { - assertEquals(result, rs.getString(1)); + try (ResultSet rs = prepStatement.executeQuery()) { + String result = "1 \"[2,3]\" [2,3]"; + while (rs.next()) { + assertEquals(result, rs.getString(1)); + } + } + } + } finally { + statement.executeQuery( + "drop procedure if exists TEST_SP_CALL_STMT_ENABLED(float, variant)"); } - - statement.executeQuery("drop procedure if exists TEST_SP_CALL_STMT_ENABLED(float, variant)"); - rs.close(); - prepStatement.close(); - statement.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java index 71f2fe3d5..efb8ef944 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java @@ -48,7 +48,8 @@ public PreparedStatement2IT() { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testStageBatchDates() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { Date dEpoch = new Date(0); Date dAfterEpoch = new Date(24 * 60 * 60 * 1000); Date dBeforeEpoch = new Date(-1 * 24 * 60 * 60 * 1000); @@ -59,16 +60,12 @@ public void testStageBatchDates() throws SQLException { int[] countResult; try { - connection - .createStatement() - .execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); + statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement("INSERT INTO test_prepst_date(id, d) VALUES(?,?)")) { // First, run with non-stage binding - connection - .createStatement() - .execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); for (int i = 0; i < dates.length; i++) { prepStatement.setInt(1, i); prepStatement.setDate(2, dates[i]); @@ -80,22 +77,19 @@ public void testStageBatchDates() throws SQLException { } Date[] nonStageResult = new Date[dates.length]; - ResultSet rsNonStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC"); - for (int i = 0; i < nonStageResult.length; i++) { - rsNonStage.next(); - nonStageResult[i] = rsNonStage.getDate(2); - } + try (ResultSet rsNonStage = + statement.executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC")) { - connection.createStatement().execute("DELETE FROM test_prepst_date WHERE 1=1"); + for (int i = 0; i < nonStageResult.length; i++) { + assertTrue(rsNonStage.next()); + nonStageResult[i] = rsNonStage.getDate(2); + } + } + statement.execute("DELETE FROM test_prepst_date WHERE 1=1"); // Now, run with stage binding - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage // bind for (int i = 0; i < dates.length; i++) { prepStatement.setInt(1, i); @@ -108,35 +102,33 @@ public void testStageBatchDates() throws SQLException { } Date[] stageResult = new Date[dates.length]; - ResultSet rsStage = - connection - .createStatement() - .executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC"); - for (int i = 0; i < stageResult.length; i++) { - rsStage.next(); - stageResult[i] = rsStage.getDate(2); - } + try (ResultSet rsStage = + statement.executeQuery("SELECT * FROM test_prepst_date ORDER BY id ASC")) { + for (int i = 0; i < stageResult.length; i++) { + assertTrue(rsStage.next()); + stageResult[i] = rsStage.getDate(2); + } - for (int i = 0; i < dates.length; i++) { - assertEquals( - "Stage binding date should match non-stage binding date", - nonStageResult[i], - stageResult[i]); + for (int i = 0; i < dates.length; i++) { + assertEquals( + "Stage binding date should match non-stage binding date", + nonStageResult[i], + stageResult[i]); + } } } } finally { - connection.createStatement().execute("DROP TABLE IF EXISTS test_prepst_date"); + statement.execute("DROP TABLE IF EXISTS test_prepst_date"); } } } @Test public void testBindWithNullValue() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute( - "create or replace table testBindNull(cola date, colb time, colc timestamp, cold number)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table testBindNull(cola date, colb time, colc timestamp, cold number)"); try (PreparedStatement prepStatement = connection.prepareStatement("insert into testBindNull values (?, ?, ?, ?)")) { @@ -146,9 +138,8 @@ public void testBindWithNullValue() throws SQLException { prepStatement.setBigDecimal(4, null); prepStatement.addBatch(); prepStatement.executeBatch(); - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from testBindNull")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from testBindNull")) { + assertTrue(resultSet.next()); Date date = resultSet.getDate(1); assertNull(date); assertTrue(resultSet.wasNull()); @@ -165,7 +156,7 @@ public void testBindWithNullValue() throws SQLException { assertNull(bg); assertTrue(resultSet.wasNull()); } - connection.createStatement().execute("TRUNCATE table testbindnull"); + statement.execute("TRUNCATE table testbindnull"); prepStatement.setDate(1, null, Calendar.getInstance()); prepStatement.setTime(2, null, Calendar.getInstance()); prepStatement.setTimestamp(3, null, Calendar.getInstance()); @@ -174,9 +165,8 @@ public void testBindWithNullValue() throws SQLException { prepStatement.addBatch(); prepStatement.executeBatch(); - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from testBindNull")) { - resultSet.next(); + try (ResultSet resultSet = statement.executeQuery("select * from testBindNull")) { + assertTrue(resultSet.next()); Date date = resultSet.getDate(1); assertNull(date); assertTrue(resultSet.wasNull()); @@ -195,20 +185,20 @@ public void testBindWithNullValue() throws SQLException { @Test public void testPrepareDDL() throws SQLException { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { try { try (PreparedStatement prepStatement = connection.prepareStatement("create or replace table testprepareddl(cola number)")) { prepStatement.execute(); } - try (ResultSet resultSet = - connection.createStatement().executeQuery("show tables like 'testprepareddl'")) { + try (ResultSet resultSet = statement.executeQuery("show tables like 'testprepareddl'")) { // result should only have one row since table is created assertThat(resultSet.next(), is(true)); assertThat(resultSet.next(), is(false)); } } finally { - connection.createStatement().execute("drop table if exists testprepareddl"); + statement.execute("drop table if exists testprepareddl"); } } } @@ -236,7 +226,7 @@ public void testPrepareTCL() throws SQLException { for (String testCase : testCases) { try (PreparedStatement prepStatement = connection.prepareStatement(testCase)) { try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getString(1), is("Statement executed successfully.")); } } @@ -266,26 +256,26 @@ public void testPrepareShowCommand() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPrepareTimeout() throws SQLException, InterruptedException { - try (Connection adminCon = getSnowflakeAdminConnection()) { - adminCon.createStatement().execute("alter system set enable_combined_describe=true"); + try (Connection adminCon = getSnowflakeAdminConnection(); + Statement adminStatement = adminCon.createStatement()) { + adminStatement.execute("alter system set enable_combined_describe=true"); try { - try (Connection connection = init()) { - connection.createStatement().execute("create or replace table t(c1 string) as select 1"); - connection - .createStatement() - .execute("alter session set jdbc_enable_combined_describe=true"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table t(c1 string) as select 1"); + statement.execute("alter session set jdbc_enable_combined_describe=true"); try (PreparedStatement prepStatement = connection.prepareStatement("select c1 from t order by c1 limit 1")) { Thread.sleep(5000); try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getInt(1), is(1)); } } - connection.createStatement().execute("drop table if exists t"); + statement.execute("drop table if exists t"); } } finally { - adminCon.createStatement().execute("alter system set enable_combined_describe=default"); + adminStatement.execute("alter system set enable_combined_describe=default"); } } } @@ -293,40 +283,41 @@ public void testPrepareTimeout() throws SQLException, InterruptedException { /** Test case to make sure 2 non null bind refs was not constant folded into one */ @Test public void testSnow36284() throws Exception { - Connection connection = init(); - String query = "select * from (values ('a'), ('b')) x where x.COLUMN1 in (?,?);"; - PreparedStatement preparedStatement = connection.prepareStatement(query); - preparedStatement.setString(1, "a"); - preparedStatement.setString(2, "b"); - ResultSet rs = preparedStatement.executeQuery(); - int rowcount = 0; - Set valuesReturned = Sets.newHashSetWithExpectedSize(2); - while (rs.next()) { - rowcount++; - valuesReturned.add(rs.getString(1)); + + try (Connection connection = init(); + PreparedStatement preparedStatement = connection.prepareStatement(query)) { + preparedStatement.setString(1, "a"); + preparedStatement.setString(2, "b"); + try (ResultSet rs = preparedStatement.executeQuery()) { + int rowcount = 0; + Set valuesReturned = Sets.newHashSetWithExpectedSize(2); + while (rs.next()) { + rowcount++; + valuesReturned.add(rs.getString(1)); + } + assertEquals("Should get back 2 rows", 2, rowcount); + assertEquals("", valuesReturned, Sets.newHashSet("a", "b")); + } } - assertEquals("Should get back 2 rows", 2, rowcount); - assertEquals("", valuesReturned, Sets.newHashSet("a", "b")); } /** Test for coalesce with bind and null arguments in a prepared statement */ @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSnow35923() throws Exception { - try (Connection connection = init()) { - connection - .createStatement() - .execute("alter session set " + "optimizer_eliminate_scans_for_constant_select=false"); - connection.createStatement().execute("create or replace table inc(a int, b int)"); - connection - .createStatement() - .execute("insert into inc(a, b) values (1, 2), " + "(NULL, 4), (5,NULL), (7,8)"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + "optimizer_eliminate_scans_for_constant_select=false"); + statement.execute("create or replace table inc(a int, b int)"); + statement.execute("insert into inc(a, b) values (1, 2), " + "(NULL, 4), (5,NULL), (7,8)"); // Query used to cause an incident. - PreparedStatement preparedStatement = - connection.prepareStatement("SELECT coalesce(?, NULL) from inc;"); - preparedStatement.setInt(1, 0); - ResultSet rs = preparedStatement.executeQuery(); + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT coalesce(?, NULL) from inc;")) { + preparedStatement.setInt(1, 0); + try (ResultSet rs = preparedStatement.executeQuery()) {} + } } } @@ -337,162 +328,180 @@ public void testSnow35923() throws Exception { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testBindObjectLiteral() throws Exception { - try (Connection conn = init()) { - Statement stmt = conn.createStatement(); + long t1Id = 0; + long t2Id = 0; + String t1 = null; - String sqlText = "create or replace table identifier(?) (c1 number)"; - SnowflakePreparedStatementV1 pStmt = - (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - String t1 = "bindObjectTable1"; - // Bind the table name - pStmt.setString(1, t1); - ResultSet result = pStmt.executeQuery(); + try (Connection conn = init(); + Statement stmt = conn.createStatement()) { + String sqlText = "create or replace table identifier(?) (c1 number)"; + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + t1 = "bindObjectTable1"; + // Bind the table name + pStmt.setString(1, t1); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify the table has been created and get the table ID stmt.execute("select parse_json(system$dict_id('table', '" + t1 + "')):entityId;"); - result = stmt.getResultSet(); - - long t1Id = 0; - if (result.next()) { - t1Id = Long.valueOf(result.getString(1)); + try (ResultSet result = stmt.getResultSet()) { + if (result.next()) { + t1Id = Long.valueOf(result.getString(1)); + } + assertTrue(t1Id != 0); } - assertTrue(t1Id != 0); - // Mix of object literal binds and value binds sqlText = "insert into identifier(?) values (1), (2), (3)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - // Bind by object IDs - pStmt.setLong(1, t1Id); - - result = pStmt.executeQuery(); - + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + // Bind by object IDs + pStmt.setLong(1, t1Id); + try (ResultSet result = pStmt.executeQuery()) {} + } // Perform some selection sqlText = "select * from identifier(?) order by 1"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t1); - result = pStmt.executeQuery(); - // Verify 3 rows have been inserted - for (int i = 0; i < 3; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t1); + try (ResultSet result = pStmt.executeQuery()) { + // Verify 3 rows have been inserted + for (int i = 0; i < 3; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); - // Alter Table sqlText = "alter table identifier(?) add column c2 number"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setLong(1, t1Id); - result = pStmt.executeQuery(); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setLong(1, t1Id); + try (ResultSet result = pStmt.executeQuery()) {} + } // Describe sqlText = "desc table identifier(?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t1); - result = pStmt.executeQuery(); - // Verify two columns have been created - for (int i = 0; i < 2; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t1); + try (ResultSet result = pStmt.executeQuery()) { + // Verify two columns have been created + for (int i = 0; i < 2; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); // Create another table String t2 = "bindObjectTable2"; sqlText = "create or replace table identifier(?) (c1 number)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t2); - result = pStmt.executeQuery(); - + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t2); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify the table has been created and get the table ID stmt.execute("select parse_json(system$dict_id('table', '" + t2 + "')):entityId;"); - result = stmt.getResultSet(); - - long t2Id = 0; - if (result.next()) { - t2Id = Long.valueOf(result.getString(1)); + try (ResultSet result = stmt.getResultSet()) { + if (result.next()) { + t2Id = Long.valueOf(result.getString(1)); + } + assertTrue(t2Id != 0); } - assertTrue(t2Id != 0); - // Mix object binds with value binds sqlText = "insert into identifier(?) values (?), (?), (?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, t2); - pStmt.setInt(2, 1); - pStmt.setInt(3, 2); - pStmt.setInt(4, 3); - result = pStmt.executeQuery(); - + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, t2); + pStmt.setInt(2, 1); + pStmt.setInt(3, 2); + pStmt.setInt(4, 3); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify that 3 rows have been inserted sqlText = "select * from identifier(?) order by 1"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setLong(1, t2Id); - result = pStmt.executeQuery(); - for (int i = 0; i < 3; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setLong(1, t2Id); + try (ResultSet result = pStmt.executeQuery()) { + for (int i = 0; i < 3; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); // Multiple Object Binds sqlText = "select t2.c1 from identifier(?) as t1, identifier(?) as t2 " + "where t1.c1 = t2.c1 and t1.c1 > (?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setString(1, t1); - pStmt.setLong(2, t2Id); - pStmt.setInt(3, 1); - result = pStmt.executeQuery(); - for (int i = 0; i < 2; i++) { - assertTrue(result.next()); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setString(1, t1); + pStmt.setLong(2, t2Id); + pStmt.setInt(3, 1); + try (ResultSet result = pStmt.executeQuery()) { + for (int i = 0; i < 2; i++) { + assertTrue(result.next()); + } + assertFalse(result.next()); + } } - assertFalse(result.next()); // Drop Tables sqlText = "drop table identifier(?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setString(1, "bindObjectTable1"); - result = pStmt.executeQuery(); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setString(1, "bindObjectTable1"); + try (ResultSet result = pStmt.executeQuery()) {} + } sqlText = "drop table identifier(?)"; - pStmt = (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText); - pStmt.setParameter("resolve_object_ids", true); - pStmt.setLong(1, t2Id); - result = pStmt.executeQuery(); + try (SnowflakePreparedStatementV1 pStmt = + (SnowflakePreparedStatementV1) conn.prepareStatement(sqlText)) { + pStmt.setParameter("resolve_object_ids", true); + pStmt.setLong(1, t2Id); + try (ResultSet result = pStmt.executeQuery()) {} + } // Verify that the tables have been dropped stmt.execute("show tables like 'bindobjecttable%'"); - result = stmt.getResultSet(); - assertFalse(result.next()); + try (ResultSet result = stmt.getResultSet()) { + assertFalse(result.next()); + } } } @Test public void testBindTimestampTZViaString() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute( - "alter session set timestamp_tz_output_format='YYYY-MM" - + "-DD HH24:MI:SS.FF9 TZHTZM'"); - connection - .createStatement() - .execute("create or replace table testbindtstz(cola timestamp_tz)"); - - try (PreparedStatement preparedStatement = - connection.prepareStatement("insert into testbindtstz values(?)")) { - preparedStatement.setString(1, "2017-11-30T18:17:05.123456789+08:00"); - int count = preparedStatement.executeUpdate(); - assertThat(count, is(1)); - } - try (ResultSet resultSet = - connection.createStatement().executeQuery("select * from testbindtstz")) { - assertTrue(resultSet.next()); - assertThat(resultSet.getString(1), is("2017-11-30 18:17:05.123456789 +0800")); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "alter session set timestamp_tz_output_format='YYYY-MM" + "-DD HH24:MI:SS.FF9 TZHTZM'"); + statement.execute("create or replace table testbindtstz(cola timestamp_tz)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testbindtstz values(?)")) { + preparedStatement.setString(1, "2017-11-30T18:17:05.123456789+08:00"); + int count = preparedStatement.executeUpdate(); + assertThat(count, is(1)); + } + try (ResultSet resultSet = statement.executeQuery("select * from testbindtstz")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is("2017-11-30 18:17:05.123456789 +0800")); + } + } finally { + statement.execute("drop table if exists testbindtstz"); } - connection.createStatement().execute("drop table if exists testbindtstz"); } } @@ -503,41 +512,40 @@ public void testBindTimestampTZViaString() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testBindTimestampTZViaStringBatch() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute( - "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind - connection - .createStatement() - .execute("create or replace table testbindtstz(cola timestamp_tz, colb timestamp_ntz)"); - - try (PreparedStatement preparedStatement = - connection.prepareStatement("insert into testbindtstz values(?,?)")) { - - preparedStatement.setString(1, "2017-11-30 18:17:05.123456789 +08:00"); - preparedStatement.setString(2, "2017-11-30 18:17:05.123456789"); - preparedStatement.addBatch(); - preparedStatement.setString(1, "2017-05-03 16:44:42.0"); - preparedStatement.setString(2, "2017-05-03 16:44:42.0"); - preparedStatement.addBatch(); - int[] count = preparedStatement.executeBatch(); - assertThat(count[0], is(1)); - - try (ResultSet resultSet = - connection - .createStatement() - .executeQuery("select * from testbindtstz order by 1 desc")) { - assertTrue(resultSet.next()); - assertThat(resultSet.getString(1), is("Thu, 30 Nov 2017 18:17:05 +0800")); - assertThat(resultSet.getString(2), is("Thu, 30 Nov 2017 18:17:05 Z")); - - assertTrue(resultSet.next()); - assertThat(resultSet.getString(1), is("Wed, 03 May 2017 16:44:42 -0700")); - assertThat(resultSet.getString(2), is("Wed, 03 May 2017 16:44:42 Z")); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind + statement.execute( + "create or replace table testbindtstz(cola timestamp_tz, colb timestamp_ntz)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testbindtstz values(?,?)")) { + + preparedStatement.setString(1, "2017-11-30 18:17:05.123456789 +08:00"); + preparedStatement.setString(2, "2017-11-30 18:17:05.123456789"); + preparedStatement.addBatch(); + preparedStatement.setString(1, "2017-05-03 16:44:42.0"); + preparedStatement.setString(2, "2017-05-03 16:44:42.0"); + preparedStatement.addBatch(); + int[] count = preparedStatement.executeBatch(); + assertThat(count[0], is(1)); + + try (ResultSet resultSet = + statement.executeQuery("select * from testbindtstz order by 1 desc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is("Thu, 30 Nov 2017 18:17:05 +0800")); + assertThat(resultSet.getString(2), is("Thu, 30 Nov 2017 18:17:05 Z")); + + assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is("Wed, 03 May 2017 16:44:42 -0700")); + assertThat(resultSet.getString(2), is("Wed, 03 May 2017 16:44:42 Z")); + } } + } finally { + statement.execute("drop table if exists testbindtstz"); } - connection.createStatement().execute("drop table if exists testbindtstz"); } } @@ -549,14 +557,11 @@ public void testBindTimestampTZViaStringBatch() throws SQLException { */ @Test public void testSnow41620() throws Exception { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { // Create a table and insert 3 records - connection - .createStatement() - .execute("CREATE or REPLACE TABLE SNOW41620 (c1 varchar(20)," + "c2 int" + " )"); - connection - .createStatement() - .execute("insert into SNOW41620 values('value1', 1), ('value2', 2), ('value3', 3)"); + statement.execute("CREATE or REPLACE TABLE SNOW41620 (c1 varchar(20)," + "c2 int" + " )"); + statement.execute("insert into SNOW41620 values('value1', 1), ('value2', 2), ('value3', 3)"); String PARAMETERIZED_QUERY = "SELECT t0.C1, " @@ -564,28 +569,24 @@ public void testSnow41620() throws Exception { + "CASE WHEN t0.C1 IN (?, ?) THEN t0.C2 ELSE null END " + "FROM SNOW41620 t0"; - ResultSet bindStmtResultSet; try (PreparedStatement pst = connection.prepareStatement(PARAMETERIZED_QUERY)) { // bind values pst.setObject(1, "value1"); pst.setObject(2, "value3"); pst.setObject(3, "value2"); pst.setObject(4, "value3"); - bindStmtResultSet = pst.executeQuery(); - - // Execute the same query with bind values replaced in the sql - String DIRECT_QUERY = - "SELECT t0.C1, " - + "CASE WHEN t0.C1 IN ('value1', 'value3') THEN t0.C2 ELSE null END," - + "CASE WHEN t0.C1 IN ('value2', 'value3') THEN t0.C2 ELSE null END " - + "FROM SNOW41620 t0"; - try (PreparedStatement pst1 = connection.prepareStatement(DIRECT_QUERY)) { - ResultSet directStmtResultSet = pst1.executeQuery(); - - checkResultSetEqual(bindStmtResultSet, directStmtResultSet); - - bindStmtResultSet.close(); - directStmtResultSet.close(); + try (ResultSet bindStmtResultSet = pst.executeQuery()) { + + // Execute the same query with bind values replaced in the sql + String DIRECT_QUERY = + "SELECT t0.C1, " + + "CASE WHEN t0.C1 IN ('value1', 'value3') THEN t0.C2 ELSE null END," + + "CASE WHEN t0.C1 IN ('value2', 'value3') THEN t0.C2 ELSE null END " + + "FROM SNOW41620 t0"; + try (PreparedStatement pst1 = connection.prepareStatement(DIRECT_QUERY); + ResultSet directStmtResultSet = pst1.executeQuery()) { + checkResultSetEqual(bindStmtResultSet, directStmtResultSet); + } } } } @@ -650,8 +651,9 @@ public void testPreparedStatementWithSkipParsing() throws Exception { @Test public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { - try (Connection con = init()) { - con.createStatement().execute("create or replace table t(c1 int)"); + try (Connection con = init(); + Statement statement = con.createStatement()) { + statement.execute("create or replace table t(c1 int)"); try { try (PreparedStatement stmt = con.unwrap(SnowflakeConnectionV1.class) @@ -661,13 +663,13 @@ public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { assertThat(ret, is(1)); } try (PreparedStatement stmt = - con.unwrap(SnowflakeConnectionV1.class).prepareStatement("select * from t", true)) { - ResultSet rs = stmt.executeQuery(); + con.unwrap(SnowflakeConnectionV1.class).prepareStatement("select * from t", true); + ResultSet rs = stmt.executeQuery()) { assertThat(rs.next(), is(true)); assertThat(rs.getInt(1), is(123)); } } finally { - con.createStatement().execute("drop table if exists t"); + statement.execute("drop table if exists t"); } } } @@ -685,11 +687,12 @@ public void testSnow44393() throws Exception { .execute("alter session set timestamp_ntz_output_format='YYYY-MM-DD HH24:MI:SS'")); try (PreparedStatement stmt = con.prepareStatement("select to_timestamp_ntz(?, 3)")) { stmt.setBigDecimal(1, new BigDecimal("1261440000000")); - ResultSet resultSet = stmt.executeQuery(); - resultSet.next(); + try (ResultSet resultSet = stmt.executeQuery()) { + assertTrue(resultSet.next()); - String res = resultSet.getString(1); - assertThat(res, is("2009-12-22 00:00:00")); + String res = resultSet.getString(1); + assertThat(res, is("2009-12-22 00:00:00")); + } } } } @@ -775,36 +778,38 @@ public void testAddBatchNumericNullFloatMixed() throws Exception { @Test public void testInvalidUsageOfApi() throws Exception { - Connection connection = init(); - final PreparedStatement preparedStatement = connection.prepareStatement("select 1"); - final int expectedCode = ErrorCode.UNSUPPORTED_STATEMENT_TYPE_IN_EXECUTION_API.getMessageCode(); - - assertException( - new RunnableWithSQLException() { - @Override - public void run() throws SQLException { - preparedStatement.executeUpdate("select 1"); - } - }, - expectedCode); - - assertException( - new RunnableWithSQLException() { - @Override - public void run() throws SQLException { - preparedStatement.execute("select 1"); - } - }, - expectedCode); - - assertException( - new RunnableWithSQLException() { - @Override - public void run() throws SQLException { - preparedStatement.addBatch("select 1"); - } - }, - expectedCode); + try (Connection connection = init(); + PreparedStatement preparedStatement = connection.prepareStatement("select 1")) { + final int expectedCode = + ErrorCode.UNSUPPORTED_STATEMENT_TYPE_IN_EXECUTION_API.getMessageCode(); + + assertException( + new RunnableWithSQLException() { + @Override + public void run() throws SQLException { + preparedStatement.executeUpdate("select 1"); + } + }, + expectedCode); + + assertException( + new RunnableWithSQLException() { + @Override + public void run() throws SQLException { + preparedStatement.execute("select 1"); + } + }, + expectedCode); + + assertException( + new RunnableWithSQLException() { + @Override + public void run() throws SQLException { + preparedStatement.addBatch("select 1"); + } + }, + expectedCode); + } } private void assertException(RunnableWithSQLException runnable, int expectedCode) { diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java index 84e5f20ea..f7ca395de 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java @@ -41,23 +41,20 @@ public PreparedStatement2LatestIT() { @Test public void testPrepareUDTF() throws Exception { - try (Connection connection = init()) { + try (Connection connection = init(); + Statement statement = connection.createStatement()) { try { - connection - .createStatement() - .execute("create or replace table employee(id number, address text)"); - connection - .createStatement() - .execute( - "create or replace function employee_detail(sid number, addr text)\n" - + " returns table(id number, address text)\n" - + "LANGUAGE SQL\n" - + "as\n" - + "$$\n" - + "select *\n" - + "from employee\n" - + "where id=sid\n" - + "$$;"); + statement.execute("create or replace table employee(id number, address text)"); + statement.execute( + "create or replace function employee_detail(sid number, addr text)\n" + + " returns table(id number, address text)\n" + + "LANGUAGE SQL\n" + + "as\n" + + "$$\n" + + "select *\n" + + "from employee\n" + + "where id=sid\n" + + "$$;"); // should resolve successfully try (PreparedStatement prepStatement = @@ -87,17 +84,15 @@ public void testPrepareUDTF() throws Exception { } // create a udf with same name but different arguments and return type - connection - .createStatement() - .execute( - "create or replace function employee_detail(name text , addr text)\n" - + " returns table(id number)\n" - + "LANGUAGE SQL\n" - + "as\n" - + "$$\n" - + "select id\n" - + "from employee\n" - + "$$;"); + statement.execute( + "create or replace function employee_detail(name text , addr text)\n" + + " returns table(id number)\n" + + "LANGUAGE SQL\n" + + "as\n" + + "$$\n" + + "select id\n" + + "from employee\n" + + "$$;"); try (PreparedStatement prepStatement = connection.prepareStatement("select * from table(employee_detail(?, 'abc'))")) { @@ -105,10 +100,8 @@ public void testPrepareUDTF() throws Exception { prepStatement.execute(); } } finally { - connection - .createStatement() - .execute("drop function if exists employee_detail(number, text)"); - connection.createStatement().execute("drop function if exists employee_detail(text, text)"); + statement.execute("drop function if exists employee_detail(number, text)"); + statement.execute("drop function if exists employee_detail(text, text)"); } } } @@ -119,38 +112,34 @@ public void testPrepareUDTF() throws Exception { */ @Test public void testSelectWithBinding() throws Throwable { - try (Connection connection = init()) { - connection - .createStatement() - .execute("create or replace table TESTNULL(created_time timestamp_ntz, mid int)"); - PreparedStatement ps; - ResultSet rs; + try (Connection connection = init(); + Statement statement = connection.createStatement()) { try { + statement.execute("create or replace table TESTNULL(created_time timestamp_ntz, mid int)"); // skip bind parameter index check if prepare fails and defer the error checks to execute - ps = + try (PreparedStatement ps = connection.prepareStatement( - "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?, 3) and MID = ?"); - ps.setObject(1, 0); - ps.setObject(2, null); - rs = ps.executeQuery(); - assertFalse(rs.next()); - rs.close(); - ps.close(); + "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?, 3) and MID = ?")) { + ps.setObject(1, 0); + ps.setObject(2, null); + try (ResultSet rs = ps.executeQuery()) { + assertFalse(rs.next()); + } + } // describe is success and do the index range check - ps = + try (PreparedStatement ps = connection.prepareStatement( - "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?::NUMBER, 3) and MID = ?"); - ps.setObject(1, 0); - ps.setObject(2, null); - - rs = ps.executeQuery(); - assertFalse(rs.next()); - rs.close(); - ps.close(); + "SELECT 1 FROM TESTNULL WHERE CREATED_TIME = TO_TIMESTAMP(?::NUMBER, 3) and MID = ?")) { + ps.setObject(1, 0); + ps.setObject(2, null); + try (ResultSet rs = ps.executeQuery()) { + assertFalse(rs.next()); + } + } } finally { - connection.createStatement().execute("drop table if exists TESTNULL"); + statement.execute("drop table if exists TESTNULL"); } } } @@ -175,7 +164,7 @@ public void testConstOptLimitBind() throws SQLException { prepStatement.setInt(1, 10); prepStatement.setInt(2, 0); try (ResultSet resultSet = prepStatement.executeQuery()) { - resultSet.next(); + assertTrue(resultSet.next()); assertThat(resultSet.getInt(1), is(1)); assertThat(resultSet.next(), is(false)); } @@ -198,8 +187,9 @@ public void testTableFuncBindInput() throws SQLException { @Test public void testExecuteLargeBatch() throws SQLException { - try (Connection con = init()) { - try (Statement statement = con.createStatement()) { + try (Connection con = init(); + Statement statement = con.createStatement()) { + try { statement.execute("create or replace table mytab(id int)"); try (PreparedStatement pstatement = con.prepareStatement("insert into mytab(id) values (?)")) { @@ -212,117 +202,138 @@ public void testExecuteLargeBatch() throws SQLException { pstatement.executeLargeBatch(); con.commit(); try (ResultSet resultSet = statement.executeQuery("select * from mytab")) { - resultSet.next(); + assertTrue(resultSet.next()); assertEquals(4, resultSet.getInt(1)); } - statement.execute("drop table if exists mytab"); } + } finally { + statement.execute("drop table if exists mytab"); } } } @Test public void testRemoveExtraDescribeCalls() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_uuid_with_bind(c1 number)"); - - PreparedStatement preparedStatement = - connection.prepareStatement("insert into test_uuid_with_bind values (?)"); - preparedStatement.setInt(1, 5); - assertEquals(1, preparedStatement.executeUpdate()); - String queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - // Calling getMetadata() should no longer require an additional server call because we have the - // metadata form the executeUpdate - String queryId2 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - // Assert the query IDs are the same. This will be the case if there is no additional describe - // call for getMetadata(). - assertEquals(queryId1, queryId2); - - preparedStatement.addBatch(); - - preparedStatement = - connection.prepareStatement("select * from test_uuid_with_bind where c1 = ?"); - assertFalse(preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - preparedStatement.setInt(1, 5); - - ResultSet resultSet = preparedStatement.executeQuery(); - assertThat(resultSet.next(), is(true)); - queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - queryId2 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - String queryId3 = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); - // Assert all 3 query IDs are the same because only 1 server call was executed - assertEquals(queryId1, queryId2); - assertEquals(queryId1, queryId3); - - resultSet.close(); - preparedStatement.close(); - - statement.execute("drop table if exists test_uuid_with_bind"); - connection.close(); + String queryId1 = null; + String queryId2 = null; + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_uuid_with_bind(c1 number)"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_uuid_with_bind values (?)")) { + preparedStatement.setInt(1, 5); + assertEquals(1, preparedStatement.executeUpdate()); + queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + // Calling getMetadata() should no longer require an additional server call because we + // have + // the + // metadata form the executeUpdate + queryId2 = + preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); + // Assert the query IDs are the same. This will be the case if there is no additional + // describe + // call for getMetadata(). + assertEquals(queryId1, queryId2); + + preparedStatement.addBatch(); + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("select * from test_uuid_with_bind where c1 = ?")) { + assertFalse( + preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + preparedStatement.setInt(1, 5); + + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertThat(resultSet.next(), is(true)); + queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + queryId2 = + preparedStatement + .getMetaData() + .unwrap(SnowflakeResultSetMetaData.class) + .getQueryID(); + String queryId3 = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); + // Assert all 3 query IDs are the same because only 1 server call was executed + assertEquals(queryId1, queryId2); + assertEquals(queryId1, queryId3); + } + } + } finally { + statement.execute("drop table if exists test_uuid_with_bind"); + } + } } @Test public void testRemoveExtraDescribeCallsSanityCheck() throws SQLException { - Connection connection = init(); - PreparedStatement preparedStatement = - connection.prepareStatement( - "create or replace table test_uuid_with_bind(c1 number, c2 string)"); - preparedStatement.execute(); - String queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - preparedStatement = - connection.prepareStatement("insert into test_uuid_with_bind values (?, ?)"); - assertFalse(preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - preparedStatement.setInt(1, 5); - preparedStatement.setString(2, "hello"); - preparedStatement.addBatch(); - preparedStatement.setInt(1, 7); - preparedStatement.setString(2, "hello1"); - preparedStatement.addBatch(); - String queryId2 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - // These query IDs should not match because they are from 2 different prepared statements - assertNotEquals(queryId1, queryId2); - preparedStatement.executeBatch(); - String queryId3 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); - // Another execute call was created, so prepared statement has new query ID - assertNotEquals(queryId2, queryId3); - // Calling getMetadata() should no longer require an additional server call because we have the - // metadata form the executeUpdate - String queryId4 = - preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); - // Assert the query IDs for the 2 identical getMetadata() calls are the same. They should match - // since metadata no longer gets overwritten after successive query calls. - assertEquals(queryId2, queryId4); - - connection.createStatement().execute("drop table if exists test_uuid_with_bind"); - preparedStatement.close(); - connection.close(); + String queryId1; + try (Connection connection = init()) { + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "create or replace table test_uuid_with_bind(c1 number, c2 string)")) { + preparedStatement.execute(); + queryId1 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into test_uuid_with_bind values (?, ?)")) { + assertFalse( + preparedStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + preparedStatement.setInt(1, 5); + preparedStatement.setString(2, "hello"); + preparedStatement.addBatch(); + preparedStatement.setInt(1, 7); + preparedStatement.setString(2, "hello1"); + preparedStatement.addBatch(); + String queryId2 = + preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); + // These query IDs should not match because they are from 2 different prepared statements + assertNotEquals(queryId1, queryId2); + preparedStatement.executeBatch(); + String queryId3 = preparedStatement.unwrap(SnowflakePreparedStatement.class).getQueryID(); + // Another execute call was created, so prepared statement has new query ID + assertNotEquals(queryId2, queryId3); + // Calling getMetadata() should no longer require an additional server call because we + // have + // the + // metadata form the executeUpdate + String queryId4 = + preparedStatement.getMetaData().unwrap(SnowflakeResultSetMetaData.class).getQueryID(); + // Assert the query IDs for the 2 identical getMetadata() calls are the same. They should + // match + // since metadata no longer gets overwritten after successive query calls. + assertEquals(queryId2, queryId4); + connection.createStatement().execute("drop table if exists test_uuid_with_bind"); + } + } } @Test public void testAlreadyDescribedMultipleResults() throws SQLException { - Connection connection = init(); - PreparedStatement prepStatement = connection.prepareStatement(insertSQL); - bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); - prepStatement.execute(); - // The statement above has already been described since it has been executed - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - prepStatement = connection.prepareStatement(selectSQL); - // Assert the statement, once it has been re-created, has already described set to false - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - prepStatement.setInt(1, 1); - ResultSet rs = prepStatement.executeQuery(); - assertTrue(rs.next()); - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - prepStatement = connection.prepareStatement(selectAllSQL); - // Assert the statement, once it has been re-created, has already described set to false - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - rs = prepStatement.executeQuery(); - assertTrue(rs.next()); - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + try (Connection connection = init()) { + try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { + bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); + prepStatement.execute(); + // The statement above has already been described since it has been executed + assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + } + try (PreparedStatement prepStatement = connection.prepareStatement(selectSQL)) { + // Assert the statement, once it has been re-created, has already described set to false + assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + prepStatement.setInt(1, 1); + try (ResultSet rs = prepStatement.executeQuery()) { + assertTrue(rs.next()); + assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + } + } + try (PreparedStatement prepStatement = connection.prepareStatement(selectAllSQL)) { + // Assert the statement, once it has been re-created, has already described set to false + assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + try (ResultSet rs = prepStatement.executeQuery()) { + assertTrue(rs.next()); + assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + } + } + } } /** @@ -333,40 +344,48 @@ public void testAlreadyDescribedMultipleResults() throws SQLException { */ @Test public void testConsecutiveBatchInsertError() throws SQLException { - try (Connection connection = init()) { - connection - .createStatement() - .execute("create or replace table testStageArrayBind(c1 integer, c2 string)"); - PreparedStatement prepStatement = - connection.prepareStatement("insert into testStageArrayBind values (?, ?)"); - // Assert to begin with that before the describe call, array binding is not supported - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); - assertFalse(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); - // Insert enough rows to hit the default binding array threshold - for (int i = 0; i < 35000; i++) { - prepStatement.setInt(1, i); - prepStatement.setString(2, "test" + i); - prepStatement.addBatch(); - } - prepStatement.executeBatch(); - // After executing the first batch, verify that array bind support is still true - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); - for (int i = 0; i < 35000; i++) { - prepStatement.setInt(1, i); - prepStatement.setString(2, "test" + i); - prepStatement.addBatch(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table testStageArrayBind(c1 integer, c2 string)"); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into testStageArrayBind values (?, ?)")) { + // Assert to begin with that before the describe call, array binding is not supported + assertFalse( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isAlreadyDescribed()); + assertFalse( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); + // Insert enough rows to hit the default binding array threshold + for (int i = 0; i < 35000; i++) { + prepStatement.setInt(1, i); + prepStatement.setString(2, "test" + i); + prepStatement.addBatch(); + } + prepStatement.executeBatch(); + // After executing the first batch, verify that array bind support is still true + assertTrue( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); + for (int i = 0; i < 35000; i++) { + prepStatement.setInt(1, i); + prepStatement.setString(2, "test" + i); + prepStatement.addBatch(); + } + prepStatement.executeBatch(); + // After executing the second batch, verify that array bind support is still true + assertTrue( + prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); + } + } finally { + statement.execute("drop table if exists testStageArrayBind"); } - prepStatement.executeBatch(); - // After executing the second batch, verify that array bind support is still true - assertTrue(prepStatement.unwrap(SnowflakePreparedStatementV1.class).isArrayBindSupported()); } } @Test public void testToString() throws SQLException { - try (Connection connection = init()) { - PreparedStatement prepStatement = - connection.prepareStatement("select current_version() --testing toString()"); + try (Connection connection = init(); + PreparedStatement prepStatement = + connection.prepareStatement("select current_version() --testing toString()")) { // Query ID is going to be null since we didn't execute the statement yet assertEquals( diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java index e3ec67b83..f80a00528 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java @@ -14,8 +14,8 @@ public class PreparedStatementFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { - try (Connection connection = getConnection()) { - PreparedStatement preparedStatement = connection.prepareStatement("select ?"); + try (Connection connection = getConnection(); + PreparedStatement preparedStatement = connection.prepareStatement("select ?")) { expectFeatureNotSupportedException( () -> preparedStatement.setAsciiStream(1, new BaseJDBCTest.FakeInputStream())); expectFeatureNotSupportedException( diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java index a1dfbb81c..883fe0c4d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java @@ -9,6 +9,7 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.sql.Statement; import java.util.Map; import net.snowflake.client.ConditionalIgnoreRule; import net.snowflake.client.RunningOnGithubAction; @@ -29,26 +30,31 @@ public class PreparedStatementLargeUpdateLatestIT extends BaseJDBCTest { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testLargeUpdate() throws Throwable { - try (Connection con = getConnection()) { - long expectedUpdateRows = (long) Integer.MAX_VALUE + 10L; - con.createStatement().execute("create or replace table test_large_update(c1 boolean)"); - PreparedStatement st = - con.prepareStatement( - "insert into test_large_update select true from table(generator(rowcount=>" - + expectedUpdateRows - + "))"); - PreparedStatement spyp = spy(st); - // Mock internal method which returns rowcount - Mockito.doReturn(expectedUpdateRows) - .when((SnowflakePreparedStatementV1) spyp) - .executeUpdateInternal( - Mockito.any(String.class), - Mockito.any(Map.class), - Mockito.any(boolean.class), - Mockito.any(ExecTimeTelemetryData.class)); - long updatedRows = spyp.executeLargeUpdate(); - assertEquals(expectedUpdateRows, updatedRows); - con.createStatement().execute("drop table if exists test_large_update"); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + try { + long expectedUpdateRows = (long) Integer.MAX_VALUE + 10L; + statement.execute("create or replace table test_large_update(c1 boolean)"); + try (PreparedStatement st = + con.prepareStatement( + "insert into test_large_update select true from table(generator(rowcount=>" + + expectedUpdateRows + + "))"); + PreparedStatement spyp = spy(st)) { + // Mock internal method which returns rowcount + Mockito.doReturn(expectedUpdateRows) + .when((SnowflakePreparedStatementV1) spyp) + .executeUpdateInternal( + Mockito.any(String.class), + Mockito.any(Map.class), + Mockito.any(boolean.class), + Mockito.any(ExecTimeTelemetryData.class)); + long updatedRows = spyp.executeLargeUpdate(); + assertEquals(expectedUpdateRows, updatedRows); + } + } finally { + statement.execute("drop table if exists test_large_update"); + } } } @@ -60,26 +66,30 @@ public void testLargeUpdate() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testExecuteLargeBatchOverIntMax() throws SQLException { - try (Connection connection = getConnection()) { - connection - .createStatement() - .execute("create or replace table over_int_table (val string, id int)"); - PreparedStatement pstmt = connection.prepareStatement("UPDATE over_int_table SET ID=200"); - PreparedStatement spyp = spy(pstmt); - long numRows = Integer.MAX_VALUE + 10L; - // Mock internal method which returns rowcount - Mockito.doReturn(numRows) - .when((SnowflakePreparedStatementV1) spyp) - .executeUpdateInternal( - Mockito.any(String.class), - Mockito.any(Map.class), - Mockito.any(boolean.class), - Mockito.any(ExecTimeTelemetryData.class)); - pstmt.addBatch(); - long[] queryResult = spyp.executeLargeBatch(); - assertEquals(1, queryResult.length); - assertEquals(numRows, queryResult[0]); - connection.createStatement().execute("drop table if exists over_int_table"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table over_int_table (val string, id int)"); + try (PreparedStatement pstmt = + connection.prepareStatement("UPDATE over_int_table SET ID=200"); + PreparedStatement spyp = spy(pstmt)) { + long numRows = Integer.MAX_VALUE + 10L; + // Mock internal method which returns rowcount + Mockito.doReturn(numRows) + .when((SnowflakePreparedStatementV1) spyp) + .executeUpdateInternal( + Mockito.any(String.class), + Mockito.any(Map.class), + Mockito.any(boolean.class), + Mockito.any(ExecTimeTelemetryData.class)); + pstmt.addBatch(); + long[] queryResult = spyp.executeLargeBatch(); + assertEquals(1, queryResult.length); + assertEquals(numRows, queryResult[0]); + } + } finally { + statement.execute("drop table if exists over_int_table"); + } } } } From ff0adbd12e494381fe95dec1305c68f629412c6f Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Sat, 27 Apr 2024 04:08:32 +0900 Subject: [PATCH 05/54] SNOW-1213117: Wrap connection, statement and result set in try with resources(4/4) (#1724) --- .../client/jdbc/ServiceNameTest.java | 21 +- .../SnowflakeChunkDownloaderLatestIT.java | 23 +- .../client/jdbc/SnowflakeDriverIT.java | 3690 ++++++++--------- .../client/jdbc/SnowflakeDriverLatestIT.java | 1773 ++++---- .../SnowflakeResultSetSerializableIT.java | 573 ++- .../snowflake/client/jdbc/StatementIT.java | 762 ++-- .../client/jdbc/StatementLargeUpdateIT.java | 25 +- .../client/jdbc/StatementLatestIT.java | 306 +- .../net/snowflake/client/jdbc/StreamIT.java | 167 +- .../snowflake/client/jdbc/StreamLatestIT.java | 315 +- .../storage/SnowflakeS3ClientLatestIT.java | 39 +- .../client/jdbc/telemetry/TelemetryIT.java | 60 +- .../loader/FlatfileReadMultithreadIT.java | 430 +- .../client/loader/LoaderLatestIT.java | 229 +- .../client/loader/LoaderMultipleBatchIT.java | 78 +- .../client/loader/LoaderTimestampIT.java | 186 +- .../log/JDK14LoggerWithClientLatestIT.java | 20 +- .../ConnectionPoolingDataSourceIT.java | 32 +- 18 files changed, 4129 insertions(+), 4600 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java index f32af5470..bd51ef533 100644 --- a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java @@ -127,16 +127,21 @@ public void testAddServiceNameToRequestHeader() throws Throwable { props.setProperty(SFSessionProperty.USER.getPropertyKey(), "fakeuser"); props.setProperty(SFSessionProperty.PASSWORD.getPropertyKey(), "fakepassword"); props.setProperty(SFSessionProperty.INSECURE_MODE.getPropertyKey(), Boolean.TRUE.toString()); - SnowflakeConnectionV1 con = + try (SnowflakeConnectionV1 con = new SnowflakeConnectionV1( - "jdbc:snowflake://http://fakeaccount.snowflakecomputing.com", props); - assertThat(con.getSfSession().getServiceName(), is(INITIAL_SERVICE_NAME)); + "jdbc:snowflake://http://fakeaccount.snowflakecomputing.com", props)) { + assertThat(con.getSfSession().getServiceName(), is(INITIAL_SERVICE_NAME)); - SnowflakeStatementV1 stmt = (SnowflakeStatementV1) con.createStatement(); - stmt.execute("SELECT 1"); - assertThat( - stmt.getConnection().unwrap(SnowflakeConnectionV1.class).getSfSession().getServiceName(), - is(NEW_SERVICE_NAME)); + try (SnowflakeStatementV1 stmt = (SnowflakeStatementV1) con.createStatement()) { + stmt.execute("SELECT 1"); + assertThat( + stmt.getConnection() + .unwrap(SnowflakeConnectionV1.class) + .getSfSession() + .getServiceName(), + is(NEW_SERVICE_NAME)); + } + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java index 76c3b0466..1af7e1534 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java @@ -37,19 +37,20 @@ public void testChunkDownloaderRetry() throws SQLException, InterruptedException SnowflakeChunkDownloader snowflakeChunkDownloaderSpy = null; - try (Connection connection = getConnection(properties)) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { // execute a query that will require chunk downloading - ResultSet resultSet = + try (ResultSet resultSet = statement.executeQuery( - "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))"); - List resultSetSerializables = - ((SnowflakeResultSet) resultSet).getResultSetSerializables(100 * 1024 * 1024); - SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); - SnowflakeChunkDownloader downloader = - new SnowflakeChunkDownloader((SnowflakeResultSetSerializableV1) resultSetSerializable); - snowflakeChunkDownloaderSpy = Mockito.spy(downloader); - snowflakeChunkDownloaderSpy.getNextChunkToConsume(); + "select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))")) { + List resultSetSerializables = + ((SnowflakeResultSet) resultSet).getResultSetSerializables(100 * 1024 * 1024); + SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); + SnowflakeChunkDownloader downloader = + new SnowflakeChunkDownloader((SnowflakeResultSetSerializableV1) resultSetSerializable); + snowflakeChunkDownloaderSpy = Mockito.spy(downloader); + snowflakeChunkDownloaderSpy.getNextChunkToConsume(); + } } catch (SnowflakeSQLException exception) { // verify that request was retried twice before reaching max retries Mockito.verify(snowflakeChunkDownloaderSpy, Mockito.times(2)).getResultStreamProvider(); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java index aa82813f0..13bcee4e5 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java @@ -116,27 +116,26 @@ public static void setUp() throws Throwable { @AfterClass public static void tearDown() throws SQLException { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.execute("drop table if exists clustered_jdbc"); statement.execute("drop table if exists orders_jdbc"); - statement.close(); } } public static Connection getConnection(int injectSocketTimeout) throws SQLException { Connection connection = AbstractDriverIT.getConnection(injectSocketTimeout); - Statement statement = connection.createStatement(); - statement.execute( - "alter session set " - + "TIMEZONE='America/Los_Angeles'," - + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," - + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," - + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); - statement.close(); + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + } return connection; } @@ -149,33 +148,38 @@ public static Connection getConnection() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testOauthConnection() throws SQLException { Map params = getConnectionParameters(); - Connection con = getConnection("s3testaccount"); - Statement statement = con.createStatement(); - statement.execute("use role accountadmin"); - statement.execute( - "create or replace security integration jdbc_oauth_integration\n" - + " type=oauth\n" - + " oauth_client=CUSTOM\n" - + " oauth_client_type=CONFIDENTIAL\n" - + " oauth_redirect_uri='https://localhost.com/oauth'\n" - + " oauth_issue_refresh_tokens=true\n" - + " enabled=true oauth_refresh_token_validity=86400;"); - String role = params.get("role"); - ResultSet rs = - statement.executeQuery( - "select system$it('create_oauth_access_token', 'JDBC_OAUTH_INTEGRATION', '" - + role - + "')"); - rs.next(); - String token = rs.getString(1); - con.close(); + String role = null; + String token = null; + + try (Connection con = getConnection("s3testaccount"); + Statement statement = con.createStatement()) { + statement.execute("use role accountadmin"); + statement.execute( + "create or replace security integration jdbc_oauth_integration\n" + + " type=oauth\n" + + " oauth_client=CUSTOM\n" + + " oauth_client_type=CONFIDENTIAL\n" + + " oauth_redirect_uri='https://localhost.com/oauth'\n" + + " oauth_issue_refresh_tokens=true\n" + + " enabled=true oauth_refresh_token_validity=86400;"); + role = params.get("role"); + try (ResultSet rs = + statement.executeQuery( + "select system$it('create_oauth_access_token', 'JDBC_OAUTH_INTEGRATION', '" + + role + + "')")) { + assertTrue(rs.next()); + token = rs.getString(1); + } + } Properties props = new Properties(); props.put("authenticator", ClientAuthnDTO.AuthenticatorType.OAUTH.name()); props.put("token", token); props.put("role", role); - con = getConnection("s3testaccount", props); - con.createStatement().execute("select 1"); - con.close(); + try (Connection con = getConnection("s3testaccount", props); + Statement statement = con.createStatement()) { + statement.execute("select 1"); + } } @Ignore @@ -195,16 +199,10 @@ public void testConnections() throws Throwable { futures.add( executorService.submit( () -> { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - ResultSetMetaData resultSetMetaData; - - try { - connection = getConnection(); - statement = connection.createStatement(); - resultSet = statement.executeQuery("SELECT system$sleep(10) % 1"); - resultSetMetaData = resultSet.getMetaData(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT system$sleep(10) % 1")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); // assert column count assertEquals(1, resultSetMetaData.getColumnCount()); @@ -220,10 +218,6 @@ public void testConnections() throws Throwable { } logger.info("Query " + queryIdx + " passed "); - - statement.close(); - } finally { - closeSQLObjects(resultSet, statement, connection); } return true; })); @@ -239,18 +233,11 @@ public void testConnections() throws Throwable { /** Test show columns */ @Test public void testShowColumns() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - Properties paramProperties = new Properties(); - connection = getConnection(paramProperties); - statement = connection.createStatement(); - resultSet = statement.executeQuery("show columns in clustered_jdbc"); + Properties paramProperties = new Properties(); + try (Connection connection = getConnection(paramProperties); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("show columns in clustered_jdbc")) { assertEquals("number of columns", 2, countRows(resultSet)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @@ -264,51 +251,39 @@ private int countRows(ResultSet rset) throws Throwable { @Test public void testRowsPerResultset() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - try { - connection = getConnection(); + try (Connection connection = getConnection()) { connection.createStatement().execute("alter session set rows_per_resultset=2048"); - statement = connection.createStatement(); - resultSet = statement.executeQuery("SELECT * FROM orders_jdbc"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - int numColumns = resultSetMetaData.getColumnCount(); - assertEquals(9, numColumns); - assertEquals("number of columns", 73, countRows(resultSet)); - statement.close(); - } finally { - closeSQLObjects(resultSet, statement, connection); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT * FROM orders_jdbc")) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + int numColumns = resultSetMetaData.getColumnCount(); + assertEquals(9, numColumns); + assertEquals("number of columns", 73, countRows(resultSet)); + } } } @Test public void testDDLs() throws Throwable { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - - statement.execute("CREATE OR REPLACE TABLE testDDLs(version number, name string)"); - - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("CREATE OR REPLACE TABLE testDDLs(version number, name string)"); + } finally { statement.execute("DROP TABLE testDDLs"); } - closeSQLObjects(statement, connection); } } private long getCurrentTransaction(Connection connection) throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute(getCurrenTransactionStmt); - ResultSet rs = statement.getResultSet(); - if (rs.next()) { - String txnId = rs.getString(1); - return txnId != null ? Long.valueOf(txnId) : 0L; + try (ResultSet rs = statement.getResultSet()) { + if (rs.next()) { + String txnId = rs.getString(1); + return txnId != null ? Long.valueOf(txnId) : 0L; + } } } @@ -318,57 +293,51 @@ private long getCurrentTransaction(Connection connection) throws SQLException { /** Tests autocommit */ @Test public void testAutocommit() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // 1. test commit - connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); - assertEquals(Connection.TRANSACTION_READ_COMMITTED, connection.getTransactionIsolation()); - connection.setAutoCommit(false); // disable autocommit - assertFalse(connection.getAutoCommit()); - - assertEquals(0, getCurrentTransaction(connection)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // 1. test commit + connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); + assertEquals(Connection.TRANSACTION_READ_COMMITTED, connection.getTransactionIsolation()); + connection.setAutoCommit(false); // disable autocommit + assertFalse(connection.getAutoCommit()); - // create a table, this should not start a transaction - statement.executeUpdate("CREATE OR REPLACE TABLE AUTOCOMMIT_API_TEST (i int)"); - assertEquals(0, getCurrentTransaction(connection)); + assertEquals(0, getCurrentTransaction(connection)); - // insert into it this should start a transaction. - statement.executeUpdate("INSERT INTO AUTOCOMMIT_API_TEST VALUES (1)"); - assertNotEquals(0, getCurrentTransaction(connection)); + // create a table, this should not start a transaction + statement.executeUpdate("CREATE OR REPLACE TABLE AUTOCOMMIT_API_TEST (i int)"); + assertEquals(0, getCurrentTransaction(connection)); - // commit it using the api - connection.commit(); - assertFalse(connection.getAutoCommit()); - assertEquals(0, getCurrentTransaction(connection)); - resultSet = statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1"); - assertTrue(resultSet.next()); - assertEquals(1, resultSet.getInt(1)); - resultSet.close(); - - // 2. test rollback == - // delete from the table, should start a transaction. - statement.executeUpdate("DELETE FROM AUTOCOMMIT_API_TEST"); - assertNotEquals(0, getCurrentTransaction(connection)); - - // roll it back using the api - connection.rollback(); - assertFalse(connection.getAutoCommit()); - assertEquals(0, getCurrentTransaction(connection)); - resultSet = statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1"); - assertTrue(resultSet.next()); - assertEquals(1, resultSet.getInt(1)); - } finally { - if (statement != null) { + // insert into it this should start a transaction. + statement.executeUpdate("INSERT INTO AUTOCOMMIT_API_TEST VALUES (1)"); + assertNotEquals(0, getCurrentTransaction(connection)); + + // commit it using the api + connection.commit(); + assertFalse(connection.getAutoCommit()); + assertEquals(0, getCurrentTransaction(connection)); + try (ResultSet resultSet = + statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1")) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + // 2. test rollback == + // delete from the table, should start a transaction. + statement.executeUpdate("DELETE FROM AUTOCOMMIT_API_TEST"); + assertNotEquals(0, getCurrentTransaction(connection)); + + // roll it back using the api + connection.rollback(); + assertFalse(connection.getAutoCommit()); + assertEquals(0, getCurrentTransaction(connection)); + try (ResultSet resultSet = + statement.executeQuery("SELECT COUNT(*) FROM AUTOCOMMIT_API_TEST WHERE i = 1")) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + } finally { statement.execute("DROP TABLE AUTOCOMMIT_API_TEST"); } - closeSQLObjects(resultSet, statement, connection); } } @@ -405,396 +374,362 @@ private void assertConstraintResults( @Test public void testBoolean() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - - DatabaseMetaData metadata = connection.getMetaData(); - - // Create a table with boolean columns - statement.execute("create or replace table testBooleanT1(c1 boolean)"); - - // Insert values into the table - statement.execute("insert into testBooleanT1 values(true), (false), (null)"); - - // Get values from the table - PreparedStatement preparedStatement = - connection.prepareStatement("select c1 from testBooleanT1"); - - // I. Test ResultSetMetaData interface - resultSet = preparedStatement.executeQuery(); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - - // Verify the column type is Boolean - assertEquals(Types.BOOLEAN, resultSetMetaData.getColumnType(1)); - // II. Test DatabaseMetadata interface - ResultSet columnMetaDataResultSet = - metadata.getColumns( - null, // catalog - null, // schema - "TESTBOOLEANT1", // table - null // column - ); - - resultSetMetaData = columnMetaDataResultSet.getMetaData(); - - // assert column count - assertEquals(24, resultSetMetaData.getColumnCount()); - - assertTrue(columnMetaDataResultSet.next()); - assertEquals(Types.BOOLEAN, columnMetaDataResultSet.getInt(5)); - } finally // cleanup - { - // drop the table - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); + + DatabaseMetaData metadata = connection.getMetaData(); + + // Create a table with boolean columns + statement.execute("create or replace table testBooleanT1(c1 boolean)"); + + // Insert values into the table + statement.execute("insert into testBooleanT1 values(true), (false), (null)"); + + // Get values from the table + try (PreparedStatement preparedStatement = + connection.prepareStatement("select c1 from testBooleanT1")) { + + // I. Test ResultSetMetaData interface + try (ResultSet resultSet = preparedStatement.executeQuery()) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // Verify the column type is Boolean + assertEquals(Types.BOOLEAN, resultSetMetaData.getColumnType(1)); + + // II. Test DatabaseMetadata interface + try (ResultSet columnMetaDataResultSet = + metadata.getColumns( + null, // catalog + null, // schema + "TESTBOOLEANT1", // table + null // column + )) { + resultSetMetaData = columnMetaDataResultSet.getMetaData(); + // assert column count + assertEquals(24, resultSetMetaData.getColumnCount()); + + assertTrue(columnMetaDataResultSet.next()); + assertEquals(Types.BOOLEAN, columnMetaDataResultSet.getInt(5)); + } + } + } + } finally { statement.execute("drop table testBooleanT1"); } - closeSQLObjects(resultSet, statement, connection); } } @Test public void testConstraints() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - - DatabaseMetaData metadata = connection.getMetaData(); - - // Create primary key tables - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsP1(c1 number unique, c2 " - + "number, constraint cons0 primary key (c1, c2))"); + ResultSet manualResultSet = null; - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsP2(c1 number " - + "constraint cons1 primary key, c2 number)"); - - // Create foreign key tables - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsF1(c1 number, c2 number, " - + "constraint cons3 foreign key (c1, c2) references " - + "testConstraintsP1(c1, c2))"); - - statement.execute( - "CREATE OR REPLACE TABLE testConstraintsF2(c1 number, c2 number, " - + "constraint cons4 foreign key (c1, c2) references " - + "testConstraintsP1(c1, c2), constraint cons5 " - + "foreign key (c2) references testConstraintsP2(c1))"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - // show primary keys - resultSet = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP1"); + DatabaseMetaData metadata = connection.getMetaData(); - // primary key for testConstraintsP1 should contain two rows - assertConstraintResults(resultSet, 2, 6, "testConstraintsP1", null); + // Create primary key tables + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsP1(c1 number unique, c2 " + + "number, constraint cons0 primary key (c1, c2))"); - resultSet = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP2"); + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsP2(c1 number " + + "constraint cons1 primary key, c2 number)"); - // primary key for testConstraintsP2 contains 1 row - assertConstraintResults(resultSet, 1, 6, "testConstraintsP2", null); - resultSet.close(); - resultSet.next(); + // Create foreign key tables + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsF1(c1 number, c2 number, " + + "constraint cons3 foreign key (c1, c2) references " + + "testConstraintsP1(c1, c2))"); - // Show imported keys - resultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF1"); + statement.execute( + "CREATE OR REPLACE TABLE testConstraintsF2(c1 number, c2 number, " + + "constraint cons4 foreign key (c1, c2) references " + + "testConstraintsP1(c1, c2), constraint cons5 " + + "foreign key (c2) references testConstraintsP2(c1))"); - assertConstraintResults(resultSet, 2, 14, null, "testConstraintsF1"); + // show primary keys + try (ResultSet resultSet = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP1")) { - resultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF2"); + // primary key for testConstraintsP1 should contain two rows + assertConstraintResults(resultSet, 2, 6, "testConstraintsP1", null); + } - assertConstraintResults(resultSet, 3, 14, null, "testConstraintsF2"); - resultSet.close(); - resultSet.next(); + ResultSet resultSet1 = metadata.getPrimaryKeys(null, null, "TESTCONSTRAINTSP2"); - // show exported keys - resultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP1"); + // primary key for testConstraintsP2 contains 1 row + assertConstraintResults(resultSet1, 1, 6, "testConstraintsP2", null); + resultSet1.close(); + assertFalse(resultSet1.next()); - assertConstraintResults(resultSet, 4, 14, "testConstraintsP1", null); + // Show imported keys + try (ResultSet resultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF1")) { + assertConstraintResults(resultSet, 2, 14, null, "testConstraintsF1"); + } - resultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP2"); + manualResultSet = metadata.getImportedKeys(null, null, "TESTCONSTRAINTSF2"); - assertConstraintResults(resultSet, 1, 14, "testConstraintsP2", null); - resultSet.close(); - resultSet.next(); + assertConstraintResults(manualResultSet, 3, 14, null, "testConstraintsF2"); + manualResultSet.close(); + assertFalse(manualResultSet.next()); - // show cross references - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF1"); + // show exported keys + try (ResultSet resultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP1")) { + assertConstraintResults(resultSet, 4, 14, "testConstraintsP1", null); + } - assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF1"); + manualResultSet = metadata.getExportedKeys(null, null, "TESTCONSTRAINTSP2"); - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF2"); + assertConstraintResults(manualResultSet, 1, 14, "testConstraintsP2", null); + manualResultSet.close(); + assertFalse(manualResultSet.next()); - assertConstraintResults(resultSet, 1, 14, "testConstraintsP2", "testConstraintsF2"); + // show cross references + try (ResultSet resultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF1")) { + assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF1"); + } - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF2"); + try (ResultSet resultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF2")) { + assertConstraintResults(resultSet, 1, 14, "testConstraintsP2", "testConstraintsF2"); + } - assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF2"); + try (ResultSet resultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP1", null, null, "TESTCONSTRAINTSF2")) { + assertConstraintResults(resultSet, 2, 14, "testConstraintsP1", "testConstraintsF2"); + } - resultSet = - metadata.getCrossReference( - null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF1"); + manualResultSet = + metadata.getCrossReference( + null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF1"); - assertFalse( - "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty", - resultSet.next()); - resultSet.close(); - resultSet.next(); - } finally { - if (statement != null) { + assertFalse( + "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty", + manualResultSet.next()); + manualResultSet.close(); + assertFalse(manualResultSet.next()); + } finally { statement.execute("DROP TABLE TESTCONSTRAINTSF1"); statement.execute("DROP TABLE TESTCONSTRAINTSF2"); statement.execute("DROP TABLE TESTCONSTRAINTSP1"); statement.execute("DROP TABLE TESTCONSTRAINTSP2"); } - closeSQLObjects(resultSet, statement, connection); } } @Test public void testQueryWithMaxRows() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; final int maxRows = 30; - - try { - connection = getConnection(); - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.setMaxRows(maxRows); - resultSet = statement.executeQuery("SELECT * FROM orders_jdbc"); + try (ResultSet resultSet = statement.executeQuery("SELECT * FROM orders_jdbc")) { - // assert column count - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertEquals(9, resultSetMetaData.getColumnCount()); - assertEquals(maxRows, countRows(resultSet)); - } finally { - closeSQLObjects(resultSet, statement, connection); + // assert column count + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + assertEquals(9, resultSetMetaData.getColumnCount()); + assertEquals(maxRows, countRows(resultSet)); + } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCancelQueryBySystemFunction() throws Throwable { - Statement statement = null; - ResultSet resultSet = null; - - final Connection connection = getConnection(); - - try { - // Get the current session identifier - Statement getSessionIdStmt = connection.createStatement(); + try (Connection connection = getConnection(); + Statement getSessionIdStmt = connection.createStatement()) { getSessionIdStmt.setMaxRows(30); - resultSet = getSessionIdStmt.executeQuery("SELECT current_session()"); - assertTrue(resultSet.next()); - final long sessionId = resultSet.getLong(1); - Timer timer = new Timer(); - timer.schedule( - new TimerTask() { - @Override - public void run() { - try { - PreparedStatement cancelAll; - cancelAll = connection.prepareStatement("call system$cancel_all_queries(?)"); - - // bind integer - cancelAll.setLong(1, sessionId); - cancelAll.executeQuery(); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "Cancel failed with exception {}", ex); + try (ResultSet resultSet = getSessionIdStmt.executeQuery("SELECT current_session()")) { + assertTrue(resultSet.next()); + final long sessionId = resultSet.getLong(1); + Timer timer = new Timer(); + timer.schedule( + new TimerTask() { + @Override + public void run() { + try { + PreparedStatement cancelAll; + cancelAll = connection.prepareStatement("call system$cancel_all_queries(?)"); + + // bind integer + cancelAll.setLong(1, sessionId); + cancelAll.executeQuery(); + } catch (SQLException ex) { + logger.log(Level.SEVERE, "Cancel failed with exception {}", ex); + } } - } - }, - 5000); - + }, + 5000); + } // execute a query for 120s - statement = connection.createStatement(); - statement.setMaxRows(30); - - resultSet = statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))"); - + try (Statement statement = connection.createStatement()) { + statement.setMaxRows(30); + try (ResultSet resultSet = + statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))")) {} + } fail("should raise an exception"); } catch (SQLException ex) { // assert the sqlstate is what we expect (QUERY CANCELLED) assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testDBMetadata() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); - + int cnt = 0; + try (Connection connection = getConnection()) { + try (Statement statement = connection.createStatement()) { + statement.execute("alter SESSION set CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX=true"); + } // get database metadata DatabaseMetaData metaData = connection.getMetaData(); // the following will issue - ResultSet databaseSet = metaData.getCatalogs(); - assertTrue("databases shouldn't be empty", databaseSet.next()); - - // "show schemas in [databaseName]" - ResultSet schemaSet = metaData.getSchemas(connection.getCatalog(), connection.getSchema()); - assertTrue("schemas shouldn't be empty", schemaSet.next()); - assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); - assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); - - // snow tables in a schema - ResultSet tableSet = - metaData.getTables( - connection.getCatalog(), connection.getSchema(), ORDERS_JDBC, null); // types - assertTrue( - String.format( - "table %s should exists in db: %s, schema: %s", - ORDERS_JDBC, connection.getCatalog(), connection.getSchema()), - tableSet.next()); - assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); - assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); - assertTrue( - "table should be orders_jdbc", ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3))); - - ResultSet tableMetaDataResultSet = + try (ResultSet databaseSet = metaData.getCatalogs()) { + assertTrue("databases shouldn't be empty", databaseSet.next()); + + // "show schemas in [databaseName]" + ResultSet schemaSet = metaData.getSchemas(connection.getCatalog(), connection.getSchema()); + assertTrue("schemas shouldn't be empty", schemaSet.next()); + assertTrue( + "database should be " + connection.getCatalog(), + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + assertTrue( + "schema should be " + connection.getSchema(), + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + // snow tables in a schema + try (ResultSet tableSet = + metaData.getTables( + connection.getCatalog(), connection.getSchema(), ORDERS_JDBC, null)) { // types + assertTrue( + String.format( + "table %s should exists in db: %s, schema: %s", + ORDERS_JDBC, connection.getCatalog(), connection.getSchema()), + tableSet.next()); + assertTrue( + "database should be " + connection.getCatalog(), + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + assertTrue( + "schema should be " + connection.getSchema(), + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + assertTrue( + "table should be orders_jdbc", ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3))); + } + } + + try (ResultSet tableMetaDataResultSet = metaData.getTables( null, // catalog null, // schema ORDERS_JDBC, // table - null); // types + null)) { // types - ResultSetMetaData resultSetMetaData = tableMetaDataResultSet.getMetaData(); + ResultSetMetaData resultSetMetaData = tableMetaDataResultSet.getMetaData(); - assertEquals(10, resultSetMetaData.getColumnCount()); + assertEquals(10, resultSetMetaData.getColumnCount()); - // assert we get 1 rows - int cnt = 0; - while (tableMetaDataResultSet.next()) { - assertTrue(ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))); - ++cnt; + // assert we get 1 rows + cnt = 0; + while (tableMetaDataResultSet.next()) { + assertTrue(ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))); + ++cnt; + } + assertEquals("number of tables", 1, cnt); } - assertEquals("number of tables", 1, cnt); - - tableMetaDataResultSet.close(); - // test pattern - tableMetaDataResultSet = + try (ResultSet tableMetaDataResultSet = metaData.getTables( null, // catalog null, // schema "%", // table - null); // types - - resultSetMetaData = tableMetaDataResultSet.getMetaData(); + null)) { // types - // assert column count - assertEquals(10, resultSetMetaData.getColumnCount()); + ResultSetMetaData resultSetMetaData = tableMetaDataResultSet.getMetaData(); - // assert we get orders_jdbc - boolean found = false; - while (tableMetaDataResultSet.next()) { - // assert the table name - if (ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))) { - found = true; - break; + // assert column count + assertEquals(10, resultSetMetaData.getColumnCount()); + + // assert we get orders_jdbc + boolean found = false; + while (tableMetaDataResultSet.next()) { + // assert the table name + if (ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))) { + found = true; + break; + } } + assertTrue("orders_jdbc not found", found); } - assertTrue("orders_jdbc not found", found); - - tableMetaDataResultSet.close(); // get column metadata - ResultSet columnMetaDataResultSet = metaData.getColumns(null, null, ORDERS_JDBC, null); + try (ResultSet columnMetaDataResultSet = metaData.getColumns(null, null, ORDERS_JDBC, null)) { - resultSetMetaData = columnMetaDataResultSet.getMetaData(); + ResultSetMetaData resultSetMetaData = columnMetaDataResultSet.getMetaData(); - // assert column count - assertEquals(24, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(24, resultSetMetaData.getColumnCount()); - // assert we get 9 rows - cnt = 0; - while (columnMetaDataResultSet.next()) { - // SNOW-16881: assert database name - assertTrue(connection.getCatalog().equalsIgnoreCase(columnMetaDataResultSet.getString(1))); + // assert we get 9 rows + cnt = 0; + while (columnMetaDataResultSet.next()) { + // SNOW-16881: assert database name + assertTrue( + connection.getCatalog().equalsIgnoreCase(columnMetaDataResultSet.getString(1))); - // assert the table name and column name, data type and type name - assertTrue(ORDERS_JDBC.equalsIgnoreCase(columnMetaDataResultSet.getString(3))); + // assert the table name and column name, data type and type name + assertTrue(ORDERS_JDBC.equalsIgnoreCase(columnMetaDataResultSet.getString(3))); - assertTrue(columnMetaDataResultSet.getString(4).startsWith("C")); + assertTrue(columnMetaDataResultSet.getString(4).startsWith("C")); - assertEquals(Types.VARCHAR, columnMetaDataResultSet.getInt(5)); + assertEquals(Types.VARCHAR, columnMetaDataResultSet.getInt(5)); - assertTrue("VARCHAR".equalsIgnoreCase(columnMetaDataResultSet.getString(6))); + assertTrue("VARCHAR".equalsIgnoreCase(columnMetaDataResultSet.getString(6))); - if (cnt == 0) { - // assert comment - assertEquals("JDBC", columnMetaDataResultSet.getString(12)); + if (cnt == 0) { + // assert comment + assertEquals("JDBC", columnMetaDataResultSet.getString(12)); - // assert nullable - assertEquals(DatabaseMetaData.columnNoNulls, columnMetaDataResultSet.getInt(11)); + // assert nullable + assertEquals(DatabaseMetaData.columnNoNulls, columnMetaDataResultSet.getInt(11)); - // assert is_nullable - assertEquals("NO", columnMetaDataResultSet.getString(18)); + // assert is_nullable + assertEquals("NO", columnMetaDataResultSet.getString(18)); + } + ++cnt; } - ++cnt; + assertEquals(9, cnt); } - assertEquals(9, cnt); - - columnMetaDataResultSet.close(); // create a table with mix cases - statement = connection.createStatement(); - statement.execute("create or replace table \"testDBMetadata\" (a timestamp_ltz)"); - columnMetaDataResultSet = metaData.getColumns(null, null, "testDBMetadata", null); + try (Statement statement = connection.createStatement()) { + statement.execute("create or replace table \"testDBMetadata\" (a timestamp_ltz)"); + try (ResultSet columnMetaDataResultSet = + metaData.getColumns(null, null, "testDBMetadata", null)) { - // assert we get 1 row - cnt = 0; - while (columnMetaDataResultSet.next()) { - // assert the table name and column name, data type and type name - assertTrue("testDBMetadata".equalsIgnoreCase(columnMetaDataResultSet.getString(3))); + // assert we get 1 row + cnt = 0; + while (columnMetaDataResultSet.next()) { + // assert the table name and column name, data type and type name + assertTrue("testDBMetadata".equalsIgnoreCase(columnMetaDataResultSet.getString(3))); - assertEquals(Types.TIMESTAMP, columnMetaDataResultSet.getInt(5)); + assertEquals(Types.TIMESTAMP, columnMetaDataResultSet.getInt(5)); - assertTrue(columnMetaDataResultSet.getString(4).equalsIgnoreCase("a")); - cnt++; - } - assertEquals(1, cnt); - } finally { - if (statement != null) { - statement.execute("DROP TABLE IF EXISTS \"testDBMetadata\""); + assertTrue(columnMetaDataResultSet.getString(4).equalsIgnoreCase("a")); + cnt++; + } + assertEquals(1, cnt); + } } - closeSQLObjects(statement, connection); + connection.createStatement().execute("DROP TABLE IF EXISTS \"testDBMetadata\""); } } @@ -804,57 +739,53 @@ public void testPutWithWildcardGCP() throws Throwable { Properties _connectionProperties = new Properties(); _connectionProperties.put("inject_wait_in_put", 5); _connectionProperties.put("ssl", "off"); - Connection connection = - getConnection( - DONT_INJECT_SOCKET_TIMEOUT, _connectionProperties, false, false, "gcpaccount"); - Statement statement = connection.createStatement(); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - // replace file name with wildcard character - sourceFilePath = sourceFilePath.replace("orders_100.csv", "orders_10*.csv"); - - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + try (Connection connection = + getConnection( + DONT_INJECT_SOCKET_TIMEOUT, _connectionProperties, false, false, "gcpaccount"); + Statement statement = connection.createStatement()) { + try { + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + // replace file name with wildcard character + sourceFilePath = sourceFilePath.replace("orders_100.csv", "orders_10*.csv"); - try { - statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); - statement.execute("CREATE OR REPLACE STAGE wildcard_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage")); + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); + statement.execute("CREATE OR REPLACE STAGE wildcard_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage")); - findFile(statement, "ls @wildcard_stage/"); + findFile(statement, "ls @wildcard_stage/"); - assertTrue( - "Failed to get files", - statement.execute( - "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + assertTrue( + "Failed to get files", + statement.execute( + "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - File downloaded; - // download the files we just uploaded to stage - for (int i = 0; i < fileNames.length; i++) { - // Make sure that the downloaded file exists, it should be gzip compressed - downloaded = new File(destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); - assert (downloaded.exists()); + File downloaded; + // download the files we just uploaded to stage + for (int i = 0; i < fileNames.length; i++) { + // Make sure that the downloaded file exists, it should be gzip compressed + downloaded = new File(destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); + assert (downloaded.exists()); - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); - p.waitFor(); + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); + p.waitFor(); - String individualFilePath = sourceFilePath.replace("orders_10*.csv", fileNames[i]); + String individualFilePath = sourceFilePath.replace("orders_10*.csv", fileNames[i]); - File original = new File(individualFilePath); - File unzipped = new File(destFolderCanonicalPathWithSeparator + fileNames[i]); - assert (original.length() == unzipped.length()); - assert (FileUtils.contentEquals(original, unzipped)); + File original = new File(individualFilePath); + File unzipped = new File(destFolderCanonicalPathWithSeparator + fileNames[i]); + assert (original.length() == unzipped.length()); + assert (FileUtils.contentEquals(original, unzipped)); + } + } finally { + statement.execute("DROP STAGE IF EXISTS wildcard_stage"); } - - } finally { - statement.execute("DROP STAGE IF EXISTS wildcard_stage"); - statement.close(); - connection.close(); } } @@ -868,110 +799,104 @@ public void testPutWithWildcardGCP() throws Throwable { private void copyContentFrom(File file1, File file2) throws Exception { FileInputStream inputStream = new FileInputStream(file1); FileOutputStream outputStream = new FileOutputStream(file2); - FileChannel fIn = inputStream.getChannel(); - FileChannel fOut = outputStream.getChannel(); - fOut.transferFrom(fIn, 0, fIn.size()); - fIn.position(0); - fOut.transferFrom(fIn, fIn.size(), fIn.size()); - fOut.close(); - fIn.close(); + try (FileChannel fIn = inputStream.getChannel(); + FileChannel fOut = outputStream.getChannel()) { + fOut.transferFrom(fIn, 0, fIn.size()); + fIn.position(0); + fOut.transferFrom(fIn, fIn.size(), fIn.size()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGetLargeFileGCP() throws Throwable { - Connection connection = getConnection("gcpaccount"); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { + try { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + File largeTempFile = tmpFolder.newFile("largeFile.csv"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + } + File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - File largeTempFile = tmpFolder.newFile("largeFile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile)); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.close(); - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - - String sourceFilePath = largeTempFile.getCanonicalPath(); - - try { - // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, - // which is a large file - for (int i = 0; i < 12; i++) { - copyContentFrom(largeTempFile, largeTempFile2); - copyContentFrom(largeTempFile2, largeTempFile); - } + String sourceFilePath = largeTempFile.getCanonicalPath(); - statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); + // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, + // which is a large file + for (int i = 0; i < 12; i++) { + copyContentFrom(largeTempFile, largeTempFile2); + copyContentFrom(largeTempFile2, largeTempFile); + } - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE largefile_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); + + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE largefile_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @largefile_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @largefile_stage/"); - // create a new table with columns matching CSV file - statement.execute("create or replace table large_table (colA string)"); - // copy rows from file into table - statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); - // copy back from table into different stage - statement.execute("create or replace stage extra_stage"); - statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); + // create a new table with columns matching CSV file + statement.execute("create or replace table large_table (colA string)"); + // copy rows from file into table + statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); + // copy back from table into different stage + statement.execute("create or replace stage extra_stage"); + statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); - - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - p.waitFor(); - - // compare the original file with the file that's been uploaded, copied into a table, copied - // back into a stage, - // downloaded, and unzipped - File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); - } finally { - statement.execute("DROP STAGE IF EXISTS largefile_stage"); - statement.execute("DROP STAGE IF EXISTS extra_stage"); - statement.execute("DROP TABLE IF EXISTS large_table"); - statement.close(); - connection.close(); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + assert (downloaded.exists()); + + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + p.waitFor(); + + // compare the original file with the file that's been uploaded, copied into a table, copied + // back into a stage, + // downloaded, and unzipped + File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); + assert (largeTempFile.length() == unzipped.length()); + assert (FileUtils.contentEquals(largeTempFile, unzipped)); + } finally { + statement.execute("DROP STAGE IF EXISTS largefile_stage"); + statement.execute("DROP STAGE IF EXISTS extra_stage"); + statement.execute("DROP TABLE IF EXISTS large_table"); + } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutOverwrite() throws Throwable { - Connection connection = null; - Statement statement = null; - // create 2 files: an original, and one that will overwrite the original File file1 = tmpFolder.newFile("testfile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(file1)); - bw.write("Writing original file content. This should get overwritten."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { + bw.write("Writing original file content. This should get overwritten."); + } File file2 = tmpFolder2.newFile("testfile.csv"); - bw = new BufferedWriter(new FileWriter(file2)); - bw.write("This is all new! This should be the result of the overwriting."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { + bw.write("This is all new! This should be the result of the overwriting."); + } String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); @@ -982,51 +907,49 @@ public void testPutOverwrite() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE testing_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE testing_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); - // put another file in same stage with same filename with overwrite = true - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true")); + // put another file in same stage with same filename with overwrite = true + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true")); - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - assert (downloaded.exists()); + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + assert (downloaded.exists()); - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - p.waitFor(); + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + p.waitFor(); - File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); - assert (FileUtils.contentEqualsIgnoreEOL(file2, unzipped, null)); - } finally { - statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); - statement.close(); + File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); + assert (FileUtils.contentEqualsIgnoreEOL(file2, unzipped, null)); + } finally { + statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); + } } } } @@ -1034,20 +957,14 @@ public void testPutOverwrite() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPut() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - // load file test - // create a unique data file name by using current timestamp in millis + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { try { + // load file test + // create a unique data file name by using current timestamp in millis statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); // test external table load statement.execute("CREATE OR REPLACE TABLE testLoadToLocalFS(a number)"); @@ -1060,361 +977,293 @@ public void testPut() throws Throwable { + getFullPathFileInResource(TEST_DATA_FILE) + " @%testLoadToLocalFS/orders parallel=10")); - resultSet = statement.getResultSet(); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = statement.getResultSet()) { - // assert column count - assertTrue(resultSetMetaData.getColumnCount() > 0); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertTrue(resultSet.next()); // one row - assertFalse(resultSet.next()); + // assert column count + assertTrue(resultSetMetaData.getColumnCount() > 0); + assertTrue(resultSet.next()); // one row + assertFalse(resultSet.next()); + } findFile( statement, "ls @%testLoadToLocalFS/ pattern='.*orders/" + TEST_DATA_FILE + ".g.*'"); // remove files - resultSet = + try (ResultSet resultSet = statement.executeQuery( - "rm @%testLoadToLocalFS/ pattern='.*orders/" + TEST_DATA_FILE + ".g.*'"); - - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertTrue(resultSetMetaData.getColumnCount() >= 1); + "rm @%testLoadToLocalFS/ pattern='.*orders/" + TEST_DATA_FILE + ".g.*'")) { + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + + // assert column count + assertTrue(resultSetMetaData.getColumnCount() >= 1); + + // assert we get 1 row for the file we copied + assertTrue(resultSet.next()); + assertNotNull(resultSet.getString(1)); + assertFalse(resultSet.next()); + try { + resultSet.getString(1); // no more row + fail("must fail"); + } catch (SQLException ex) { + assertEquals( + (int) ErrorCode.COLUMN_DOES_NOT_EXIST.getMessageCode(), ex.getErrorCode()); + } - // assert we get 1 row for the file we copied - assertTrue(resultSet.next()); - assertNotNull(resultSet.getString(1)); - assertFalse(resultSet.next()); - try { - resultSet.getString(1); // no more row - fail("must fail"); - } catch (SQLException ex) { - assertEquals((int) ErrorCode.COLUMN_DOES_NOT_EXIST.getMessageCode(), ex.getErrorCode()); + Thread.sleep(100); } - - Thread.sleep(100); - // show files again - resultSet = statement.executeQuery("ls @%testLoadToLocalFS/ pattern='.*orders/orders.*'"); - - // assert we get 0 row - assertFalse(resultSet.next()); + try (ResultSet resultSet = + statement.executeQuery("ls @%testLoadToLocalFS/ pattern='.*orders/orders.*'")) { + // assert we get 0 row + assertFalse(resultSet.next()); + } } finally { statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); - statement.close(); } - - } finally { - closeSQLObjects(resultSet, statement, connection); } } } static void findFile(Statement statement, String checkSQL) throws Throwable { boolean fileFound = false; - ResultSet resultSet = null; // tolerate at most 60 tries for the following loop for (int numSecs = 0; numSecs <= 60; numSecs++) { // show files - resultSet = statement.executeQuery(checkSQL); + try (ResultSet resultSet = statement.executeQuery(checkSQL)) { + + if (resultSet.next()) { + fileFound = true; + break; + } + // give enough time for s3 eventual consistency for US region + Thread.sleep(1000); + assertTrue("Could not find a file", fileFound); - if (resultSet.next()) { - fileFound = true; - break; + // assert the first column not null + assertNotNull("Null result", resultSet.getString(1)); } - // give enough time for s3 eventual consistency for US region - Thread.sleep(1000); } - assertTrue("Could not find a file", fileFound); - - // assert the first column not null - assertNotNull("Null result", resultSet.getString(1)); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSQLError42S02() throws SQLException { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // execute a bad query - try { - resultSet = statement.executeQuery("SELECT * FROM nonexistence"); - + try (ResultSet resultSet = statement.executeQuery("SELECT * FROM nonexistence")) { fail("SQL exception not raised"); } catch (SQLException ex1) { // assert the sqlstate "42S02" which means BASE_TABLE_OR_VIEW_NOT_FOUND assertEquals("sqlstate mismatch", "42S02", ex1.getSQLState()); } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testExplainPlan() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); - // test explain plan: sorry not available for general but debugging purpose only - resultSet = statement.executeQuery("EXPLAIN PLAN FOR SELECT c1 FROM orders_jdbc"); + // test explain plan: sorry not available for general but debugging purpose only + ResultSet resultSet = + statement.executeQuery("EXPLAIN PLAN FOR SELECT c1 FROM orders_jdbc")) { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); assertTrue("must return more than 4 columns", resultSetMetaData.getColumnCount() >= 4); assertTrue("must return more than 3 rows", countRows(resultSet) > 3); - - statement.close(); - - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testTimestampParsing() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - resultSet = - statement.executeQuery( - "select to_timestamp('2013-05-08T15:39:20.123-07:00') from orders_jdbc"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "select to_timestamp('2013-05-08T15:39:20.123-07:00') from orders_jdbc")) { assertTrue(resultSet.next()); assertEquals("Wed, 08 May 2013 15:39:20 -0700", resultSet.getString(1)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testDateParsing() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - statement = connection.createStatement(); - resultSet = statement.executeQuery("select to_date('0001-01-01')"); - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("select to_date('0001-01-01')")) { assertTrue(resultSet.next()); assertEquals("0001-01-01", resultSet.getString(1)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testTimeParsing() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - statement = connection.createStatement(); - resultSet = statement.executeQuery("select to_time('15:39:20.123') from orders_jdbc"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery("select to_time('15:39:20.123') from orders_jdbc")) { assertTrue(resultSet.next()); assertEquals("15:39:20", resultSet.getString(1)); - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testClientSideSorting() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; + ResultSetMetaData resultSetMetaData = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // turn on sorting mode statement.execute("set-sf-property sort on"); - resultSet = statement.executeQuery("SELECT c3 FROM orders_jdbc"); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = statement.executeQuery("SELECT c3 FROM orders_jdbc")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert the values for the first 5 rows - for (int i = 0; i < 5; i++) { - assertTrue(resultSet.next()); + // assert the values for the first 5 rows + for (int i = 0; i < 5; i++) { + assertTrue(resultSet.next()); - // assert each column is 'F' - assertEquals("F", resultSet.getString(1)); + // assert each column is 'F' + assertEquals("F", resultSet.getString(1)); + } } - // turn off sorting mode statement.execute("set-sf-property sort off"); - resultSet = statement.executeQuery("SELECT c3 FROM orders_jdbc order by c3 desc"); + try (ResultSet resultSet = + statement.executeQuery("SELECT c3 FROM orders_jdbc order by c3 desc")) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert the values for the first 4 rows - for (int i = 0; i < 4; i++) { - assertTrue(resultSet.next()); + // assert the values for the first 4 rows + for (int i = 0; i < 4; i++) { + assertTrue(resultSet.next()); - // assert each column is 'P' - assertEquals("P", resultSet.getString(1)); + // assert each column is 'P' + assertEquals("P", resultSet.getString(1)); + } } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testUpdateCount() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // create test table - statement.execute("CREATE OR REPLACE TABLE testUpdateCount(version number, name string)"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // create test table + statement.execute("CREATE OR REPLACE TABLE testUpdateCount(version number, name string)"); - // insert two rows - int numRows = - statement.executeUpdate("INSERT INTO testUpdateCount values (1, 'a'), (2, 'b')"); + // insert two rows + int numRows = + statement.executeUpdate("INSERT INTO testUpdateCount values (1, 'a'), (2, 'b')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 2, numRows); - } finally { - if (statement != null) { + assertEquals("Unexpected number of rows inserted: " + numRows, 2, numRows); + } finally { statement.execute("DROP TABLE if exists testUpdateCount"); } - closeSQLObjects(null, statement, connection); } } @Test public void testSnow4245() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - // set timestamp format - statement.execute("alter session set timestamp_input_format = 'YYYY-MM-DD HH24:MI:SS';"); - - // create test table with different time zone flavors - String createSQL = - "create or replace table testSnow4245(t timestamp with local time " - + "zone,ntz timestamp without time zone,tz timestamp with time zone)"; - statement.execute(createSQL); - - // populate - int numRows = - statement.executeUpdate( - "insert into testSnow4245 values(NULL,NULL,NULL)," - + "('2013-06-04 01:00:04','2013-06-04 01:00:04','2013-06-04 01:00:04')," - + "('2013-06-05 23:00:05','2013-06-05 23:00:05','2013-06-05 23:00:05')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 3, numRows); - - // query the data - resultSet = - statement.executeQuery( - "SELECT * FROM testSnow4245 order by 1 " - + "nulls first, 2 nulls first, 3 nulls first"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // set timestamp format + statement.execute("alter session set timestamp_input_format = 'YYYY-MM-DD HH24:MI:SS';"); + + // create test table with different time zone flavors + String createSQL = + "create or replace table testSnow4245(t timestamp with local time " + + "zone,ntz timestamp without time zone,tz timestamp with time zone)"; + statement.execute(createSQL); + + // populate + int numRows = + statement.executeUpdate( + "insert into testSnow4245 values(NULL,NULL,NULL)," + + "('2013-06-04 01:00:04','2013-06-04 01:00:04','2013-06-04 01:00:04')," + + "('2013-06-05 23:00:05','2013-06-05 23:00:05','2013-06-05 23:00:05')"); + assertEquals("Unexpected number of rows inserted: " + numRows, 3, numRows); + + // query the data + try (ResultSet resultSet = + statement.executeQuery( + "SELECT * FROM testSnow4245 order by 1 " + + "nulls first, 2 nulls first, 3 nulls first")) { - int i = 0; - // assert we get 3 rows + int i = 0; + // assert we get 3 rows - while (resultSet.next()) { - // assert each column is not null except the first row + while (resultSet.next()) { + // assert each column is not null except the first row - if (i == 0) { - for (int j = 1; j < 4; j++) { - assertNull(resultSet.getString(j), resultSet.getString(j)); - } - } else { - for (int j = 1; j < 4; j++) { - assertNotNull(resultSet.getString(j), resultSet.getString(j)); + if (i == 0) { + for (int j = 1; j < 4; j++) { + assertNull(resultSet.getString(j), resultSet.getString(j)); + } + } else { + for (int j = 1; j < 4; j++) { + assertNotNull(resultSet.getString(j), resultSet.getString(j)); + } + } + i = i + 1; } } - i = i + 1; - } - } finally { - if (statement != null) { + } finally { statement.execute("drop table testSnow4245"); } - closeSQLObjects(resultSet, statement, connection); } } /** SNOW-4394 - Four bytes UTF-8 characters are not returned correctly. */ @Test public void testSnow4394() throws Throwable { - Connection connection = null; - Statement statement = null; - String tableName = String.format("snow4394_%s", UUID.randomUUID().toString()).replaceAll("-", "_"); - try { - connection = getConnection(); - - statement = connection.createStatement(); - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // create test table - statement.execute(String.format("CREATE OR REPLACE TABLE %s(str string)", tableName)); - - String data = "What is \ud83d\ude12?"; - // insert two rows - int numRows = - statement.executeUpdate( - String.format("INSERT INTO %s(str) values('%s')", tableName, data)); - assertEquals("Unexpected number of rows inserted: " + numRows, 1, numRows); - - ResultSet rset = statement.executeQuery(String.format("SELECT str FROM %s", tableName)); - String ret = null; - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret, data, ret); - } finally { - if (statement != null) { + try { + statement.execute(String.format("CREATE OR REPLACE TABLE %s(str string)", tableName)); + + String data = "What is \ud83d\ude12?"; + // insert two rows + int numRows = + statement.executeUpdate( + String.format("INSERT INTO %s(str) values('%s')", tableName, data)); + assertEquals("Unexpected number of rows inserted: " + numRows, 1, numRows); + + try (ResultSet rset = + statement.executeQuery(String.format("SELECT str FROM %s", tableName))) { + String ret = null; + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret, data, ret); + } + } finally { statement.execute(String.format("DROP TABLE if exists %s", tableName)); - statement.close(); } - closeSQLObjects(null, statement, connection); } } @@ -1447,1116 +1296,1016 @@ private void addBindBatch(PreparedStatement preparedStatement, java.sql.Date sql @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void test31448() throws Throwable { - Connection connection = getConnection(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + statement.execute( + "alter session set enable_fix_31448_2=2, " + "error_on_generic_pruner=true;"); - Statement statement = connection.createStatement(); - - statement.execute("alter session set enable_fix_31448_2=2, " + "error_on_generic_pruner=true;"); - - statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); - - statement.execute("create or replace table " + "bug56658(iv number, tsv timestamp_ntz)"); - statement.execute( - "insert into bug56658 select seq8(), " - + "timestampadd(day, seq8(), '1970-01-13 00:00:00'::timestamp_ntz)\n" - + "from table(generator(rowcount=>20))"); - - connection - .unwrap(SnowflakeConnectionV1.class) - .getSfSession() - .setTimestampMappedType(SnowflakeType.TIMESTAMP_NTZ); - Timestamp ts = buildTimestamp(1970, 0, 15, 10, 14, 30, 0); - PreparedStatement preparedStatement = - connection.prepareStatement( - "select iv, tsv from bug56658 where tsv" + " >= ? and tsv <= ? order by iv;"); - statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); - Timestamp ts2 = buildTimestamp(1970, 0, 18, 10, 14, 30, 0); - preparedStatement.setTimestamp(1, ts); - preparedStatement.setTimestamp(2, ts2); - preparedStatement.executeQuery(); + statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); + + statement.execute("create or replace table " + "bug56658(iv number, tsv timestamp_ntz)"); + statement.execute( + "insert into bug56658 select seq8(), " + + "timestampadd(day, seq8(), '1970-01-13 00:00:00'::timestamp_ntz)\n" + + "from table(generator(rowcount=>20))"); + + connection + .unwrap(SnowflakeConnectionV1.class) + .getSfSession() + .setTimestampMappedType(SnowflakeType.TIMESTAMP_NTZ); + Timestamp ts = buildTimestamp(1970, 0, 15, 10, 14, 30, 0); + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "select iv, tsv from bug56658 where tsv" + " >= ? and tsv <= ? order by iv;")) { + statement.execute("alter session set timestamp_type_mapping=timestamp_ntz"); + Timestamp ts2 = buildTimestamp(1970, 0, 18, 10, 14, 30, 0); + preparedStatement.setTimestamp(1, ts); + preparedStatement.setTimestamp(2, ts2); + preparedStatement.executeQuery(); + } + } } @Test public void testBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - preparedStatement = connection.prepareStatement("SELECT ?, ?"); - - // bind integer - preparedStatement.setInt(1, 1); - preparedStatement.setString(2, "hello"); - resultSet = preparedStatement.executeQuery(); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - - // assert we get 1 rows - assertTrue(resultSet.next()); - - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - - // bind float - preparedStatement.setDouble(1, 1.2); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(1)); - - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("double", 1.2, resultSet.getDouble(1), 0); - assertEquals("string", "hello", resultSet.getString(2)); - - // bind string - preparedStatement.setString(1, "hello"); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); + ResultSetMetaData resultSetMetaData = null; + Timestamp ts = null; + Time tm = null; + java.sql.Date sqlDate = null; + int[] updateCounts; + try (Connection connection = getConnection()) { + try (PreparedStatement preparedStatement = connection.prepareStatement("SELECT ?, ?")) { - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + // bind integer + preparedStatement.setInt(1, 1); + preparedStatement.setString(2, "hello"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("string1", "hello", resultSet.getString(1)); - assertEquals("string2", "hello", resultSet.getString(2)); + resultSetMetaData = resultSet.getMetaData(); - // bind date - java.sql.Date sqlDate = java.sql.Date.valueOf("2014-08-26"); - preparedStatement.setDate(1, sqlDate); - resultSet = preparedStatement.executeQuery(); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - resultSetMetaData = resultSet.getMetaData(); + // assert we get 1 rows + assertTrue(resultSet.next()); - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(1)); + assertEquals("integer", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind float + preparedStatement.setDouble(1, 1.2); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("string", "2014-08-26", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(1)); - // bind timestamp - Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); - preparedStatement.setTimestamp(1, ts); - resultSet = preparedStatement.executeQuery(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("double", 1.2, resultSet.getDouble(1), 0); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind string + preparedStatement.setString(1, "hello"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - resultSetMetaData = resultSet.getMetaData(); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(1)); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("string1", "hello", resultSet.getString(1)); + assertEquals("string2", "hello", resultSet.getString(2)); + } + // bind date + sqlDate = java.sql.Date.valueOf("2014-08-26"); + preparedStatement.setDate(1, sqlDate); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals( - "Incorrect timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(1)); - // bind time - Time tm = new Time(12345678); // 03:25:45.678 - preparedStatement.setTime(1, tm); - resultSet = preparedStatement.executeQuery(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("string", "2014-08-26", resultSet.getString(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind timestamp + ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); + preparedStatement.setTimestamp(1, ts); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.TIME, resultSetMetaData.getColumnType(1)); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("Incorrect time", "03:25:45", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals( + "Incorrect timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + // bind time + tm = new Time(12345678); // 03:25:45.678 + preparedStatement.setTime(1, tm); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - preparedStatement.close(); + // assert column count + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.TIME, resultSetMetaData.getColumnType(1)); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("Incorrect time", "03:25:45", resultSet.getString(1)); + assertEquals("string", "hello", resultSet.getString(2)); + } + } // bind in where clause - preparedStatement = - connection.prepareStatement("SELECT * FROM orders_jdbc WHERE to_number(c1) = ?"); + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * FROM orders_jdbc WHERE to_number(c1) = ?")) { - preparedStatement.setInt(1, 100); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(9, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + preparedStatement.setInt(1, 100); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("c1", "100", resultSet.getString(1)); - assertEquals("c2", "147004", resultSet.getString(2)); + // assert column count + assertEquals(9, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - preparedStatement.close(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("c1", "100", resultSet.getString(1)); + assertEquals("c2", "147004", resultSet.getString(2)); + } + } // bind in insert statement // create a test table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate( - "create or replace table testBind(a int, b string, c double, d date, " - + "e timestamp, f time, g date)"); - - preparedStatement = - connection.prepareStatement( - "insert into testBind(a, b, c, d, e, f) values(?, ?, ?, ?, ?, ?)"); + try (Statement regularStatement = connection.createStatement()) { + regularStatement.executeUpdate( + "create or replace table testBind(a int, b string, c double, d date, " + + "e timestamp, f time, g date)"); - preparedStatement.setInt(1, 1); - preparedStatement.setString(2, "hello"); - preparedStatement.setDouble(3, 1.2); - preparedStatement.setDate(4, sqlDate); - preparedStatement.setTimestamp(5, ts); - preparedStatement.setTime(6, tm); - int rowCount = preparedStatement.executeUpdate(); - - // update count should be 1 - assertEquals("update count", 1, rowCount); - - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBind"); - - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); - - // bind in update statement - preparedStatement = connection.prepareStatement("update testBind set b=? where a=?"); - - preparedStatement.setString(1, "world"); - preparedStatement.setInt(2, 1); - preparedStatement.execute(); + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testBind(a, b, c, d, e, f) values(?, ?, ?, ?, ?, ?)")) { + + preparedStatement.setInt(1, 1); + preparedStatement.setString(2, "hello"); + preparedStatement.setDouble(3, 1.2); + preparedStatement.setDate(4, sqlDate); + preparedStatement.setTimestamp(5, ts); + preparedStatement.setTime(6, tm); + int rowCount = preparedStatement.executeUpdate(); + + // update count should be 1 + assertEquals("update count", 1, rowCount); + + // test the inserted rows + try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { + + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + assertNull("date", resultSet.getString(7)); + } + } + // bind in update statement + try (PreparedStatement preparedStatement = + connection.prepareStatement("update testBind set b=? where a=?")) { + preparedStatement.setString(1, "world"); + preparedStatement.setInt(2, 1); + preparedStatement.execute(); + } - preparedStatement.close(); + // test the updated rows + try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 1, resultSet.getInt(1)); + assertEquals("string", "world", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + assertNull("date", resultSet.getString(7)); + } + // array bind for insert + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testBind (a, b, c, d, e, f, g) " + + "values(?, ?, ?, ?, ?, ?, current_date())")) { + + preparedStatement.setInt(1, 2); + preparedStatement.setString(2, "hello"); + preparedStatement.setDouble(3, 1.2); + preparedStatement.setDate(4, sqlDate); + preparedStatement.setTimestamp(5, ts); + preparedStatement.setTime(6, tm); + preparedStatement.addBatch(); + + preparedStatement.setInt(1, 3); + preparedStatement.setString(2, "hello"); + preparedStatement.setDouble(3, 1.2); + preparedStatement.setDate(4, sqlDate); + preparedStatement.setTimestamp(5, ts); + preparedStatement.setTime(6, tm); + preparedStatement.addBatch(); + + updateCounts = preparedStatement.executeBatch(); + + // GS optimizes this into one insert execution, but we expand the + // return count into an array + assertEquals("Number of update counts", 2, updateCounts.length); + + // update count should be 1 for each + assertEquals("update count", 1, updateCounts[0]); + assertEquals("update count", 1, updateCounts[1]); + } + // test the inserted rows + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBind where a = 2")) { - // test the updated rows - resultSet = regularStatement.executeQuery("select * from testBind"); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 2, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + } - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "world", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); - - // array bind for insert - preparedStatement = - connection.prepareStatement( - "insert into testBind (a, b, c, d, e, f, g) " - + "values(?, ?, ?, ?, ?, ?, current_date())"); - - preparedStatement.setInt(1, 2); - preparedStatement.setString(2, "hello"); - preparedStatement.setDouble(3, 1.2); - preparedStatement.setDate(4, sqlDate); - preparedStatement.setTimestamp(5, ts); - preparedStatement.setTime(6, tm); - preparedStatement.addBatch(); - - preparedStatement.setInt(1, 3); - preparedStatement.setString(2, "hello"); - preparedStatement.setDouble(3, 1.2); - preparedStatement.setDate(4, sqlDate); - preparedStatement.setTimestamp(5, ts); - preparedStatement.setTime(6, tm); - preparedStatement.addBatch(); - - int[] updateCounts = preparedStatement.executeBatch(); - - // GS optimizes this into one insert execution, but we expand the - // return count into an array - assertEquals("Number of update counts", 2, updateCounts.length); - - // update count should be 1 for each - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); - - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBind where a = 2"); - - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 2, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBind where a = 3")) { - resultSet = regularStatement.executeQuery("select * from testBind where a = 3"); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("int", 3, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("double", 1.2, resultSet.getDouble(3), 0); + assertEquals("date", "2014-08-26", resultSet.getString(4)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); + assertEquals("time", "03:25:45", resultSet.getString(6)); + } - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("int", 3, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - - // describe mode - preparedStatement = - connection.prepareStatement("select * from testBind WHERE to_number(a) = ?"); - - resultSetMetaData = preparedStatement.getMetaData(); - assertEquals(7, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(3)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(4)); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(5)); - assertEquals(Types.TIME, resultSetMetaData.getColumnType(6)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(7)); - - preparedStatement.close(); - preparedStatement = connection.prepareStatement("select ?, ?"); - - resultSetMetaData = preparedStatement.getMetaData(); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - - preparedStatement.close(); - preparedStatement = connection.prepareStatement("select ?, ?"); + // describe mode + try (PreparedStatement preparedStatement = + connection.prepareStatement("select * from testBind WHERE to_number(a) = ?")) { + + resultSetMetaData = preparedStatement.getMetaData(); + assertEquals(7, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(3)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(4)); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(5)); + assertEquals(Types.TIME, resultSetMetaData.getColumnType(6)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(7)); + } - preparedStatement.setInt(1, 1); - preparedStatement.setString(2, "hello"); - ResultSet result = preparedStatement.executeQuery(); + try (PreparedStatement preparedStatement = connection.prepareStatement("select ?, ?")) { + resultSetMetaData = preparedStatement.getMetaData(); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + } - resultSetMetaData = result.getMetaData(); - assertEquals(2, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + try (PreparedStatement preparedStatement = connection.prepareStatement("select ?, ?")) { - preparedStatement.close(); + preparedStatement.setInt(1, 1); + preparedStatement.setString(2, "hello"); + ResultSet result = preparedStatement.executeQuery(); - // test null binding - preparedStatement = connection.prepareStatement("select ?"); + resultSetMetaData = result.getMetaData(); + assertEquals(2, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + } - preparedStatement.setNull(1, Types.VARCHAR); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + // test null binding + try (PreparedStatement preparedStatement = connection.prepareStatement("select ?")) { - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); + preparedStatement.setNull(1, Types.VARCHAR); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertNull(resultSet.getObject(1)); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1)); - preparedStatement.setNull(1, Types.INTEGER); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(1)); + } + preparedStatement.setNull(1, Types.INTEGER); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertNull(resultSet.getObject(1)); - - preparedStatement.close(); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(1)); + } + } + } // bind in insert statement // create a test table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate( - "create or replace table testBind1(c1 double, c2 string, c3 date, " - + "c4 date, c5 string, c6 date, c7 string, c8 string, " - + "c9 string, c10 string, c11 string, c12 date, c13 string, " - + "c14 float, c15 string, c16 string, c17 string, c18 string," - + "c19 string, c20 date, c21 string)"); - - // array bind for insert - preparedStatement = - connection.prepareStatement( - "insert into testBind1 (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, " - + "c12, c13, c14, c15, c16, c17, c18, c19, c20, c21) values " - + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?," - + " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); + try (Statement regularStatement = connection.createStatement()) { + regularStatement.executeUpdate( + "create or replace table testBind1(c1 double, c2 string, c3 date, " + + "c4 date, c5 string, c6 date, c7 string, c8 string, " + + "c9 string, c10 string, c11 string, c12 date, c13 string, " + + "c14 float, c15 string, c16 string, c17 string, c18 string," + + "c19 string, c20 date, c21 string)"); + + // array bind for insert + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testBind1 (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, " + + "c12, c13, c14, c15, c16, c17, c18, c19, c20, c21) values " + + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?," + + " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { - for (int idx = 0; idx < 16; idx++) { - addBindBatch(preparedStatement, sqlDate); - } + for (int idx = 0; idx < 16; idx++) { + addBindBatch(preparedStatement, sqlDate); + } - updateCounts = preparedStatement.executeBatch(); + updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 16, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 16, updateCounts.length); - for (int idx = 0; idx < 16; idx++) { - assertEquals("update count", 1, updateCounts[idx]); - } - } finally { - if (regularStatement != null) { - regularStatement.execute("DROP TABLE testBind"); - regularStatement.close(); + for (int idx = 0; idx < 16; idx++) { + assertEquals("update count", 1, updateCounts[idx]); + } + } } - - closeSQLObjects(resultSet, preparedStatement, connection); + connection.createStatement().execute("DROP TABLE testBind"); } } @Test public void testTableBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - // select * from table(?) - preparedStatement = connection.prepareStatement("SELECT * from table(?)"); - ResultSetMetaData resultSetMetaData = preparedStatement.getMetaData(); - // we do not have any metadata, without a specified table - assertEquals(0, resultSetMetaData.getColumnCount()); - - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - assertEquals(9, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); - - preparedStatement.close(); - - // select * from table(?) where c1 = 1 - preparedStatement = connection.prepareStatement("SELECT * from table(?) where c1 = 1"); - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - assertEquals(9, resultSetMetaData.getColumnCount()); - assertTrue(resultSet.next()); - assertFalse(resultSet.next()); - - preparedStatement.close(); - - // select * from table(?) where c1 = 2 order by c3 - preparedStatement = connection.prepareStatement("SELECT * from table(?) order by c3"); - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - assertEquals(9, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); - - preparedStatement.close(); - - regularStatement = connection.createStatement(); - regularStatement.execute("create or replace table testTableBind(c integer, d string)"); - - // insert into table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate("insert into testTableBind (c, d) values (1, 'one')"); - - // select c1, c from table(?), testTableBind - preparedStatement = connection.prepareStatement("SELECT * from table(?), testTableBind"); - preparedStatement.setString(1, ORDERS_JDBC); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - assertEquals(11, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); + ResultSetMetaData resultSetMetaData = null; - preparedStatement.close(); - - // select * from table(?), table(?) - preparedStatement = connection.prepareStatement("SELECT * from table(?), table(?)"); - preparedStatement.setString(1, ORDERS_JDBC); - preparedStatement.setString(2, "testTableBind"); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + // select * from table(?) + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?)")) { + resultSetMetaData = preparedStatement.getMetaData(); + // we do not have any metadata, without a specified table + assertEquals(0, resultSetMetaData.getColumnCount()); + + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + assertEquals(9, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - assertEquals(11, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); + // select * from table(?) where c1 = 1 + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?) where c1 = 1")) { + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - preparedStatement.close(); + assertEquals(9, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.next()); + assertFalse(resultSet.next()); + } + } - // select tab1.c1, tab2.c from table(?) as a, table(?) as b - preparedStatement = - connection.prepareStatement("SELECT a.c1, b.c from table(?) as a, table(?) as b"); - preparedStatement.setString(1, ORDERS_JDBC); - preparedStatement.setString(2, "testTableBind"); - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); + // select * from table(?) where c1 = 2 order by c3 + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?) order by c3")) { + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(9, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - assertEquals(2, resultSetMetaData.getColumnCount()); - // assert we have 73 rows - for (int i = 0; i < 73; i++) { - assertTrue(resultSet.next()); - } - assertFalse(resultSet.next()); + regularStatement.execute("create or replace table testTableBind(c integer, d string)"); + // insert into table + regularStatement.executeUpdate("insert into testTableBind (c, d) values (1, 'one')"); + // select c1, c from table(?), testTableBind + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?), testTableBind")) { + preparedStatement.setString(1, ORDERS_JDBC); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(11, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - preparedStatement.close(); + // select * from table(?), table(?) + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * from table(?), table(?)")) { + preparedStatement.setString(1, ORDERS_JDBC); + preparedStatement.setString(2, "testTableBind"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(11, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } - } finally { - if (regularStatement != null) { + // select tab1.c1, tab2.c from table(?) as a, table(?) as b + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT a.c1, b.c from table(?) as a, table(?) as b")) { + preparedStatement.setString(1, ORDERS_JDBC); + preparedStatement.setString(2, "testTableBind"); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + assertEquals(2, resultSetMetaData.getColumnCount()); + // assert we have 73 rows + for (int i = 0; i < 73; i++) { + assertTrue(resultSet.next()); + } + assertFalse(resultSet.next()); + } + } + } finally { regularStatement.execute("DROP TABLE testTableBind"); } - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test public void testBindInWithClause() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - // create a test table - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table testBind2(a int, b string, c double, " - + "d date, e timestamp, f time, g date)"); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + // create a test table + regularStatement.execute( + "create or replace table testBind2(a int, b string, c double, " + + "d date, e timestamp, f time, g date)"); - // bind in where clause - preparedStatement = - connection.prepareStatement( - "WITH V AS (SELECT * FROM testBind2 WHERE a = ?) " + "SELECT count(*) FROM V"); + // bind in where clause + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "WITH V AS (SELECT * FROM testBind2 WHERE a = ?) " + "SELECT count(*) FROM V")) { - preparedStatement.setInt(1, 100); - resultSet = preparedStatement.executeQuery(); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + preparedStatement.setInt(1, 100); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 rows - assertTrue(resultSet.next()); - preparedStatement.close(); - } finally { - if (regularStatement != null) { + // assert we get 1 rows + assertTrue(resultSet.next()); + } + } + } finally { regularStatement.execute("DROP TABLE testBind2"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test public void testBindTimestampNTZ() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - // create a test table - regularStatement = connection.createStatement(); - regularStatement.executeUpdate( - "create or replace table testBindTimestampNTZ(a timestamp_ntz)"); - - regularStatement.execute("alter session set client_timestamp_type_mapping='timestamp_ntz'"); - - // bind in where clause - preparedStatement = connection.prepareStatement("insert into testBindTimestampNTZ values(?)"); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + // create a test table + regularStatement.executeUpdate( + "create or replace table testBindTimestampNTZ(a timestamp_ntz)"); - Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); - preparedStatement.setTimestamp(1, ts); + regularStatement.execute("alter session set client_timestamp_type_mapping='timestamp_ntz'"); - int updateCount = preparedStatement.executeUpdate(); + // bind in where clause + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testBindTimestampNTZ values(?)")) { - // update count should be 1 - assertEquals("update count", 1, updateCount); + Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); + preparedStatement.setTimestamp(1, ts); - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBindTimestampNTZ"); + int updateCount = preparedStatement.executeUpdate(); - // assert we get 1 rows - assertTrue(resultSet.next()); - assertEquals("timestamp", "Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1)); + // update count should be 1 + assertEquals("update count", 1, updateCount); - regularStatement.executeUpdate("truncate table testBindTimestampNTZ"); + // test the inserted rows + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBindTimestampNTZ")) { - preparedStatement.setTimestamp( - 1, ts, Calendar.getInstance(TimeZone.getTimeZone("America/Los_Angeles"))); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertEquals("timestamp", "Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1)); - updateCount = preparedStatement.executeUpdate(); + regularStatement.executeUpdate("truncate table testBindTimestampNTZ"); - // update count should be 1 - assertEquals("update count", 1, updateCount); + preparedStatement.setTimestamp( + 1, ts, Calendar.getInstance(TimeZone.getTimeZone("America/Los_Angeles"))); - // test the inserted rows - resultSet = regularStatement.executeQuery("select * from testBindTimestampNTZ"); + updateCount = preparedStatement.executeUpdate(); - // assert we get 1 rows - assertTrue(resultSet.next()); + // update count should be 1 + assertEquals("update count", 1, updateCount); + } + // test the inserted rows + try (ResultSet resultSet = + regularStatement.executeQuery("select * from testBindTimestampNTZ")) { - preparedStatement.close(); - } finally { - if (regularStatement != null) { + // assert we get 1 rows + assertTrue(resultSet.next()); + } + } + } finally { regularStatement.execute("DROP TABLE testBindTimestampNTZ"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test public void testNullBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - try { - connection = getConnection(); - - regularStatement = connection.createStatement(); - regularStatement.execute("create or replace table testNullBind(a double)"); - - // array bind with nulls - preparedStatement = connection.prepareStatement("insert into testNullBind (a) values(?)"); + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table testNullBind(a double)"); - preparedStatement.setDouble(1, 1.2); - preparedStatement.addBatch(); + // array bind with nulls + try (PreparedStatement preparedStatement = + connection.prepareStatement("insert into testNullBind (a) values(?)")) { + preparedStatement.setDouble(1, 1.2); + preparedStatement.addBatch(); - preparedStatement.setObject(1, null); - preparedStatement.addBatch(); + preparedStatement.setObject(1, null); + preparedStatement.addBatch(); - int[] updateCounts = preparedStatement.executeBatch(); + int[] updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 2, updateCounts.length); - // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + // update count should be 1 + assertEquals("update count", 1, updateCounts[0]); + assertEquals("update count", 1, updateCounts[1]); - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - preparedStatement.setObject(1, null); - preparedStatement.addBatch(); + preparedStatement.setObject(1, null); + preparedStatement.addBatch(); - preparedStatement.setDouble(1, 1.2); - preparedStatement.addBatch(); + preparedStatement.setDouble(1, 1.2); + preparedStatement.addBatch(); - updateCounts = preparedStatement.executeBatch(); + updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 2, updateCounts.length); - // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + // update count should be 1 + assertEquals("update count", 1, updateCounts[0]); + assertEquals("update count", 1, updateCounts[1]); - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - preparedStatement.setObject(1, null); - preparedStatement.addBatch(); + preparedStatement.setObject(1, null); + preparedStatement.addBatch(); - updateCounts = preparedStatement.executeBatch(); + updateCounts = preparedStatement.executeBatch(); - // GS optimizes this into one insert execution - assertEquals("Number of update counts", 1, updateCounts.length); + // GS optimizes this into one insert execution + assertEquals("Number of update counts", 1, updateCounts.length); - // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); + // update count should be 1 + assertEquals("update count", 1, updateCounts[0]); - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - // this test causes query count in GS not to be decremented because - // the exception is thrown before registerQC. Discuss with Johnston - // to resolve the issue before enabling the test. - try { - preparedStatement.setObject(1, "Null", Types.DOUBLE); - preparedStatement.addBatch(); - preparedStatement.executeBatch(); - fail("must fail in executeBatch()"); - } catch (SnowflakeSQLException ex) { - assertEquals(2086, ex.getErrorCode()); - } + // this test causes query count in GS not to be decremented because + // the exception is thrown before registerQC. Discuss with Johnston + // to resolve the issue before enabling the test. + try { + preparedStatement.setObject(1, "Null", Types.DOUBLE); + preparedStatement.addBatch(); + preparedStatement.executeBatch(); + fail("must fail in executeBatch()"); + } catch (SnowflakeSQLException ex) { + assertEquals(2086, ex.getErrorCode()); + } - preparedStatement.clearBatch(); + preparedStatement.clearBatch(); - try { - preparedStatement.setString(1, "hello"); - preparedStatement.addBatch(); + try { + preparedStatement.setString(1, "hello"); + preparedStatement.addBatch(); - preparedStatement.setDouble(1, 1.2); - preparedStatement.addBatch(); - fail("must fail"); - } catch (SnowflakeSQLException ex) { - assertEquals( - (int) ErrorCode.ARRAY_BIND_MIXED_TYPES_NOT_SUPPORTED.getMessageCode(), - ex.getErrorCode()); - } - } finally { - if (regularStatement != null) { + preparedStatement.setDouble(1, 1.2); + preparedStatement.addBatch(); + fail("must fail"); + } catch (SnowflakeSQLException ex) { + assertEquals( + (int) ErrorCode.ARRAY_BIND_MIXED_TYPES_NOT_SUPPORTED.getMessageCode(), + ex.getErrorCode()); + } + } + } finally { regularStatement.execute("DROP TABLE testNullBind"); - regularStatement.close(); } - - closeSQLObjects(preparedStatement, connection); } } @Test public void testSnow12603() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); + ResultSetMetaData resultSetMetaData = null; + try (Connection connection = getConnection()) { + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT ?, ?, ?, ?, ?, ?")) { - preparedStatement = connection.prepareStatement("SELECT ?, ?, ?, ?, ?, ?"); + java.sql.Date sqlDate = java.sql.Date.valueOf("2014-08-26"); - java.sql.Date sqlDate = java.sql.Date.valueOf("2014-08-26"); + Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); - Timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); + preparedStatement.setObject(1, 1); + preparedStatement.setObject(2, "hello"); + preparedStatement.setObject(3, new BigDecimal("1.3")); + preparedStatement.setObject(4, Float.valueOf("1.3")); + preparedStatement.setObject(5, sqlDate); + preparedStatement.setObject(6, ts); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - preparedStatement.setObject(1, 1); - preparedStatement.setObject(2, "hello"); - preparedStatement.setObject(3, new BigDecimal("1.3")); - preparedStatement.setObject(4, Float.valueOf("1.3")); - preparedStatement.setObject(5, sqlDate); - preparedStatement.setObject(6, ts); - resultSet = preparedStatement.executeQuery(); + resultSetMetaData = resultSet.getMetaData(); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // assert column count + assertEquals(6, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); + + // assert we get 1 rows + assertTrue(resultSet.next()); - // assert column count - assertEquals(6, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); + assertEquals("integer", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); + assertEquals("double", 1.3, resultSet.getDouble(4), 0); + assertEquals("date", "2014-08-26", resultSet.getString(5)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + + preparedStatement.setObject(1, 1, Types.INTEGER); + preparedStatement.setObject(2, "hello", Types.VARCHAR); + preparedStatement.setObject(3, new BigDecimal("1.3"), Types.DECIMAL); + preparedStatement.setObject(4, Float.valueOf("1.3"), Types.DOUBLE); + preparedStatement.setObject(5, sqlDate, Types.DATE); + preparedStatement.setObject(6, ts, Types.TIMESTAMP); + } + try (ResultSet resultSet = preparedStatement.executeQuery()) { - // assert we get 1 rows - assertTrue(resultSet.next()); + resultSetMetaData = resultSet.getMetaData(); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); - - preparedStatement.setObject(1, 1, Types.INTEGER); - preparedStatement.setObject(2, "hello", Types.VARCHAR); - preparedStatement.setObject(3, new BigDecimal("1.3"), Types.DECIMAL); - preparedStatement.setObject(4, Float.valueOf("1.3"), Types.DOUBLE); - preparedStatement.setObject(5, sqlDate, Types.DATE); - preparedStatement.setObject(6, ts, Types.TIMESTAMP); - - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(6, resultSetMetaData.getColumnCount()); - assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); - assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); - assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); - assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); - assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); - assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); - - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert column count + assertEquals(6, resultSetMetaData.getColumnCount()); + assertEquals(Types.BIGINT, resultSetMetaData.getColumnType(1)); + assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(2)); + assertEquals(Types.DECIMAL, resultSetMetaData.getColumnType(3)); + assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4)); + assertEquals(Types.DATE, resultSetMetaData.getColumnType(5)); + assertEquals(Types.TIMESTAMP, resultSetMetaData.getColumnType(6)); + + // assert we get 1 rows + assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); - } finally { - closeSQLObjects(resultSet, preparedStatement, connection); + assertEquals("integer", 1, resultSet.getInt(1)); + assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); + assertEquals("double", 1.3, resultSet.getDouble(4), 0); + assertEquals("date", "2014-08-26", resultSet.getString(5)); + assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + } + } } } /** SNOW-6290: timestamp value is shifted by local timezone */ @Test public void testSnow6290() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // create test table - statement.execute("CREATE OR REPLACE TABLE testSnow6290(ts timestamp)"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // create test table + statement.execute("CREATE OR REPLACE TABLE testSnow6290(ts timestamp)"); - PreparedStatement preparedStatement = - connection.prepareStatement("INSERT INTO testSnow6290(ts) values(?)"); + PreparedStatement preparedStatement = + connection.prepareStatement("INSERT INTO testSnow6290(ts) values(?)"); - Timestamp ts = new Timestamp(System.currentTimeMillis()); + Timestamp ts = new Timestamp(System.currentTimeMillis()); - preparedStatement.setTimestamp(1, ts); - preparedStatement.executeUpdate(); + preparedStatement.setTimestamp(1, ts); + preparedStatement.executeUpdate(); - ResultSet res = statement.executeQuery("select ts from testSnow6290"); + ResultSet res = statement.executeQuery("select ts from testSnow6290"); - assertTrue("expect a row", res.next()); + assertTrue("expect a row", res.next()); - Timestamp tsFromDB = res.getTimestamp(1); + Timestamp tsFromDB = res.getTimestamp(1); - assertEquals("timestamp mismatch", ts.getTime(), tsFromDB.getTime()); - } finally { - if (statement != null) { + assertEquals("timestamp mismatch", ts.getTime(), tsFromDB.getTime()); + } finally { statement.execute("DROP TABLE if exists testSnow6290"); - statement.close(); } - closeSQLObjects(statement, connection); } } /** SNOW-6986: null sql shouldn't be allowed */ @Test public void testInvalidSQL() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // execute DDLs statement.executeQuery(null); - statement.close(); fail("expected exception, but no exception"); } catch (SnowflakeSQLException ex) { assertEquals((int) ErrorCode.INVALID_SQL.getMessageCode(), ex.getErrorCode()); - } finally { - closeSQLObjects(statement, connection); } } @Test public void testGetObject() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; ResultSetMetaData resultSetMetaData; - try { - connection = getConnection(); - - preparedStatement = connection.prepareStatement("SELECT ?"); - + try (Connection connection = getConnection(); + PreparedStatement preparedStatement = connection.prepareStatement("SELECT ?")) { // bind integer preparedStatement.setInt(1, 1); - resultSet = preparedStatement.executeQuery(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=BigDecimal", - Long.class.getName(), - resultSetMetaData.getColumnClassName(1)); - - // assert we get 1 rows - assertTrue(resultSet.next()); + assertEquals( + "column class name=BigDecimal", + Long.class.getName(), + resultSetMetaData.getColumnClassName(1)); - assertTrue("integer", resultSet.getObject(1) instanceof Long); + // assert we get 1 rows + assertTrue(resultSet.next()); + assertTrue("integer", resultSet.getObject(1) instanceof Long); + } preparedStatement.setString(1, "hello"); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=String", - String.class.getName(), - resultSetMetaData.getColumnClassName(1)); + assertEquals( + "column class name=String", + String.class.getName(), + resultSetMetaData.getColumnClassName(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert we get 1 rows + assertTrue(resultSet.next()); - assertTrue("string", resultSet.getObject(1) instanceof String); + assertTrue("string", resultSet.getObject(1) instanceof String); + } preparedStatement.setDouble(1, 1.2); - resultSet = preparedStatement.executeQuery(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=Double", - Double.class.getName(), - resultSetMetaData.getColumnClassName(1)); + assertEquals( + "column class name=Double", + Double.class.getName(), + resultSetMetaData.getColumnClassName(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert we get 1 rows + assertTrue(resultSet.next()); - assertTrue("double", resultSet.getObject(1) instanceof Double); + assertTrue("double", resultSet.getObject(1) instanceof Double); + } preparedStatement.setTimestamp(1, new Timestamp(0)); - resultSet = preparedStatement.executeQuery(); + try (ResultSet resultSet = preparedStatement.executeQuery()) { - resultSetMetaData = resultSet.getMetaData(); + resultSetMetaData = resultSet.getMetaData(); - assertEquals( - "column class name=Timestamp", - Timestamp.class.getName(), - resultSetMetaData.getColumnClassName(1)); + assertEquals( + "column class name=Timestamp", + Timestamp.class.getName(), + resultSetMetaData.getColumnClassName(1)); - // assert we get 1 rows - assertTrue(resultSet.next()); + // assert we get 1 rows + assertTrue(resultSet.next()); - assertTrue("timestamp", resultSet.getObject(1) instanceof Timestamp); + assertTrue("timestamp", resultSet.getObject(1) instanceof Timestamp); + } preparedStatement.setDate(1, new java.sql.Date(0)); - resultSet = preparedStatement.executeQuery(); - - resultSetMetaData = resultSet.getMetaData(); - - assertEquals( - "column class name=Date", - java.sql.Date.class.getName(), - resultSetMetaData.getColumnClassName(1)); - - // assert we get 1 rows - assertTrue(resultSet.next()); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); - assertTrue("date", resultSet.getObject(1) instanceof java.sql.Date); + assertEquals( + "column class name=Date", + java.sql.Date.class.getName(), + resultSetMetaData.getColumnClassName(1)); - preparedStatement.close(); + // assert we get 1 rows + assertTrue(resultSet.next()); - } finally { - closeSQLObjects(resultSet, preparedStatement, connection); + assertTrue("date", resultSet.getObject(1) instanceof java.sql.Date); + } } } @Test public void testGetDoubleForNull() throws Throwable { - Connection connection = null; - Statement stmt = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - stmt = connection.createStatement(); - resultSet = stmt.executeQuery("select cast(null as int) as null_int"); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement(); + ResultSet resultSet = stmt.executeQuery("select cast(null as int) as null_int")) { assertTrue(resultSet.next()); assertEquals("0 for null", 0, resultSet.getDouble(1), 0.0001); - } finally { - closeSQLObjects(resultSet, stmt, connection); } } // SNOW-27438 @Test public void testGetDoubleForNaN() throws Throwable { - Connection connection = null; - Statement stmt = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - stmt = connection.createStatement(); - resultSet = stmt.executeQuery("select 'nan'::float"); + try (Connection connection = getConnection(); + Statement stmt = connection.createStatement(); + ResultSet resultSet = stmt.executeQuery("select 'nan'::float")) { assertTrue(resultSet.next()); assertThat("NaN for NaN", resultSet.getDouble(1), equalTo(Double.NaN)); - } finally { - closeSQLObjects(resultSet, stmt, connection); } } @Test public void testPutViaExecuteQuery() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - // load file test - // create a unique data file name by using current timestamp in millis + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { try { + // load file test + // create a unique data file name by using current timestamp in millis // test external table load statement.execute("CREATE OR REPLACE TABLE testPutViaExecuteQuery(a number)"); // put files - resultSet = + try (ResultSet resultSet = statement.executeQuery( "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) - + " @%testPutViaExecuteQuery/orders parallel=10"); - - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + + " @%testPutViaExecuteQuery/orders parallel=10")) { - // assert column count - assertTrue(resultSetMetaData.getColumnCount() > 0); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // assert we get 1 rows - for (int i = 0; i < 1; i++) { - assertTrue(resultSet.next()); + // assert column count + assertTrue(resultSetMetaData.getColumnCount() > 0); + // assert we get 1 rows + for (int i = 0; i < 1; i++) { + assertTrue(resultSet.next()); + } } } finally { statement.execute("DROP TABLE IF EXISTS testPutViaExecuteQuery"); - statement.close(); } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Ignore("takes 7 min. enable this for long running tests") @Test public void testSnow16332() throws Throwable { - Connection conn = null; - Connection connWithNwError = null; - Statement stmt = null; - Statement stmtWithNwError = null; - - try { - // use v1 query request API and inject 200ms socket timeout for first - // http request to simulate network failure - conn = getConnection(); - stmt = conn.createStatement(); - - // create a table - stmt.execute("CREATE OR REPLACE TABLE SNOW16332 (i int)"); - - // make sure QC is JIT optimized. Change the GS JVM args to include - // -Xcomp or -XX:CompileThreshold = < a number smaller than the - // stmtCounter - - int stmtCounter = 2000; - while (stmtCounter > 0) { - // insert into it this should start a transaction. - stmt.executeUpdate("INSERT INTO SNOW16332 VALUES (" + stmtCounter + ")"); - --stmtCounter; - } + // use v1 query request API and inject 200ms socket timeout for first + // http request to simulate network failure + try (Connection conn = getConnection(); + Statement stmt = conn.createStatement()) { + try { + // create a table + stmt.execute("CREATE OR REPLACE TABLE SNOW16332 (i int)"); + + // make sure QC is JIT optimized. Change the GS JVM args to include + // -Xcomp or -XX:CompileThreshold = < a number smaller than the + // stmtCounter + + int stmtCounter = 2000; + while (stmtCounter > 0) { + // insert into it this should start a transaction. + stmt.executeUpdate("INSERT INTO SNOW16332 VALUES (" + stmtCounter + ")"); + --stmtCounter; + } - connWithNwError = getConnection(500); // inject socket timeout in ms - stmtWithNwError = connWithNwError.createStatement(); + try (Connection connWithNwError = getConnection(500)) { // inject socket timeout in ms + try (Statement stmtWithNwError = connWithNwError.createStatement()) { - // execute dml - stmtWithNwError.executeUpdate( - "INSERT INTO SNOW16332 " + "SELECT seq8() " + "FROM table(generator(timeLimit => 1))"); + // execute dml + stmtWithNwError.executeUpdate( + "INSERT INTO SNOW16332 " + + "SELECT seq8() " + + "FROM table(generator(timeLimit => 1))"); - // and execute another dml - stmtWithNwError.executeUpdate( - "INSERT INTO SNOW16332 " + "SELECT seq8() " + "FROM table(generator(timeLimit => 1))"); - } finally { - if (stmt != null) { + // and execute another dml + stmtWithNwError.executeUpdate( + "INSERT INTO SNOW16332 " + + "SELECT seq8() " + + "FROM table(generator(timeLimit => 1))"); + } + } + } finally { stmt.executeQuery("DROP TABLE SNOW16332"); } - closeSQLObjects(stmt, conn); - closeSQLObjects(stmtWithNwError, connWithNwError); } } @Test public void testV1Query() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - // use v1 query request API and inject 200ms socket timeout for first - // http request to simulate network failure - connection = getConnection(200); // inject socket timeout = 200ms - - statement = connection.createStatement(); + ResultSetMetaData resultSetMetaData = null; + // use v1 query request API and inject 200ms socket timeout for first + // http request to simulate network failure + try (Connection connection = getConnection(200); // inject socket timeout = 200m + Statement statement = connection.createStatement()) { // execute query - resultSet = - statement.executeQuery("SELECT count(*) FROM table(generator(rowCount => 100000000))"); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = + statement.executeQuery("SELECT count(*) FROM table(generator(rowCount => 100000000))")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 row - for (int i = 0; i < 1; i++) { - assertTrue(resultSet.next()); - assertTrue(resultSet.getInt(1) > 0); + // assert we get 1 row + for (int i = 0; i < 1; i++) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getInt(1) > 0); + } } // Test parsing for timestamp with timezone value that has new encoding // where timezone index follows timestamp value - resultSet = statement.executeQuery("SELECT 'Fri, 23 Oct 2015 12:35:38 -0700'::timestamp_tz"); - resultSetMetaData = resultSet.getMetaData(); + try (ResultSet resultSet = + statement.executeQuery("SELECT 'Fri, 23 Oct 2015 12:35:38 -0700'::timestamp_tz")) { + resultSetMetaData = resultSet.getMetaData(); - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); - // assert we get 1 row - for (int i = 0; i < 1; i++) { - assertTrue(resultSet.next()); - assertEquals("Fri, 23 Oct 2015 12:35:38 -0700", resultSet.getString(1)); + // assert we get 1 row + for (int i = 0; i < 1; i++) { + assertTrue(resultSet.next()); + assertEquals("Fri, 23 Oct 2015 12:35:38 -0700", resultSet.getString(1)); + } } - } finally { - closeSQLObjects(resultSet, statement, connection); } } @Test public void testCancelQuery() throws Throwable { - ResultSet resultSet = null; - - final Connection connection = getConnection(); - - final Statement statement = connection.createStatement(); - - // schedule a cancel in 5 seconds - try { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + // schedule a cancel in 5 seconds Timer timer = new Timer(); timer.schedule( new TimerTask() { @@ -2572,162 +2321,143 @@ public void run() { 5000); // now run a query for 120 seconds - resultSet = statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))"); - fail("should be canceled"); - } catch (SQLException ex) { - // assert the sqlstate is what we expect (QUERY CANCELLED) - assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); - } finally { - closeSQLObjects(resultSet, statement, connection); + try (ResultSet resultSet = + statement.executeQuery("SELECT count(*) FROM TABLE(generator(timeLimit => 120))")) { + fail("should be canceled"); + } catch (SQLException ex) { + // assert the sqlstate is what we expect (QUERY CANCELLED) + assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); + } } } /** SNOW-14774: timestamp_ntz value should use client time zone to adjust the epoch time. */ @Test public void testSnow14774() throws Throwable { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - + Calendar calendar = null; + Timestamp tsInUTC = null; + Timestamp tsInLA = null; + SimpleDateFormat sdf = null; + String tsStrInLA = null; + String tsStrInUTC = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { // 30 minutes past daylight saving change (from 2am to 3am) - ResultSet res = statement.executeQuery("select '2015-03-08 03:30:00'::timestamp_ntz"); + try (ResultSet res = statement.executeQuery("select '2015-03-08 03:30:00'::timestamp_ntz")) { - res.next(); + assertTrue(res.next()); - // get timestamp in UTC - Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); - Timestamp tsInUTC = res.getTimestamp(1, calendar); + // get timestamp in UTC + calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + tsInUTC = res.getTimestamp(1, calendar); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss"); - sdf.setTimeZone(TimeZone.getTimeZone("UTC")); - String tsStrInUTC = sdf.format(tsInUTC); + sdf = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss"); + sdf.setTimeZone(TimeZone.getTimeZone("UTC")); + tsStrInUTC = sdf.format(tsInUTC); - // get timestamp in LA timezone - calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - Timestamp tsInLA = res.getTimestamp(1, calendar); + // get timestamp in LA timezone + calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsInLA = res.getTimestamp(1, calendar); - sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - String tsStrInLA = sdf.format(tsInLA); - - // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsStrInLA = sdf.format(tsInLA); + // the timestamp in LA and in UTC should be the same + assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + } // 30 minutes before daylight saving change - res = statement.executeQuery("select '2015-03-08 01:30:00'::timestamp_ntz"); + try (ResultSet res = statement.executeQuery("select '2015-03-08 01:30:00'::timestamp_ntz")) { - res.next(); + assertTrue(res.next()); - // get timestamp in UTC - calendar.setTimeZone(TimeZone.getTimeZone("UTC")); - tsInUTC = res.getTimestamp(1, calendar); + // get timestamp in UTC + calendar.setTimeZone(TimeZone.getTimeZone("UTC")); + tsInUTC = res.getTimestamp(1, calendar); - sdf.setTimeZone(TimeZone.getTimeZone("UTC")); - tsStrInUTC = sdf.format(tsInUTC); + sdf.setTimeZone(TimeZone.getTimeZone("UTC")); + tsStrInUTC = sdf.format(tsInUTC); - // get timestamp in LA timezone - calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - tsInLA = res.getTimestamp(1, calendar); + // get timestamp in LA timezone + calendar.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsInLA = res.getTimestamp(1, calendar); - sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); - tsStrInLA = sdf.format(tsInLA); + sdf.setTimeZone(TimeZone.getTimeZone("America/Los_Angeles")); + tsStrInLA = sdf.format(tsInLA); - // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); - } finally { - closeSQLObjects(null, statement, connection); + // the timestamp in LA and in UTC should be the same + assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + } } } /** SNOW-19172: getMoreResults should return false after executeQuery */ @Test public void testSnow19172() throws SQLException { - Connection connection = null; - Statement statement = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.executeQuery("select 1"); assertTrue(!statement.getMoreResults()); - - } finally { - closeSQLObjects(statement, connection); } } @Test public void testSnow19819() throws Throwable { - Connection connection; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - connection = getConnection(); - try { - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table testSnow19819(\n" - + "s string,\n" - + "v variant,\n" - + "t timestamp_ltz)\n"); - - preparedStatement = - connection.prepareStatement( - "insert into testSnow19819 (s, v, t)\n" + "select ?, parse_json(?), to_timestamp(?)"); - - preparedStatement.setString(1, "foo"); - preparedStatement.setString(2, "{ }"); - preparedStatement.setString(3, "2016-05-12 12:15:00"); - preparedStatement.addBatch(); - - preparedStatement.setString(1, "foo2"); - preparedStatement.setString(2, "{ \"a\": 1 }"); - preparedStatement.setString(3, "2016-05-12 12:16:00"); - preparedStatement.addBatch(); - - preparedStatement.executeBatch(); - - resultSet = - connection.createStatement().executeQuery("SELECT s, v, t FROM testSnow19819 ORDER BY 1"); - assertThat("next result", resultSet.next()); - assertThat("String", resultSet.getString(1), equalTo("foo")); - assertThat("Variant", resultSet.getString(2), equalTo("{}")); - assertThat("next result", resultSet.next()); - assertThat("String", resultSet.getString(1), equalTo("foo2")); - assertThat("Variant", resultSet.getString(2), equalTo("{\n \"a\": 1\n}")); - assertThat("no more result", !resultSet.next()); - } finally { - if (regularStatement != null) { - regularStatement.execute("DROP TABLE testSnow19819"); + try (Connection connection = getConnection()) { + try (Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute( + "create or replace table testSnow19819(\n" + + "s string,\n" + + "v variant,\n" + + "t timestamp_ltz)\n"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "insert into testSnow19819 (s, v, t)\n" + + "select ?, parse_json(?), to_timestamp(?)")) { + + preparedStatement.setString(1, "foo"); + preparedStatement.setString(2, "{ }"); + preparedStatement.setString(3, "2016-05-12 12:15:00"); + preparedStatement.addBatch(); + + preparedStatement.setString(1, "foo2"); + preparedStatement.setString(2, "{ \"a\": 1 }"); + preparedStatement.setString(3, "2016-05-12 12:16:00"); + preparedStatement.addBatch(); + + preparedStatement.executeBatch(); + + try (ResultSet resultSet = + connection + .createStatement() + .executeQuery("SELECT s, v, t FROM testSnow19819 ORDER BY 1")) { + assertThat("next result", resultSet.next()); + assertThat("String", resultSet.getString(1), equalTo("foo")); + assertThat("Variant", resultSet.getString(2), equalTo("{}")); + assertThat("next result", resultSet.next()); + assertThat("String", resultSet.getString(1), equalTo("foo2")); + assertThat("Variant", resultSet.getString(2), equalTo("{\n \"a\": 1\n}")); + assertThat("no more result", !resultSet.next()); + } + } + } finally { + regularStatement.execute("DROP TABLE testSnow19819"); + } } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) public void testClientInfo() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet res = null; - - try { - System.setProperty( - "snowflake.client.info", - "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\", \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\", \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); - - connection = getConnection(); - - statement = connection.createStatement(); - - res = statement.executeQuery("select current_session_client_info()"); + System.setProperty( + "snowflake.client.info", + "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\", \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\", \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet res = statement.executeQuery("select current_session_client_info()")) { assertTrue("result expected", res.next()); @@ -2746,115 +2476,87 @@ public void testClientInfo() throws Throwable { "SnowflakeSourceSuite", clientInfoJSON.get("spark.app.name").asText()); - } finally { - System.clearProperty("snowflake.client.info"); closeSQLObjects(res, statement, connection); } + System.clearProperty("snowflake.client.info"); } @Test public void testLargeResultSet() throws Throwable { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - // create statement - statement = connection.createStatement(); - + try (Connection connection = getConnection(); + // create statement + Statement statement = connection.createStatement()) { String sql = "SELECT random()||random(), randstr(1000, random()) FROM table(generator(rowcount =>" + " 10000))"; - ResultSet result = statement.executeQuery(sql); - - int cnt = 0; - while (result.next()) { - ++cnt; + try (ResultSet result = statement.executeQuery(sql)) { + int cnt = 0; + while (result.next()) { + ++cnt; + } + assertEquals(10000, cnt); } - assertEquals(10000, cnt); - } finally { - closeSQLObjects(null, statement, connection); } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testSnow26503() throws Throwable { - Connection connection = null; - Connection snowflakeConnection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - Statement regularStatementSF = null; - ResultSet resultSet = null; ResultSetMetaData resultSetMetaData; + String queryId = null; + try (Connection connection = getConnection(); + // create a test table + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute( + "create or replace table testBind2(a int) as select * from values(1),(2),(8),(10)"); + + // test binds in BETWEEN predicate + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * FROM testBind2 WHERE a between ? and ?")) { + preparedStatement.setInt(1, 3); + preparedStatement.setInt(2, 9); + // test that the query succeeds; used to fail with incident + try (ResultSet resultSet = preparedStatement.executeQuery()) { + resultSetMetaData = resultSet.getMetaData(); + + // assert column count + assertEquals(1, resultSetMetaData.getColumnCount()); + + // assert we get 1 row + assertTrue(resultSet.next()); + } + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT last_query_id()"); + ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + queryId = resultSet.getString(1); + } - try { - connection = getConnection(); - - // create a test table - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table testBind2(a int) as select * from values(1),(2),(8),(10)"); - - // test binds in BETWEEN predicate - preparedStatement = - connection.prepareStatement("SELECT * FROM testBind2 WHERE a between ? and ?"); - - preparedStatement.setInt(1, 3); - preparedStatement.setInt(2, 9); - // test that the query succeeds; used to fail with incident - resultSet = preparedStatement.executeQuery(); - resultSetMetaData = resultSet.getMetaData(); - - // assert column count - assertEquals(1, resultSetMetaData.getColumnCount()); - - // assert we get 1 row - assertTrue(resultSet.next()); - - resultSet.close(); - preparedStatement.close(); - preparedStatement = connection.prepareStatement("SELECT last_query_id()"); - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - String queryId = resultSet.getString(1); - - resultSet.close(); - preparedStatement.close(); - - // check that the bind values can be retrieved using system$get_bind_values - snowflakeConnection = getSnowflakeAdminConnection(); - - regularStatementSF = snowflakeConnection.createStatement(); - regularStatementSF.execute("create or replace warehouse wh26503 warehouse_size=xsmall"); - - preparedStatement = - snowflakeConnection.prepareStatement( - "select bv:\"1\":\"value\"::string, bv:\"2\":\"value\"::string from (select" - + " parse_json(system$get_bind_values(?)) bv)"); - preparedStatement.setString(1, queryId); - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - - // check that the bind values are correct - assertEquals(3, resultSet.getInt(1)); - assertEquals(9, resultSet.getInt(2)); - - } finally { - if (regularStatement != null) { + // check that the bind values can be retrieved using system$get_bind_values + try (Connection snowflakeConnection = getSnowflakeAdminConnection()) { + try (Statement regularStatementSF = snowflakeConnection.createStatement()) { + regularStatementSF.execute("create or replace warehouse wh26503 warehouse_size=xsmall"); + + try (PreparedStatement preparedStatement = + snowflakeConnection.prepareStatement( + "select bv:\"1\":\"value\"::string, bv:\"2\":\"value\"::string from (select" + + " parse_json(system$get_bind_values(?)) bv)")) { + preparedStatement.setString(1, queryId); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + + // check that the bind values are correct + assertEquals(3, resultSet.getInt(1)); + assertEquals(9, resultSet.getInt(2)); + } + } + } + snowflakeConnection.createStatement().execute("DROP warehouse wh26503"); + } + } finally { regularStatement.execute("DROP TABLE testBind2"); - regularStatement.close(); - } - - if (regularStatementSF != null) { - regularStatementSF.execute("DROP warehouse wh26503"); - regularStatementSF.close(); - } - - closeSQLObjects(resultSet, preparedStatement, connection); - - if (snowflakeConnection != null) { - snowflakeConnection.close(); } } } @@ -2865,60 +2567,49 @@ public void testSnow26503() throws Throwable { */ @Test public void testSnow28530() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - - try { - connection = getConnection(); - regularStatement = connection.createStatement(); - regularStatement.execute("create or replace table t(a number, b number)"); - - ///////////////////////////////////////// - // bind variables in a view definition + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { try { - preparedStatement = - connection.prepareStatement("create or replace view v as select * from t where a=?"); - preparedStatement.setInt(1, 1); - preparedStatement.execute(); - - // we shouldn't reach here - fail("Bind variable in view definition did not cause a user error"); - } catch (SnowflakeSQLException e) { - assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); - } + regularStatement.execute("create or replace table t(a number, b number)"); + + ///////////////////////////////////////// + // bind variables in a view definition + try (PreparedStatement preparedStatement = + connection.prepareStatement("create or replace view v as select * from t where a=?")) { + preparedStatement.setInt(1, 1); + preparedStatement.execute(); + + // we shouldn't reach here + fail("Bind variable in view definition did not cause a user error"); + } catch (SnowflakeSQLException e) { + assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); + } - ///////////////////////////////////////////// - // bind variables in a scalar UDF definition - try { - preparedStatement = + ///////////////////////////////////////////// + // bind variables in a scalar UDF definition + try (PreparedStatement preparedStatement = connection.prepareStatement( - "create or replace function f(n number) returns number as " + "'n + ?'"); - preparedStatement.execute(); - fail("Bind variable in scalar UDF definition did not cause a user " + "error"); - } catch (SnowflakeSQLException e) { - assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); - } + "create or replace function f(n number) returns number as " + "'n + ?'")) { + preparedStatement.execute(); + fail("Bind variable in scalar UDF definition did not cause a user " + "error"); + } catch (SnowflakeSQLException e) { + assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); + } - /////////////////////////////////////////// - // bind variables in a table UDF definition - try { - preparedStatement = + /////////////////////////////////////////// + // bind variables in a table UDF definition + try (PreparedStatement preparedStatement = connection.prepareStatement( "create or replace function tf(n number) returns table(b number) as" - + " 'select b from t where a=?'"); - preparedStatement.execute(); - fail("Bind variable in table UDF definition did not cause a user " + "error"); - } catch (SnowflakeSQLException e) { - assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); - } - } finally { - if (regularStatement != null) { + + " 'select b from t where a=?'")) { + preparedStatement.execute(); + fail("Bind variable in table UDF definition did not cause a user " + "error"); + } catch (SnowflakeSQLException e) { + assertEquals(ERROR_CODE_BIND_VARIABLE_NOT_ALLOWED_IN_VIEW_OR_UDF_DEF, e.getErrorCode()); + } + } finally { regularStatement.execute("drop table t"); - regularStatement.close(); } - - closeSQLObjects(null, preparedStatement, connection); } } @@ -2928,54 +2619,47 @@ public void testSnow28530() throws Throwable { */ @Test public void testSnow31104() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - Properties paramProperties = new Properties(); - paramProperties.put("TYPESYSTEM_WIDEN_CONSTANTS_EXACTLY", Boolean.TRUE.toString()); - connection = getConnection(paramProperties); - - regularStatement = connection.createStatement(); + Properties paramProperties = new Properties(); + paramProperties.put("TYPESYSTEM_WIDEN_CONSTANTS_EXACTLY", Boolean.TRUE.toString()); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { // Repeat a couple of test cases from snow-31104.sql // We don't need to repeat all of them; we just need to verify // that string bind refs and null bind refs are treated the same as // string and null constants. + try { + regularStatement.execute("create or replace table t(n number)"); - regularStatement.execute("create or replace table t(n number)"); - - regularStatement.executeUpdate( - "insert into t values (1), (90000000000000000000000000000000000000)"); - - preparedStatement = connection.prepareStatement("select n, n > ? from t order by 1"); - preparedStatement.setString(1, "1"); - - // this should not produce a user error - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - assertFalse(resultSet.getBoolean(2)); - resultSet.next(); - assertTrue(resultSet.getBoolean(2)); + regularStatement.executeUpdate( + "insert into t values (1), (90000000000000000000000000000000000000)"); - preparedStatement = - connection.prepareStatement("select n, '1' in (?, '256', n, 10) from t order by 1"); - preparedStatement.setString(1, null); + try (PreparedStatement preparedStatement = + connection.prepareStatement("select n, n > ? from t order by 1")) { + preparedStatement.setString(1, "1"); - resultSet = preparedStatement.executeQuery(); - resultSet.next(); - assertTrue(resultSet.getBoolean(2)); - resultSet.next(); - assertNull(resultSet.getObject(2)); - } finally { - if (regularStatement != null) { + // this should not produce a user error + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + assertFalse(resultSet.getBoolean(2)); + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(2)); + } + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("select n, '1' in (?, '256', n, 10) from t order by 1")) { + preparedStatement.setString(1, null); + + try (ResultSet resultSet = preparedStatement.executeQuery()) { + assertTrue(resultSet.next()); + assertTrue(resultSet.getBoolean(2)); + assertTrue(resultSet.next()); + assertNull(resultSet.getObject(2)); + } + } + } finally { regularStatement.execute("drop table t"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @@ -2983,22 +2667,17 @@ public void testSnow31104() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGet() throws Throwable { - Connection connection = null; - Statement statement = null; List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - try { statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); @@ -3028,10 +2707,7 @@ public void testPutGet() throws Throwable { assert (original.length() == unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testGetPut_stage"); - statement.close(); } - } finally { - closeSQLObjects(null, statement, connection); } } } @@ -3046,22 +2722,17 @@ public void testPutGet() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGetToUnencryptedStage() throws Throwable { - Connection connection = null; - Statement statement = null; List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i)); - - statement = connection.createStatement(); - - String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - try { statement.execute("alter session set ENABLE_UNENCRYPTED_INTERNAL_STAGES=true"); statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); statement.execute( @@ -3095,10 +2766,7 @@ public void testPutGetToUnencryptedStage() throws Throwable { assert (original.length() == unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testPutGet_unencstage"); - statement.close(); } - } finally { - closeSQLObjects(null, statement, connection); } } } @@ -3114,39 +2782,33 @@ public void testNotClosedSession() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testToTimestampNullBind() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - - try { - connection = getConnection(); - - preparedStatement = - connection.prepareStatement( - "select 3 where to_timestamp_ltz(?, 3) = '1970-01-01 00:00:12.345" - + " +000'::timestamp_ltz"); - + try (Connection connection = getConnection(); + PreparedStatement preparedStatement = + connection.prepareStatement( + "select 3 where to_timestamp_ltz(?, 3) = '1970-01-01 00:00:12.345" + + " +000'::timestamp_ltz")) { // First test, normal usage. preparedStatement.setInt(1, 12345); - ResultSet resultSet = preparedStatement.executeQuery(); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - // Assert column count. - assertEquals(1, resultSetMetaData.getColumnCount()); - // Assert this returned a 3. - assertTrue(resultSet.next()); - assertEquals(3, resultSet.getInt(1)); - assertFalse(resultSet.next()); - - // Second test, input is null. - preparedStatement.setNull(1, Types.INTEGER); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + // Assert column count. + assertEquals(1, resultSetMetaData.getColumnCount()); + // Assert this returned a 3. + assertTrue(resultSet.next()); + assertEquals(3, resultSet.getInt(1)); + assertFalse(resultSet.next()); - resultSet = preparedStatement.executeQuery(); - // Assert no rows returned. - assertFalse(resultSet.next()); - } finally { - closeSQLObjects(preparedStatement, connection); + // Second test, input is null. + preparedStatement.setNull(1, Types.INTEGER); + } + try (ResultSet resultSet = preparedStatement.executeQuery()) { + // Assert no rows returned. + assertFalse(resultSet.next()); + } } + // NOTE: Don't add new tests here. Instead, add it to other appropriate test class or create a + // new + // one. This class is too large to have more tests. } - // NOTE: Don't add new tests here. Instead, add it to other appropriate test class or create a new - // one. This class is too large to have more tests. } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java index e76e5c60e..01bff3e2e 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java @@ -106,40 +106,37 @@ public void testStaticVersionMatchesManifest() { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) public void testClientInfoConnectionProperty() throws Throwable { - Connection connection = null; - Statement statement = null; - ResultSet res = null; - - try { - Properties props = new Properties(); - props.put( - "snowflakeClientInfo", - "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\"," - + " \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\"," - + " \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); - connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); - statement = connection.createStatement(); - res = statement.executeQuery("select current_session_client_info()"); + String clientInfoJSONStr = null; + JsonNode clientInfoJSON = null; + Properties props = new Properties(); + props.put( + "snowflakeClientInfo", + "{\"spark.version\":\"3.0.0\", \"spark.snowflakedb.version\":\"2.8.5\"," + + " \"spark.app.name\":\"SnowflakeSourceSuite\", \"scala.version\":\"2.12.11\"," + + " \"java.version\":\"1.8.0_221\", \"snowflakedb.jdbc.version\":\"3.13.2\"}"); + try (Connection connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); + Statement statement = connection.createStatement(); + ResultSet res = statement.executeQuery("select current_session_client_info()")) { assertTrue(res.next()); - String clientInfoJSONStr = res.getString(1); - JsonNode clientInfoJSON = mapper.readTree(clientInfoJSONStr); + clientInfoJSONStr = res.getString(1); + clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spart version and spark app are found assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); assertEquals( "spark app mismatch", "SnowflakeSourceSuite", clientInfoJSON.get("spark.app.name").asText()); - connection.close(); - - // Test that when session property is set, connection parameter overrides it - System.setProperty( - "snowflake.client.info", - "{\"spark.version\":\"fake\", \"spark.snowflakedb.version\":\"fake\"," - + " \"spark.app.name\":\"fake\", \"scala.version\":\"fake\"," - + " \"java.version\":\"fake\", \"snowflakedb.jdbc.version\":\"fake\"}"); - connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); - statement = connection.createStatement(); - res = statement.executeQuery("select current_session_client_info()"); + } + + // Test that when session property is set, connection parameter overrides it + System.setProperty( + "snowflake.client.info", + "{\"spark.version\":\"fake\", \"spark.snowflakedb.version\":\"fake\"," + + " \"spark.app.name\":\"fake\", \"scala.version\":\"fake\"," + + " \"java.version\":\"fake\", \"snowflakedb.jdbc.version\":\"fake\"}"); + try (Connection connection = getConnection(DONT_INJECT_SOCKET_TIMEOUT, props, false, false); + Statement statement = connection.createStatement(); + ResultSet res = statement.executeQuery("select current_session_client_info()")) { assertTrue(res.next()); clientInfoJSONStr = res.getString(1); clientInfoJSON = mapper.readTree(clientInfoJSONStr); @@ -149,21 +146,19 @@ public void testClientInfoConnectionProperty() throws Throwable { "spark app mismatch", "SnowflakeSourceSuite", clientInfoJSON.get("spark.app.name").asText()); - - } finally { - System.clearProperty("snowflake.client.info"); - closeSQLObjects(res, statement, connection); } + System.clearProperty("snowflake.client.info"); } @Test public void testGetSessionID() throws Throwable { - Connection con = getConnection(); - String sessionID = con.unwrap(SnowflakeConnection.class).getSessionID(); - Statement statement = con.createStatement(); - ResultSet rset = statement.executeQuery("select current_session()"); - rset.next(); - assertEquals(sessionID, rset.getString(1)); + try (Connection con = getConnection(); + Statement statement = con.createStatement(); + ResultSet rset = statement.executeQuery("select current_session()")) { + String sessionID = con.unwrap(SnowflakeConnection.class).getSessionID(); + assertTrue(rset.next()); + assertEquals(sessionID, rset.getString(1)); + } } @Test @@ -172,32 +167,33 @@ public void testPutThreshold() throws SQLException { try (Connection connection = getConnection()) { // assert that threshold equals default 200 from server side SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - Statement statement = connection.createStatement(); - SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); - statement.execute("CREATE OR REPLACE STAGE PUTTHRESHOLDSTAGE"); - String command = - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @PUTTHRESHOLDSTAGE"; - SnowflakeFileTransferAgent agent = - new SnowflakeFileTransferAgent(command, sfSession, sfStatement); - assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); - // assert that setting threshold via put statement directly sets the big file threshold - // appropriately - String commandWithPut = command + " threshold=314572800"; - agent = new SnowflakeFileTransferAgent(commandWithPut, sfSession, sfStatement); - assertEquals(314572800, agent.getBigFileThreshold()); - // assert that after put statement, threshold goes back to previous session threshold - agent = new SnowflakeFileTransferAgent(command, sfSession, sfStatement); - assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); - // Attempt to set threshold to an invalid value such as a negative number - String commandWithInvalidThreshold = command + " threshold=-1"; - try { - agent = new SnowflakeFileTransferAgent(commandWithInvalidThreshold, sfSession, sfStatement); - } - // assert invalid value causes exception to be thrown of type INVALID_PARAMETER_VALUE - catch (SQLException e) { - assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); + try (Statement statement = connection.createStatement()) { + SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); + statement.execute("CREATE OR REPLACE STAGE PUTTHRESHOLDSTAGE"); + String command = + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @PUTTHRESHOLDSTAGE"; + SnowflakeFileTransferAgent agent = + new SnowflakeFileTransferAgent(command, sfSession, sfStatement); + assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); + // assert that setting threshold via put statement directly sets the big file threshold + // appropriately + String commandWithPut = command + " threshold=314572800"; + agent = new SnowflakeFileTransferAgent(commandWithPut, sfSession, sfStatement); + assertEquals(314572800, agent.getBigFileThreshold()); + // assert that after put statement, threshold goes back to previous session threshold + agent = new SnowflakeFileTransferAgent(command, sfSession, sfStatement); + assertEquals(200 * 1024 * 1024, agent.getBigFileThreshold()); + // Attempt to set threshold to an invalid value such as a negative number + String commandWithInvalidThreshold = command + " threshold=-1"; + try { + agent = + new SnowflakeFileTransferAgent(commandWithInvalidThreshold, sfSession, sfStatement); + } + // assert invalid value causes exception to be thrown of type INVALID_PARAMETER_VALUE + catch (SQLException e) { + assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); + } } - statement.close(); } catch (SQLException ex) { throw ex; } @@ -207,99 +203,12 @@ public void testPutThreshold() throws SQLException { @Test @Ignore public void testGCPFileTransferMetadataWithOneFile() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); - try { - connection = getConnection("gcpaccount"); - Statement statement = connection.createStatement(); - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); - - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // Test put file with internal compression - String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); - List metadatas1 = sfAgent1.getFileTransferMetadatas(); - - String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { - InputStream inputStream = new FileInputStream(srcPath1); - - assert (oneMetadata.isForOneFile()); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .build()); - } - - // Test Put file with external compression - String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); - List metadatas2 = sfAgent2.getFileTransferMetadatas(); - - String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { - String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; - Process p = - Runtime.getRuntime() - .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); - p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); - - InputStream gzInputStream = new FileInputStream(gzfilePath); - assert (oneMetadata.isForOneFile()); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(gzInputStream) - .setRequireCompress(false) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .build()); - } - // Download two files and verify their content. - assertTrue( - "Failed to get files", - statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); - - // Make sure that the downloaded files are EQUAL, - // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); - } - } - } - - /** Test API for Kafka connector for FileTransferMetadata */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { - Connection connection = null; - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - - List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); - for (String accountName : supportedAccounts) { + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { try { - connection = getConnection(accountName); - Statement statement = connection.createStatement(); - // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE " + testStageName); @@ -315,6 +224,7 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { InputStream inputStream = new FileInputStream(srcPath1); + assert (oneMetadata.isForOneFile()); SnowflakeFileTransferAgent.uploadWithoutConnection( SnowflakeFileTransferConfig.Builder.newInstance() .setSnowflakeFileTransferMetadata(oneMetadata) @@ -322,8 +232,6 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { .setRequireCompress(true) .setNetworkTimeoutInMilli(0) .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand1) .build()); } @@ -344,7 +252,7 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { p.waitFor(); InputStream gzInputStream = new FileInputStream(gzfilePath); - + assert (oneMetadata.isForOneFile()); SnowflakeFileTransferAgent.uploadWithoutConnection( SnowflakeFileTransferConfig.Builder.newInstance() .setSnowflakeFileTransferMetadata(oneMetadata) @@ -352,8 +260,6 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { .setRequireCompress(false) .setNetworkTimeoutInMilli(0) .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand2) .build()); } @@ -368,9 +274,96 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + statement.execute("DROP STAGE if exists " + testStageName); + } + } + } + + /** Test API for Kafka connector for FileTransferMetadata */ + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + + List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); + for (String accountName : supportedAccounts) { + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // Test put file with internal compression + String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); + List metadatas1 = sfAgent1.getFileTransferMetadatas(); + + String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { + InputStream inputStream = new FileInputStream(srcPath1); + + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand1) + .build()); + } + + // Test Put file with external compression + String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); + List metadatas2 = sfAgent2.getFileTransferMetadatas(); + + String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); + for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { + String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; + Process p = + Runtime.getRuntime() + .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); + p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); + + InputStream gzInputStream = new FileInputStream(gzfilePath); + + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(gzInputStream) + .setRequireCompress(false) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand2) + .build()); + } + + // Download two files and verify their content. + assertTrue( + "Failed to get files", + statement.execute( + "GET @" + + testStageName + + " 'file://" + + destFolderCanonicalPath + + "/' parallel=8")); + + // Make sure that the downloaded files are EQUAL, + // they should be gzip compressed + assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -380,45 +373,42 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGCPFileTransferMetadataNegativeOnlySupportPut() throws Throwable { - Connection connection = null; int expectExceptionCount = 1; int actualExceptionCount = -1; - try { - connection = getConnection("gcpaccount"); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); + // Put one file to the stage + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + statement.execute("put file://" + srcPath + " @" + testStageName); - // Put one file to the stage - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - statement.execute("put file://" + srcPath + " @" + testStageName); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String getCommand = "get @" + testStageName + " file://" + destFolderCanonicalPath; - String getCommand = "get @" + testStageName + " file://" + destFolderCanonicalPath; + // The GET can be executed in normal way. + statement.execute(getCommand); - // The GET can be executed in normal way. - statement.execute(getCommand); + // Start negative test for GET. + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); - // Start negative test for GET. - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(getCommand, sfSession, new SFStatement(sfSession)); + // Below function call should fail. + actualExceptionCount = 0; + sfAgent.getFileTransferMetadatas(); + fail("Above function should raise exception for GET"); - // Below function call should fail. - actualExceptionCount = 0; - sfAgent.getFileTransferMetadatas(); - fail("Above function should raise exception for GET"); - } catch (Exception ex) { - System.out.println("Negative test to hit expected exception: " + ex.getMessage()); - actualExceptionCount++; - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + } catch (Exception ex) { + System.out.println("Negative test to hit expected exception: " + ex.getMessage()); + actualExceptionCount++; + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } assertEquals(expectExceptionCount, actualExceptionCount); @@ -494,20 +484,17 @@ public void testGetPropertyInfo() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutOverwriteFalseNoDigest() throws Throwable { - Connection connection = null; - Statement statement = null; // create 2 files: an original, and one that will overwrite the original File file1 = tmpFolder.newFile("testfile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(file1)); - bw.write("Writing original file content. This should get overwritten."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { + bw.write("Writing original file content. This should get overwritten."); + } File file2 = tmpFolder2.newFile("testfile.csv"); - bw = new BufferedWriter(new FileWriter(file2)); - bw.write("This is all new! This should be the result of the overwriting."); - bw.close(); - + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { + bw.write("This is all new! This should be the result of the overwriting."); + } String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); @@ -520,50 +507,48 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i), paramProperties); - - statement = connection.createStatement(); - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE testing_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); - - // put another file in same stage with same filename with overwrite = true - assertTrue( - "Failed to put a file", - statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false")); - - // check that file exists in stage after PUT - findFile(statement, "ls @testing_stage/"); - - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - assertTrue(downloaded.exists()); - - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - p.waitFor(); + try (Connection connection = getConnection(accounts.get(i), paramProperties); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE testing_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); + + // put another file in same stage with same filename with overwrite = true + assertTrue( + "Failed to put a file", + statement.execute( + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false")); + + // check that file exists in stage after PUT + findFile(statement, "ls @testing_stage/"); + + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + assertTrue(downloaded.exists()); + + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); + p.waitFor(); - // 2nd file should never be uploaded - File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); - assertTrue(FileUtils.contentEqualsIgnoreEOL(file1, unzipped, null)); - } finally { - statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); - statement.close(); + // 2nd file should never be uploaded + File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); + assertTrue(FileUtils.contentEqualsIgnoreEOL(file1, unzipped, null)); + } finally { + statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); + } } } } @@ -576,14 +561,12 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutDisable() throws Throwable { - Connection connection = null; - Statement statement = null; // create a file File file = tmpFolder.newFile("testfile99.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(file)); - bw.write("This content won't be uploaded as PUT is disabled."); - bw.close(); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(file))) { + bw.write("This content won't be uploaded as PUT is disabled."); + } String sourceFilePathOriginal = file.getCanonicalPath(); @@ -592,19 +575,14 @@ public void testPutDisable() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i), paramProperties); - - statement = connection.createStatement(); - + try (Connection connection = getConnection(accounts.get(i), paramProperties); + Statement statement = connection.createStatement()) { statement.execute("PUT file://" + sourceFilePathOriginal + " @testPutGet_disable_stage"); assertTrue("Shouldn't come here", false); } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); - } finally { - statement.close(); } } } @@ -617,8 +595,6 @@ public void testPutDisable() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGetDisable() throws Throwable { - Connection connection = null; - Statement statement = null; // create a folder File destFolder = tmpFolder.newFolder(); @@ -629,10 +605,8 @@ public void testGetDisable() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { - try { - connection = getConnection(accounts.get(i), paramProperties); - - statement = connection.createStatement(); + try (Connection connection = getConnection(accounts.get(i), paramProperties); + Statement statement = connection.createStatement()) { statement.execute( "GET @testPutGet_disable_stage 'file://" + destFolderCanonicalPath + "' parallel=8"); @@ -641,8 +615,6 @@ public void testGetDisable() throws Throwable { } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); - } finally { - statement.close(); } } } @@ -653,161 +625,164 @@ public void testGetDisable() throws Throwable { */ @Test public void testSnow76376() throws Throwable { - Connection connection = null; - PreparedStatement preparedStatement = null; - Statement regularStatement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - regularStatement = connection.createStatement(); - regularStatement.execute( - "create or replace table t(a int) as select * from values" + "(1),(2),(8),(10)"); - - preparedStatement = - connection.prepareStatement("SELECT * FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?"); - - //////////////////////////// - // both NULL - preparedStatement.setNull(1, 4); // int - preparedStatement.setNull(2, 4); // int - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET and LIMIT set " + "to NULL"); - } - - //////////////////////////// - // both empty string - preparedStatement.setString(1, ""); - preparedStatement.setString(2, ""); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET and LIMIT set " + "to empty string"); - } + try (Connection connection = getConnection(); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute( + "create or replace table t(a int) as select * from values" + "(1),(2),(8),(10)"); + + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT * FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?")) { + + //////////////////////////// + // both NULL + preparedStatement.setNull(1, 4); // int + preparedStatement.setNull(2, 4); // int + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with OFFSET and LIMIT set " + "to NULL"); + } - //////////////////////////// - // only LIMIT NULL - preparedStatement.setNull(1, 4); // int - preparedStatement.setInt(2, 2); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with LIMIT set to NULL"); - } + //////////////////////////// + // both empty string + preparedStatement.setString(1, ""); + preparedStatement.setString(2, ""); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail( + "Could not execute preparedStatement with OFFSET and LIMIT set " + + "to empty string"); + } - //////////////////////////// - // only LIMIT empty string - preparedStatement.setString(1, ""); - preparedStatement.setInt(2, 2); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - resultSet.next(); - assertEquals(10, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with LIMIT set to empty " + "string"); - } + //////////////////////////// + // only LIMIT NULL + preparedStatement.setNull(1, 4); // int + preparedStatement.setInt(2, 2); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with LIMIT set to NULL"); + } - //////////////////////////// - // only OFFSET NULL - preparedStatement.setInt(1, 3); // int - preparedStatement.setNull(2, 4); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET set to NULL"); - } + //////////////////////////// + // only LIMIT empty string + preparedStatement.setString(1, ""); + preparedStatement.setInt(2, 2); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(10, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with LIMIT set to empty " + "string"); + } - //////////////////////////// - // only OFFSET empty string - preparedStatement.setInt(1, 3); // int - preparedStatement.setNull(2, 4); - - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); - resultSet.next(); - assertEquals(2, resultSet.getInt(1)); - resultSet.next(); - assertEquals(8, resultSet.getInt(1)); - } else { - fail("Could not execute preparedStatement with OFFSET set to empty " + "string"); - } + //////////////////////////// + // only OFFSET NULL + preparedStatement.setInt(1, 3); // int + preparedStatement.setNull(2, 4); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with OFFSET set to NULL"); + } - //////////////////////////// - // OFFSET and LIMIT NULL for constant select query - preparedStatement = - connection.prepareStatement("SELECT 1 FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?"); - preparedStatement.setNull(1, 4); // int - preparedStatement.setNull(2, 4); // int - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - for (int i = 0; i < 4; i++) { - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); + //////////////////////////// + // only OFFSET empty string + preparedStatement.setInt(1, 3); // int + preparedStatement.setNull(2, 4); + + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(2, resultSet.getInt(1)); + assertTrue(resultSet.next()); + assertEquals(8, resultSet.getInt(1)); + } + } else { + fail("Could not execute preparedStatement with OFFSET set to empty " + "string"); + } } - } else { - fail("Could not execute constant preparedStatement with OFFSET and " + "LIMIT set to NULL"); - } + //////////////////////////// + // OFFSET and LIMIT NULL for constant select query + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT 1 FROM t " + "ORDER BY a LIMIT " + "? OFFSET ?")) { + preparedStatement.setNull(1, 4); // int + preparedStatement.setNull(2, 4); // int + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + for (int i = 0; i < 4; i++) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + } + } else { + fail( + "Could not execute constant preparedStatement with OFFSET and " + + "LIMIT set to NULL"); + } - //////////////////////////// - // OFFSET and LIMIT empty string for constant select query - preparedStatement.setString(1, ""); // int - preparedStatement.setString(2, ""); // int - if (preparedStatement.execute()) { - resultSet = preparedStatement.getResultSet(); - for (int i = 0; i < 4; i++) { - resultSet.next(); - assertEquals(1, resultSet.getInt(1)); + //////////////////////////// + // OFFSET and LIMIT empty string for constant select query + preparedStatement.setString(1, ""); // int + preparedStatement.setString(2, ""); // int + if (preparedStatement.execute()) { + try (ResultSet resultSet = preparedStatement.getResultSet()) { + for (int i = 0; i < 4; i++) { + assertTrue(resultSet.next()); + assertEquals(1, resultSet.getInt(1)); + } + } + } else { + fail( + "Could not execute constant preparedStatement with OFFSET and " + + "LIMIT set to empty string"); + } } - } else { - fail( - "Could not execute constant preparedStatement with OFFSET and " - + "LIMIT set to empty string"); - } - - } finally { - if (regularStatement != null) { + } finally { regularStatement.execute("drop table t"); - regularStatement.close(); } - - closeSQLObjects(resultSet, preparedStatement, connection); } } @@ -820,46 +795,37 @@ public void testSnow76376() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeoOutputTypes() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - try { - Properties paramProperties = new Properties(); - - paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); - paramProperties.put("ENABLE_GEOGRAPHY_TYPE", true); - - connection = getConnection(paramProperties); + Properties paramProperties = new Properties(); - regularStatement = connection.createStatement(); + paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); + paramProperties.put("ENABLE_GEOGRAPHY_TYPE", true); - regularStatement.execute("create or replace table t_geo(geo geography);"); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo(geo geography);"); - regularStatement.execute("insert into t_geo values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); + regularStatement.execute( + "insert into t_geo values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); - testGeoOutputTypeSingle( - regularStatement, false, "geoJson", "OBJECT", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, false, "geoJson", "OBJECT", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle( - regularStatement, true, "geoJson", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, true, "geoJson", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle( - regularStatement, false, "wkt", "VARCHAR", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, false, "wkt", "VARCHAR", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle( - regularStatement, true, "wkt", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); + testGeoOutputTypeSingle( + regularStatement, true, "wkt", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); - testGeoOutputTypeSingle(regularStatement, false, "wkb", "BINARY", "[B", Types.BINARY); + testGeoOutputTypeSingle(regularStatement, false, "wkb", "BINARY", "[B", Types.BINARY); - testGeoOutputTypeSingle(regularStatement, true, "wkb", "GEOGRAPHY", "[B", Types.BINARY); - } finally { - if (regularStatement != null) { + testGeoOutputTypeSingle(regularStatement, true, "wkb", "GEOGRAPHY", "[B", Types.BINARY); + } finally { regularStatement.execute("drop table t_geo"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -872,16 +838,13 @@ private void testGeoOutputTypeSingle( String expectedColumnClassName, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - regularStatement.execute( - "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - resultSet = regularStatement.executeQuery("select * from t_geo"); + regularStatement.execute( + "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); + try (ResultSet resultSet = regularStatement.executeQuery("select * from t_geo")) { ResultSetMetaData metadata = resultSet.getMetaData(); assertEquals(1, metadata.getColumnCount()); @@ -890,51 +853,34 @@ private void testGeoOutputTypeSingle( assertEquals(expectedColumnTypeName, metadata.getColumnTypeName(1)); assertEquals(expectedColumnClassName, metadata.getColumnClassName(1)); assertEquals(expectedColumnType, metadata.getColumnType(1)); - - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeoMetadata() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - - try { - Properties paramProperties = new Properties(); - - paramProperties.put("ENABLE_FIX_182763", true); - - connection = getConnection(paramProperties); - - regularStatement = connection.createStatement(); + Properties paramProperties = new Properties(); - regularStatement.execute("create or replace table t_geo(geo geography);"); + paramProperties.put("ENABLE_FIX_182763", true); - testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo(geo geography);"); - testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); + testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + testGeoMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); + testGeoMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); - testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); + testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); - } finally { - if (regularStatement != null) { + testGeoMetadataSingle(connection, regularStatement, "wkb", Types.BINARY); + } finally { regularStatement.execute("drop table t_geo"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -945,13 +891,11 @@ private void testGeoMetadataSingle( String outputFormat, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - DatabaseMetaData md = connection.getMetaData(); - resultSet = md.getColumns(null, null, "T_GEO", null); + DatabaseMetaData md = connection.getMetaData(); + try (ResultSet resultSet = md.getColumns(null, null, "T_GEO", null)) { ResultSetMetaData metadata = resultSet.getMetaData(); assertEquals(24, metadata.getColumnCount()); @@ -960,48 +904,32 @@ private void testGeoMetadataSingle( assertEquals(expectedColumnType, resultSet.getInt(5)); assertEquals("GEOGRAPHY", resultSet.getString(6)); - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeometryOutputTypes() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - - try { - Properties paramProperties = new Properties(); - - paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); - paramProperties.put("ENABLE_GEOMETRY_TYPE", true); - - connection = getConnection(paramProperties); - - regularStatement = connection.createStatement(); + Properties paramProperties = new Properties(); - regularStatement.execute("create or replace table t_geo2(geo geometry);"); + paramProperties.put("ENABLE_USER_DEFINED_TYPE_EXPANSION", true); + paramProperties.put("ENABLE_GEOMETRY_TYPE", true); - regularStatement.execute( - "insert into t_geo2 values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo2(geo geometry);"); - testGeometryOutputTypeSingle( - regularStatement, true, "geoJson", "GEOMETRY", "java.lang.String", Types.VARCHAR); + regularStatement.execute( + "insert into t_geo2 values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); - testGeometryOutputTypeSingle( - regularStatement, true, "wkt", "GEOMETRY", "java.lang.String", Types.VARCHAR); + testGeometryOutputTypeSingle( + regularStatement, true, "geoJson", "GEOMETRY", "java.lang.String", Types.VARCHAR); - } finally { - if (regularStatement != null) { + testGeometryOutputTypeSingle( + regularStatement, true, "wkt", "GEOMETRY", "java.lang.String", Types.VARCHAR); + } finally { regularStatement.execute("drop table t_geo2"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -1014,15 +942,13 @@ private void testGeometryOutputTypeSingle( String expectedColumnClassName, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - regularStatement.execute( - "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); + regularStatement.execute( + "alter session set ENABLE_UDT_EXTERNAL_TYPE_NAMES=" + enableExternalTypeNames); - resultSet = regularStatement.executeQuery("select * from t_geo2"); + try (ResultSet resultSet = regularStatement.executeQuery("select * from t_geo2")) { ResultSetMetaData metadata = resultSet.getMetaData(); @@ -1032,41 +958,25 @@ private void testGeometryOutputTypeSingle( assertEquals(expectedColumnTypeName, metadata.getColumnTypeName(1)); assertEquals(expectedColumnClassName, metadata.getColumnClassName(1)); assertEquals(expectedColumnType, metadata.getColumnType(1)); - - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testGeometryMetadata() throws Throwable { - Connection connection = null; - Statement regularStatement = null; - - try { - Properties paramProperties = new Properties(); - - connection = getConnection(paramProperties); - regularStatement = connection.createStatement(); - - regularStatement.execute("create or replace table t_geo2(geo geometry);"); + Properties paramProperties = new Properties(); - testGeometryMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); + try (Connection connection = getConnection(paramProperties); + Statement regularStatement = connection.createStatement()) { + try { + regularStatement.execute("create or replace table t_geo2(geo geometry);"); - testGeometryMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + testGeometryMetadataSingle(connection, regularStatement, "geoJson", Types.VARCHAR); - } finally { - if (regularStatement != null) { + testGeometryMetadataSingle(connection, regularStatement, "wkt", Types.VARCHAR); + } finally { regularStatement.execute("drop table t_geo2"); - regularStatement.close(); - } - - if (connection != null) { - connection.close(); } } } @@ -1077,13 +987,11 @@ private void testGeometryMetadataSingle( String outputFormat, int expectedColumnType) throws Throwable { - ResultSet resultSet = null; - try { - regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); + regularStatement.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='" + outputFormat + "'"); - DatabaseMetaData md = connection.getMetaData(); - resultSet = md.getColumns(null, null, "T_GEO2", null); + DatabaseMetaData md = connection.getMetaData(); + try (ResultSet resultSet = md.getColumns(null, null, "T_GEO2", null)) { ResultSetMetaData metadata = resultSet.getMetaData(); assertEquals(24, metadata.getColumnCount()); @@ -1092,10 +1000,6 @@ private void testGeometryMetadataSingle( assertEquals(expectedColumnType, resultSet.getInt(5)); assertEquals("GEOMETRY", resultSet.getString(6)); - } finally { - if (resultSet != null) { - resultSet.close(); - } } } @@ -1183,77 +1087,75 @@ private void putAndGetFile(Statement statement) throws Throwable { public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - Connection connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); - - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - - File largeTempFile = tmpFolder.newFile("largeFile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile)); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.write("Creating large test file for GCP PUT/GET test"); - bw.write(System.lineSeparator()); - bw.close(); - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + + File largeTempFile = tmpFolder.newFile("largeFile.csv"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + bw.write("Creating large test file for GCP PUT/GET test"); + bw.write(System.lineSeparator()); + } + File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - String sourceFilePath = largeTempFile.getCanonicalPath(); + String sourceFilePath = largeTempFile.getCanonicalPath(); - try { - // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, - // which is a large file - for (int i = 0; i < 12; i++) { - copyContentFrom(largeTempFile, largeTempFile2); - copyContentFrom(largeTempFile2, largeTempFile); - } + // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, + // which is a large file + for (int i = 0; i < 12; i++) { + copyContentFrom(largeTempFile, largeTempFile2); + copyContentFrom(largeTempFile2, largeTempFile); + } - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE largefile_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE largefile_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @largefile_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @largefile_stage/"); - // create a new table with columns matching CSV file - statement.execute("create or replace table large_table (colA string)"); - // copy rows from file into table - statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); - // copy back from table into different stage - statement.execute("create or replace stage extra_stage"); - statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); + // create a new table with columns matching CSV file + statement.execute("create or replace table large_table (colA string)"); + // copy rows from file into table + statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); + // copy back from table into different stage + statement.execute("create or replace stage extra_stage"); + statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + assert (downloaded.exists()); - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - p.waitFor(); + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + p.waitFor(); - // compare the original file with the file that's been uploaded, copied into a table, copied - // back into a stage, - // downloaded, and unzipped - File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); - } finally { - statement.execute("DROP STAGE IF EXISTS largefile_stage"); - statement.execute("DROP STAGE IF EXISTS extra_stage"); - statement.execute("DROP TABLE IF EXISTS large_table"); - statement.close(); - connection.close(); + // compare the original file with the file that's been uploaded, copied into a table, copied + // back into a stage, + // downloaded, and unzipped + File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); + assert (largeTempFile.length() == unzipped.length()); + assert (FileUtils.contentEquals(largeTempFile, unzipped)); + } finally { + statement.execute("DROP STAGE IF EXISTS largefile_stage"); + statement.execute("DROP STAGE IF EXISTS extra_stage"); + statement.execute("DROP TABLE IF EXISTS large_table"); + } } } @@ -1261,77 +1163,75 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutGetLargeFileAzure() throws Throwable { Properties paramProperties = new Properties(); - Connection connection = getConnection("azureaccount", paramProperties); - Statement statement = connection.createStatement(); - - File destFolder = tmpFolder.newFolder(); - String destFolderCanonicalPath = destFolder.getCanonicalPath(); - String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - - File largeTempFile = tmpFolder.newFile("largeFile.csv"); - BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile)); - bw.write("Creating large test file for Azure PUT/GET test"); - bw.write(System.lineSeparator()); - bw.write("Creating large test file for Azure PUT/GET test"); - bw.write(System.lineSeparator()); - bw.close(); - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + try (Connection connection = getConnection("azureaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + File destFolder = tmpFolder.newFolder(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; + + File largeTempFile = tmpFolder.newFile("largeFile.csv"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { + bw.write("Creating large test file for Azure PUT/GET test"); + bw.write(System.lineSeparator()); + bw.write("Creating large test file for Azure PUT/GET test"); + bw.write(System.lineSeparator()); + } + File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); - String sourceFilePath = largeTempFile.getCanonicalPath(); + String sourceFilePath = largeTempFile.getCanonicalPath(); - try { - // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, - // which is a large file - for (int i = 0; i < 12; i++) { - copyContentFrom(largeTempFile, largeTempFile2); - copyContentFrom(largeTempFile2, largeTempFile); - } + // copy info from 1 file to another and continue doubling file size until we reach ~1.5GB, + // which is a large file + for (int i = 0; i < 12; i++) { + copyContentFrom(largeTempFile, largeTempFile2); + copyContentFrom(largeTempFile2, largeTempFile); + } - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE largefile_stage"); - assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE largefile_stage"); + assertTrue( + "Failed to put a file", + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); - // check that file exists in stage after PUT - findFile(statement, "ls @largefile_stage/"); + // check that file exists in stage after PUT + findFile(statement, "ls @largefile_stage/"); - // create a new table with columns matching CSV file - statement.execute("create or replace table large_table (colA string)"); - // copy rows from file into table - statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); - // copy back from table into different stage - statement.execute("create or replace stage extra_stage"); - statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); + // create a new table with columns matching CSV file + statement.execute("create or replace table large_table (colA string)"); + // copy rows from file into table + statement.execute("copy into large_table from @largefile_stage/largeFile.csv.gz"); + // copy back from table into different stage + statement.execute("create or replace stage extra_stage"); + statement.execute("copy into @extra_stage/bigFile.csv.gz from large_table single=true"); - // get file from new stage - assertTrue( - "Failed to get files", - statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + // get file from new stage + assertTrue( + "Failed to get files", + statement.execute( + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); - // Make sure that the downloaded file exists; it should be gzip compressed - File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); + // Make sure that the downloaded file exists; it should be gzip compressed + File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + assert (downloaded.exists()); - // unzip the file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - p.waitFor(); + // unzip the file + Process p = + Runtime.getRuntime() + .exec("gzip -d " + destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); + p.waitFor(); - // compare the original file with the file that's been uploaded, copied into a table, copied - // back into a stage, - // downloaded, and unzipped - File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); - } finally { - statement.execute("DROP STAGE IF EXISTS largefile_stage"); - statement.execute("DROP STAGE IF EXISTS extra_stage"); - statement.execute("DROP TABLE IF EXISTS large_table"); - statement.close(); - connection.close(); + // compare the original file with the file that's been uploaded, copied into a table, copied + // back into a stage, + // downloaded, and unzipped + File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); + assert (largeTempFile.length() == unzipped.length()); + assert (FileUtils.contentEquals(largeTempFile, unzipped)); + } finally { + statement.execute("DROP STAGE IF EXISTS largefile_stage"); + statement.execute("DROP STAGE IF EXISTS extra_stage"); + statement.execute("DROP TABLE IF EXISTS large_table"); + } } } @@ -1345,115 +1245,114 @@ public void testPutGetLargeFileAzure() throws Throwable { private void copyContentFrom(File file1, File file2) throws Exception { FileInputStream inputStream = new FileInputStream(file1); FileOutputStream outputStream = new FileOutputStream(file2); - FileChannel fIn = inputStream.getChannel(); - FileChannel fOut = outputStream.getChannel(); - fOut.transferFrom(fIn, 0, fIn.size()); - fIn.position(0); - fOut.transferFrom(fIn, fIn.size(), fIn.size()); - fOut.close(); - fIn.close(); + try (FileChannel fIn = inputStream.getChannel(); + FileChannel fOut = outputStream.getChannel()) { + fOut.transferFrom(fIn, 0, fIn.size()); + fIn.position(0); + fOut.transferFrom(fIn, fIn.size(), fIn.size()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPutS3RegionalUrl() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Test put file with internal compression + String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent1 = + new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); + List metadatas1 = sfAgent1.getFileTransferMetadatas(); - // Test put file with internal compression - String putCommand1 = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent1 = - new SnowflakeFileTransferAgent(putCommand1, sfSession, new SFStatement(sfSession)); - List metadatas1 = sfAgent1.getFileTransferMetadatas(); - - String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); + String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { - InputStream inputStream = new FileInputStream(srcPath1); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand1) - .setUseS3RegionalUrl(false) - .build()); - } + for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { + InputStream inputStream = new FileInputStream(srcPath1); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand1) + .setUseS3RegionalUrl(false) + .build()); + } - for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { - InputStream inputStream = new FileInputStream(srcPath1); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand1) - .setUseS3RegionalUrl(true) - .build()); - } + for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { + InputStream inputStream = new FileInputStream(srcPath1); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand1) + .setUseS3RegionalUrl(true) + .build()); + } - // Test Put file with external compression - String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent2 = - new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); - List metadatas2 = sfAgent2.getFileTransferMetadatas(); + // Test Put file with external compression + String putCommand2 = "put file:///dummy/path/file2.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent2 = + new SnowflakeFileTransferAgent(putCommand2, sfSession, new SFStatement(sfSession)); + List metadatas2 = sfAgent2.getFileTransferMetadatas(); - String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); - for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { - String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; - Process p = - Runtime.getRuntime() - .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); - p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); - p.waitFor(); + String srcPath2 = getFullPathFileInResource(TEST_DATA_FILE_2); + for (SnowflakeFileTransferMetadata oneMetadata : metadatas2) { + String gzfilePath = destFolderCanonicalPath + "/tmp_compress.gz"; + Process p = + Runtime.getRuntime() + .exec("cp -fr " + srcPath2 + " " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); + p = Runtime.getRuntime().exec("gzip " + destFolderCanonicalPath + "/tmp_compress"); + p.waitFor(); - InputStream gzInputStream = new FileInputStream(gzfilePath); + InputStream gzInputStream = new FileInputStream(gzfilePath); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(gzInputStream) - .setRequireCompress(false) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand2) - .build()); - } - - // Download two files and verify their content. - assertTrue( - "Failed to get files", - statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(gzInputStream) + .setRequireCompress(false) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand2) + .build()); + } - // Make sure that the downloaded files are EQUAL, - // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + // Download two files and verify their content. + assertTrue( + "Failed to get files", + statement.execute( + "GET @" + + testStageName + + " 'file://" + + destFolderCanonicalPath + + "/' parallel=8")); + + // Make sure that the downloaded files are EQUAL, + // they should be gzip compressed + assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -1466,66 +1365,62 @@ public void testPutS3RegionalUrl() throws Throwable { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { - Connection connection = null; String clientName = "clientName"; String clientKey = "clientKey"; List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - - // Test put file with internal compression - String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - List metadata = sfAgent.getFileTransferMetadatas(); + // Test put file with internal compression + String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + List metadata = sfAgent.getFileTransferMetadatas(); - String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadata) { - InputStream inputStream = new FileInputStream(srcPath1); + String srcPath1 = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadata) { + InputStream inputStream = new FileInputStream(srcPath1); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .setSFSession(sfSession) - .setCommand(putCommand) - .setStreamingIngestClientName(clientName) - .setStreamingIngestClientKey(clientKey) - .build()); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .setSFSession(sfSession) + .setCommand(putCommand) + .setStreamingIngestClientName(clientName) + .setStreamingIngestClientKey(clientKey) + .build()); - SnowflakeStorageClient client = - StorageClientFactory.getFactory() - .createClient( - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), - 1, - null, - /* session= */ null); - - String location = - ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); - int idx = location.indexOf('/'); - String remoteStageLocation = location.substring(0, idx); - String path = location.substring(idx + 1) + "file1.gz"; - StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); - - // Verify that we are able to fetch the metadata - assertEquals(clientName, client.getStreamingIngestClientName(meta)); - assertEquals(clientKey, client.getStreamingIngestClientKey(meta)); - } - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + SnowflakeStorageClient client = + StorageClientFactory.getFactory() + .createClient( + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo(), + 1, + null, + /* session= */ null); + + String location = + ((SnowflakeFileTransferMetadataV1) oneMetadata).getStageInfo().getLocation(); + int idx = location.indexOf('/'); + String remoteStageLocation = location.substring(0, idx); + String path = location.substring(idx + 1) + "file1.gz"; + StorageObjectMetadata meta = client.getObjectMetadata(remoteStageLocation, path); + + // Verify that we are able to fetch the metadata + assertEquals(clientName, client.getStreamingIngestClientName(meta)); + assertEquals(clientKey, client.getStreamingIngestClientKey(meta)); + } + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -1533,38 +1428,36 @@ public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { @Test(expected = SnowflakeSQLException.class) public void testNoSpaceLeftOnDeviceException() throws SQLException { - Connection connection = null; List supportedAccounts = Arrays.asList("gcpaccount", "s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); + try (Connection connection = getConnection(accountName)) { SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - Statement statement = connection.createStatement(); - SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); - statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); - statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testPutGet_stage"); - String command = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file:///tmp'"; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(command, sfSession, sfStatement); - StageInfo info = sfAgent.getStageInfo(); - SnowflakeStorageClient client = - StorageClientFactory.getFactory().createClient(info, 1, null, /* session= */ null); - - client.handleStorageException( - new StorageException( + try (Statement statement = connection.createStatement()) { + try { + SFStatement sfStatement = statement.unwrap(SnowflakeStatementV1.class).getSfStatement(); + statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); + statement.execute( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @testPutGet_stage"); + String command = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file:///tmp'"; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(command, sfSession, sfStatement); + StageInfo info = sfAgent.getStageInfo(); + SnowflakeStorageClient client = + StorageClientFactory.getFactory().createClient(info, 1, null, /* session= */ null); + + client.handleStorageException( + new StorageException( + client.getMaxRetries(), + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), client.getMaxRetries(), - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - client.getMaxRetries(), - "download", - null, - command, - null); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists testPutGet_stage"); - connection.close(); + "download", + null, + command, + null); + } finally { + statement.execute("DROP STAGE if exists testPutGet_stage"); + } } } } @@ -1573,51 +1466,47 @@ public void testNoSpaceLeftOnDeviceException() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); - try { - // set parameter for presignedUrl upload instead of downscoped token - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", false); - connection = getConnection("gcpaccount", paramProperties); - Statement statement = connection.createStatement(); + // set parameter for presignedUrl upload instead of downscoped token + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", false); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + // Test put file with internal compression + String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + List metadata = sfAgent.getFileTransferMetadatas(); - // Test put file with internal compression - String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; - SnowflakeFileTransferAgent sfAgent = - new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); - List metadata = sfAgent.getFileTransferMetadatas(); - - String srcPath = getFullPathFileInResource(TEST_DATA_FILE); - for (SnowflakeFileTransferMetadata oneMetadata : metadata) { - InputStream inputStream = new FileInputStream(srcPath); - - assert (oneMetadata.isForOneFile()); - SnowflakeFileTransferAgent.uploadWithoutConnection( - SnowflakeFileTransferConfig.Builder.newInstance() - .setSnowflakeFileTransferMetadata(oneMetadata) - .setUploadStream(inputStream) - .setRequireCompress(true) - .setNetworkTimeoutInMilli(0) - .setOcspMode(OCSPMode.FAIL_OPEN) - .build()); - } + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadata) { + InputStream inputStream = new FileInputStream(srcPath); - assertTrue( - "Failed to get files", - statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); - assert (isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + assert (oneMetadata.isForOneFile()); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .build()); + } + + assertTrue( + "Failed to get files", + statement.execute( + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); + assert (isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } @@ -1785,39 +1674,47 @@ public void testHTAPStatementParameterCaching() throws SQLException { + TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT") + " set ENABLE_SNOW_654741_FOR_TESTING=true"); } - Connection con = getConnection(); - Statement statement = con.createStatement(); - // Set up a test table with time, date, and timestamp values - statement.execute("create or replace table timetable (t1 time, t2 timestamp, t3 date)"); - statement.execute( - "insert into timetable values ('13:53:11', '2023-08-17 13:53:33', '2023-08-17')"); - // Set statement- level parameters that will affect the output (set output format params) - statement - .unwrap(SnowflakeStatement.class) - .setParameter("TIME_OUTPUT_FORMAT", "HH12:MI:SS.FF AM"); - statement.unwrap(SnowflakeStatement.class).setParameter("DATE_OUTPUT_FORMAT", "DD-MON-YYYY"); - statement - .unwrap(SnowflakeStatement.class) - .setParameter("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS"); - ResultSet resultSet = statement.executeQuery("select * from timetable"); - resultSet.next(); - // Assert that the values match the format of the specified statement parameter output format - // values - assertEquals("01:53:11.000000000 PM", resultSet.getString(1)); - assertEquals("2023-08-17T13:53:33", resultSet.getString(2)); - assertEquals("17-Aug-2023", resultSet.getString(3)); - // Set a different statement parameter value for DATE_OUTPUT_FORMAT - statement.unwrap(SnowflakeStatement.class).setParameter("DATE_OUTPUT_FORMAT", "MM/DD/YYYY"); - resultSet = statement.executeQuery("select * from timetable"); - resultSet.next(); - // Verify it matches the new statement parameter specified output format - assertEquals("08/17/2023", resultSet.getString(3)); - statement.execute("drop table if exists timetable"); - statement.close(); - con.close(); + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + // Set up a test table with time, date, and timestamp values + try { + statement.execute("create or replace table timetable (t1 time, t2 timestamp, t3 date)"); + statement.execute( + "insert into timetable values ('13:53:11', '2023-08-17 13:53:33', '2023-08-17')"); + // Set statement- level parameters that will affect the output (set output format params) + statement + .unwrap(SnowflakeStatement.class) + .setParameter("TIME_OUTPUT_FORMAT", "HH12:MI:SS.FF AM"); + statement + .unwrap(SnowflakeStatement.class) + .setParameter("DATE_OUTPUT_FORMAT", "DD-MON-YYYY"); + statement + .unwrap(SnowflakeStatement.class) + .setParameter("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD\"T\"HH24:MI:SS"); + try (ResultSet resultSet = statement.executeQuery("select * from timetable")) { + assertTrue(resultSet.next()); + // Assert that the values match the format of the specified statement parameter output + // format + // values + assertEquals("01:53:11.000000000 PM", resultSet.getString(1)); + assertEquals("2023-08-17T13:53:33", resultSet.getString(2)); + assertEquals("17-Aug-2023", resultSet.getString(3)); + } + + // Set a different statement parameter value for DATE_OUTPUT_FORMAT + statement.unwrap(SnowflakeStatement.class).setParameter("DATE_OUTPUT_FORMAT", "MM/DD/YYYY"); + try (ResultSet resultSet = statement.executeQuery("select * from timetable")) { + assertTrue(resultSet.next()); + // Verify it matches the new statement parameter specified output format + assertEquals("08/17/2023", resultSet.getString(3)); + } + } finally { + statement.execute("drop table if exists timetable"); + } + } // cleanup - try (Connection con2 = getSnowflakeAdminConnection()) { - statement = con2.createStatement(); + try (Connection con2 = getSnowflakeAdminConnection(); + Statement statement = con2.createStatement()) { statement.execute( "alter account " + TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT") @@ -1828,40 +1725,36 @@ public void testHTAPStatementParameterCaching() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testS3PutInGS() throws Throwable { - Connection connection = null; File destFolder = tmpFolder.newFolder(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); - try { - Properties paramProperties = new Properties(); - connection = getConnection("s3testaccount", paramProperties); - Statement statement = connection.createStatement(); - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE " + testStageName); - - // put file using GS system commmand, this is internal GS behavior - final String fileName = "testFile.json"; - final String content = "testName: testS3PutInGs"; - String putSystemCall = - String.format( - "call system$it('PUT_FILE_TO_STAGE', '%s', '%s', '%s', '%s')", - testStageName, fileName, content, "false"); - statement.execute(putSystemCall); - - // get file using jdbc - String getCall = - String.format("GET @%s 'file://%s/'", testStageName, destFolderCanonicalPath); - statement.execute(getCall); - - InputStream downloadedFileStream = - new FileInputStream(destFolderCanonicalPath + "/" + fileName); - String downloadedFile = IOUtils.toString(downloadedFileStream, StandardCharsets.UTF_8); - assertTrue( - "downloaded content does not equal uploaded content", content.equals(downloadedFile)); - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + Properties paramProperties = new Properties(); + try (Connection connection = getConnection("s3testaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); + + // put file using GS system commmand, this is internal GS behavior + final String fileName = "testFile.json"; + final String content = "testName: testS3PutInGs"; + String putSystemCall = + String.format( + "call system$it('PUT_FILE_TO_STAGE', '%s', '%s', '%s', '%s')", + testStageName, fileName, content, "false"); + statement.execute(putSystemCall); + + // get file using jdbc + String getCall = + String.format("GET @%s 'file://%s/'", testStageName, destFolderCanonicalPath); + statement.execute(getCall); + + InputStream downloadedFileStream = + new FileInputStream(destFolderCanonicalPath + "/" + fileName); + String downloadedFile = IOUtils.toString(downloadedFileStream, StandardCharsets.UTF_8); + assertTrue( + "downloaded content does not equal uploaded content", content.equals(downloadedFile)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java index 20f986542..f9c2bb66d 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java @@ -56,17 +56,16 @@ public Connection init() throws SQLException { public Connection init(@Nullable Properties properties) throws SQLException { Connection conn = BaseJDBCTest.getConnection(properties); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - - // Set up theses parameters as smaller values in order to generate - // multiple file chunks with small data volumes. - stmt.execute("alter session set result_first_chunk_max_size = 512"); - stmt.execute("alter session set result_min_chunk_size = 512"); - stmt.execute("alter session set arrow_result_rb_flush_size = 512"); - stmt.execute("alter session set result_chunk_size_multiplier = 1.2"); - stmt.close(); - + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + + // Set up theses parameters as smaller values in order to generate + // multiple file chunks with small data volumes. + stmt.execute("alter session set result_first_chunk_max_size = 512"); + stmt.execute("alter session set result_min_chunk_size = 512"); + stmt.execute("alter session set arrow_result_rb_flush_size = 512"); + stmt.execute("alter session set result_chunk_size_multiplier = 1.2"); + } return conn; } @@ -123,12 +122,11 @@ private List serializeResultSet( // Write object to file String tmpFileName = tmpFolder.getRoot().getPath() + "_result_" + i + "." + fileNameAppendix; - FileOutputStream fo = new FileOutputStream(tmpFileName); - ObjectOutputStream so = new ObjectOutputStream(fo); - so.writeObject(entry); - so.flush(); - so.close(); - + try (FileOutputStream fo = new FileOutputStream(tmpFileName); + ObjectOutputStream so = new ObjectOutputStream(fo)) { + so.writeObject(entry); + so.flush(); + } result.add(tmpFileName); } @@ -161,67 +159,68 @@ private String deserializeResultSetWithProperties(List files, Properties for (String filename : files) { // Read Object from file - FileInputStream fi = new FileInputStream(filename); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 resultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); + try (FileInputStream fi = new FileInputStream(filename); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 resultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); - if (developPrint) { - System.out.println( - "\nFormat: " - + resultSetChunk.getQueryResultFormat() - + " UncompChunksize: " - + resultSetChunk.getUncompressedDataSizeInBytes() - + " firstChunkContent: " - + (resultSetChunk.getFirstChunkStringData() == null ? " null " : " not null ")); - for (SnowflakeResultSetSerializableV1.ChunkFileMetadata chunkFileMetadata : - resultSetChunk.chunkFileMetadatas) { + if (developPrint) { System.out.println( - "RowCount=" - + chunkFileMetadata.getRowCount() - + ", cpsize=" - + chunkFileMetadata.getCompressedByteSize() - + ", uncpsize=" - + chunkFileMetadata.getUncompressedByteSize() - + ", URL= " - + chunkFileMetadata.getFileURL()); + "\nFormat: " + + resultSetChunk.getQueryResultFormat() + + " UncompChunksize: " + + resultSetChunk.getUncompressedDataSizeInBytes() + + " firstChunkContent: " + + (resultSetChunk.getFirstChunkStringData() == null ? " null " : " not null ")); + for (SnowflakeResultSetSerializableV1.ChunkFileMetadata chunkFileMetadata : + resultSetChunk.chunkFileMetadatas) { + System.out.println( + "RowCount=" + + chunkFileMetadata.getRowCount() + + ", cpsize=" + + chunkFileMetadata.getCompressedByteSize() + + ", uncpsize=" + + chunkFileMetadata.getUncompressedByteSize() + + ", URL= " + + chunkFileMetadata.getFileURL()); + } } - } - // Read data from object - ResultSet rs = - resultSetChunk.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(props) - .setSfFullURL(sfFullURL) - .build()); - - // print result set meta data - ResultSetMetaData metadata = rs.getMetaData(); - int colCount = metadata.getColumnCount(); - if (developPrint) { - for (int j = 1; j <= colCount; j++) { - System.out.print(" table: " + metadata.getTableName(j)); - System.out.print(" schema: " + metadata.getSchemaName(j)); - System.out.print(" type: " + metadata.getColumnTypeName(j)); - System.out.print(" name: " + metadata.getColumnName(j)); - System.out.print(" precision: " + metadata.getPrecision(j)); - System.out.println(" scale:" + metadata.getScale(j)); - } - } + // Read data from object + try (ResultSet rs = + resultSetChunk.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(props) + .setSfFullURL(sfFullURL) + .build())) { + + // print result set meta data + ResultSetMetaData metadata = rs.getMetaData(); + int colCount = metadata.getColumnCount(); + if (developPrint) { + for (int j = 1; j <= colCount; j++) { + System.out.print(" table: " + metadata.getTableName(j)); + System.out.print(" schema: " + metadata.getSchemaName(j)); + System.out.print(" type: " + metadata.getColumnTypeName(j)); + System.out.print(" name: " + metadata.getColumnName(j)); + System.out.print(" precision: " + metadata.getPrecision(j)); + System.out.println(" scale:" + metadata.getScale(j)); + } + } - // Print and count data - while (rs.next()) { - for (int i = 1; i <= colCount; i++) { - rs.getObject(i); - if (rs.wasNull()) { - builder.append("\"").append("null").append("\","); - } else { - builder.append("\"").append(rs.getString(i)).append("\","); + // Print and count data + while (rs.next()) { + for (int i = 1; i <= colCount; i++) { + rs.getObject(i); + if (rs.wasNull()) { + builder.append("\"").append("null").append("\","); + } else { + builder.append("\"").append(rs.getString(i)).append("\","); + } + } + builder.append("\n"); } } - builder.append("\n"); } } @@ -275,15 +274,15 @@ private void testBasicTableHarness( } String sqlSelect = "select * from table_basic " + whereClause; - ResultSet rs = + try (ResultSet rs = async ? statement.unwrap(SnowflakeStatement.class).executeAsyncQuery(sqlSelect) - : statement.executeQuery(sqlSelect); + : statement.executeQuery(sqlSelect)) { - fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); + fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); - originalResultCSVString = generateCSVResult(rs); - rs.close(); + originalResultCSVString = generateCSVResult(rs); + } } String chunkResultString = deserializeResultSet(fileNameList); @@ -370,25 +369,14 @@ private void testTimestampHarness( throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init()) { - connection - .createStatement() - .execute("alter session set DATE_OUTPUT_FORMAT = '" + format_date + "'"); - connection - .createStatement() - .execute("alter session set TIME_OUTPUT_FORMAT = '" + format_time + "'"); - connection - .createStatement() - .execute("alter session set TIMESTAMP_NTZ_OUTPUT_FORMAT = '" + format_ntz + "'"); - connection - .createStatement() - .execute("alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT = '" + format_ltz + "'"); - connection - .createStatement() - .execute("alter session set TIMESTAMP_TZ_OUTPUT_FORMAT = '" + format_tz + "'"); - connection.createStatement().execute("alter session set TIMEZONE = '" + timezone + "'"); - - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("alter session set DATE_OUTPUT_FORMAT = '" + format_date + "'"); + statement.execute("alter session set TIME_OUTPUT_FORMAT = '" + format_time + "'"); + statement.execute("alter session set TIMESTAMP_NTZ_OUTPUT_FORMAT = '" + format_ntz + "'"); + statement.execute("alter session set TIMESTAMP_LTZ_OUTPUT_FORMAT = '" + format_ltz + "'"); + statement.execute("alter session set TIMESTAMP_TZ_OUTPUT_FORMAT = '" + format_tz + "'"); + statement.execute("alter session set TIMEZONE = '" + timezone + "'"); statement.execute( "Create or replace table all_timestamps (" @@ -403,30 +391,28 @@ private void testTimestampHarness( + ")"); if (rowCount > 0) { - connection - .createStatement() - .execute( - "insert into all_timestamps " - + "select seq4(), '2015-10-25' , " - + "'23:59:59.123456789', '23:59:59', '23:59:59.123', '23:59:59.123456', " - + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," - + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," - + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," - + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," - + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," - + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'" - + " from table(generator(rowcount=>" - + rowCount - + "))"); + statement.execute( + "insert into all_timestamps " + + "select seq4(), '2015-10-25' , " + + "'23:59:59.123456789', '23:59:59', '23:59:59.123', '23:59:59.123456', " + + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," + + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," + + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," + + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'," + + " '2014-01-11 06:12:13.123456789', '2014-01-11 06:12:13'," + + " '2014-01-11 06:12:13.123', '2014-01-11 06:12:13.123456'" + + " from table(generator(rowcount=>" + + rowCount + + "))"); } String sqlSelect = "select * from all_timestamps " + whereClause; - ResultSet rs = statement.executeQuery(sqlSelect); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { - fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); + fileNameList = serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, "txt"); - originalResultCSVString = generateCSVResult(rs); - rs.close(); + originalResultCSVString = generateCSVResult(rs); + } } String chunkResultString = deserializeResultSet(fileNameList); @@ -464,9 +450,8 @@ public void testTimestamp() throws Throwable { public void testBasicTableWithSerializeObjectsAfterReadResultSet() throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute("create or replace schema testschema"); statement.execute( @@ -481,16 +466,15 @@ public void testBasicTableWithSerializeObjectsAfterReadResultSet() throws Throwa + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - - originalResultCSVString = generateCSVResult(rs); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { - // In previous test, the serializable objects are serialized before - // reading the ResultSet. This test covers the case that serializes the - // object after reading the result set. - fileNameList = serializeResultSet((SnowflakeResultSet) rs, 1 * 1024 * 1024, "txt"); + originalResultCSVString = generateCSVResult(rs); - rs.close(); + // In previous test, the serializable objects are serialized before + // reading the ResultSet. This test covers the case that serializes the + // object after reading the result set. + fileNameList = serializeResultSet((SnowflakeResultSet) rs, 1 * 1024 * 1024, "txt"); + } } String chunkResultString = deserializeResultSet(fileNameList); @@ -511,29 +495,29 @@ private synchronized List splitResultSetSerializables( for (String filename : files) { // Read Object from file - FileInputStream fi = new FileInputStream(filename); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 resultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); - - // Get ResultSet from object - ResultSet rs = - resultSetChunk.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(new Properties()) - .setSfFullURL(sfFullURL) - .build()); - - String[] filePathParts = filename.split(File.separator); - String appendix = filePathParts[filePathParts.length - 1]; - - List thisFileList = - serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, appendix); - for (int i = 0; i < thisFileList.size(); i++) { - resultFileList.add(thisFileList.get(i)); + try (FileInputStream fi = new FileInputStream(filename); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 resultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); + + // Get ResultSet from object + try (ResultSet rs = + resultSetChunk.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(new Properties()) + .setSfFullURL(sfFullURL) + .build())) { + + String[] filePathParts = filename.split(File.separator); + String appendix = filePathParts[filePathParts.length - 1]; + + List thisFileList = + serializeResultSet((SnowflakeResultSet) rs, maxSizeInBytes, appendix); + for (int i = 0; i < thisFileList.size(); i++) { + resultFileList.add(thisFileList.get(i)); + } + } } - rs.close(); } if (developPrint) { @@ -550,8 +534,8 @@ public void testSplitResultSetSerializable() throws Throwable { List fileNameList = null; String originalResultCSVString = null; int rowCount = 90000; - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -565,12 +549,12 @@ public void testSplitResultSetSerializable() throws Throwable { + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { - fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); + fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); - originalResultCSVString = generateCSVResult(rs); - rs.close(); + originalResultCSVString = generateCSVResult(rs); + } } // Split deserializedResultSet by 3M, the result should be the same @@ -613,28 +597,30 @@ private void hackToSetupWrongURL(List resultSetS @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCloseUnconsumedResultSet() throws Throwable { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table table_basic " + " (int_c int, string_c string(128))"); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table table_basic " + " (int_c int, string_c string(128))"); - int rowCount = 100000; - statement.execute( - "insert into table_basic select " - + "seq4(), " - + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" - + " from table(generator(rowcount=>" - + rowCount - + "))"); + int rowCount = 100000; + statement.execute( + "insert into table_basic select " + + "seq4(), " + + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" + + " from table(generator(rowcount=>" + + rowCount + + "))"); - int testCount = 5; - while (testCount-- > 0) { - String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - rs.close(); + int testCount = 5; + while (testCount-- > 0) { + String sqlSelect = "select * from table_basic "; + try (ResultSet rs = statement.executeQuery(sqlSelect)) {} + ; + } + } finally { + statement.execute("drop table if exists table_basic"); } - statement.execute("drop table if exists table_basic"); } } @@ -645,50 +631,50 @@ public void testNegativeWithChunkFileNotExist() throws Throwable { Properties properties = new Properties(); properties.put("networkTimeout", 10000); // 10000 millisec try (Connection connection = init(properties)) { - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table table_basic " + " (int_c int, string_c string(128))"); - - int rowCount = 300; - statement.execute( - "insert into table_basic select " - + "seq4(), " - + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" - + " from table(generator(rowcount=>" - + rowCount - + "))"); - - String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - - // Test case 1: Generate one Serializable object - List resultSetSerializables = - ((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024); - - hackToSetupWrongURL(resultSetSerializables); - - // Expected to hit credential issue when access the result. - assertEquals(resultSetSerializables.size(), 1); - try { - SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); + try (Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table table_basic " + " (int_c int, string_c string(128))"); - ResultSet resultSet = - resultSetSerializable.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(new Properties()) - .setSfFullURL(sfFullURL) - .build()); + int rowCount = 300; + statement.execute( + "insert into table_basic select " + + "seq4(), " + + "'arrow_1234567890arrow_1234567890arrow_1234567890arrow_1234567890'" + + " from table(generator(rowcount=>" + + rowCount + + "))"); - while (resultSet.next()) { - resultSet.getString(1); + String sqlSelect = "select * from table_basic "; + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // Test case 1: Generate one Serializable object + List resultSetSerializables = + ((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024); + + hackToSetupWrongURL(resultSetSerializables); + + // Expected to hit credential issue when access the result. + assertEquals(resultSetSerializables.size(), 1); + try { + SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0); + + ResultSet resultSet = + resultSetSerializable.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(new Properties()) + .setSfFullURL(sfFullURL) + .build()); + + while (resultSet.next()) { + resultSet.getString(1); + } + fail( + "error should happen when accessing the data because the " + + "file URL is corrupted."); + } catch (SQLException ex) { + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), ex.getErrorCode()); + } } - fail("error should happen when accessing the data because the " + "file URL is corrupted."); - } catch (SQLException ex) { - assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), ex.getErrorCode()); } - - rs.close(); } } @@ -790,8 +776,8 @@ public void testCustomProxyWithFiles() throws Throwable { } private void generateTestFiles() throws Throwable { - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -806,10 +792,11 @@ private void generateTestFiles() throws Throwable { + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - developPrint = true; - serializeResultSet((SnowflakeResultSet) rs, 2 * 1024 * 1024, "txt"); - System.exit(-1); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + developPrint = true; + serializeResultSet((SnowflakeResultSet) rs, 2 * 1024 * 1024, "txt"); + System.exit(-1); + } } } @@ -821,8 +808,8 @@ public void testRetrieveMetadata() throws Throwable { long expectedTotalRowCount = 0; long expectedTotalCompressedSize = 0; long expectedTotalUncompressedSize = 0; - try (Connection connection = init()) { - Statement statement = connection.createStatement(); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -836,34 +823,31 @@ public void testRetrieveMetadata() throws Throwable { + "))"); String sqlSelect = "select * from table_basic "; - ResultSet rs = statement.executeQuery(sqlSelect); - - // Split deserializedResultSet by 3M - fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); - - // Only one serializable object is generated with 100M data. - assertEquals(fileNameList.size(), 1); - - FileInputStream fi = new FileInputStream(fileNameList.get(0)); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 wholeResultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); - expectedTotalRowCount = wholeResultSetChunk.getRowCount(); - expectedTotalCompressedSize = wholeResultSetChunk.getCompressedDataSizeInBytes(); - expectedTotalUncompressedSize = wholeResultSetChunk.getUncompressedDataSizeInBytes(); - - if (developPrint) { - System.out.println( - "Total statistic: RowCount=" - + expectedTotalRowCount - + " CompSize=" - + expectedTotalCompressedSize - + " UncompSize=" - + expectedTotalUncompressedSize); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // Split deserializedResultSet by 3M + fileNameList = serializeResultSet((SnowflakeResultSet) rs, 100 * 1024 * 1024, "txt"); + + // Only one serializable object is generated with 100M data. + assertEquals(fileNameList.size(), 1); + + try (FileInputStream fi = new FileInputStream(fileNameList.get(0)); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 wholeResultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); + expectedTotalRowCount = wholeResultSetChunk.getRowCount(); + expectedTotalCompressedSize = wholeResultSetChunk.getCompressedDataSizeInBytes(); + expectedTotalUncompressedSize = wholeResultSetChunk.getUncompressedDataSizeInBytes(); + } + if (developPrint) { + System.out.println( + "Total statistic: RowCount=" + + expectedTotalRowCount + + " CompSize=" + + expectedTotalCompressedSize + + " UncompSize=" + + expectedTotalUncompressedSize); + } } - - rs.close(); } assertEquals(expectedTotalRowCount, rowCount); assertThat(expectedTotalCompressedSize, greaterThan((long) 0)); @@ -938,56 +922,55 @@ private boolean isMetadataConsistent( for (String filename : files) { // Read Object from file - FileInputStream fi = new FileInputStream(filename); - ObjectInputStream si = new ObjectInputStream(fi); - SnowflakeResultSetSerializableV1 resultSetChunk = - (SnowflakeResultSetSerializableV1) si.readObject(); - fi.close(); - - // Accumulate statistic from metadata - actualRowCountFromMetadata += resultSetChunk.getRowCount(); - actualTotalCompressedSize += resultSetChunk.getCompressedDataSizeInBytes(); - actualTotalUncompressedSize += resultSetChunk.getUncompressedDataSizeInBytes(); - chunkFileCount += resultSetChunk.chunkFileCount; - - // Get actual row count from result set. - // sfFullURL is used to support private link URL. - // This test case is not for private link env, so just use a valid URL for testing purpose. - ResultSet rs = - resultSetChunk.getResultSet( - SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() - .setProxyProperties(props) - .setSfFullURL(sfFullURL) - .build()); - - // Accumulate the actual row count from result set. - while (rs.next()) { - actualRowCount++; - } - } + try (FileInputStream fi = new FileInputStream(filename); + ObjectInputStream si = new ObjectInputStream(fi)) { + SnowflakeResultSetSerializableV1 resultSetChunk = + (SnowflakeResultSetSerializableV1) si.readObject(); + + // Accumulate statistic from metadata + actualRowCountFromMetadata += resultSetChunk.getRowCount(); + actualTotalCompressedSize += resultSetChunk.getCompressedDataSizeInBytes(); + actualTotalUncompressedSize += resultSetChunk.getUncompressedDataSizeInBytes(); + chunkFileCount += resultSetChunk.chunkFileCount; + + // Get actual row count from result set. + // sfFullURL is used to support private link URL. + // This test case is not for private link env, so just use a valid URL for testing purpose. + try (ResultSet rs = + resultSetChunk.getResultSet( + SnowflakeResultSetSerializable.ResultSetRetrieveConfig.Builder.newInstance() + .setProxyProperties(props) + .setSfFullURL(sfFullURL) + .build())) { - if (developPrint) { - System.out.println( - "isMetadataConsistent: FileCount=" - + files.size() - + " RowCounts=" - + expectedTotalRowCount - + " " - + actualRowCountFromMetadata - + " (" - + actualRowCount - + ") CompSize=" - + expectedTotalCompressedSize - + " " - + actualTotalCompressedSize - + " UncompSize=" - + expectedTotalUncompressedSize - + " " - + actualTotalUncompressedSize - + " chunkFileCount=" - + chunkFileCount); + // Accumulate the actual row count from result set. + while (rs.next()) { + actualRowCount++; + } + } + } + if (developPrint) { + System.out.println( + "isMetadataConsistent: FileCount=" + + files.size() + + " RowCounts=" + + expectedTotalRowCount + + " " + + actualRowCountFromMetadata + + " (" + + actualRowCount + + ") CompSize=" + + expectedTotalCompressedSize + + " " + + actualTotalCompressedSize + + " UncompSize=" + + expectedTotalUncompressedSize + + " " + + actualTotalUncompressedSize + + " chunkFileCount=" + + chunkFileCount); + } } - return actualRowCount == expectedTotalRowCount && actualRowCountFromMetadata == expectedTotalRowCount && actualTotalCompressedSize == expectedTotalCompressedSize diff --git a/src/test/java/net/snowflake/client/jdbc/StatementIT.java b/src/test/java/net/snowflake/client/jdbc/StatementIT.java index 82c9725b4..bf62a4948 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementIT.java @@ -41,9 +41,9 @@ public class StatementIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @@ -51,209 +51,205 @@ public static Connection getConnection() throws SQLException { @Test public void testFetchDirection() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(ResultSet.FETCH_FORWARD, statement.getFetchDirection()); - try { - statement.setFetchDirection(ResultSet.FETCH_REVERSE); - } catch (SQLFeatureNotSupportedException e) { - assertTrue(true); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + assertEquals(ResultSet.FETCH_FORWARD, statement.getFetchDirection()); + try { + statement.setFetchDirection(ResultSet.FETCH_REVERSE); + } catch (SQLFeatureNotSupportedException e) { + assertTrue(true); + } } - statement.close(); - connection.close(); } @Ignore("Not working for setFetchSize") @Test public void testFetchSize() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(50, statement.getFetchSize()); - statement.setFetchSize(1); - ResultSet rs = statement.executeQuery("select * from JDBC_STATEMENT"); - assertEquals(1, getSizeOfResultSet(rs)); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + assertEquals(50, statement.getFetchSize()); + statement.setFetchSize(1); + ResultSet rs = statement.executeQuery("select * from JDBC_STATEMENT"); + assertEquals(1, getSizeOfResultSet(rs)); + } } @Test public void testMaxRows() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; - assertEquals(0, statement.getMaxRows()); - - // statement.setMaxRows(1); - // assertEquals(1, statement.getMaxRows()); - ResultSet rs = statement.executeQuery(sqlSelect); - int resultSizeCount = getSizeOfResultSet(rs); - // assertEquals(1, resultSizeCount); - - statement.setMaxRows(0); - rs = statement.executeQuery(sqlSelect); - // assertEquals(3, getSizeOfResultSet(rs)); - - statement.setMaxRows(-1); - rs = statement.executeQuery(sqlSelect); - // assertEquals(3, getSizeOfResultSet(rs)); - statement.close(); - - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; + assertEquals(0, statement.getMaxRows()); + + // statement.setMaxRows(1); + // assertEquals(1, statement.getMaxRows()); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + int resultSizeCount = getSizeOfResultSet(rs); + // assertEquals(1, resultSizeCount); + } + statement.setMaxRows(0); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // assertEquals(3, getSizeOfResultSet(rs)); + } + statement.setMaxRows(-1); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + // assertEquals(3, getSizeOfResultSet(rs)); + } + } } @Test public void testQueryTimeOut() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(0, statement.getQueryTimeout()); - statement.setQueryTimeout(5); - assertEquals(5, statement.getQueryTimeout()); - try { - statement.executeQuery("select count(*) from table(generator(timeLimit => 100))"); - } catch (SQLException e) { - assertTrue(true); - assertEquals(SqlState.QUERY_CANCELED, e.getSQLState()); - assertEquals("SQL execution canceled", e.getMessage()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + assertEquals(0, statement.getQueryTimeout()); + statement.setQueryTimeout(5); + assertEquals(5, statement.getQueryTimeout()); + try { + statement.executeQuery("select count(*) from table(generator(timeLimit => 100))"); + } catch (SQLException e) { + assertTrue(true); + assertEquals(SqlState.QUERY_CANCELED, e.getSQLState()); + assertEquals("SQL execution canceled", e.getMessage()); + } } - statement.close(); - connection.close(); } @Test public void testStatementClose() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - assertEquals(connection, statement.getConnection()); - assertTrue(!statement.isClosed()); - statement.close(); - assertTrue(statement.isClosed()); - connection.close(); + try (Connection connection = getConnection()) { + Statement statement = connection.createStatement(); + assertEquals(connection, statement.getConnection()); + assertTrue(!statement.isClosed()); + statement.close(); + assertTrue(statement.isClosed()); + } } @Test public void testExecuteSelect() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; - boolean success = statement.execute(sqlSelect); - assertTrue(success); - String queryID1 = statement.unwrap(SnowflakeStatement.class).getQueryID(); - assertNotNull(queryID1); - - ResultSet rs = statement.getResultSet(); - assertEquals(3, getSizeOfResultSet(rs)); - assertEquals(-1, statement.getUpdateCount()); - assertEquals(-1L, statement.getLargeUpdateCount()); - String queryID2 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - assertEquals(queryID2, queryID1); - - rs = statement.executeQuery(sqlSelect); - assertEquals(3, getSizeOfResultSet(rs)); - String queryID4 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); - assertNotEquals(queryID4, queryID1); - rs.close(); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + + String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; + boolean success = statement.execute(sqlSelect); + assertTrue(success); + String queryID1 = statement.unwrap(SnowflakeStatement.class).getQueryID(); + assertNotNull(queryID1); + + try (ResultSet rs = statement.getResultSet()) { + assertEquals(3, getSizeOfResultSet(rs)); + assertEquals(-1, statement.getUpdateCount()); + assertEquals(-1L, statement.getLargeUpdateCount()); + String queryID2 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + assertEquals(queryID2, queryID1); + } + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + assertEquals(3, getSizeOfResultSet(rs)); + String queryID4 = rs.unwrap(SnowflakeResultSet.class).getQueryID(); + assertNotEquals(queryID4, queryID1); + } + } } @Test public void testExecuteInsert() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("create or replace table test_insert(cola number)"); - - String insertSQL = "insert into test_insert values(2),(3)"; - int updateCount; - boolean success; - updateCount = statement.executeUpdate(insertSQL); - assertEquals(2, updateCount); - - success = statement.execute(insertSQL); - assertFalse(success); - assertEquals(2, statement.getUpdateCount()); - assertEquals(2L, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); - - ResultSet rs = statement.executeQuery("select count(*) from test_insert"); - rs.next(); - assertEquals(4, rs.getInt(1)); - rs.close(); - - assertTrue(statement.execute("select 1")); - ResultSet rs0 = statement.getResultSet(); - rs0.next(); - assertEquals(rs0.getInt(1), 1); - rs0.close(); - - statement.execute("drop table if exists test_insert"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_insert(cola number)"); + + String insertSQL = "insert into test_insert values(2),(3)"; + int updateCount; + boolean success; + updateCount = statement.executeUpdate(insertSQL); + assertEquals(2, updateCount); + + success = statement.execute(insertSQL); + assertFalse(success); + assertEquals(2, statement.getUpdateCount()); + assertEquals(2L, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + + try (ResultSet rs = statement.executeQuery("select count(*) from test_insert")) { + assertTrue(rs.next()); + assertEquals(4, rs.getInt(1)); + } + + assertTrue(statement.execute("select 1")); + try (ResultSet rs0 = statement.getResultSet()) { + assertTrue(rs0.next()); + assertEquals(rs0.getInt(1), 1); + } + } finally { + statement.execute("drop table if exists test_insert"); + } + } } @Test public void testExecuteUpdateAndDelete() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - statement.execute( - "create or replace table test_update(cola number, colb string) " + "as select 1, 'str1'"); - - statement.execute("insert into test_update values(2, 'str2')"); - - int updateCount; - boolean success; - updateCount = statement.executeUpdate("update test_update set COLB = 'newStr' where COLA = 1"); - assertEquals(1, updateCount); - - success = statement.execute("update test_update set COLB = 'newStr' where COLA = 2"); - assertFalse(success); - assertEquals(1, statement.getUpdateCount()); - assertEquals(1L, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); - - updateCount = statement.executeUpdate("delete from test_update where colA = 1"); - assertEquals(1, updateCount); - - success = statement.execute("delete from test_update where colA = 2"); - assertFalse(success); - assertEquals(1, statement.getUpdateCount()); - assertEquals(1L, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); - - statement.execute("drop table if exists test_update"); - statement.close(); - - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table test_update(cola number, colb string) " + + "as select 1, 'str1'"); + + statement.execute("insert into test_update values(2, 'str2')"); + + int updateCount; + boolean success; + updateCount = + statement.executeUpdate("update test_update set COLB = 'newStr' where COLA = 1"); + assertEquals(1, updateCount); + + success = statement.execute("update test_update set COLB = 'newStr' where COLA = 2"); + assertFalse(success); + assertEquals(1, statement.getUpdateCount()); + assertEquals(1L, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + + updateCount = statement.executeUpdate("delete from test_update where colA = 1"); + assertEquals(1, updateCount); + + success = statement.execute("delete from test_update where colA = 2"); + assertFalse(success); + assertEquals(1, statement.getUpdateCount()); + assertEquals(1L, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + } finally { + statement.execute("drop table if exists test_update"); + } + } } @Test public void testExecuteMerge() throws SQLException { - Connection connection = getConnection(); - String mergeSQL = - "merge into target using source on target.id = source.id " - + "when matched and source.sb =22 then update set ta = 'newStr' " - + "when not matched then insert (ta, tb) values (source.sa, source.sb)"; - Statement statement = connection.createStatement(); - statement.execute("create or replace table target(id integer, ta string, tb integer)"); - statement.execute("create or replace table source(id integer, sa string, sb integer)"); - statement.execute("insert into target values(1, 'str', 1)"); - statement.execute("insert into target values(2, 'str', 2)"); - statement.execute("insert into target values(3, 'str', 3)"); - statement.execute("insert into source values(1, 'str1', 11)"); - statement.execute("insert into source values(2, 'str2', 22)"); - statement.execute("insert into source values(3, 'str3', 33)"); - - int updateCount = statement.executeUpdate(mergeSQL); - - assertEquals(1, updateCount); - - statement.execute("drop table if exists target"); - statement.execute("drop table if exists source"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String mergeSQL = + "merge into target using source on target.id = source.id " + + "when matched and source.sb =22 then update set ta = 'newStr' " + + "when not matched then insert (ta, tb) values (source.sa, source.sb)"; + try { + statement.execute("create or replace table target(id integer, ta string, tb integer)"); + statement.execute("create or replace table source(id integer, sa string, sb integer)"); + statement.execute("insert into target values(1, 'str', 1)"); + statement.execute("insert into target values(2, 'str', 2)"); + statement.execute("insert into target values(3, 'str', 3)"); + statement.execute("insert into source values(1, 'str1', 11)"); + statement.execute("insert into source values(2, 'str2', 22)"); + statement.execute("insert into source values(3, 'str3', 33)"); + + int updateCount = statement.executeUpdate(mergeSQL); + + assertEquals(1, updateCount); + } finally { + statement.execute("drop table if exists target"); + statement.execute("drop table if exists source"); + } + } } /** @@ -263,8 +259,8 @@ public void testExecuteMerge() throws SQLException { */ @Test public void testAutogenerateKey() throws Throwable { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { statement.execute("create or replace table t(c1 int)"); statement.execute("insert into t values(1)", Statement.NO_GENERATED_KEYS); try { @@ -274,154 +270,161 @@ public void testAutogenerateKey() throws Throwable { // nop } // empty result - ResultSet rset = statement.getGeneratedKeys(); - assertFalse(rset.next()); - rset.close(); + try (ResultSet rset = statement.getGeneratedKeys()) { + assertFalse(rset.next()); + } } } @Test public void testExecuteMultiInsert() throws SQLException { - Connection connection = getConnection(); - String multiInsertionSQL = - " insert all " - + "into foo " - + "into foo1 " - + "into bar (b1, b2, b3) values (s3, s2, s1) " - + "select s1, s2, s3 from source"; - - Statement statement = connection.createStatement(); - assertFalse( - statement.execute("create or replace table foo (f1 integer, f2 integer, f3 integer)")); - assertFalse( - statement.execute("create or replace table foo1 (f1 integer, f2 integer, f3 integer)")); - assertFalse( - statement.execute("create or replace table bar (b1 integer, b2 integer, b3 integer)")); - assertFalse( - statement.execute("create or replace table source(s1 integer, s2 integer, s3 integer)")); - assertFalse(statement.execute("insert into source values(1, 2, 3)")); - assertFalse(statement.execute("insert into source values(11, 22, 33)")); - assertFalse(statement.execute("insert into source values(111, 222, 333)")); - - int updateCount = statement.executeUpdate(multiInsertionSQL); - assertEquals(9, updateCount); - - statement.execute("drop table if exists foo"); - statement.execute("drop table if exists foo1"); - statement.execute("drop table if exists bar"); - statement.execute("drop table if exists source"); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiInsertionSQL = + " insert all " + + "into foo " + + "into foo1 " + + "into bar (b1, b2, b3) values (s3, s2, s1) " + + "select s1, s2, s3 from source"; + + try { + assertFalse( + statement.execute("create or replace table foo (f1 integer, f2 integer, f3 integer)")); + assertFalse( + statement.execute("create or replace table foo1 (f1 integer, f2 integer, f3 integer)")); + assertFalse( + statement.execute("create or replace table bar (b1 integer, b2 integer, b3 integer)")); + assertFalse( + statement.execute( + "create or replace table source(s1 integer, s2 integer, s3 integer)")); + assertFalse(statement.execute("insert into source values(1, 2, 3)")); + assertFalse(statement.execute("insert into source values(11, 22, 33)")); + assertFalse(statement.execute("insert into source values(111, 222, 333)")); + + int updateCount = statement.executeUpdate(multiInsertionSQL); + assertEquals(9, updateCount); + } finally { + statement.execute("drop table if exists foo"); + statement.execute("drop table if exists foo1"); + statement.execute("drop table if exists bar"); + statement.execute("drop table if exists source"); + } + } } @Test public void testExecuteBatch() throws Exception { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - connection.setAutoCommit(false); - // mixed of ddl/dml in batch - statement.addBatch("create or replace table test_batch(a string, b integer)"); - statement.addBatch("insert into test_batch values('str1', 1), ('str2', 2)"); - statement.addBatch( - "update test_batch set test_batch.b = src.b + 5 from " - + "(select 'str1' as a, 2 as b) src where test_batch.a = src.a"); - - int[] rowCounts = statement.executeBatch(); - connection.commit(); - - assertThat(rowCounts.length, is(3)); - assertThat(rowCounts[0], is(0)); - assertThat(rowCounts[1], is(2)); - assertThat(rowCounts[2], is(1)); - - List batchQueryIDs = statement.unwrap(SnowflakeStatement.class).getBatchQueryIDs(); - assertEquals(3, batchQueryIDs.size()); - assertEquals(statement.unwrap(SnowflakeStatement.class).getQueryID(), batchQueryIDs.get(2)); - - ResultSet resultSet = statement.executeQuery("select * from test_batch order by b asc"); - resultSet.next(); - assertThat(resultSet.getInt("B"), is(2)); - resultSet.next(); - assertThat(resultSet.getInt("B"), is(7)); - statement.clearBatch(); - - // one of the batch is query instead of ddl/dml - // it should continuing processing - try { - statement.addBatch("insert into test_batch values('str3', 3)"); - statement.addBatch("select * from test_batch"); - statement.addBatch("select * from test_batch_not_exist"); - statement.addBatch("insert into test_batch values('str4', 4)"); - statement.executeBatch(); - fail(); - } catch (BatchUpdateException e) { - rowCounts = e.getUpdateCounts(); - assertThat(e.getErrorCode(), is(ERROR_CODE_DOMAIN_OBJECT_DOES_NOT_EXIST)); - assertThat(rowCounts[0], is(1)); - assertThat(rowCounts[1], is(Statement.SUCCESS_NO_INFO)); - assertThat(rowCounts[2], is(Statement.EXECUTE_FAILED)); - assertThat(rowCounts[3], is(1)); - - connection.rollback(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + connection.setAutoCommit(false); + // mixed of ddl/dml in batch + statement.addBatch("create or replace table test_batch(a string, b integer)"); + statement.addBatch("insert into test_batch values('str1', 1), ('str2', 2)"); + statement.addBatch( + "update test_batch set test_batch.b = src.b + 5 from " + + "(select 'str1' as a, 2 as b) src where test_batch.a = src.a"); + + int[] rowCounts = statement.executeBatch(); + connection.commit(); + + assertThat(rowCounts.length, is(3)); + assertThat(rowCounts[0], is(0)); + assertThat(rowCounts[1], is(2)); + assertThat(rowCounts[2], is(1)); + + List batchQueryIDs = statement.unwrap(SnowflakeStatement.class).getBatchQueryIDs(); + assertEquals(3, batchQueryIDs.size()); + assertEquals(statement.unwrap(SnowflakeStatement.class).getQueryID(), batchQueryIDs.get(2)); + + try (ResultSet resultSet = + statement.executeQuery("select * from test_batch order by b asc")) { + assertTrue(resultSet.next()); + assertThat(resultSet.getInt("B"), is(2)); + assertTrue(resultSet.next()); + assertThat(resultSet.getInt("B"), is(7)); + statement.clearBatch(); + + // one of the batch is query instead of ddl/dml + // it should continuing processing + try { + statement.addBatch("insert into test_batch values('str3', 3)"); + statement.addBatch("select * from test_batch"); + statement.addBatch("select * from test_batch_not_exist"); + statement.addBatch("insert into test_batch values('str4', 4)"); + statement.executeBatch(); + fail(); + } catch (BatchUpdateException e) { + rowCounts = e.getUpdateCounts(); + assertThat(e.getErrorCode(), is(ERROR_CODE_DOMAIN_OBJECT_DOES_NOT_EXIST)); + assertThat(rowCounts[0], is(1)); + assertThat(rowCounts[1], is(Statement.SUCCESS_NO_INFO)); + assertThat(rowCounts[2], is(Statement.EXECUTE_FAILED)); + assertThat(rowCounts[3], is(1)); + + connection.rollback(); + } + + statement.clearBatch(); + + statement.addBatch( + "put file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @%test_batch auto_compress=false"); + File tempFolder = tmpFolder.newFolder("test_downloads_folder"); + statement.addBatch("get @%test_batch file://" + tempFolder); + + rowCounts = statement.executeBatch(); + assertThat(rowCounts.length, is(2)); + assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); + assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); + statement.clearBatch(); + } + } finally { + statement.execute("drop table if exists test_batch"); + } } - - statement.clearBatch(); - - statement.addBatch( - "put file://" - + getFullPathFileInResource(TEST_DATA_FILE) - + " @%test_batch auto_compress=false"); - File tempFolder = tmpFolder.newFolder("test_downloads_folder"); - statement.addBatch("get @%test_batch file://" + tempFolder); - - rowCounts = statement.executeBatch(); - assertThat(rowCounts.length, is(2)); - assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); - assertThat(rowCounts[0], is(Statement.SUCCESS_NO_INFO)); - statement.clearBatch(); - - statement.execute("drop table if exists test_batch"); - statement.close(); - connection.close(); } @Test public void testExecuteLargeBatch() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - /** - * Generate a table with several rows and 1 column named test_large_batch Note: to truly test - * that executeLargeBatch works with a number of rows greater than MAX_INT, replace rowcount => - * 15 in next code line with rowcount => 2147483648, or some other number larger than MAX_INT. - * Test will take about 15 minutes to run. - */ - statement.execute( - "create or replace table test_large_batch (a number) as (select * from (select 5 from table" - + "(generator(rowcount => 15)) v));"); - // update values in table so that all rows are updated - statement.addBatch("update test_large_batch set a = 7 where a = 5;"); - long[] rowsUpdated = statement.executeLargeBatch(); - assertThat(rowsUpdated.length, is(1)); - long testVal = 15L; - assertThat(rowsUpdated[0], is(testVal)); - statement.clearBatch(); - /** - * To test SQLException for integer overflow when using executeBatch() for row updates of larger - * than MAX_INT, uncomment the following lines of code. Test will take about 15 minutes to run. - * - *

statement.execute("create or replace table test_large_batch (a number) as (select * from - * (select 5 from table" + "(generator(rowcount => 2147483648)) v));"); - * statement.addBatch("update test_large_batch set a = 7 where a = 5;"); try { int[] rowsUpdated - * = statement.executeBatch(); fail(); } catch (SnowflakeSQLException e) { assertEquals((int) - * ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getMessageCode(), e.getErrorCode()); - * assertEquals(ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getSqlState(), e.getSQLState()); } - * statement.clearBatch(); - */ - statement.execute("drop table if exists test_large_batch"); - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + /** + * Generate a table with several rows and 1 column named test_large_batch Note: to truly test + * that executeLargeBatch works with a number of rows greater than MAX_INT, replace rowcount + * => 15 in next code line with rowcount => 2147483648, or some other number larger than + * MAX_INT. Test will take about 15 minutes to run. + */ + try { + statement.execute( + "create or replace table test_large_batch (a number) as (select * from (select 5 from table" + + "(generator(rowcount => 15)) v));"); + // update values in table so that all rows are updated + statement.addBatch("update test_large_batch set a = 7 where a = 5;"); + long[] rowsUpdated = statement.executeLargeBatch(); + assertThat(rowsUpdated.length, is(1)); + long testVal = 15L; + assertThat(rowsUpdated[0], is(testVal)); + statement.clearBatch(); + + /** + * To test SQLException for integer overflow when using executeBatch() for row updates of + * larger than MAX_INT, uncomment the following lines of code. Test will take about 15 + * minutes to run. + * + *

statement.execute("create or replace table test_large_batch (a number) as (select * + * from (select 5 from table" + "(generator(rowcount => 2147483648)) v));"); + * statement.addBatch("update test_large_batch set a = 7 where a = 5;"); try { int[] + * rowsUpdated = statement.executeBatch(); fail(); } catch (SnowflakeSQLException e) { + * assertEquals((int) ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getMessageCode(), + * e.getErrorCode()); assertEquals(ErrorCode.EXECUTE_BATCH_INTEGER_OVERFLOW.getSqlState(), + * e.getSQLState()); } statement.clearBatch(); + */ + } finally { + statement.execute("drop table if exists test_large_batch"); + } + } } /** @@ -433,7 +436,6 @@ public void testExecuteLargeBatch() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testExecuteUpdateZeroCount() throws SQLException { try (Connection connection = getConnection()) { - String[] testCommands = { "use role accountadmin", "use database testdb", @@ -457,25 +459,26 @@ public void testExecuteUpdateZeroCount() throws SQLException { }; try { for (String testCommand : testCommands) { - Statement statement = connection.createStatement(); - int updateCount = statement.executeUpdate(testCommand); - assertThat(updateCount, is(0)); - statement.close(); + try (Statement statement = connection.createStatement()) { + int updateCount = statement.executeUpdate(testCommand); + assertThat(updateCount, is(0)); + } } } finally { - Statement statement = connection.createStatement(); - statement.execute("use role accountadmin"); - statement.execute("drop table if exists testExecuteUpdate"); - statement.execute("drop role if exists testrole"); - statement.execute("drop user if exists testuser"); + try (Statement statement = connection.createStatement()) { + statement.execute("use role accountadmin"); + statement.execute("drop table if exists testExecuteUpdate"); + statement.execute("drop role if exists testrole"); + statement.execute("drop user if exists testuser"); + } } } } @Test public void testExecuteUpdateFail() throws Exception { - try (Connection connection = getConnection()) { - + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { String[] testCommands = { "list @~", "ls @~", @@ -488,7 +491,6 @@ public void testExecuteUpdateFail() throws Exception { for (String testCommand : testCommands) { try { - Statement statement = connection.createStatement(); statement.executeUpdate(testCommand); fail("TestCommand: " + testCommand + " is expected to be failed to execute"); } catch (SQLException e) { @@ -503,30 +505,29 @@ public void testExecuteUpdateFail() throws Exception { @Test public void testTelemetryBatch() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - ResultSet rs; - String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; - statement.execute(sqlSelect); - - rs = statement.getResultSet(); - assertEquals(3, getSizeOfResultSet(rs)); - assertEquals(-1, statement.getUpdateCount()); - assertEquals(-1L, statement.getLargeUpdateCount()); + Telemetry telemetryClient = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - rs = statement.executeQuery(sqlSelect); - assertEquals(3, getSizeOfResultSet(rs)); - rs.close(); + String sqlSelect = "select seq4() from table(generator(rowcount=>3))"; + statement.execute(sqlSelect); - Telemetry telemetryClient = - ((SnowflakeStatementV1) statement).connection.getSfSession().getTelemetryClient(); + try (ResultSet rs = statement.getResultSet()) { + assertEquals(3, getSizeOfResultSet(rs)); + assertEquals(-1, statement.getUpdateCount()); + assertEquals(-1L, statement.getLargeUpdateCount()); + } - // there should be logs ready to be sent - assertTrue(((TelemetryClient) telemetryClient).bufferSize() > 0); + try (ResultSet rs = statement.executeQuery(sqlSelect)) { + assertEquals(3, getSizeOfResultSet(rs)); + } - statement.close(); + telemetryClient = + ((SnowflakeStatementV1) statement).connection.getSfSession().getTelemetryClient(); + // there should be logs ready to be sent + assertTrue(((TelemetryClient) telemetryClient).bufferSize() > 0); + } // closing the statement should flush the buffer, however, flush is async, // sleep some time before check buffer size try { @@ -534,54 +535,50 @@ public void testTelemetryBatch() throws SQLException { } catch (Throwable e) { } assertEquals(((TelemetryClient) telemetryClient).bufferSize(), 0); - connection.close(); } @Test public void testMultiStmtNotEnabled() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - String multiStmtQuery = - "create or replace temporary table test_multi (cola int);\n" - + "insert into test_multi VALUES (1), (2);\n" - + "select cola from test_multi order by cola asc"; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + String multiStmtQuery = + "create or replace temporary table test_multi (cola int);\n" + + "insert into test_multi VALUES (1), (2);\n" + + "select cola from test_multi order by cola asc"; - try { - statement.execute(multiStmtQuery); - fail("Using a multi-statement query without the parameter set should fail"); - } catch (SnowflakeSQLException ex) { - assertEquals(SqlState.FEATURE_NOT_SUPPORTED, ex.getSQLState()); + try { + statement.execute(multiStmtQuery); + fail("Using a multi-statement query without the parameter set should fail"); + } catch (SnowflakeSQLException ex) { + assertEquals(SqlState.FEATURE_NOT_SUPPORTED, ex.getSQLState()); + } } - - statement.close(); - connection.close(); } @Test public void testCallStoredProcedure() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace procedure SP()\n" - + "returns string not null\n" - + "language javascript\n" - + "as $$\n" - + " snowflake.execute({sqlText:'select seq4() from table(generator(rowcount=>5))'});\n" - + " return 'done';\n" - + "$$"); - - assertTrue(statement.execute("call SP()")); - ResultSet rs = statement.getResultSet(); - assertNotNull(rs); - assertTrue(rs.next()); - assertEquals("done", rs.getString(1)); - assertFalse(rs.next()); - assertFalse(statement.getMoreResults()); - assertEquals(-1, statement.getUpdateCount()); - assertEquals(-1L, statement.getLargeUpdateCount()); - - statement.close(); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "create or replace procedure SP()\n" + + "returns string not null\n" + + "language javascript\n" + + "as $$\n" + + " snowflake.execute({sqlText:'select seq4() from table(generator(rowcount=>5))'});\n" + + " return 'done';\n" + + "$$"); + + assertTrue(statement.execute("call SP()")); + try (ResultSet rs = statement.getResultSet()) { + assertNotNull(rs); + assertTrue(rs.next()); + assertEquals("done", rs.getString(1)); + assertFalse(rs.next()); + assertFalse(statement.getMoreResults()); + assertEquals(-1, statement.getUpdateCount()); + assertEquals(-1L, statement.getLargeUpdateCount()); + } + } } @Test @@ -612,8 +609,8 @@ public void testCreateStatementWithParameters() throws Throwable { @Test public void testUnwrapper() throws Throwable { - try (Connection connection = getConnection()) { - Statement statement = connection.createStatement(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { if (statement.isWrapperFor(SnowflakeStatementV1.class)) { statement.execute("select 1"); SnowflakeStatement sfstatement = statement.unwrap(SnowflakeStatement.class); @@ -632,10 +629,9 @@ public void testUnwrapper() throws Throwable { @Test public void testQueryIdIsNullOnFreshStatement() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java index cdd3527f7..d041b1694 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java @@ -13,18 +13,21 @@ public class StatementLargeUpdateIT extends BaseJDBCTest { @Test public void testLargeUpdate() throws Throwable { - try (Connection con = getConnection()) { + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { long expectedUpdateRows = (long) Integer.MAX_VALUE + 10L; - con.createStatement().execute("create or replace table test_large_update(c1 boolean)"); - Statement st = con.createStatement(); - long updatedRows = - st.executeLargeUpdate( - "insert into test_large_update select true from table(generator(rowcount=>" - + expectedUpdateRows - + "))"); - assertEquals(expectedUpdateRows, updatedRows); - assertEquals(expectedUpdateRows, st.getLargeUpdateCount()); - con.createStatement().execute("drop table if exists test_large_update"); + try { + statement.execute("create or replace table test_large_update(c1 boolean)"); + long updatedRows = + statement.executeLargeUpdate( + "insert into test_large_update select true from table(generator(rowcount=>" + + expectedUpdateRows + + "))"); + assertEquals(expectedUpdateRows, updatedRows); + assertEquals(expectedUpdateRows, statement.getLargeUpdateCount()); + } finally { + statement.execute("drop table if exists test_large_update"); + } } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java index 0a003957c..56bd318b8 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java @@ -46,9 +46,9 @@ public class StatementLatestIT extends BaseJDBCTest { public static Connection getConnection() throws SQLException { Connection conn = BaseJDBCTest.getConnection(); - Statement stmt = conn.createStatement(); - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - stmt.close(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } return conn; } @@ -56,111 +56,119 @@ public static Connection getConnection() throws SQLException { @Test public void testExecuteCreateAndDrop() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - - boolean success = statement.execute("create or replace table test_create(colA integer)"); - assertFalse(success); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - int rowCount = statement.executeUpdate("create or replace table test_create_2(colA integer)"); - assertEquals(0, rowCount); - assertEquals(0, statement.getUpdateCount()); + boolean success = statement.execute("create or replace table test_create(colA integer)"); + assertFalse(success); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); - success = statement.execute("drop table if exists TEST_CREATE"); - assertFalse(success); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); + int rowCount = statement.executeUpdate("create or replace table test_create_2(colA integer)"); + assertEquals(0, rowCount); + assertEquals(0, statement.getUpdateCount()); - rowCount = statement.executeUpdate("drop table if exists TEST_CREATE_2"); - assertEquals(0, rowCount); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertNull(statement.getResultSet()); + success = statement.execute("drop table if exists TEST_CREATE"); + assertFalse(success); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); - statement.close(); - connection.close(); + rowCount = statement.executeUpdate("drop table if exists TEST_CREATE_2"); + assertEquals(0, rowCount); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertNull(statement.getResultSet()); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testCopyAndUpload() throws Exception { - - Connection connection = null; - Statement statement = null; File tempFolder = tmpFolder.newFolder("test_downloads_folder"); List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { String fileName = "test_copy.csv"; URL resource = StatementIT.class.getResource(fileName); - connection = getConnection(accounts.get(i)); - statement = connection.createStatement(); - - statement.execute("create or replace table test_copy(c1 number, c2 number, c3 string)"); - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - - String path = resource.getFile(); - - // put files - ResultSet rset = statement.executeQuery("PUT file://" + path + " @%test_copy"); - try { - rset.getString(1); - fail("Should raise No row found exception, because no next() is called."); - } catch (SQLException ex) { - assertThat( - "No row found error", ex.getErrorCode(), equalTo(ROW_DOES_NOT_EXIST.getMessageCode())); - } - int cnt = 0; - while (rset.next()) { - assertThat("uploaded file name", rset.getString(1), equalTo(fileName)); - ++cnt; + try (Connection connection = getConnection(accounts.get(i)); + Statement statement = connection.createStatement()) { + try { + statement.execute("create or replace table test_copy(c1 number, c2 number, c3 string)"); + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + + String path = resource.getFile(); + + // put files + try (ResultSet rset = statement.executeQuery("PUT file://" + path + " @%test_copy")) { + try { + rset.getString(1); + fail("Should raise No row found exception, because no next() is called."); + } catch (SQLException ex) { + assertThat( + "No row found error", + ex.getErrorCode(), + equalTo(ROW_DOES_NOT_EXIST.getMessageCode())); + } + int cnt = 0; + while (rset.next()) { + assertThat("uploaded file name", rset.getString(1), equalTo(fileName)); + ++cnt; + } + assertEquals(0, statement.getUpdateCount()); + assertEquals(0, statement.getLargeUpdateCount()); + assertThat("number of files", cnt, equalTo(1)); + int numRows = statement.executeUpdate("copy into test_copy"); + assertEquals(2, numRows); + assertEquals(2, statement.getUpdateCount()); + assertEquals(2L, statement.getLargeUpdateCount()); + + // get files + statement.executeQuery( + "get @%test_copy 'file://" + tempFolder.getCanonicalPath() + "' parallel=8"); + + // Make sure that the downloaded file exists, it should be gzip compressed + File downloaded = + new File(tempFolder.getCanonicalPath() + File.separator + fileName + ".gz"); + assert (downloaded.exists()); + } + // unzip the new file + Process p = + Runtime.getRuntime() + .exec( + "gzip -d " + + tempFolder.getCanonicalPath() + + File.separator + + fileName + + ".gz"); + p.waitFor(); + File newCopy = new File(tempFolder.getCanonicalPath() + File.separator + fileName); + // check that the get worked by uploading new file again to a different table and + // comparing it + // to original table + statement.execute("create or replace table test_copy_2(c1 number, c2 number, c3 string)"); + + // put copy of file + statement.executeQuery("PUT file://" + newCopy.getPath() + " @%test_copy_2"); + // assert that the result set is empty when you subtract each table from the other + try (ResultSet rset = + statement.executeQuery( + "select * from @%test_copy minus select * from @%test_copy_2")) { + assertFalse(rset.next()); + } + try (ResultSet rset = + statement.executeQuery( + "select * from @%test_copy_2 minus select * from @%test_copy")) { + assertFalse(rset.next()); + } + } finally { + statement.execute("drop table if exists test_copy"); + statement.execute("drop table if exists test_copy_2"); + } } - assertEquals(0, statement.getUpdateCount()); - assertEquals(0, statement.getLargeUpdateCount()); - assertThat("number of files", cnt, equalTo(1)); - int numRows = statement.executeUpdate("copy into test_copy"); - assertEquals(2, numRows); - assertEquals(2, statement.getUpdateCount()); - assertEquals(2L, statement.getLargeUpdateCount()); - - // get files - statement.executeQuery( - "get @%test_copy 'file://" + tempFolder.getCanonicalPath() + "' parallel=8"); - - // Make sure that the downloaded file exists, it should be gzip compressed - File downloaded = new File(tempFolder.getCanonicalPath() + File.separator + fileName + ".gz"); - assert (downloaded.exists()); - - // unzip the new file - Process p = - Runtime.getRuntime() - .exec("gzip -d " + tempFolder.getCanonicalPath() + File.separator + fileName + ".gz"); - p.waitFor(); - File newCopy = new File(tempFolder.getCanonicalPath() + File.separator + fileName); - - // check that the get worked by uploading new file again to a different table and comparing it - // to original table - statement.execute("create or replace table test_copy_2(c1 number, c2 number, c3 string)"); - - // put copy of file - rset = statement.executeQuery("PUT file://" + newCopy.getPath() + " @%test_copy_2"); - // assert that the result set is empty when you subtract each table from the other - rset = statement.executeQuery("select * from @%test_copy minus select * from @%test_copy_2"); - assertFalse(rset.next()); - rset = statement.executeQuery("select * from @%test_copy_2 minus select * from @%test_copy"); - assertFalse(rset.next()); - - statement.execute("drop table if exists test_copy"); - statement.execute("drop table if exists test_copy_2"); } - - statement.close(); - connection.close(); } /** @@ -170,36 +178,34 @@ public void testCopyAndUpload() throws Exception { */ @Test public void testExecuteOpenResultSets() throws SQLException { - Connection con = getConnection(); - Statement statement = con.createStatement(); - ResultSet resultSet; + try (Connection con = getConnection()) { + try (Statement statement = con.createStatement()) { + for (int i = 0; i < 10; i++) { + statement.execute("select 1"); + statement.getResultSet(); + } - for (int i = 0; i < 10; i++) { - statement.execute("select 1"); - statement.getResultSet(); - } + assertEquals(9, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); + } - assertEquals(9, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); - statement.close(); + try (Statement statement = con.createStatement()) { + for (int i = 0; i < 10; i++) { + statement.execute("select 1"); + ResultSet resultSet = statement.getResultSet(); + resultSet.close(); + } - statement = con.createStatement(); - for (int i = 0; i < 10; i++) { - statement.execute("select 1"); - resultSet = statement.getResultSet(); - resultSet.close(); + assertEquals(0, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); + } } - - assertEquals(0, statement.unwrap(SnowflakeStatementV1.class).getOpenResultSets().size()); - - statement.close(); - con.close(); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testPreparedStatementLogging() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + try { SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); sfSession.setPreparedStatementLogging(true); @@ -229,7 +235,7 @@ public void testPreparedStatementLogging() throws SQLException { int bindValues = BindUploader.arrayBindValueCount(bindings); assertEquals(8008, bindValues); pstatement.executeBatch(); - + } finally { stmt.execute("drop table if exists mytab"); } } @@ -240,29 +246,27 @@ public void testSchemaWith255CharactersDoesNotCauseException() throws SQLExcepti String schemaName = TestUtil.GENERATED_SCHEMA_PREFIX + SnowflakeUtil.randomAlphaNumeric(255 - TestUtil.GENERATED_SCHEMA_PREFIX.length()); - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - stmt.execute("create schema " + schemaName); - stmt.execute("use schema " + schemaName); - stmt.execute("drop schema " + schemaName); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + stmt.execute("create schema " + schemaName); + stmt.execute("use schema " + schemaName); + stmt.execute("drop schema " + schemaName); } } /** Added in > 3.14.4 */ @Test public void testQueryIdIsSetOnFailedQueryExecute() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - try { - stmt.execute("use database not_existing_database"); - fail("Statement should fail with exception"); - } catch (SnowflakeSQLException e) { - String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); - TestUtil.assertValidQueryId(queryID); - assertEquals(queryID, e.getQueryId()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); + try { + stmt.execute("use database not_existing_database"); + fail("Statement should fail with exception"); + } catch (SnowflakeSQLException e) { + String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); + TestUtil.assertValidQueryId(queryID); + assertEquals(queryID, e.getQueryId()); } } } @@ -270,17 +274,16 @@ public void testQueryIdIsSetOnFailedQueryExecute() throws SQLException { /** Added in > 3.14.4 */ @Test public void testQueryIdIsSetOnFailedExecuteUpdate() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - try { - stmt.executeUpdate("update not_existing_table set a = 1 where id = 42"); - fail("Statement should fail with exception"); - } catch (SnowflakeSQLException e) { - String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); - TestUtil.assertValidQueryId(queryID); - assertEquals(queryID, e.getQueryId()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); + try { + stmt.executeUpdate("update not_existing_table set a = 1 where id = 42"); + fail("Statement should fail with exception"); + } catch (SnowflakeSQLException e) { + String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); + TestUtil.assertValidQueryId(queryID); + assertEquals(queryID, e.getQueryId()); } } } @@ -288,17 +291,16 @@ public void testQueryIdIsSetOnFailedExecuteUpdate() throws SQLException { /** Added in > 3.14.4 */ @Test public void testQueryIdIsSetOnFailedExecuteQuery() throws SQLException { - try (Connection con = getConnection()) { - try (Statement stmt = con.createStatement()) { - assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); - try { - stmt.executeQuery("select * from not_existing_table"); - fail("Statement should fail with exception"); - } catch (SnowflakeSQLException e) { - String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); - TestUtil.assertValidQueryId(queryID); - assertEquals(queryID, e.getQueryId()); - } + try (Connection con = getConnection(); + Statement stmt = con.createStatement()) { + assertNull(stmt.unwrap(SnowflakeStatement.class).getQueryID()); + try { + stmt.executeQuery("select * from not_existing_table"); + fail("Statement should fail with exception"); + } catch (SnowflakeSQLException e) { + String queryID = stmt.unwrap(SnowflakeStatement.class).getQueryID(); + TestUtil.assertValidQueryId(queryID); + assertEquals(queryID, e.getQueryId()); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StreamIT.java b/src/test/java/net/snowflake/client/jdbc/StreamIT.java index f36fd5d34..d1762904d 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamIT.java @@ -32,40 +32,32 @@ public class StreamIT extends BaseJDBCTest { @Test public void testUploadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - - statement = connection.createStatement(); - - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); - - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); - - // select from the file to make sure the data is uploaded - ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX); - - String ret = null; - - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); + + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX)) { + String ret = null; + + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } @@ -80,38 +72,37 @@ public void testUploadStream() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDownloadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { - try { - connection = getConnection(accountName); - statement = connection.createStatement(); - ResultSet rset = - statement.executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~/" + DEST_PREFIX); - assertTrue(rset.next()); - assertEquals("UPLOADED", rset.getString(7)); - - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - // the first 2 characters - assertEquals("1|", output.substring(0, 2)); - - // the number of lines - String[] lines = output.split("\n"); - assertEquals(28, lines.length); - } finally { - if (statement != null) { + try (Connection connection = getConnection(accountName); + Statement statement = connection.createStatement()) { + try { + try (ResultSet rset = + statement.executeQuery( + "PUT file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @~/" + + DEST_PREFIX)) { + assertTrue(rset.next()); + assertEquals("UPLOADED", rset.getString(7)); + + InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + // the first 2 characters + assertEquals("1|", output.substring(0, 2)); + + // the number of lines + String[] lines = output.split("\n"); + assertEquals(28, lines.length); + } + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } } @@ -119,42 +110,34 @@ public void testDownloadStream() throws Throwable { @Test public void testCompressAndUploadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/" + "testCompressAndUploadStream"; - Connection connection = null; - Statement statement = null; - ResultSet resultSet = null; - - try { - connection = getConnection(); - - statement = connection.createStatement(); - - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); - - // upload the data to user stage under testCompressAndUploadStream - // with name hello.txt - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnectionV1.class) - .uploadStream( - "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", true); - - // select from the file to make sure the data is uploaded - ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); + + // upload the data to user stage under testCompressAndUploadStream + // with name hello.txt + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnectionV1.class) + .uploadStream( + "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", true); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = statement.executeQuery("SELECT $1 FROM @~/" + DEST_PREFIX)) { + + String ret = null; + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } - String ret = null; - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - } finally { - if (statement != null) { + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(resultSet, statement, connection); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java index f7e3d0d74..3ab179b70 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java @@ -47,58 +47,56 @@ public class StreamLatestIT extends BaseJDBCTest { */ @Test public void testUnusualStageName() throws Throwable { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); + String ret = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { - try { - statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); + try { + statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); - FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); - outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); - outputStream.flush(); + FileBackedOutputStream outputStream = new FileBackedOutputStream(1000000); + outputStream.write("hello".getBytes(StandardCharsets.UTF_8)); + outputStream.flush(); - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "'@%\"ice cream (nice)\"'", - null, outputStream.asByteSource().openStream(), "hello.txt", false); - - // select from the file to make sure the data is uploaded - ResultSet rset = statement.executeQuery("SELECT $1 FROM '@%\"ice cream (nice)\"/'"); - - String ret = null; - - while (rset.next()) { - ret = rset.getString(1); - } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - - statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); - - // upload the data to user stage under testUploadStream with name hello.txt - connection - .unwrap(SnowflakeConnection.class) - .uploadStream( - "$$@%\"ice cream (nice)\"$$", - null, outputStream.asByteSource().openStream(), "hello.txt", false); - - // select from the file to make sure the data is uploaded - rset = statement.executeQuery("SELECT $1 FROM $$@%\"ice cream (nice)\"/$$"); - - ret = null; - - while (rset.next()) { - ret = rset.getString(1); + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "'@%\"ice cream (nice)\"'", + null, outputStream.asByteSource().openStream(), "hello.txt", false); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = statement.executeQuery("SELECT $1 FROM '@%\"ice cream (nice)\"/'")) { + ret = null; + + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } + statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); + + // upload the data to user stage under testUploadStream with name hello.txt + connection + .unwrap(SnowflakeConnection.class) + .uploadStream( + "$$@%\"ice cream (nice)\"$$", + null, outputStream.asByteSource().openStream(), "hello.txt", false); + + // select from the file to make sure the data is uploaded + try (ResultSet rset = + statement.executeQuery("SELECT $1 FROM $$@%\"ice cream (nice)\"/$$")) { + + ret = null; + + while (rset.next()) { + ret = rset.getString(1); + } + assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + } + } finally { + statement.execute("DROP TABLE IF EXISTS \"ice cream (nice)\""); } - rset.close(); - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); - - } finally { - statement.execute("DROP TABLE IF EXISTS \"ice cream (nice)\""); - statement.close(); - connection.close(); } } @@ -106,28 +104,25 @@ public void testUnusualStageName() throws Throwable { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; - try { - Properties paramProperties = new Properties(); - paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - connection = getConnection("gcpaccount", paramProperties); - statement = connection.createStatement(); - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", DEST_PREFIX + "/abc.gz", true); - fail("should throw a storage provider exception for blob not found"); - } catch (Exception ex) { - assertTrue(ex instanceof SQLException); - assertTrue( - "Wrong exception message: " + ex.getMessage(), - ex.getMessage().matches(".*Blob.*not found in bucket.*")); - } finally { - if (statement != null) { + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); + + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + + try { + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", DEST_PREFIX + "/abc.gz", true); + fail("should throw a storage provider exception for blob not found"); + } catch (Exception ex) { + assertTrue(ex instanceof SQLException); + assertTrue( + "Wrong exception message: " + ex.getMessage(), + ex.getMessage().matches(".*Blob.*not found in bucket.*")); + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } @@ -135,121 +130,111 @@ public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { @Ignore public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOException { final String DEST_PREFIX = "testUploadStream"; - Connection connection = null; - Statement statement = null; - connection = getConnection("gcpaccount"); - statement = connection.createStatement(); - statement.execute("create or replace stage testgcpstage"); - ResultSet rset = - statement.executeQuery( - "PUT file://" - + getFullPathFileInResource(TEST_DATA_FILE) - + " @testgcpstage/" - + DEST_PREFIX); - assertTrue(rset.next()); - assertEquals("Error message:" + rset.getString(8), "UPLOADED", rset.getString(7)); - - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("@testgcpstage", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - // the first 2 characters - assertEquals("1|", output.substring(0, 2)); - // the number of lines - String[] lines = output.split("\n"); - assertEquals(28, lines.length); - - statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); - closeSQLObjects(statement, connection); + try (Connection connection = getConnection("gcpaccount"); + Statement statement = connection.createStatement()) { + statement.execute("create or replace stage testgcpstage"); + try (ResultSet rset = + statement.executeQuery( + "PUT file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @testgcpstage/" + + DEST_PREFIX)) { + assertTrue(rset.next()); + assertEquals("Error message:" + rset.getString(8), "UPLOADED", rset.getString(7)); + + InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("@testgcpstage", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + // the first 2 characters + assertEquals("1|", output.substring(0, 2)); + + // the number of lines + String[] lines = output.split("\n"); + assertEquals(28, lines.length); + } + statement.execute("rm @~/" + DEST_PREFIX); + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testDownloadToStreamGCS() throws SQLException, IOException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; - Connection connection = null; - Statement statement = null; Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); - try { - connection = getConnection("gcpaccount", paramProperties); - statement = connection.createStatement(); - ResultSet rset = - statement.executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~/" + DEST_PREFIX); - assertTrue(rset.next()); - assertEquals("UPLOADED", rset.getString(7)); - - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - // the first 2 characters - assertEquals("1|", output.substring(0, 2)); - // the number of lines - String[] lines = output.split("\n"); - assertEquals(28, lines.length); - } finally { - if (statement != null) { + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement(); + ResultSet rset = + statement.executeQuery( + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~/" + DEST_PREFIX)) { + try { + assertTrue(rset.next()); + assertEquals("UPLOADED", rset.getString(7)); + + InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", DEST_PREFIX + "/" + TEST_DATA_FILE + ".gz", true); + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + // the first 2 characters + assertEquals("1|", output.substring(0, 2)); + + // the number of lines + String[] lines = output.split("\n"); + assertEquals(28, lines.length); + } finally { statement.execute("rm @~/" + DEST_PREFIX); - statement.close(); } - closeSQLObjects(statement, connection); } } @Test public void testSpecialCharactersInFileName() throws SQLException, IOException { - Connection connection = null; - Statement statement = null; - try { - connection = getConnection(); - statement = connection.createStatement(); - - // Create a temporary file with special characters in the name and write to it - File specialCharFile = tmpFolder.newFile("(special char@).txt"); - BufferedWriter bw = new BufferedWriter(new FileWriter(specialCharFile)); - bw.write("Creating test file for downloadStream test"); - bw.close(); - - String sourceFilePath = specialCharFile.getCanonicalPath(); - String sourcePathEscaped; - if (System.getProperty("file.separator").equals("\\")) { - // windows separator needs to be escaped because of quotes - sourcePathEscaped = sourceFilePath.replace("\\", "\\\\"); - } else { - sourcePathEscaped = sourceFilePath; - } - - // create a stage to put the file in - statement.execute("CREATE OR REPLACE STAGE downloadStream_stage"); - statement.execute( - "PUT 'file://" + sourcePathEscaped + "' @~/downloadStream_stage auto_compress=false"); - - // download file stream - InputStream out = - connection - .unwrap(SnowflakeConnection.class) - .downloadStream("~", "/downloadStream_stage/" + specialCharFile.getName(), false); - - // Read file stream and check the result - StringWriter writer = new StringWriter(); - IOUtils.copy(out, writer, "UTF-8"); - String output = writer.toString(); - assertEquals("Creating test file for downloadStream test", output); - } finally { - if (statement != null) { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + // Create a temporary file with special characters in the name and write to it + File specialCharFile = tmpFolder.newFile("(special char@).txt"); + try (BufferedWriter bw = new BufferedWriter(new FileWriter(specialCharFile))) { + bw.write("Creating test file for downloadStream test"); + } + + String sourceFilePath = specialCharFile.getCanonicalPath(); + String sourcePathEscaped; + if (System.getProperty("file.separator").equals("\\")) { + // windows separator needs to be escaped because of quotes + sourcePathEscaped = sourceFilePath.replace("\\", "\\\\"); + } else { + sourcePathEscaped = sourceFilePath; + } + + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE downloadStream_stage"); + statement.execute( + "PUT 'file://" + sourcePathEscaped + "' @~/downloadStream_stage auto_compress=false"); + + // download file stream + try (InputStream out = + connection + .unwrap(SnowflakeConnection.class) + .downloadStream("~", "/downloadStream_stage/" + specialCharFile.getName(), false)) { + + // Read file stream and check the result + StringWriter writer = new StringWriter(); + IOUtils.copy(out, writer, "UTF-8"); + String output = writer.toString(); + assertEquals("Creating test file for downloadStream test", output); + } + } finally { statement.execute("DROP STAGE IF EXISTS downloadStream_stage"); - statement.close(); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java index ab6276ec3..de241162f 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java @@ -72,26 +72,29 @@ public void testS3Client256Encryption() throws SQLException { @Test @Ignore public void testS3ConnectionWithProxyEnvVariablesSet() throws SQLException { - Connection connection = null; String testStageName = "s3TestStage"; - try { - connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery("select 1"); - assertTrue(resultSet.next()); - statement.execute("create or replace stage " + testStageName); - resultSet = - connection - .createStatement() - .executeQuery( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @" + testStageName); - while (resultSet.next()) { - assertEquals("UPLOADED", resultSet.getString("status")); + + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try (ResultSet resultSet = statement.executeQuery("select 1")) { + assertTrue(resultSet.next()); } - } finally { - if (connection != null) { - connection.createStatement().execute("DROP STAGE if exists " + testStageName); - connection.close(); + try { + statement.execute("create or replace stage " + testStageName); + try (ResultSet resultSet = + connection + .createStatement() + .executeQuery( + "PUT file://" + + getFullPathFileInResource(TEST_DATA_FILE) + + " @" + + testStageName)) { + while (resultSet.next()) { + assertEquals("UPLOADED", resultSet.getString("status")); + } + } + } finally { + statement.execute("DROP STAGE if exists " + testStageName); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java index c9b9b6272..3efa9d168 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java @@ -249,15 +249,15 @@ private TelemetryClient createJWTSessionlessTelemetry() private void setUpPublicKey() throws SQLException, IOException { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("use role accountadmin"); - String pathfile = getFullPathFileInResource("rsa_key.pub"); - String pubKey = new String(Files.readAllBytes(Paths.get(pathfile))); - pubKey = pubKey.replace("-----BEGIN PUBLIC KEY-----", ""); - pubKey = pubKey.replace("-----END PUBLIC KEY-----", ""); - statement.execute(String.format("alter user %s set rsa_public_key='%s'", testUser, pubKey)); - connection.close(); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("use role accountadmin"); + String pathfile = getFullPathFileInResource("rsa_key.pub"); + String pubKey = new String(Files.readAllBytes(Paths.get(pathfile))); + pubKey = pubKey.replace("-----BEGIN PUBLIC KEY-----", ""); + pubKey = pubKey.replace("-----END PUBLIC KEY-----", ""); + statement.execute(String.format("alter user %s set rsa_public_key='%s'", testUser, pubKey)); + } } // Helper function to create a sessionless telemetry using OAuth @@ -279,26 +279,28 @@ private TelemetryClient createOAuthSessionlessTelemetry() // Helper function to set up and get OAuth token private String getOAuthToken() throws SQLException { Map parameters = getConnectionParameters(); - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute("use role accountadmin"); - statement.execute( - "create or replace security integration telemetry_oauth_integration\n" - + " type=oauth\n" - + " oauth_client=CUSTOM\n" - + " oauth_client_type=CONFIDENTIAL\n" - + " oauth_redirect_uri='https://localhost.com/oauth'\n" - + " oauth_issue_refresh_tokens=true\n" - + " enabled=true oauth_refresh_token_validity=86400;"); - String role = parameters.get("role"); - ResultSet resultSet = - statement.executeQuery( - "select system$it('create_oauth_access_token', 'TELEMETRY_OAUTH_INTEGRATION', '" - + role - + "')"); - resultSet.next(); - String token = resultSet.getString(1); - connection.close(); + String token = null; + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("use role accountadmin"); + statement.execute( + "create or replace security integration telemetry_oauth_integration\n" + + " type=oauth\n" + + " oauth_client=CUSTOM\n" + + " oauth_client_type=CONFIDENTIAL\n" + + " oauth_redirect_uri='https://localhost.com/oauth'\n" + + " oauth_issue_refresh_tokens=true\n" + + " enabled=true oauth_refresh_token_validity=86400;"); + String role = parameters.get("role"); + try (ResultSet resultSet = + statement.executeQuery( + "select system$it('create_oauth_access_token', 'TELEMETRY_OAUTH_INTEGRATION', '" + + role + + "')")) { + assertTrue(resultSet.next()); + token = resultSet.getString(1); + } + } return token; } } diff --git a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java index 573579e38..86f8caf5a 100644 --- a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java +++ b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java @@ -6,6 +6,7 @@ import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; @@ -31,23 +32,23 @@ public class FlatfileReadMultithreadIT { @BeforeClass public static void setUpClass() throws Throwable { - Connection testConnection = AbstractDriverIT.getConnection(); - // NOTE: the stage object must be created right after the connection - // because the Loader API assumes the stage object exists in the default - // namespace of the connection. - testConnection - .createStatement() - .execute(String.format("CREATE OR REPLACE STAGE %s", TARGET_STAGE)); - TARGET_SCHEMA = testConnection.getSchema(); - TARGET_DB = testConnection.getCatalog(); + try (Connection testConnection = AbstractDriverIT.getConnection(); + // NOTE: the stage object must be created right after the connection + // because the Loader API assumes the stage object exists in the default + // namespace of the connection. + Statement statement = testConnection.createStatement()) { + statement.execute(String.format("CREATE OR REPLACE STAGE %s", TARGET_STAGE)); + TARGET_SCHEMA = testConnection.getSchema(); + TARGET_DB = testConnection.getCatalog(); + } } @AfterClass public static void tearDownClass() throws Throwable { - Connection testConnection = AbstractDriverIT.getConnection(); - testConnection - .createStatement() - .execute(String.format("DROP STAGE IF EXISTS %s", TARGET_STAGE)); + try (Connection testConnection = AbstractDriverIT.getConnection(); + Statement statement = testConnection.createStatement()) { + statement.execute(String.format("DROP STAGE IF EXISTS %s", TARGET_STAGE)); + } } /** @@ -58,220 +59,215 @@ public static void tearDownClass() throws Throwable { @Test public void testIssueSimpleDateFormat() throws Throwable { final String targetTable = "TABLE_ISSUE_SIMPLEDATEFORMAT"; - Connection testConnection = AbstractDriverIT.getConnection(); - testConnection - .createStatement() - .execute( + try (Connection testConnection = AbstractDriverIT.getConnection(); + Statement statement = testConnection.createStatement()) { + try { + statement.execute( String.format( "CREATE OR REPLACE TABLE %s.%s.%s (" + "ID int, " + "C1 timestamp)", TARGET_DB, TARGET_SCHEMA, targetTable)); - try { - Thread t1 = - new Thread( - new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); - Thread t2 = - new Thread( - new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); - - t1.start(); - t2.start(); - t1.join(); - t2.join(); - ResultSet rs = - testConnection - .createStatement() - .executeQuery( - String.format( - "select count(*) from %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable)); - rs.next(); - assertThat("total number of records", rs.getInt(1), equalTo(NUM_RECORDS * 2)); - - } finally { - testConnection - .createStatement() - .execute( - String.format( - "DROP TABLE IF EXISTS %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable)); + Thread t1 = + new Thread( + new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); + Thread t2 = + new Thread( + new FlatfileRead(NUM_RECORDS, TARGET_DB, TARGET_SCHEMA, TARGET_STAGE, targetTable)); + + t1.start(); + t2.start(); + t1.join(); + t2.join(); + try (ResultSet rs = + statement.executeQuery( + String.format( + "select count(*) from %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable))) { + rs.next(); + assertThat("total number of records", rs.getInt(1), equalTo(NUM_RECORDS * 2)); + } + + } finally { + statement.execute( + String.format("DROP TABLE IF EXISTS %s.%s.%s", TARGET_DB, TARGET_SCHEMA, targetTable)); + } } } -} -class FlatfileRead implements Runnable { - private final int totalRows; - private final String dbName; - private final String schemaName; - private final String tableName; - private final String stageName; - - FlatfileRead( - int totalRows, String dbName, String schemaName, String stageName, String tableName) { - this.totalRows = totalRows; - this.dbName = dbName; - this.schemaName = schemaName; - this.stageName = stageName; - this.tableName = tableName; - } - - @Override - public void run() { - Connection testConnection = null; - Connection putConnection = null; - try { - testConnection = AbstractDriverIT.getConnection(); - putConnection = AbstractDriverIT.getConnection(); - } catch (SQLException e) { - e.printStackTrace(); + class FlatfileRead implements Runnable { + private final int totalRows; + private final String dbName; + private final String schemaName; + private final String tableName; + private final String stageName; + + FlatfileRead( + int totalRows, String dbName, String schemaName, String stageName, String tableName) { + this.totalRows = totalRows; + this.dbName = dbName; + this.schemaName = schemaName; + this.stageName = stageName; + this.tableName = tableName; } - ResultListener _resultListener = new ResultListener(); - - // init properties - Map prop = new HashMap<>(); - prop.put(LoaderProperty.tableName, this.tableName); - prop.put(LoaderProperty.schemaName, this.schemaName); - prop.put(LoaderProperty.databaseName, this.dbName); - prop.put(LoaderProperty.remoteStage, this.stageName); - prop.put(LoaderProperty.operation, Operation.INSERT); - - StreamLoader underTest = - (StreamLoader) LoaderFactory.createLoader(prop, putConnection, testConnection); - underTest.setProperty(LoaderProperty.startTransaction, true); - underTest.setProperty(LoaderProperty.truncateTable, false); - - underTest.setProperty(LoaderProperty.columns, Arrays.asList("ID", "C1")); - - underTest.setListener(_resultListener); - underTest.start(); - - Random rnd = new Random(); - for (int i = 0; i < this.totalRows; ++i) { - Object[] row = new Object[2]; - row[0] = i; - // random timestamp data - long ms = -946771200000L + (Math.abs(rnd.nextLong()) % (70L * 365 * 24 * 60 * 60 * 1000)); - row[1] = new Date(ms); - underTest.submitRow(row); + @Override + public void run() { + try (Connection testConnection = AbstractDriverIT.getConnection(); + Connection putConnection = AbstractDriverIT.getConnection()) { + + ResultListener _resultListener = new ResultListener(); + + // init properties + Map prop = new HashMap<>(); + prop.put(LoaderProperty.tableName, this.tableName); + prop.put(LoaderProperty.schemaName, this.schemaName); + prop.put(LoaderProperty.databaseName, this.dbName); + prop.put(LoaderProperty.remoteStage, this.stageName); + prop.put(LoaderProperty.operation, Operation.INSERT); + + StreamLoader underTest = + (StreamLoader) LoaderFactory.createLoader(prop, putConnection, testConnection); + underTest.setProperty(LoaderProperty.startTransaction, true); + underTest.setProperty(LoaderProperty.truncateTable, false); + + underTest.setProperty(LoaderProperty.columns, Arrays.asList("ID", "C1")); + + underTest.setListener(_resultListener); + underTest.start(); + + Random rnd = new Random(); + for (int i = 0; i < this.totalRows; ++i) { + Object[] row = new Object[2]; + row[0] = i; + // random timestamp data + long ms = -946771200000L + (Math.abs(rnd.nextLong()) % (70L * 365 * 24 * 60 * 60 * 1000)); + row[1] = new Date(ms); + underTest.submitRow(row); + } + + try { + underTest.finish(); + } catch (Exception e) { + e.printStackTrace(); + } + underTest.close(); + assertThat("must be no error", _resultListener.getErrorCount(), equalTo(0)); + assertThat( + "total number of rows", + _resultListener.getSubmittedRowCount(), + equalTo(this.totalRows)); + } catch (SQLException e) { + e.printStackTrace(); + } } - try { - underTest.finish(); - } catch (Exception e) { - e.printStackTrace(); + class ResultListener implements LoadResultListener { + + private final List errors = new ArrayList<>(); + + private final AtomicInteger errorCount = new AtomicInteger(0); + private final AtomicInteger errorRecordCount = new AtomicInteger(0); + + private final AtomicInteger counter = new AtomicInteger(0); + private final AtomicInteger processed = new AtomicInteger(0); + private final AtomicInteger deleted = new AtomicInteger(0); + private final AtomicInteger updated = new AtomicInteger(0); + private final AtomicInteger submittedRowCount = new AtomicInteger(0); + + private Object[] lastRecord = null; + + public boolean throwOnError = false; // should not trigger rollback + + @Override + public boolean needErrors() { + return true; + } + + @Override + public boolean needSuccessRecords() { + return true; + } + + @Override + public void addError(LoadingError error) { + errors.add(error); + } + + @Override + public boolean throwOnError() { + return throwOnError; + } + + public List getErrors() { + return errors; + } + + @Override + public void recordProvided(Operation op, Object[] record) { + lastRecord = record; + } + + @Override + public void addProcessedRecordCount(Operation op, int i) { + processed.addAndGet(i); + } + + @Override + public void addOperationRecordCount(Operation op, int i) { + counter.addAndGet(i); + if (op == Operation.DELETE) { + deleted.addAndGet(i); + } else if (op == Operation.MODIFY || op == Operation.UPSERT) { + updated.addAndGet(i); + } + } + + public Object[] getLastRecord() { + return lastRecord; + } + + @Override + public int getErrorCount() { + return errorCount.get(); + } + + @Override + public int getErrorRecordCount() { + return errorRecordCount.get(); + } + + @Override + public void resetErrorCount() { + errorCount.set(0); + } + + @Override + public void resetErrorRecordCount() { + errorRecordCount.set(0); + } + + @Override + public void addErrorCount(int count) { + errorCount.addAndGet(count); + } + + @Override + public void addErrorRecordCount(int count) { + errorRecordCount.addAndGet(count); + } + + @Override + public void resetSubmittedRowCount() { + submittedRowCount.set(0); + } + + @Override + public void addSubmittedRowCount(int count) { + submittedRowCount.addAndGet(count); + } + + @Override + public int getSubmittedRowCount() { + return submittedRowCount.get(); + } } - underTest.close(); - assertThat("must be no error", _resultListener.getErrorCount(), equalTo(0)); - assertThat( - "total number of rows", _resultListener.getSubmittedRowCount(), equalTo(this.totalRows)); - } -} - -class ResultListener implements LoadResultListener { - - private final List errors = new ArrayList<>(); - - private final AtomicInteger errorCount = new AtomicInteger(0); - private final AtomicInteger errorRecordCount = new AtomicInteger(0); - - private final AtomicInteger counter = new AtomicInteger(0); - private final AtomicInteger processed = new AtomicInteger(0); - private final AtomicInteger deleted = new AtomicInteger(0); - private final AtomicInteger updated = new AtomicInteger(0); - private final AtomicInteger submittedRowCount = new AtomicInteger(0); - - private Object[] lastRecord = null; - - public boolean throwOnError = false; // should not trigger rollback - - @Override - public boolean needErrors() { - return true; - } - - @Override - public boolean needSuccessRecords() { - return true; - } - - @Override - public void addError(LoadingError error) { - errors.add(error); - } - - @Override - public boolean throwOnError() { - return throwOnError; - } - - public List getErrors() { - return errors; - } - - @Override - public void recordProvided(Operation op, Object[] record) { - lastRecord = record; - } - - @Override - public void addProcessedRecordCount(Operation op, int i) { - processed.addAndGet(i); - } - - @Override - public void addOperationRecordCount(Operation op, int i) { - counter.addAndGet(i); - if (op == Operation.DELETE) { - deleted.addAndGet(i); - } else if (op == Operation.MODIFY || op == Operation.UPSERT) { - updated.addAndGet(i); - } - } - - public Object[] getLastRecord() { - return lastRecord; - } - - @Override - public int getErrorCount() { - return errorCount.get(); - } - - @Override - public int getErrorRecordCount() { - return errorRecordCount.get(); - } - - @Override - public void resetErrorCount() { - errorCount.set(0); - } - - @Override - public void resetErrorRecordCount() { - errorRecordCount.set(0); - } - - @Override - public void addErrorCount(int count) { - errorCount.addAndGet(count); - } - - @Override - public void addErrorRecordCount(int count) { - errorRecordCount.addAndGet(count); - } - - @Override - public void resetSubmittedRowCount() { - submittedRowCount.set(0); - } - - @Override - public void addSubmittedRowCount(int count) { - submittedRowCount.addAndGet(count); - } - - @Override - public int getSubmittedRowCount() { - return submittedRowCount.get(); } } diff --git a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java index 6ac489300..e10a606d4 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java @@ -4,10 +4,12 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.PreparedStatement; import java.sql.ResultSet; +import java.sql.Statement; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -52,29 +54,30 @@ public void testLoaderUpsert() throws Exception { assertThat("error count", listener.getErrorCount(), equalTo(0)); assertThat("error record count", listener.getErrorRecordCount(), equalTo(0)); - ResultSet rs = + try (ResultSet rs = testConnection .createStatement() .executeQuery( String.format( - "SELECT C1, C4, C3" + " FROM \"%s\" WHERE ID=10001", TARGET_TABLE_NAME)); + "SELECT C1, C4, C3" + " FROM \"%s\" WHERE ID=10001", TARGET_TABLE_NAME))) { - rs.next(); - assertThat("C1 is not correct", rs.getString("C1"), equalTo("inserted\\,")); + assertTrue(rs.next()); + assertThat("C1 is not correct", rs.getString("C1"), equalTo("inserted\\,")); - long l = rs.getTimestamp("C4").getTime(); - assertThat("C4 is not correct", l, equalTo(d.getTime())); - assertThat( - "C3 is not correct", Double.toHexString((rs.getDouble("C3"))), equalTo("0x1.044ccp4")); - - rs = + long l = rs.getTimestamp("C4").getTime(); + assertThat("C4 is not correct", l, equalTo(d.getTime())); + assertThat( + "C3 is not correct", Double.toHexString((rs.getDouble("C3"))), equalTo("0x1.044ccp4")); + } + try (ResultSet rs = testConnection .createStatement() .executeQuery( - String.format("SELECT C1 AS N" + " FROM \"%s\" WHERE ID=39", TARGET_TABLE_NAME)); + String.format("SELECT C1 AS N" + " FROM \"%s\" WHERE ID=39", TARGET_TABLE_NAME))) { - rs.next(); - assertThat("N is not correct", rs.getString("N"), equalTo("modified")); + assertTrue(rs.next()); + assertThat("N is not correct", rs.getString("N"), equalTo("modified")); + } } @Test @@ -82,76 +85,79 @@ public void testLoaderUpsertWithErrorAndRollback() throws Exception { TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); tdcb.populate(); - PreparedStatement pstmt = + try (PreparedStatement pstmt = testConnection.prepareStatement( String.format( "INSERT INTO \"%s\"(ID,C1,C2,C3,C4,C5)" + " SELECT column1, column2, column3, column4," + " column5, parse_json(column6)" + " FROM VALUES(?,?,?,?,?,?)", - TARGET_TABLE_NAME)); - pstmt.setInt(1, 10001); - pstmt.setString(2, "inserted\\,"); - pstmt.setString(3, "something"); - pstmt.setDouble(4, 0x4.11_33p2); - pstmt.setDate(5, new java.sql.Date(new Date().getTime())); - pstmt.setObject(6, "{}"); - pstmt.execute(); - testConnection.commit(); - - TestDataConfigBuilder tdcbUpsert = new TestDataConfigBuilder(testConnection, putConnection); - tdcbUpsert - .setOperation(Operation.UPSERT) - .setTruncateTable(false) - .setStartTransaction(true) - .setPreserveStageFile(true) - .setColumns(Arrays.asList("ID", "C1", "C2", "C3", "C4", "C5")) - .setKeys(Collections.singletonList("ID")); - StreamLoader loader = tdcbUpsert.getStreamLoader(); - TestDataConfigBuilder.ResultListener listener = tdcbUpsert.getListener(); - listener.throwOnError = true; // should trigger rollback - loader.start(); - try { - - Object[] noerr = new Object[] {"10001", "inserted", "something", "42", new Date(), "{}"}; - loader.submitRow(noerr); - - Object[] err = new Object[] {"10002-", "inserted", "something", "42-", new Date(), "{}"}; - loader.submitRow(err); - - loader.finish(); - - fail("Test must raise Loader.DataError exception"); - } catch (Loader.DataError e) { - // we are good - assertThat( - "error message", - e.getMessage(), - allOf(containsString("10002-"), containsString("not recognized"))); + TARGET_TABLE_NAME))) { + pstmt.setInt(1, 10001); + pstmt.setString(2, "inserted\\,"); + pstmt.setString(3, "something"); + pstmt.setDouble(4, 0x4.11_33p2); + pstmt.setDate(5, new java.sql.Date(new Date().getTime())); + pstmt.setObject(6, "{}"); + pstmt.execute(); + testConnection.commit(); + + TestDataConfigBuilder tdcbUpsert = new TestDataConfigBuilder(testConnection, putConnection); + tdcbUpsert + .setOperation(Operation.UPSERT) + .setTruncateTable(false) + .setStartTransaction(true) + .setPreserveStageFile(true) + .setColumns(Arrays.asList("ID", "C1", "C2", "C3", "C4", "C5")) + .setKeys(Collections.singletonList("ID")); + StreamLoader loader = tdcbUpsert.getStreamLoader(); + TestDataConfigBuilder.ResultListener listener = tdcbUpsert.getListener(); + listener.throwOnError = true; // should trigger rollback + loader.start(); + try { + + Object[] noerr = new Object[] {"10001", "inserted", "something", "42", new Date(), "{}"}; + loader.submitRow(noerr); + + Object[] err = new Object[] {"10002-", "inserted", "something", "42-", new Date(), "{}"}; + loader.submitRow(err); + + loader.finish(); + + fail("Test must raise Loader.DataError exception"); + } catch (Loader.DataError e) { + // we are good + assertThat( + "error message", + e.getMessage(), + allOf(containsString("10002-"), containsString("not recognized"))); + } + + assertThat("processed", listener.processed.get(), equalTo(0)); + assertThat("submitted row", listener.getSubmittedRowCount(), equalTo(2)); + assertThat("updated/inserted", listener.updated.get(), equalTo(0)); + assertThat("error count", listener.getErrorCount(), equalTo(2)); + assertThat("error record count", listener.getErrorRecordCount(), equalTo(1)); + + try (ResultSet rs = + testConnection + .createStatement() + .executeQuery(String.format("SELECT COUNT(*) AS N FROM \"%s\"", TARGET_TABLE_NAME))) { + assertTrue(rs.next()); + assertThat("N", rs.getInt("N"), equalTo(10001)); + } + try (ResultSet rs = + testConnection + .createStatement() + .executeQuery( + String.format("SELECT C3 FROM \"%s\" WHERE id=10001", TARGET_TABLE_NAME))) { + assertTrue(rs.next()); + assertThat( + "C3. No commit should happen", + Double.toHexString((rs.getDouble("C3"))), + equalTo("0x1.044ccp4")); + } } - - assertThat("processed", listener.processed.get(), equalTo(0)); - assertThat("submitted row", listener.getSubmittedRowCount(), equalTo(2)); - assertThat("updated/inserted", listener.updated.get(), equalTo(0)); - assertThat("error count", listener.getErrorCount(), equalTo(2)); - assertThat("error record count", listener.getErrorRecordCount(), equalTo(1)); - - ResultSet rs = - testConnection - .createStatement() - .executeQuery(String.format("SELECT COUNT(*) AS N FROM \"%s\"", TARGET_TABLE_NAME)); - rs.next(); - assertThat("N", rs.getInt("N"), equalTo(10001)); - - rs = - testConnection - .createStatement() - .executeQuery(String.format("SELECT C3 FROM \"%s\" WHERE id=10001", TARGET_TABLE_NAME)); - rs.next(); - assertThat( - "C3. No commit should happen", - Double.toHexString((rs.getDouble("C3"))), - equalTo("0x1.044ccp4")); } /** @@ -163,44 +169,43 @@ public void testLoaderUpsertWithErrorAndRollback() throws Exception { @Test public void testKeyClusteringTable() throws Exception { String targetTableName = "CLUSTERED_TABLE"; + try (Statement statement = testConnection.createStatement()) { + // create table with spaces in column names + statement.execute( + String.format( + "CREATE OR REPLACE TABLE \"%s\" (" + + "ID int, " + + "\"Column1\" varchar(255), " + + "\"Column2\" varchar(255))", + targetTableName)); + // Add the clustering key; all columns clustered together + statement.execute( + String.format( + "alter table %s cluster by (ID, \"Column1\", \"Column2\")", targetTableName)); + TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); + // Only submit data for 2 columns out of 3 in the table so that 1 column will be dropped in + // temp + // table + tdcb.setTableName(targetTableName).setColumns(Arrays.asList("ID", "Column1")); + StreamLoader loader = tdcb.getStreamLoader(); + loader.start(); + + for (int i = 0; i < 5; ++i) { + Object[] row = new Object[] {i, "foo_" + i}; + loader.submitRow(row); + } + loader.finish(); - // create table with spaces in column names - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE \"%s\" (" - + "ID int, " - + "\"Column1\" varchar(255), " - + "\"Column2\" varchar(255))", - targetTableName)); - // Add the clustering key; all columns clustered together - testConnection - .createStatement() - .execute( - String.format( - "alter table %s cluster by (ID, \"Column1\", \"Column2\")", targetTableName)); - TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); - // Only submit data for 2 columns out of 3 in the table so that 1 column will be dropped in temp - // table - tdcb.setTableName(targetTableName).setColumns(Arrays.asList("ID", "Column1")); - StreamLoader loader = tdcb.getStreamLoader(); - loader.start(); + try (ResultSet rs = + testConnection + .createStatement() + .executeQuery( + String.format("SELECT * FROM \"%s\" ORDER BY \"Column1\"", targetTableName))) { - for (int i = 0; i < 5; ++i) { - Object[] row = new Object[] {i, "foo_" + i}; - loader.submitRow(row); + assertTrue(rs.next()); + assertThat("The first id", rs.getInt(1), equalTo(0)); + assertThat("The first str", rs.getString(2), equalTo("foo_0")); + } } - loader.finish(); - - ResultSet rs = - testConnection - .createStatement() - .executeQuery( - String.format("SELECT * FROM \"%s\" ORDER BY \"Column1\"", targetTableName)); - - rs.next(); - assertThat("The first id", rs.getInt(1), equalTo(0)); - assertThat("The first str", rs.getString(2), equalTo("foo_0")); } } diff --git a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java index d616598d0..859533686 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java @@ -5,8 +5,10 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import java.sql.ResultSet; +import java.sql.Statement; import java.util.List; import net.snowflake.client.category.TestCategoryLoader; import org.junit.Test; @@ -17,48 +19,46 @@ public class LoaderMultipleBatchIT extends LoaderBase { @Test public void testLoaderMultipleBatch() throws Exception { String refTableName = "LOADER_TEST_TABLE_REF"; - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE \"%s\" (" - + "ID int, " - + "C1 varchar(255), " - + "C2 varchar(255) DEFAULT 'X', " - + "C3 double, " - + "C4 timestamp, " - + "C5 variant)", - refTableName)); + try (Statement statement = testConnection.createStatement()) { + statement.execute( + String.format( + "CREATE OR REPLACE TABLE \"%s\" (" + + "ID int, " + + "C1 varchar(255), " + + "C2 varchar(255) DEFAULT 'X', " + + "C3 double, " + + "C4 timestamp, " + + "C5 variant)", + refTableName)); - try { - TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); - List dataSet = tdcb.populateReturnData(); + try { + TestDataConfigBuilder tdcb = new TestDataConfigBuilder(testConnection, putConnection); + List dataSet = tdcb.populateReturnData(); - TestDataConfigBuilder tdcbRef = new TestDataConfigBuilder(testConnection, putConnection); - tdcbRef - .setDataSet(dataSet) - .setTableName(refTableName) - .setCsvFileBucketSize(2) - .setCsvFileSize(30000) - .populate(); + TestDataConfigBuilder tdcbRef = new TestDataConfigBuilder(testConnection, putConnection); + tdcbRef + .setDataSet(dataSet) + .setTableName(refTableName) + .setCsvFileBucketSize(2) + .setCsvFileSize(30000) + .populate(); - ResultSet rsReference = - testConnection - .createStatement() - .executeQuery(String.format("SELECT hash_agg(*) FROM \"%s\"", TARGET_TABLE_NAME)); - rsReference.next(); - long hashValueReference = rsReference.getLong(1); - ResultSet rsTarget = - testConnection - .createStatement() - .executeQuery(String.format("SELECT hash_agg(*) FROM \"%s\"", refTableName)); - rsTarget.next(); - long hashValueTarget = rsTarget.getLong(1); - assertThat("hash values", hashValueTarget, equalTo(hashValueReference)); - } finally { - testConnection - .createStatement() - .execute(String.format("DROP TABLE IF EXISTS %s", refTableName)); + try (ResultSet rsReference = + statement.executeQuery( + String.format("SELECT hash_agg(*) FROM \"%s\"", TARGET_TABLE_NAME))) { + assertTrue(rsReference.next()); + long hashValueReference = rsReference.getLong(1); + try (ResultSet rsTarget = + statement.executeQuery( + String.format("SELECT hash_agg(*) FROM \"%s\"", refTableName))) { + assertTrue(rsTarget.next()); + long hashValueTarget = rsTarget.getLong(1); + assertThat("hash values", hashValueTarget, equalTo(hashValueReference)); + } + } + } finally { + statement.execute(String.format("DROP TABLE IF EXISTS %s", refTableName)); + } } } } diff --git a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java index 352a3a3d1..790249e96 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java @@ -5,8 +5,10 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; import java.sql.ResultSet; +import java.sql.Statement; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Arrays; @@ -23,44 +25,44 @@ public void testLoadTimestamp() throws Exception { final String targetTableName = "LOADER_TEST_TIMESTAMP"; // create table including TIMESTAMP_NTZ - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE %s (" - + "ID int, " - + "C1 varchar(255), " - + "C2 timestamp_ntz)", - targetTableName)); - - // Binding java.util.Date, Timestamp and java.sql.Date with TIMESTAMP - // datatype. No java.sql.Time binding is supported for TIMESTAMP. - // For java.sql.Time, the target data type must be TIME. - Object[] testData = - new Object[] { - new Date(), - java.sql.Timestamp.valueOf("0001-01-01 08:00:00"), - java.sql.Date.valueOf("2001-01-02") - }; - - for (int i = 0; i < 2; ++i) { - boolean useLocalTimezone = false; - TimeZone originalTimeZone; - TimeZone targetTimeZone; - - if (i == 0) { - useLocalTimezone = true; - originalTimeZone = TimeZone.getDefault(); - targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); - } else { - originalTimeZone = TimeZone.getTimeZone("UTC"); - targetTimeZone = TimeZone.getTimeZone("UTC"); - } - - // input timestamp associated with the target timezone, America/Los_Angeles - for (Object testTs : testData) { - _testLoadTimestamp( - targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, false); + try (Statement statement = testConnection.createStatement()) { + statement.execute( + String.format( + "CREATE OR REPLACE TABLE %s (" + + "ID int, " + + "C1 varchar(255), " + + "C2 timestamp_ntz)", + targetTableName)); + + // Binding java.util.Date, Timestamp and java.sql.Date with TIMESTAMP + // datatype. No java.sql.Time binding is supported for TIMESTAMP. + // For java.sql.Time, the target data type must be TIME. + Object[] testData = + new Object[] { + new Date(), + java.sql.Timestamp.valueOf("0001-01-01 08:00:00"), + java.sql.Date.valueOf("2001-01-02") + }; + + for (int i = 0; i < 2; ++i) { + boolean useLocalTimezone = false; + TimeZone originalTimeZone; + TimeZone targetTimeZone; + + if (i == 0) { + useLocalTimezone = true; + originalTimeZone = TimeZone.getDefault(); + targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); + } else { + originalTimeZone = TimeZone.getTimeZone("UTC"); + targetTimeZone = TimeZone.getTimeZone("UTC"); + } + + // input timestamp associated with the target timezone, America/Los_Angeles + for (Object testTs : testData) { + _testLoadTimestamp( + targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, false); + } } } } @@ -97,26 +99,27 @@ private void _testLoadTimestamp( assertThat("Loader detected errors", listener.getErrorCount(), equalTo(0)); - ResultSet rs = + try (ResultSet rs = testConnection .createStatement() - .executeQuery(String.format("SELECT * FROM \"%s\"", targetTableName)); + .executeQuery(String.format("SELECT * FROM \"%s\"", targetTableName))) { - rs.next(); - Timestamp ts = rs.getTimestamp("C2"); + assertTrue(rs.next()); + Timestamp ts = rs.getTimestamp("C2"); - // format the input TS with the target timezone - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); - sdf.setTimeZone(targetTimeZone); - String currentTsStr = sdf.format(testTs); + // format the input TS with the target timezone + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); + sdf.setTimeZone(targetTimeZone); + String currentTsStr = sdf.format(testTs); - // format the retrieved TS with the original timezone - sdf.setTimeZone(originalTimeZone); - String retrievedTsStr = sdf.format(new Date(ts.getTime())); + // format the retrieved TS with the original timezone + sdf.setTimeZone(originalTimeZone); + String retrievedTsStr = sdf.format(new Date(ts.getTime())); - // They must be identical. - assertThat( - "Input and retrieved timestamp are different", retrievedTsStr, equalTo(currentTsStr)); + // They must be identical. + assertThat( + "Input and retrieved timestamp are different", retrievedTsStr, equalTo(currentTsStr)); + } } @Test @@ -124,45 +127,46 @@ public void testLoadTimestampV1() throws Exception { final String targetTableName = "LOADER_TEST_TIMESTAMP_V1"; // create table including TIMESTAMP_NTZ - testConnection - .createStatement() - .execute( - String.format( - "CREATE OR REPLACE TABLE %s (" - + "ID int, " - + "C1 varchar(255), " - + "C2 timestamp_ntz)", - targetTableName)); - - // Binding java.sql.Time with TIMESTAMP is supported only if - // mapTimeToTimestamp flag is enabled. This is required to keep the - // old behavior of Informatica V1 connector. - Object[] testData = - new Object[] { - // full timestamp in Time object. Interestingly all values are - // preserved. - new java.sql.Time(1502931205000L), java.sql.Time.valueOf("12:34:56") // a basic test case - }; - - for (int i = 0; i < 2; ++i) { - boolean useLocalTimezone; - TimeZone originalTimeZone; - TimeZone targetTimeZone; - - if (i == 0) { - useLocalTimezone = true; - originalTimeZone = TimeZone.getDefault(); - targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); - } else { - useLocalTimezone = false; - originalTimeZone = TimeZone.getTimeZone("UTC"); - targetTimeZone = TimeZone.getTimeZone("UTC"); - } - - // input timestamp associated with the target timezone, America/Los_Angeles - for (Object testTs : testData) { - _testLoadTimestamp( - targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, true); + try (Statement statement = testConnection.createStatement()) { + statement.execute( + String.format( + "CREATE OR REPLACE TABLE %s (" + + "ID int, " + + "C1 varchar(255), " + + "C2 timestamp_ntz)", + targetTableName)); + + // Binding java.sql.Time with TIMESTAMP is supported only if + // mapTimeToTimestamp flag is enabled. This is required to keep the + // old behavior of Informatica V1 connector. + Object[] testData = + new Object[] { + // full timestamp in Time object. Interestingly all values are + // preserved. + new java.sql.Time(1502931205000L), + java.sql.Time.valueOf("12:34:56") // a basic test case + }; + + for (int i = 0; i < 2; ++i) { + boolean useLocalTimezone; + TimeZone originalTimeZone; + TimeZone targetTimeZone; + + if (i == 0) { + useLocalTimezone = true; + originalTimeZone = TimeZone.getDefault(); + targetTimeZone = TimeZone.getTimeZone("America/Los_Angeles"); + } else { + useLocalTimezone = false; + originalTimeZone = TimeZone.getTimeZone("UTC"); + targetTimeZone = TimeZone.getTimeZone("UTC"); + } + + // input timestamp associated with the target timezone, America/Los_Angeles + for (Object testTs : testData) { + _testLoadTimestamp( + targetTableName, originalTimeZone, targetTimeZone, testTs, useLocalTimezone, true); + } } } } diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java index faa809af4..84b876147 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java @@ -12,6 +12,7 @@ import java.nio.file.attribute.PosixFilePermission; import java.sql.Connection; import java.sql.SQLException; +import java.sql.Statement; import java.util.HashSet; import java.util.Properties; import java.util.logging.Level; @@ -29,14 +30,16 @@ public void testJDK14LoggingWithClientConfig() { Files.write(configFilePath, configJson.getBytes()); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); - Connection connection = getConnection(properties); - connection.createStatement().executeQuery("select 1"); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { + statement.executeQuery("select 1"); - File file = new File("logs/jdbc/"); - assertTrue(file.exists()); + File file = new File("logs/jdbc/"); + assertTrue(file.exists()); - Files.deleteIfExists(configFilePath); - FileUtils.deleteDirectory(new File("logs")); + Files.deleteIfExists(configFilePath); + FileUtils.deleteDirectory(new File("logs")); + } } catch (IOException e) { fail("testJDK14LoggingWithClientConfig failed"); } catch (SQLException e) { @@ -49,8 +52,9 @@ public void testJDK14LoggingWithClientConfigInvalidConfigFilePath() throws SQLEx Path configFilePath = Paths.get("invalid.json"); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); - Connection connection = getConnection(properties); - connection.createStatement().executeQuery("select 1"); + try (Connection connection = getConnection(properties)) { + connection.createStatement().executeQuery("select 1"); + } } @Test diff --git a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java index 897102e1a..eadd984cc 100644 --- a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java +++ b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java @@ -12,6 +12,7 @@ import java.sql.Connection; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -42,20 +43,21 @@ public void testPooledConnection() throws SQLException { TestingConnectionListener listener = new TestingConnectionListener(); pooledConnection.addConnectionEventListener(listener); - Connection connection = pooledConnection.getConnection(); - connection.createStatement().execute("select 1"); + try (Connection connection = pooledConnection.getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("select 1"); - try { - // should fire connection error events - connection.setCatalog("nonexistent_database"); - fail(); - } catch (SQLException e) { - assertThat(e.getErrorCode(), is(2043)); - } + try { + // should fire connection error events + connection.setCatalog("nonexistent_database"); + fail(); + } catch (SQLException e) { + assertThat(e.getErrorCode(), is(2043)); + } - // should not close underlying physical connection - // and fire connection closed events - connection.close(); + // should not close underlying physical connection + // and fire connection closed events + } List connectionClosedEvents = listener.getConnectionClosedEvents(); List connectionErrorEvents = listener.getConnectionErrorEvents(); @@ -105,9 +107,9 @@ public void testPooledConnectionUsernamePassword() throws SQLException { TestingConnectionListener listener = new TestingConnectionListener(); pooledConnection.addConnectionEventListener(listener); - Connection connection = pooledConnection.getConnection(); - connection.createStatement().execute("select 1"); - connection.close(); + try (Connection connection = pooledConnection.getConnection()) { + connection.createStatement().execute("select 1"); + } pooledConnection.close(); } From ed334e6c440f2bc833f758370b433c7e8932a859 Mon Sep 17 00:00:00 2001 From: Przemyslaw Motacki Date: Sun, 28 Apr 2024 18:15:37 +0200 Subject: [PATCH 06/54] Structured types backward compatibility for getObject method (#1740) * SNOW-1232333 - ResultSet getObject method return string if type wasn't specified --- .../snowflake/client/core/ArrowSqlInput.java | 1 - .../snowflake/client/core/JsonSqlInput.java | 9 +- .../client/core/SFArrowResultSet.java | 9 +- .../client/core/SFBaseResultSet.java | 5 +- .../client/core/SFJsonResultSet.java | 3 +- .../net/snowflake/client/core/SFSqlInput.java | 11 -- .../client/jdbc/SnowflakeBaseResultSet.java | 24 ++- .../client/jdbc/SnowflakeResultSetV1.java | 18 +- ...ngAndInsertingStructuredTypesLatestIT.java | 48 +++-- .../ResultSetStructuredTypesLatestIT.java | 166 +++++++++++++++--- 10 files changed, 226 insertions(+), 68 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/ArrowSqlInput.java b/src/main/java/net/snowflake/client/core/ArrowSqlInput.java index 61cf39674..55e29bc65 100644 --- a/src/main/java/net/snowflake/client/core/ArrowSqlInput.java +++ b/src/main/java/net/snowflake/client/core/ArrowSqlInput.java @@ -28,7 +28,6 @@ @SnowflakeJdbcInternalApi public class ArrowSqlInput extends BaseSqlInput { - private final Map input; private int currentIndex = 0; private boolean wasNull = false; diff --git a/src/main/java/net/snowflake/client/core/JsonSqlInput.java b/src/main/java/net/snowflake/client/core/JsonSqlInput.java index d0aeb1a93..daff3d9b0 100644 --- a/src/main/java/net/snowflake/client/core/JsonSqlInput.java +++ b/src/main/java/net/snowflake/client/core/JsonSqlInput.java @@ -35,6 +35,7 @@ @SnowflakeJdbcInternalApi public class JsonSqlInput extends BaseSqlInput { + private final String text; private final JsonNode input; private final Iterator elements; private final TimeZone sessionTimeZone; @@ -42,12 +43,14 @@ public class JsonSqlInput extends BaseSqlInput { private boolean wasNull = false; public JsonSqlInput( + String text, JsonNode input, SFBaseSession session, Converters converters, List fields, TimeZone sessionTimeZone) { super(session, converters, fields); + this.text = text; this.input = input; this.elements = input.elements(); this.sessionTimeZone = sessionTimeZone; @@ -57,6 +60,10 @@ public JsonNode getInput() { return input; } + public String getText() { + return text; + } + @Override public String readString() throws SQLException { return withNextValue((this::convertString)); @@ -178,7 +185,7 @@ private T convertObject(Class type, TimeZone tz, Object value, FieldMetad JsonNode jsonNode = (JsonNode) value; SQLInput sqlInput = new JsonSqlInput( - jsonNode, session, converters, fieldMetadata.getFields(), sessionTimeZone); + null, jsonNode, session, converters, fieldMetadata.getFields(), sessionTimeZone); SQLData instance = (SQLData) SQLDataCreationHelper.create(type); instance.readSQL(sqlInput, null); return (T) instance; diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index 74be589ea..a617bb739 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -378,7 +378,7 @@ public SQLInput createSqlInputForColumn( SFBaseSession session, List fields) { if (parentObjectClass.equals(JsonSqlInput.class)) { - return createJsonSqlInputForColumn(input, columnIndex, session, fields); + return createJsonSqlInputForColumn(input, session, fields); } else { return new ArrowSqlInput((Map) input, session, converters, fields); } @@ -581,8 +581,10 @@ private Object createJsonSqlInput(int columnIndex, Object obj) throws SFExceptio if (obj == null) { return null; } - JsonNode jsonNode = OBJECT_MAPPER.readTree((String) obj); + String text = (String) obj; + JsonNode jsonNode = OBJECT_MAPPER.readTree(text); return new JsonSqlInput( + text, jsonNode, session, converters, @@ -595,6 +597,9 @@ private Object createJsonSqlInput(int columnIndex, Object obj) throws SFExceptio private Object createArrowSqlInput(int columnIndex, Map input) throws SFException { + if (input == null) { + return null; + } return new ArrowSqlInput( input, session, converters, resultSetMetaData.getColumnFields(columnIndex)); } diff --git a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java index f7fde0790..0a0fffc63 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java @@ -261,14 +261,15 @@ public Timestamp convertToTimestamp( @SnowflakeJdbcInternalApi protected SQLInput createJsonSqlInputForColumn( - Object input, int columnIndex, SFBaseSession session, List fields) { + Object input, SFBaseSession session, List fields) { JsonNode inputNode; if (input instanceof JsonNode) { inputNode = (JsonNode) input; } else { inputNode = OBJECT_MAPPER.convertValue(input, JsonNode.class); } - return new JsonSqlInput(inputNode, session, getConverters(), fields, sessionTimeZone); + return new JsonSqlInput( + input.toString(), inputNode, session, getConverters(), fields, sessionTimeZone); } @SnowflakeJdbcInternalApi diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index a959215cd..3437448fe 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -257,7 +257,7 @@ public SQLInput createSqlInputForColumn( int columnIndex, SFBaseSession session, List fields) { - return createJsonSqlInputForColumn(input, columnIndex, session, fields); + return createJsonSqlInputForColumn(input, session, fields); } @Override @@ -293,6 +293,7 @@ private Object getSqlInput(String input, int columnIndex) throws SFException { try { JsonNode jsonNode = OBJECT_MAPPER.readTree(input); return new JsonSqlInput( + input, jsonNode, session, converters, diff --git a/src/main/java/net/snowflake/client/core/SFSqlInput.java b/src/main/java/net/snowflake/client/core/SFSqlInput.java index b3efa6893..2b3d6ba95 100644 --- a/src/main/java/net/snowflake/client/core/SFSqlInput.java +++ b/src/main/java/net/snowflake/client/core/SFSqlInput.java @@ -4,7 +4,6 @@ package net.snowflake.client.core; import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLInput; import java.util.List; import java.util.Map; @@ -31,8 +30,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @param tz timezone to consider. * @return the attribute; if the value is SQL NULL, returns null * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.2 */ java.sql.Timestamp readTimestamp(TimeZone tz) throws SQLException; /** @@ -43,8 +40,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code Object} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ T readObject(Class type, TimeZone tz) throws SQLException; /** @@ -55,8 +50,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code List} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ List readList(Class type) throws SQLException; @@ -68,8 +61,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code Map} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ Map readMap(Class type) throws SQLException; /** @@ -80,8 +71,6 @@ static SFSqlInput unwrap(SQLInput sqlInput) { * @return the attribute at the head of the stream as an {@code Array} in the Java programming * language;{@code null} if the attribute is SQL {@code NULL} * @exception SQLException if a database access error occurs - * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method - * @since 1.8 */ T[] readArray(Class type) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java index 692c7e412..15c819479 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java @@ -6,6 +6,7 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.mapSFExceptionToSQLException; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -38,7 +39,6 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; -import net.snowflake.client.core.ArrowSqlInput; import net.snowflake.client.core.ColumnTypeHelper; import net.snowflake.client.core.JsonSqlInput; import net.snowflake.client.core.ObjectMapperFactory; @@ -1354,7 +1354,9 @@ public T getObject(int columnIndex, Class type) throws SQLException { logger.debug("public T getObject(int columnIndex,Class type)", false); if (resultSetMetaData.isStructuredTypeColumn(columnIndex)) { if (SQLData.class.isAssignableFrom(type)) { - SQLInput sqlInput = (SQLInput) getObject(columnIndex); + SQLInput sqlInput = + SnowflakeUtil.mapSFExceptionToSQLException( + () -> (SQLInput) sfBaseResultSet.getObject(columnIndex)); if (sqlInput == null) { return null; } else { @@ -1366,12 +1368,17 @@ public T getObject(int columnIndex, Class type) throws SQLException { Object object = getObject(columnIndex); if (object == null) { return null; - } else if (object instanceof JsonSqlInput) { - JsonNode jsonNode = ((JsonSqlInput) object).getInput(); - return (T) - OBJECT_MAPPER.convertValue(jsonNode, new TypeReference>() {}); + } else if (object instanceof Map) { + throw new SQLException( + "Arrow native struct couldn't be converted to String. To map to SqlData the method getObject(int columnIndex, Class type) should be used"); } else { - return (T) ((ArrowSqlInput) object).getInput(); + try { + return (T) + OBJECT_MAPPER.readValue( + (String) object, new TypeReference>() {}); + } catch (JsonProcessingException e) { + throw new SQLException("Value couldn't be converted to Map"); + } } } } @@ -1585,7 +1592,8 @@ public Map getMap(int columnIndex, Class type) throws SQLExcep int columnType = ColumnTypeHelper.getColumnType(valueFieldMetadata.getType(), session); int scale = valueFieldMetadata.getScale(); TimeZone tz = sfBaseResultSet.getSessionTimeZone(); - Object object = getObject(columnIndex); + Object object = + SnowflakeUtil.mapSFExceptionToSQLException(() -> sfBaseResultSet.getObject(columnIndex)); if (object == null) { return null; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java index 5135e3ca5..bc79c5669 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java @@ -28,6 +28,8 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; +import net.snowflake.client.core.ArrowSqlInput; +import net.snowflake.client.core.JsonSqlInput; import net.snowflake.client.core.QueryStatus; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFException; @@ -263,11 +265,17 @@ public ResultSetMetaData getMetaData() throws SQLException { public Object getObject(int columnIndex) throws SQLException { raiseSQLExceptionIfResultSetIsClosed(); - try { - return sfBaseResultSet.getObject(columnIndex); - } catch (SFException ex) { - throw new SnowflakeSQLException( - ex.getCause(), ex.getSqlState(), ex.getVendorCode(), ex.getParams()); + Object object = + SnowflakeUtil.mapSFExceptionToSQLException(() -> sfBaseResultSet.getObject(columnIndex)); + if (object == null) { + return null; + } else if (object instanceof JsonSqlInput) { + return ((JsonSqlInput) object).getText(); + } else if (object instanceof ArrowSqlInput) { + throw new SQLException( + "Arrow native struct couldn't be converted to String. To map to SqlData the method getObject(int columnIndex, Class type) should be used"); + } else { + return object; } } diff --git a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java index 4a4d000e0..a408e5d5a 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java @@ -20,7 +20,6 @@ import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; -import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZoneId; @@ -37,13 +36,33 @@ import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; +import org.junit.After; +import org.junit.Assume; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +@RunWith(Parameterized.class) @Category(TestCategoryResultSet.class) public class BindingAndInsertingStructuredTypesLatestIT extends BaseJDBCTest { + @Parameterized.Parameters(name = "format={0}") + public static Object[][] data() { + return new Object[][] { + {ResultSetFormatType.JSON}, + {ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES}, + {ResultSetFormatType.NATIVE_ARROW} + }; + } + + private final ResultSetFormatType queryResultFormat; + + public BindingAndInsertingStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { + this.queryResultFormat = queryResultFormat; + } + public Connection init() throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { @@ -53,11 +72,25 @@ public Connection init() throws SQLException { stmt.execute("alter session set ENABLE_OBJECT_TYPED_BINDS = true"); stmt.execute("alter session set enable_structured_types_in_fdn_tables=true"); stmt.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); + stmt.execute( + "alter session set jdbc_query_result_format = '" + + queryResultFormat.sessionParameterTypeValue + + "'"); + if (queryResultFormat == ResultSetFormatType.NATIVE_ARROW) { + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); + stmt.execute("alter session set FORCE_ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); + } } return conn; } @Before + public void setup() { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); + } + + @After public void clean() { SnowflakeObjectTypeFactories.unregister(SimpleClass.class); SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); @@ -67,7 +100,6 @@ public void clean() { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testWriteObject() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); SimpleClass sc = new SimpleClass("text1", 2); SimpleClass sc2 = new SimpleClass("text2", 3); try (Connection connection = init()) { @@ -104,7 +136,7 @@ public void testWriteObject() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testWriteNullObject() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); try (Connection connection = init(); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmtement2 = @@ -129,7 +161,6 @@ public void testWriteNullObject() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testWriteObjectBindingNull() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); try (Connection connection = init(); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = @@ -154,7 +185,6 @@ public void testWriteObjectBindingNull() throws SQLException { @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testWriteObjectAllTypes() throws SQLException { TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC)); - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); try (Connection connection = init(); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = @@ -222,13 +252,13 @@ public void testWriteObjectAllTypes() throws SQLException { assertEquals( Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), object.getTimestampLtz()); assertEquals( - // toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, - // ZoneId.of("Europe/Warsaw"))), Timestamp.valueOf(LocalDateTime.of(2021, 12, 23, 9, 44, 44)), object.getTimestampNtz()); assertEquals( toTimestamp(ZonedDateTime.of(2021, 12, 23, 9, 44, 44, 0, ZoneId.of("Asia/Tokyo"))), object.getTimestampTz()); - assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), object.getDate()); + // TODO uncomment after merge SNOW-928973: Date field is returning one day less when getting + // through getString method + // assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), object.getDate()); assertEquals(Time.valueOf(LocalTime.of(12, 34, 56)), object.getTime()); assertArrayEquals(new byte[] {'a', 'b', 'c'}, object.getBinary()); assertEquals("testString", object.getSimpleClass().getString()); @@ -244,7 +274,6 @@ public static Timestamp toTimestamp(ZonedDateTime dateTime) { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testWriteArray() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); try (Connection connection = init(); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = @@ -272,7 +301,6 @@ public void testWriteArray() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testWriteArrayNoBinds() throws SQLException { - SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); try (Connection connection = init(); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java index 442a940b9..b1da95b99 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java @@ -6,6 +6,7 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import java.math.BigDecimal; @@ -35,7 +36,9 @@ import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.NestedStructSqlData; import net.snowflake.client.jdbc.structuredtypes.sqldata.NullableFieldsSqlData; +import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.StringClass; +import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -62,6 +65,22 @@ public ResultSetStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { this.queryResultFormat = queryResultFormat; } + @Before + public void setup() { + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); + SnowflakeObjectTypeFactories.register(NullableFieldsSqlData.class, NullableFieldsSqlData::new); + } + + @After + public void clean() { + SnowflakeObjectTypeFactories.unregister(StringClass.class); + SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); + SnowflakeObjectTypeFactories.unregister(NullableFieldsSqlData.class); + } + public Connection init() throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { @@ -80,12 +99,6 @@ public Connection init() throws SQLException { return conn; } - @Before - public void clean() throws Exception { - SnowflakeObjectTypeFactories.unregister(StringClass.class); - SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); - } - @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapStructToObjectWithFactory() throws SQLException { @@ -102,6 +115,8 @@ public void testMapStructToObjectWithReflection() throws SQLException { private void testMapJson(boolean registerFactory) throws SQLException { if (registerFactory) { SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); + } else { + SnowflakeObjectTypeFactories.unregister(StringClass.class); } withFirstRow( "select {'string':'a'}::OBJECT(string VARCHAR)", @@ -109,13 +124,7 @@ private void testMapJson(boolean registerFactory) throws SQLException { StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); }); - } - - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructAllTypes() throws SQLException { - testMapAllTypes(false); - testMapAllTypes(true); + SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); } @Test @@ -129,12 +138,9 @@ public void testMapNullStruct() throws SQLException { }); } - private void testMapAllTypes(boolean registerFactory) throws SQLException { - if (registerFactory) { - SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); - } else { - SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); - } + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapStructAllTypes() throws SQLException { try (Connection connection = init(); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); @@ -211,10 +217,109 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { } } + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnStructAsStringIfTypeWasNotIndicated() throws SQLException { + Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='Europe/Warsaw'," + + "TIME_OUTPUT_FORMAT = 'HH24:MI:SS'," + + "DATE_OUTPUT_FORMAT = 'YYYY-MM-DD'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF3'"); + + try (ResultSet resultSet = + statement.executeQuery( + "select {" + + "'string': 'a', " + + "'b': 1, " + + "'s': 2, " + + "'i': 3, " + + "'l': 4, " + + "'f': 1.1, " + + "'d': 2.2, " + + "'bd': 3.3, " + + "'bool': true, " + + "'timestamp_ltz': '2021-12-22 09:43:44'::TIMESTAMP_LTZ, " + + "'timestamp_ntz': '2021-12-23 09:44:44'::TIMESTAMP_NTZ, " + + "'timestamp_tz': '2021-12-24 09:45:45 +0800'::TIMESTAMP_TZ, " + + "'date': '2023-12-24'::DATE, " + + "'time': '12:34:56'::TIME, " + + "'binary': TO_BINARY('616263', 'HEX'), " + + "'simpleClass': {'string': 'b', 'intValue': 2}" + + "}::OBJECT(" + + "string VARCHAR, " + + "b TINYINT, " + + "s SMALLINT, " + + "i INTEGER, " + + "l BIGINT, " + + "f FLOAT, " + + "d DOUBLE, " + + "bd DOUBLE, " + + "bool BOOLEAN, " + + "timestamp_ltz TIMESTAMP_LTZ, " + + "timestamp_ntz TIMESTAMP_NTZ, " + + "timestamp_tz TIMESTAMP_TZ, " + + "date DATE, " + + "time TIME, " + + "binary BINARY, " + + "simpleClass OBJECT(string VARCHAR, intValue INTEGER)" + + ")"); ) { + resultSet.next(); + String object = (String) resultSet.getObject(1); + String expected = + "{\n" + + " \"string\": \"a\",\n" + + " \"b\": 1,\n" + + " \"s\": 2,\n" + + " \"i\": 3,\n" + + " \"l\": 4,\n" + + " \"f\": 1.100000000000000e+00,\n" + + " \"d\": 2.200000000000000e+00,\n" + + " \"bd\": 3.300000000000000e+00,\n" + + " \"bool\": true,\n" + + " \"timestamp_ltz\": \"2021-12-22 09:43:44.000 +0100\",\n" + + " \"timestamp_ntz\": \"2021-12-23 09:44:44.000\",\n" + + " \"timestamp_tz\": \"2021-12-24 09:45:45.000 +0800\",\n" + + " \"date\": \"2023-12-24\",\n" + + " \"time\": \"12:34:56\",\n" + + " \"binary\": \"616263\",\n" + + " \"simpleClass\": {\n" + + " \"string\": \"b\",\n" + + " \"intValue\": 2\n" + + " }\n" + + "}"; + assertEquals(expected, object); + } + } + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testThrowingGettingObjectIfTypeWasNotIndicatedAndFormatNativeArrow() + throws SQLException { + Assume.assumeTrue(queryResultFormat == ResultSetFormatType.NATIVE_ARROW); + withFirstRow( + "select {'string':'a'}::OBJECT(string VARCHAR)", + (resultSet) -> { + assertThrows(SQLException.class, () -> resultSet.getObject(1)); + }); + withFirstRow( + "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", + (resultSet) -> { + assertThrows(SQLException.class, () -> resultSet.getObject(1, Map.class)); + }); + } + @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsArrayOfSqlData() throws SQLException { - SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "SELECT ARRAY_CONSTRUCT({'string':'one'}, {'string':'two'}, {'string':'three'})::ARRAY(OBJECT(string VARCHAR))", (resultSet) -> { @@ -229,7 +334,6 @@ public void testReturnAsArrayOfSqlData() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { - SnowflakeObjectTypeFactories.register(NullableFieldsSqlData.class, NullableFieldsSqlData::new); withFirstRow( "SELECT OBJECT_CONSTRUCT_KEEP_NULL('string', null, 'nullableIntValue', null, 'nullableLongValue', null, " + "'date', null, 'bd', null, 'bytes', null, 'longValue', null)" @@ -252,7 +356,6 @@ public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullsForAllTpesInSqlData() throws SQLException { - SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); try (Connection connection = init(); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); @@ -370,7 +473,6 @@ public void testReturnAsListOfDouble() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsMap() throws SQLException { - SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -382,10 +484,23 @@ public void testReturnAsMap() throws SQLException { }); } + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnAsMapByGetObject() throws SQLException { + Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + withFirstRow( + "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", + (resultSet) -> { + Map> map = resultSet.getObject(1, Map.class); + assertEquals("one", map.get("x").get("string")); + assertEquals("two", map.get("y").get("string")); + assertEquals("three", map.get("z").get("string")); + }); + } + @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsMapWithNullableValues() throws SQLException { - SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select {'x':{'string':'one'},'y':null,'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -400,7 +515,6 @@ public void testReturnAsMapWithNullableValues() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullAsObjectOfTypeMap() throws SQLException { - SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -413,7 +527,6 @@ public void testReturnNullAsObjectOfTypeMap() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnNullAsMap() throws SQLException { - SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -523,7 +636,6 @@ public void testReturnAsMapOfBoolean() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsList() throws SQLException { - SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); withFirstRow( "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))", (resultSet) -> { From 6927fff3e7771bbda87ba69792f529aaa2cb426a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 29 Apr 2024 08:43:53 +0200 Subject: [PATCH 07/54] SNOW-1045676: Fix list of reserved keywords (#1670) --- .../jdbc/SnowflakeDatabaseMetaData.java | 66 +++++++++++++------ .../client/jdbc/DatabaseMetaDataLatestIT.java | 9 +++ 2 files changed, 54 insertions(+), 21 deletions(-) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java index 34df34067..d79fd1b7c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java @@ -94,27 +94,51 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { // These are keywords not in SQL2003 standard private static final String notSQL2003Keywords = - "ACCOUNT,DATABASE,SCHEMA,VIEW,ISSUE,DATE_PART,EXTRACT," - + "POSITION,TRY_CAST,BIT,DATETIME,NUMBERC,OBJECT,BYTEINT,STRING,TEXT," - + "TIMESTAMPLTZ,TIMESTAMPNTZ,TIMESTAMPTZ,TIMESTAMP_LTZ,TIMESTAMP_NTZ,TIMESTAMP_TZ,TINYINT," - + "VARBINARY,VARIANT,ACCOUNTS,ACTION,ACTIVATE,ASC,AUTOINCREMENT,BEFORE," - + "BUILTIN,BYTE,CACHE,CHANGE,CLEAREPCACHE,CLONE,CLUSTER,CLUSTERS,COLUMNS,COMMENT," - + "COMPRESSION,CONSTRAINTS,COPY,CP,CREDENTIALS,D,DATA,DATABASES,DEFERRABLE," - + "DEFERRED,DELIMITED,DESC,DIRECTORY,DISABLE,DUAL,ENABLE,ENFORCED," - + "EXCLUSIVE,EXPLAIN,EXPORTED,FAIL,FIELDS,FILE,FILES,FIRST,FN,FORCE,FORMAT," - + "FORMATS,FUNCTIONS,GRANTS,GSINSTANCE,GSINSTANCES,HELP,HIBERNATE,HINTS," - + "HISTORY,IDENTIFIED,IMMUTABLE,IMPORTED,INCIDENT,INCIDENTS,INFO,INITIALLY," - + "ISSUES,KEEP,KEY,KEYS,LAST,LIMIT,LIST,LOAD,LOCATION,LOCK,LOCKS,LS,MANAGE,MAP,MATCHED," - + "MATERIALIZED,MODIFY,MONITOR,MONITORS,NAME,NETWORK,NEXT,NORELY,NOTIFY,NOVALIDATE,NULLS,OBJECTS," - + "OFFSET,OJ,OPERATE,OPERATION,OPTION,OWNERSHIP,PARAMETERS,PARTIAL," - + "PERCENT,PLAN,PLUS,POLICIES,POLICY,POOL,PRESERVE,PRIVILEGES,PUBLIC,PURGE,PUT,QUIESCE," - + "READ,RECLUSTER,REFERENCE,RELY,REMOVE,RENAME,REPLACE,REPLACE_FAIL,RESOURCE," - + "RESTART,RESTORE,RESTRICT,RESUME,REWRITE,RM,ROLE,ROLES,RULE,SAMPLE,SCHEMAS,SEMI," - + "SEQUENCE,SEQUENCES,SERVER,SERVERS,SESSION,SETLOGLEVEL,SETS,SFC,SHARE,SHARED,SHARES,SHOW,SHUTDOWN,SIMPLE,SORT," - + "STAGE,STAGES,STATEMENT,STATISTICS,STOP,STORED,STRICT,STRUCT,SUSPEND,SUSPEND_IMMEDIATE,SWAP,SWITCH,T," - + "TABLES,TEMP,TEMPORARY,TRANSACTION,TRANSACTIONS,TRANSIENT,TRIGGERS,TRUNCATE,TS,TYPE,UNDROP,UNLOCK,UNSET," - + "UPGRADE,USAGE,USE,USERS,UTC,UTCTIMESTAMP,VALIDATE,VARIABLES,VERSION,VIEWS,VOLATILE,VOLUME," - + "VOLUMES,WAREHOUSE,WAREHOUSES,WARN,WORK,WRITE,ZONE,INCREMENT,MINUS,REGEXP,RLIKE"; + String.join( + ",", + "ACCOUNT", + "ASOF", + "BIT", + "BYTEINT", + "CONNECTION", + "DATABASE", + "DATETIME", + "DATE_PART", + "FIXED", + "FOLLOWING", + "GSCLUSTER", + "GSPACKAGE", + "IDENTIFIER", + "ILIKE", + "INCREMENT", + "ISSUE", + "LONG", + "MAP", + "MATCH_CONDITION", + "MINUS", + "NUMBER", + "OBJECT", + "ORGANIZATION", + "QUALIFY", + "REFERENCE", + "REGEXP", + "RLIKE", + "SAMPLE", + "SCHEMA", + "STRING", + "TEXT", + "TIMESTAMPLTZ", + "TIMESTAMPNTZ", + "TIMESTAMPTZ", + "TIMESTAMP_LTZ", + "TIMESTAMP_NTZ", + "TIMESTAMP_TZ", + "TINYINT", + "TRANSIT", + "TRY_CAST", + "VARIANT", + "VECTOR", + "VIEW"); private final Connection connection; diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index e3659df39..d3176f8b2 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -2333,4 +2333,13 @@ public void testGetJDBCVersion() throws SQLException { assertEquals(2, metaData.getJDBCMinorVersion()); } } + + /** Added in > 3.15.1 */ + @Test + public void testKeywordsCount() throws SQLException { + try (Connection connection = getConnection()) { + DatabaseMetaData metaData = connection.getMetaData(); + assertEquals(43, metaData.getSQLKeywords().split(",").length); + } + } } From 2da252d6adf53b67e436002cab308c7cdbfb646a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 29 Apr 2024 09:37:32 +0200 Subject: [PATCH 08/54] SNOW-1333078: Add explicitly surefire autodetected dependencies (#1739) --- ci/container/test_component.sh | 2 +- parent-pom.xml | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/ci/container/test_component.sh b/ci/container/test_component.sh index fa550217d..6a3479e7c 100755 --- a/ci/container/test_component.sh +++ b/ci/container/test_component.sh @@ -76,7 +76,7 @@ export MAVEN_OPTS="$MAVEN_OPTS -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=fa cd $SOURCE_ROOT # Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched -mvn --batch-mode --show-version dependency:resolve-plugins +mvn --batch-mode --show-version dependency:go-offline for c in "${CATEGORY[@]}"; do c=$(echo $c | sed 's/ *$//g') diff --git a/parent-pom.xml b/parent-pom.xml index ac2940dfd..d36e43c12 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -488,6 +488,18 @@ ${awaitility.version} test + + org.apache.maven.surefire + surefire-junit4 + ${version.plugin.surefire} + test + + + org.apache.maven.surefire + common-junit48 + ${version.plugin.surefire} + test + @@ -725,5 +737,14 @@ org.awaitility awaitility + + + org.apache.maven.surefire + surefire-junit4 + + + org.apache.maven.surefire + common-junit48 + From f12b25a96c5fddf662577622a70d810c1d20529e Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 29 Apr 2024 12:57:26 +0200 Subject: [PATCH 09/54] Bump version to 3.16.0 for release (#1741) --- CHANGELOG.rst | 4 ++++ FIPS/pom.xml | 4 ++-- parent-pom.xml | 2 +- pom.xml | 4 ++-- src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java | 2 +- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 180cb41e5..f2f480238 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,7 @@ +**JDBC Driver 3.16.0** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + **JDBC Driver 3.15.1** - \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc diff --git a/FIPS/pom.xml b/FIPS/pom.xml index 5766a8243..dec05af1a 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,12 +5,12 @@ net.snowflake snowflake-jdbc-parent - 3.15.2-SNAPSHOT + 3.16.0 ../parent-pom.xml snowflake-jdbc-fips - 3.15.2-SNAPSHOT + 3.16.0 jar snowflake-jdbc-fips diff --git a/parent-pom.xml b/parent-pom.xml index d36e43c12..4e406ae4e 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -5,7 +5,7 @@ net.snowflake snowflake-jdbc-parent - 3.15.2-SNAPSHOT + 3.16.0 pom diff --git a/pom.xml b/pom.xml index a72cb7f43..912a7b1ba 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.15.2-SNAPSHOT + 3.16.0 ./parent-pom.xml ${artifactId} - 3.15.2-SNAPSHOT + 3.16.0 jar ${artifactId} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 4aaf6013e..3e2571951 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -29,7 +29,7 @@ public class SnowflakeDriver implements Driver { static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); - public static String implementVersion = "3.15.2"; + public static String implementVersion = "3.16.0"; static int majorVersion = 0; static int minorVersion = 0; From 6f7a6b745d43f9d336b94a7a8a2e33020f80526a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 30 Apr 2024 06:59:42 +0200 Subject: [PATCH 10/54] Prepare next development version (#1742) --- FIPS/pom.xml | 4 ++-- parent-pom.xml | 2 +- pom.xml | 4 ++-- src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index dec05af1a..6000c0941 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,12 +5,12 @@ net.snowflake snowflake-jdbc-parent - 3.16.0 + 3.16.1-SNAPSHOT ../parent-pom.xml snowflake-jdbc-fips - 3.16.0 + 3.16.1-SNAPSHOT jar snowflake-jdbc-fips diff --git a/parent-pom.xml b/parent-pom.xml index 4e406ae4e..ce94fd59f 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -5,7 +5,7 @@ net.snowflake snowflake-jdbc-parent - 3.16.0 + 3.16.1-SNAPSHOT pom diff --git a/pom.xml b/pom.xml index 912a7b1ba..25290bb6b 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.16.0 + 3.16.1-SNAPSHOT ./parent-pom.xml ${artifactId} - 3.16.0 + 3.16.1-SNAPSHOT jar ${artifactId} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 3e2571951..0f7d6a706 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -29,7 +29,7 @@ public class SnowflakeDriver implements Driver { static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); - public static String implementVersion = "3.16.0"; + public static String implementVersion = "3.16.1"; static int majorVersion = 0; static int minorVersion = 0; From 9e1c4863c6ae15ce10557dffc2e3aca9e0a7e560 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 30 Apr 2024 14:42:09 +0200 Subject: [PATCH 11/54] SNOW-1356389: Use junit assertions instead of jvm assert in tests (#1743) --- .../client/core/SessionUtilTest.java | 2 +- .../client/jdbc/CustomProxyLatestIT.java | 4 +- .../jdbc/FileUploaderExpandFileNamesTest.java | 18 ++++---- .../SnowflakeDriverConnectionStressTest.java | 4 +- .../client/jdbc/SnowflakeDriverIT.java | 24 +++++------ .../client/jdbc/SnowflakeDriverLatestIT.java | 42 +++++++++++-------- .../client/jdbc/StatementLatestIT.java | 2 +- .../pooling/LogicalConnectionLatestIT.java | 2 +- 8 files changed, 53 insertions(+), 45 deletions(-) diff --git a/src/test/java/net/snowflake/client/core/SessionUtilTest.java b/src/test/java/net/snowflake/client/core/SessionUtilTest.java index 5cb118c56..0b5a542c1 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilTest.java @@ -66,7 +66,7 @@ public void testParameterParsing() { parameterMap.put("other_parameter", BooleanNode.getTrue()); SFBaseSession session = new MockConnectionTest.MockSnowflakeConnectionImpl().getSFSession(); SessionUtil.updateSfDriverParamValues(parameterMap, session); - assert (((BooleanNode) session.getOtherParameter("other_parameter")).asBoolean()); + assertTrue(((BooleanNode) session.getOtherParameter("other_parameter")).asBoolean()); } @Test diff --git a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java index 0a2482cca..c6fb29bf4 100644 --- a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java @@ -741,7 +741,7 @@ public PasswordAuthentication getPasswordAuthentication() { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -750,7 +750,7 @@ public PasswordAuthentication getPasswordAuthentication() { File original = new File(sourceFilePath); File unzipped = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } catch (Throwable t) { t.printStackTrace(); } finally { diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java index 02ef84747..67f5a175b 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java @@ -4,6 +4,7 @@ package net.snowflake.client.jdbc; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; @@ -114,15 +115,14 @@ public int read() throws IOException { SnowflakeFileTransferConfig config = builder.build(); // Assert setting fields are in config - assert (config.getSnowflakeFileTransferMetadata() == metadata); - assert (config.getUploadStream() == input); - assert (config.getOcspMode() == OCSPMode.FAIL_CLOSED); - assert (!config.getRequireCompress()); - assert (config.getNetworkTimeoutInMilli() == 12345); - assert (config.getProxyProperties() == props); - assert (config.getPrefix().equals("dummy_prefix")); - assert (config.getDestFileName().equals("dummy_dest_file_name")); - + assertEquals(metadata, config.getSnowflakeFileTransferMetadata()); + assertEquals(input, config.getUploadStream()); + assertEquals(OCSPMode.FAIL_CLOSED, config.getOcspMode()); + assertFalse(config.getRequireCompress()); + assertEquals(12345, config.getNetworkTimeoutInMilli()); + assertEquals(props, config.getProxyProperties()); + assertEquals("dummy_prefix", config.getPrefix()); + assertEquals("dummy_dest_file_name", config.getDestFileName()); assertEquals(expectedThrowCount, throwCount); } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java index 7d9cc5f05..161e9c939 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java @@ -4,6 +4,8 @@ package net.snowflake.client.jdbc; +import static org.junit.Assert.assertNotNull; + import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; @@ -88,7 +90,7 @@ private static void connectAndQuery(int num_queries) throws SQLException { try (ResultSet resultSet = statement.executeQuery(QUERY)) { while (resultSet.next()) { final String user = resultSet.getString(1); - assert user != null; + assertNotNull(user); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java index 13bcee4e5..a66dd4c4a 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java @@ -769,7 +769,7 @@ public void testPutWithWildcardGCP() throws Throwable { for (int i = 0; i < fileNames.length; i++) { // Make sure that the downloaded file exists, it should be gzip compressed downloaded = new File(destFolderCanonicalPathWithSeparator + fileNames[i] + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -780,8 +780,8 @@ public void testPutWithWildcardGCP() throws Throwable { File original = new File(individualFilePath); File unzipped = new File(destFolderCanonicalPathWithSeparator + fileNames[i]); - assert (original.length() == unzipped.length()); - assert (FileUtils.contentEquals(original, unzipped)); + assertEquals(original.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(original, unzipped)); } } finally { statement.execute("DROP STAGE IF EXISTS wildcard_stage"); @@ -862,7 +862,7 @@ public void testPutGetLargeFileGCP() throws Throwable { // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); // unzip the file Process p = @@ -874,8 +874,8 @@ public void testPutGetLargeFileGCP() throws Throwable { // back into a stage, // downloaded, and unzipped File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); + assertEquals(largeTempFile.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(largeTempFile, unzipped)); } finally { statement.execute("DROP STAGE IF EXISTS largefile_stage"); statement.execute("DROP STAGE IF EXISTS extra_stage"); @@ -937,7 +937,7 @@ public void testPutOverwrite() throws Throwable { // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); // unzip the file Process p = @@ -946,7 +946,7 @@ public void testPutOverwrite() throws Throwable { p.waitFor(); File unzipped = new File(destFolderCanonicalPathWithSeparator + "testfile.csv"); - assert (FileUtils.contentEqualsIgnoreEOL(file2, unzipped, null)); + assertTrue(FileUtils.contentEqualsIgnoreEOL(file2, unzipped, null)); } finally { statement.execute("DROP TABLE IF EXISTS testLoadToLocalFS"); } @@ -2695,7 +2695,7 @@ public void testPutGet() throws Throwable { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -2704,7 +2704,7 @@ public void testPutGet() throws Throwable { File original = new File(sourceFilePath); File unzipped = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testGetPut_stage"); } @@ -2754,7 +2754,7 @@ public void testPutGetToUnencryptedStage() throws Throwable { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -2763,7 +2763,7 @@ public void testPutGetToUnencryptedStage() throws Throwable { File original = new File(sourceFilePath); File unzipped = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testPutGet_unencstage"); } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java index 01bff3e2e..da9847c9b 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java @@ -224,7 +224,7 @@ public void testGCPFileTransferMetadataWithOneFile() throws Throwable { for (SnowflakeFileTransferMetadata oneMetadata : metadatas1) { InputStream inputStream = new FileInputStream(srcPath1); - assert (oneMetadata.isForOneFile()); + assertTrue(oneMetadata.isForOneFile()); SnowflakeFileTransferAgent.uploadWithoutConnection( SnowflakeFileTransferConfig.Builder.newInstance() .setSnowflakeFileTransferMetadata(oneMetadata) @@ -252,7 +252,7 @@ public void testGCPFileTransferMetadataWithOneFile() throws Throwable { p.waitFor(); InputStream gzInputStream = new FileInputStream(gzfilePath); - assert (oneMetadata.isForOneFile()); + assertTrue(oneMetadata.isForOneFile()); SnowflakeFileTransferAgent.uploadWithoutConnection( SnowflakeFileTransferConfig.Builder.newInstance() .setSnowflakeFileTransferMetadata(oneMetadata) @@ -271,8 +271,10 @@ public void testGCPFileTransferMetadataWithOneFile() throws Throwable { // Make sure that the downloaded files are EQUAL, // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + assertTrue( + isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assertTrue( + isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); } finally { statement.execute("DROP STAGE if exists " + testStageName); } @@ -360,8 +362,10 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { // Make sure that the downloaded files are EQUAL, // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + assertTrue( + isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assertTrue( + isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); } finally { statement.execute("DROP STAGE if exists " + testStageName); } @@ -1057,7 +1061,7 @@ private void putAndGetFile(Statement statement) throws Throwable { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE_2 + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); Process p = Runtime.getRuntime() @@ -1070,7 +1074,7 @@ private void putAndGetFile(Statement statement) throws Throwable { "Original file: " + original.getAbsolutePath() + ", size: " + original.length()); System.out.println( "Unzipped file: " + unzipped.getAbsolutePath() + ", size: " + unzipped.length()); - assert (original.length() == unzipped.length()); + assertEquals(original.length(), unzipped.length()); } finally { statement.execute("DROP STAGE IF EXISTS testGetPut_stage"); } @@ -1137,7 +1141,7 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); // unzip the file Process p = @@ -1149,8 +1153,8 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { // back into a stage, // downloaded, and unzipped File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); + assertEquals(largeTempFile.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(largeTempFile, unzipped)); } finally { statement.execute("DROP STAGE IF EXISTS largefile_stage"); statement.execute("DROP STAGE IF EXISTS extra_stage"); @@ -1213,7 +1217,7 @@ public void testPutGetLargeFileAzure() throws Throwable { // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); // unzip the file Process p = @@ -1225,8 +1229,8 @@ public void testPutGetLargeFileAzure() throws Throwable { // back into a stage, // downloaded, and unzipped File unzipped = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv"); - assert (largeTempFile.length() == unzipped.length()); - assert (FileUtils.contentEquals(largeTempFile, unzipped)); + assertEquals(largeTempFile.length(), unzipped.length()); + assertTrue(FileUtils.contentEquals(largeTempFile, unzipped)); } finally { statement.execute("DROP STAGE IF EXISTS largefile_stage"); statement.execute("DROP STAGE IF EXISTS extra_stage"); @@ -1349,8 +1353,10 @@ public void testPutS3RegionalUrl() throws Throwable { // Make sure that the downloaded files are EQUAL, // they should be gzip compressed - assert (isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); - assert (isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); + assertTrue( + isFileContentEqual(srcPath1, false, destFolderCanonicalPath + "/file1.gz", true)); + assertTrue( + isFileContentEqual(srcPath2, false, destFolderCanonicalPath + "/file2.gz", true)); } finally { statement.execute("DROP STAGE if exists " + testStageName); } @@ -1489,7 +1495,7 @@ public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { for (SnowflakeFileTransferMetadata oneMetadata : metadata) { InputStream inputStream = new FileInputStream(srcPath); - assert (oneMetadata.isForOneFile()); + assertTrue(oneMetadata.isForOneFile()); SnowflakeFileTransferAgent.uploadWithoutConnection( SnowflakeFileTransferConfig.Builder.newInstance() .setSnowflakeFileTransferMetadata(oneMetadata) @@ -1504,7 +1510,7 @@ public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { "Failed to get files", statement.execute( "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); - assert (isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); + assertTrue(isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); } finally { statement.execute("DROP STAGE if exists " + testStageName); } diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java index 56bd318b8..d37d88118 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java @@ -132,7 +132,7 @@ public void testCopyAndUpload() throws Exception { // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(tempFolder.getCanonicalPath() + File.separator + fileName + ".gz"); - assert (downloaded.exists()); + assertTrue(downloaded.exists()); } // unzip the new file Process p = diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java index 627f1db31..bf05325e0 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java @@ -161,7 +161,7 @@ public void testTransactionStatement() throws SQLException { PooledConnection pooledConnection = poolDataSource.getPooledConnection(); try (Connection logicalConnection = pooledConnection.getConnection()) { logicalConnection.setAutoCommit(false); - assert (!logicalConnection.getAutoCommit()); + assertFalse(logicalConnection.getAutoCommit()); logicalConnection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); assertEquals(2, logicalConnection.getTransactionIsolation()); From 72d97250676d9ff9130bcc837f8d97a2a9093652 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 30 Apr 2024 14:46:04 +0200 Subject: [PATCH 12/54] SNOW-1356393: Fix flaky test EventHandlerTest.testEventFlusher (#1744) --- .../java/net/snowflake/client/core/EventHandlerTest.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/test/java/net/snowflake/client/core/EventHandlerTest.java b/src/test/java/net/snowflake/client/core/EventHandlerTest.java index 48b381330..eb930f7c6 100644 --- a/src/test/java/net/snowflake/client/core/EventHandlerTest.java +++ b/src/test/java/net/snowflake/client/core/EventHandlerTest.java @@ -62,12 +62,11 @@ public void testDumpLogBuffer() throws IOException { @Test public void testEventFlusher() { EventHandler handler = new EventHandler(2, 1000); - handler.startFlusher(); + assertEquals(0, handler.getBufferSize()); handler.triggerBasicEvent(Event.EventType.STATE_TRANSITION, "test event"); - assertEquals(handler.getBufferSize(), 1); + assertEquals(1, handler.getBufferSize()); handler.triggerBasicEvent(Event.EventType.STATE_TRANSITION, "test event 2"); // buffer should flush when max entries is reached - assertEquals(handler.getBufferSize(), 0); - handler.stopFlusher(); + assertEquals(0, handler.getBufferSize()); } } From 18ae23db03e555cbe78dc2ab238c473871b78ab5 Mon Sep 17 00:00:00 2001 From: Dawid Heyman Date: Fri, 10 May 2024 15:14:39 +0200 Subject: [PATCH 13/54] Fix MapVector casting issue (#1750) * Fix MapVector casting issue --- .../java/net/snowflake/client/jdbc/ArrowResultChunk.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java index 103f90555..3516966e6 100644 --- a/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java +++ b/src/main/java/net/snowflake/client/jdbc/ArrowResultChunk.java @@ -212,7 +212,11 @@ private static List initConverters( break; case MAP: - converters.add(new MapConverter((MapVector) vector, i, context)); + if (vector instanceof MapVector) { + converters.add(new MapConverter((MapVector) vector, i, context)); + } else { + converters.add(new VarCharConverter(vector, i, context)); + } break; case VECTOR: From c60113733044b115969f3fcfe9adf35e3a6597a5 Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Thu, 16 May 2024 01:48:52 +0900 Subject: [PATCH 14/54] SNOW-1226600: Add parameter to disable SAML URL check (#1748) --- .../snowflake/client/core/SFLoginInput.java | 10 ++ .../net/snowflake/client/core/SFSession.java | 7 +- .../client/core/SFSessionProperty.java | 4 +- .../snowflake/client/core/SessionUtil.java | 19 +- .../client/core/SessionUtilLatestIT.java | 167 ++++++++++++++++++ 5 files changed, 198 insertions(+), 9 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/SFLoginInput.java b/src/main/java/net/snowflake/client/core/SFLoginInput.java index 3d53bf104..13a52604c 100644 --- a/src/main/java/net/snowflake/client/core/SFLoginInput.java +++ b/src/main/java/net/snowflake/client/core/SFLoginInput.java @@ -49,6 +49,7 @@ public class SFLoginInput { private String inFlightCtx; // Opaque string sent for Snowsight account activation private boolean disableConsoleLogin = true; + private boolean disableSamlURLCheck = false; // Additional headers to add for Snowsight. Map additionalHttpHeadersForSnowsight; @@ -378,6 +379,15 @@ SFLoginInput setInFlightCtx(String inFlightCtx) { return this; } + boolean getDisableSamlURLCheck() { + return disableSamlURLCheck; + } + + SFLoginInput setDisableSamlURLCheck(boolean disableSamlURLCheck) { + this.disableSamlURLCheck = disableSamlURLCheck; + return this; + } + Map getAdditionalHttpHeadersForSnowsight() { return additionalHttpHeadersForSnowsight; } diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index 5f653019d..3d0900940 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -608,7 +608,12 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.DISABLE_CONSOLE_LOGIN) != null ? getBooleanValue( connectionPropertiesMap.get(SFSessionProperty.DISABLE_CONSOLE_LOGIN)) - : true); + : true) + .setDisableSamlURLCheck( + connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK) != null + ? getBooleanValue( + connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK)) + : false); // Enable or disable OOB telemetry based on connection parameter. Default is disabled. // The value may still change later when session parameters from the server are read. diff --git a/src/main/java/net/snowflake/client/core/SFSessionProperty.java b/src/main/java/net/snowflake/client/core/SFSessionProperty.java index 0ca91809c..359448d24 100644 --- a/src/main/java/net/snowflake/client/core/SFSessionProperty.java +++ b/src/main/java/net/snowflake/client/core/SFSessionProperty.java @@ -82,7 +82,9 @@ public enum SFSessionProperty { DISABLE_GCS_DEFAULT_CREDENTIALS("disableGcsDefaultCredentials", false, Boolean.class), - JDBC_ARROW_TREAT_DECIMAL_AS_INT("JDBC_ARROW_TREAT_DECIMAL_AS_INT", false, Boolean.class); + JDBC_ARROW_TREAT_DECIMAL_AS_INT("JDBC_ARROW_TREAT_DECIMAL_AS_INT", false, Boolean.class), + + DISABLE_SAML_URL_CHECK("disableSamlURLCheck", false, Boolean.class); // property key in string private String propertyKey; diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index a3421e841..ec856112d 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -1154,6 +1154,16 @@ private static String federatedFlowStep4( loginInput.getHttpClientSettingsKey()); // step 5 + validateSAML(responseHtml, loginInput); + } catch (IOException | URISyntaxException ex) { + handleFederatedFlowError(loginInput, ex); + } + return responseHtml; + } + + private static void validateSAML(String responseHtml, SFLoginInput loginInput) + throws SnowflakeSQLException, MalformedURLException { + if (!loginInput.getDisableSamlURLCheck()) { String postBackUrl = getPostBackUrlFromHTML(responseHtml); if (!isPrefixEqual(postBackUrl, loginInput.getServerUrl())) { URL idpDestinationUrl = new URL(postBackUrl); @@ -1167,18 +1177,13 @@ private static String federatedFlowStep4( clientDestinationHostName, idpDestinationHostName); - // Session is in process of getting created, so exception constructor takes in null session - // value + // Session is in process of getting created, so exception constructor takes in null throw new SnowflakeSQLLoggedException( null, ErrorCode.IDP_INCORRECT_DESTINATION.getMessageCode(), - SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION - /* session = */ ); + SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION); } - } catch (IOException | URISyntaxException ex) { - handleFederatedFlowError(loginInput, ex); } - return responseHtml; } /** diff --git a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java index dd6d5e7bd..f936ee616 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java @@ -465,4 +465,171 @@ public void testOktaAuthRetry() throws Throwable { SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); } } + + /** + * Tests the disableSamlURLCheck. If the disableSamlUrl is provided to the login input with true, + * the driver will skip checking the format of the saml URL response. This latest test will work + * with jdbc > 3.16.0 + * + * @throws Throwable + */ + @Test + public void testOktaDisableSamlUrlCheck() throws Throwable { + SFLoginInput loginInput = createOktaLoginInput(); + loginInput.setDisableSamlURLCheck(true); + Map connectionPropertiesMap = initConnectionPropertiesMap(); + try (MockedStatic mockedHttpUtil = mockStatic(HttpUtil.class)) { + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpPost.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"data\":{\"tokenUrl\":\"https://testauth.okta.com/api/v1/authn\"," + + "\"ssoUrl\":\"https://testauth.okta.com/app/snowflake/abcdefghijklmnopqrstuvwxyz/sso/saml\"," + + "\"proofKey\":null},\"code\":null,\"message\":null,\"success\":true}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeRequestWithoutCookies( + Mockito.any(HttpRequestBase.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(AtomicBoolean.class), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"expiresAt\":\"2023-10-13T19:18:09.000Z\",\"status\":\"SUCCESS\",\"sessionToken\":\"testsessiontoken\"}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpGet.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn("

"); + + SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); + } + } + + @Test + public void testInvalidOktaSamlFormat() throws Throwable { + SFLoginInput loginInput = createOktaLoginInput(); + Map connectionPropertiesMap = initConnectionPropertiesMap(); + try (MockedStatic mockedHttpUtil = mockStatic(HttpUtil.class)) { + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpPost.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"data\":{\"tokenUrl\":\"https://testauth.okta.com/api/v1/authn\"," + + "\"ssoUrl\":\"https://testauth.okta.com/app/snowflake/abcdefghijklmnopqrstuvwxyz/sso/saml\"," + + "\"proofKey\":null},\"code\":null,\"message\":null,\"success\":true}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeRequestWithoutCookies( + Mockito.any(HttpRequestBase.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(AtomicBoolean.class), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"expiresAt\":\"2023-10-13T19:18:09.000Z\",\"status\":\"SUCCESS\",\"sessionToken\":\"testsessiontoken\"}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpGet.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn("
"); + + SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); + fail("Should be failed because of the invalid form"); + } catch (SnowflakeSQLException ex) { + assertEquals((int) ErrorCode.NETWORK_ERROR.getMessageCode(), ex.getErrorCode()); + } + } + + @Test + public void testOktaWithInvalidHostName() throws Throwable { + SFLoginInput loginInput = createOktaLoginInput(); + Map connectionPropertiesMap = initConnectionPropertiesMap(); + try (MockedStatic mockedHttpUtil = mockStatic(HttpUtil.class)) { + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpPost.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"data\":{\"tokenUrl\":\"https://testauth.okta.com/api/v1/authn\"," + + "\"ssoUrl\":\"https://testauth.okta.com/app/snowflake/abcdefghijklmnopqrstuvwxyz/sso/saml\"," + + "\"proofKey\":null},\"code\":null,\"message\":null,\"success\":true}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeRequestWithoutCookies( + Mockito.any(HttpRequestBase.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(AtomicBoolean.class), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn( + "{\"expiresAt\":\"2023-10-13T19:18:09.000Z\",\"status\":\"SUCCESS\",\"sessionToken\":\"testsessiontoken\"}"); + + mockedHttpUtil + .when( + () -> + HttpUtil.executeGeneralRequest( + Mockito.any(HttpGet.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.nullable(HttpClientSettingsKey.class))) + .thenReturn("
"); + + SessionUtil.openSession(loginInput, connectionPropertiesMap, "ALL"); + fail("Should be failed because of the invalid form"); + } catch (SnowflakeSQLException ex) { + assertEquals((int) ErrorCode.IDP_INCORRECT_DESTINATION.getMessageCode(), ex.getErrorCode()); + } + } } From 23db457bae8d411d361c4f33b70d11957656702c Mon Sep 17 00:00:00 2001 From: Nancy Trinh Date: Mon, 20 May 2024 09:07:24 -0700 Subject: [PATCH 15/54] SNOW-1308317 SNOW-1432770 Update DatabaseMetaDataInternalIT testGetTables to work with adding account_usage_snowhouse_local_setup_import.sql to make setup (#1760) --- .../jdbc/DatabaseMetaDataInternalIT.java | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java index c5c0ec072..d0eed3c5f 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java @@ -476,15 +476,25 @@ public void testGetTables() throws SQLException { assertEquals(0, getSizeOfResultSet(resultSet)); } + // Get the count of tables in the SNOWFLAKE system database, so we can exclude them from + // subsequent assertions + int numSnowflakeTables = 0; + try (ResultSet snowflakeResultSet = + databaseMetaData.getTables("SNOWFLAKE", null, null, null)) { + numSnowflakeTables = getSizeOfResultSet(snowflakeResultSet); + } + try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, null)) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllTable), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllTable), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, new String[] {"VIEW", "SYSTEM_TABLE"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllView), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllView), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = @@ -497,13 +507,15 @@ public void testGetTables() throws SQLException { databaseMetaData.getTables( null, null, null, new String[] {"TABLE", "VIEW", "SYSTEM_TABLE"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllTable), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllTable), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, new String[] {"TABLE", "VIEW"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllTable), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllTable), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = @@ -515,7 +527,8 @@ public void testGetTables() throws SQLException { try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, new String[] {"VIEW"})) { assertEquals( - getAllObjectCountInDBViaInforSchema(getAllView), getSizeOfResultSet(resultSet)); + getAllObjectCountInDBViaInforSchema(getAllView), + getSizeOfResultSet(resultSet) - numSnowflakeTables); } try (ResultSet resultSet = From ffb87d7b31f34facafcee0f162843554a55d02e7 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 21 May 2024 12:43:48 +0200 Subject: [PATCH 16/54] SNOW-1416051: Fix getObject for array in json (#1761) --- .../snowflake/client/jdbc/SnowflakeUtil.java | 3 ++- .../net/snowflake/client/jdbc/ResultSetIT.java | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index e156e206b..884f832ed 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -311,8 +311,9 @@ static ColumnTypeInfo getSnowflakeType( break; case ARRAY: + int columnType = isStructuredType ? Types.ARRAY : Types.VARCHAR; columnTypeInfo = - new ColumnTypeInfo(Types.ARRAY, defaultIfNull(extColTypeName, "ARRAY"), baseType); + new ColumnTypeInfo(columnType, defaultIfNull(extColTypeName, "ARRAY"), baseType); break; case MAP: diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java index bce1d97cc..3e5343117 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java @@ -1056,4 +1056,22 @@ public void testNextNegative() throws SQLException { } } } + + /** SNOW-1416051; Added in > 3.16.0 */ + @Test + public void shouldSerializeArrayAndObjectAsStringOnGetObject() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "select ARRAY_CONSTRUCT(1,2,3), OBJECT_CONSTRUCT('a', 4, 'b', 'test')")) { + assertTrue(resultSet.next()); + String expectedArrayAsString = "[\n 1,\n 2,\n 3\n]"; + assertEquals(expectedArrayAsString, resultSet.getObject(1)); + assertEquals(expectedArrayAsString, resultSet.getString(1)); + String expectedObjectAsString = "{\n \"a\": 4,\n \"b\": \"test\"\n}"; + assertEquals(expectedObjectAsString, resultSet.getObject(2)); + assertEquals(expectedObjectAsString, resultSet.getString(2)); + } + } } From fb234082b9d4116da40b69ede8f255b6e824660d Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Wed, 22 May 2024 11:18:06 +0200 Subject: [PATCH 17/54] SNOW-1163212: Handle nested path on Windows in client config parser (#1763) --- .../net/snowflake/client/config/SFClientConfigParser.java | 2 ++ .../net/snowflake/client/config/SFClientConfigParserTest.java | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java index 2f3ee3b91..c2923af67 100644 --- a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java @@ -110,6 +110,8 @@ static String convertToWindowsPath(String filePath) { filePath = filePath.substring(1); } else if (filePath.startsWith("file:\\")) { filePath = filePath.substring(6); + } else if (filePath.startsWith("nested:\\")) { + filePath = filePath.substring(8); } else if (filePath.startsWith("\\")) { filePath = filePath.substring(2); } else if (matcher.find() && matcher.start() != 0) { diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java index 225fff203..48a77625c 100644 --- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java @@ -135,10 +135,10 @@ public void testgetConfigFileNameFromJDBCJarLocation() { } @Test - public void testconvertToWindowsPath() { + public void testConvertToWindowsPath() { String mockWindowsPath = "C:/Program Files/example.txt"; String resultWindowsPath = "C:\\Program Files\\example.txt"; - String[] testCases = new String[] {"", "file:\\", "\\\\", "/"}; + String[] testCases = new String[] {"", "file:\\", "\\\\", "/", "nested:\\"}; String mockCloudPrefix = "cloud://"; for (String testcase : testCases) { From 3c4b5216d3f99ca9922293e8073594e3d8f7a300 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 27 May 2024 07:37:17 +0200 Subject: [PATCH 18/54] SNOW-1446336: Use S3 regional url domain base on region name (#1768) --- .../jdbc/cloud/storage/SnowflakeS3Client.java | 7 ++++++- .../cloud/storage/SnowflakeS3ClientTest.java | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java index 190493b69..958eaca2e 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java @@ -217,9 +217,10 @@ private void setupSnowflakeS3Client( } else { if (region != null) { if (this.isUseS3RegionalUrl) { + String domainSuffixForRegionalUrl = getDomainSuffixForRegionalUrl(region.getName()); amazonS3Builder.withEndpointConfiguration( new AwsClientBuilder.EndpointConfiguration( - "s3." + region.getName() + ".amazonaws.com", region.getName())); + "s3." + region.getName() + "." + domainSuffixForRegionalUrl, region.getName())); } else { amazonS3Builder.withRegion(region.getName()); } @@ -230,6 +231,10 @@ private void setupSnowflakeS3Client( amazonClient = (AmazonS3) amazonS3Builder.build(); } + static String getDomainSuffixForRegionalUrl(String regionName) { + return regionName.startsWith("cn-") ? "amazonaws.com.cn" : "amazonaws.com"; + } + // Returns the Max number of retry attempts @Override public int getMaxRetries() { diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java new file mode 100644 index 000000000..d9019c8e7 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.jdbc.cloud.storage; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class SnowflakeS3ClientTest { + + @Test + public void shouldDetermineDomainForRegion() { + assertEquals("amazonaws.com", SnowflakeS3Client.getDomainSuffixForRegionalUrl("us-east-1")); + assertEquals( + "amazonaws.com.cn", SnowflakeS3Client.getDomainSuffixForRegionalUrl("cn-northwest-1")); + } +} From 573911d33af9fe0b361dbdb3db3618a5c72224c2 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 27 May 2024 13:37:56 +0200 Subject: [PATCH 19/54] Bump version to 3.16.1 for release (#1769) --- CHANGELOG.rst | 4 ++++ FIPS/pom.xml | 4 ++-- parent-pom.xml | 2 +- pom.xml | 4 ++-- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f2f480238..5cec7897f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,7 @@ +**JDBC Driver 3.16.1** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + **JDBC Driver 3.16.0** - \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc diff --git a/FIPS/pom.xml b/FIPS/pom.xml index 6000c0941..3e6160574 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,12 +5,12 @@ net.snowflake snowflake-jdbc-parent - 3.16.1-SNAPSHOT + 3.16.1 ../parent-pom.xml snowflake-jdbc-fips - 3.16.1-SNAPSHOT + 3.16.1 jar snowflake-jdbc-fips diff --git a/parent-pom.xml b/parent-pom.xml index ce94fd59f..fad3b666c 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -5,7 +5,7 @@ net.snowflake snowflake-jdbc-parent - 3.16.1-SNAPSHOT + 3.16.1 pom diff --git a/pom.xml b/pom.xml index 25290bb6b..704775d1b 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.16.1-SNAPSHOT + 3.16.1 ./parent-pom.xml ${artifactId} - 3.16.1-SNAPSHOT + 3.16.1 jar ${artifactId} From 5242b8329cd9e58293a2cfb26e796b82646f0f86 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 28 May 2024 07:23:04 +0200 Subject: [PATCH 20/54] Prepare next development version (#1771) --- FIPS/pom.xml | 4 ++-- parent-pom.xml | 2 +- pom.xml | 4 ++-- src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index 3e6160574..a43024c76 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,12 +5,12 @@ net.snowflake snowflake-jdbc-parent - 3.16.1 + 3.16.2-SNAPSHOT ../parent-pom.xml snowflake-jdbc-fips - 3.16.1 + 3.16.2-SNAPSHOT jar snowflake-jdbc-fips diff --git a/parent-pom.xml b/parent-pom.xml index fad3b666c..1c5ab3c2f 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -5,7 +5,7 @@ net.snowflake snowflake-jdbc-parent - 3.16.1 + 3.16.2-SNAPSHOT pom diff --git a/pom.xml b/pom.xml index 704775d1b..b1ea7d047 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.16.1 + 3.16.2-SNAPSHOT ./parent-pom.xml ${artifactId} - 3.16.1 + 3.16.2-SNAPSHOT jar ${artifactId} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 0f7d6a706..6baba4a57 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -29,7 +29,7 @@ public class SnowflakeDriver implements Driver { static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); - public static String implementVersion = "3.16.1"; + public static String implementVersion = "3.16.2"; static int majorVersion = 0; static int minorVersion = 0; From d84639c863bb83c7e4fdfbe18cd0d5f7980f1846 Mon Sep 17 00:00:00 2001 From: Piotr Bulawa Date: Tue, 28 May 2024 09:33:08 +0200 Subject: [PATCH 21/54] SNOW-1009500: Update PR template (#1772) --- .github/pull_request_template.md | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 9ef1e0ed3..d07631635 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -7,6 +7,7 @@ SNOW-XXXXX - [ ] The code is correctly formatted (run `mvn -P check-style validate`) - [ ] New public API is not unnecessary exposed (run `mvn verify` and inspect `target/japicmp/japicmp.html`) - [ ] The pull request name is prefixed with `SNOW-XXXX: ` +- [ ] Code is in compliance with internal logging requirements ## External contributors - please answer these questions before submitting a pull request. Thanks! From ac0e3931c76eebee246fb44aa1d71790ea91ec7a Mon Sep 17 00:00:00 2001 From: Waleed Fateem <72769898+sfc-gh-wfateem@users.noreply.github.com> Date: Fri, 7 Jun 2024 03:49:48 -0500 Subject: [PATCH 22/54] SNOW-1454594 Remove Shading of com.sun.* (#1775) --- FIPS/pom.xml | 7 ------- pom.xml | 7 ------- 2 files changed, 14 deletions(-) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index a43024c76..e9934e0b5 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -466,13 +466,6 @@ org.objectweb ${shadeBase}.org.objectweb - - com.sun - ${shadeBase}.com.sun - - com.sun.jna.** - - io.netty ${shadeBase}.io.netty diff --git a/pom.xml b/pom.xml index b1ea7d047..9e5f5c744 100644 --- a/pom.xml +++ b/pom.xml @@ -886,13 +886,6 @@ org.objectweb ${shadeBase}.org.objectweb - - com.sun - ${shadeBase}.com.sun - - com.sun.jna.** - - io.netty ${shadeBase}.io.netty From a79f58d1253dec2bf4766760a628664563bb93c7 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Fri, 7 Jun 2024 11:24:09 +0200 Subject: [PATCH 23/54] SNOW-1446174: Allow 513 and 404 in SFTrustManager tests (#1782) --- src/test/java/net/snowflake/client/core/SFTrustManagerIT.java | 2 +- src/test/java/net/snowflake/client/jdbc/RestRequestTest.java | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java index 13f3f8f09..0a9d96dd2 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java @@ -182,7 +182,7 @@ private static void accessHost(String host, HttpClient client) throws IOExceptio assertThat( String.format("response code for %s", host), statusCode, - anyOf(equalTo(200), equalTo(403), equalTo(400))); + anyOf(equalTo(200), equalTo(400), equalTo(403), equalTo(404), equalTo(513))); } /** diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java index ed6e165d1..608f69d02 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java @@ -272,6 +272,7 @@ class TestCase { testCases.add(new TestCase(509, false, false)); testCases.add(new TestCase(510, false, false)); testCases.add(new TestCase(511, false, false)); + testCases.add(new TestCase(513, false, false)); // do retry on HTTP 403 option testCases.add(new TestCase(100, true, true)); testCases.add(new TestCase(101, true, true)); @@ -325,6 +326,7 @@ class TestCase { testCases.add(new TestCase(509, true, false)); testCases.add(new TestCase(510, true, false)); testCases.add(new TestCase(511, true, false)); + testCases.add(new TestCase(513, true, false)); for (TestCase t : testCases) { if (t.result) { From e66fae55402a65ff1fb3472c49e01dd0809c8107 Mon Sep 17 00:00:00 2001 From: Piotr Bulawa Date: Mon, 10 Jun 2024 10:38:28 +0200 Subject: [PATCH 24/54] SNOW-1460355: Fix getHostFromServerUrl (#1779) --- .../snowflake/client/core/SFLoginInput.java | 6 ++++- .../client/core/SFLoginInputTest.java | 22 +++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 src/test/java/net/snowflake/client/core/SFLoginInputTest.java diff --git a/src/main/java/net/snowflake/client/core/SFLoginInput.java b/src/main/java/net/snowflake/client/core/SFLoginInput.java index 13a52604c..18ebfaa57 100644 --- a/src/main/java/net/snowflake/client/core/SFLoginInput.java +++ b/src/main/java/net/snowflake/client/core/SFLoginInput.java @@ -426,7 +426,11 @@ static boolean getBooleanValue(Object v) { String getHostFromServerUrl() throws SFException { URL url; try { - url = new URL(serverUrl); + if (!serverUrl.startsWith("http")) { + url = new URL("https://" + serverUrl); + } else { + url = new URL(serverUrl); + } } catch (MalformedURLException e) { throw new SFException( e, ErrorCode.INTERNAL_ERROR, "Invalid serverUrl for retrieving host name"); diff --git a/src/test/java/net/snowflake/client/core/SFLoginInputTest.java b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java new file mode 100644 index 000000000..7d8a5b67b --- /dev/null +++ b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java @@ -0,0 +1,22 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class SFLoginInputTest { + + @Test + public void testGetHostFromServerUrlWithoutProtocolShouldNotThrow() throws SFException { + SFLoginInput sfLoginInput = new SFLoginInput(); + sfLoginInput.setServerUrl("host.com:443"); + assertEquals("host.com", sfLoginInput.getHostFromServerUrl()); + } + + @Test + public void testGetHostFromServerUrlWithProtocolShouldNotThrow() throws SFException { + SFLoginInput sfLoginInput = new SFLoginInput(); + sfLoginInput.setServerUrl("https://host.com"); + assertEquals("host.com", sfLoginInput.getHostFromServerUrl()); + } +} From bfe205c1e964c0cc763fa57bdfd2a2a67c0d6dac Mon Sep 17 00:00:00 2001 From: Juan Martinez Ramirez <126511805+sfc-gh-jmartinez@users.noreply.github.com> Date: Mon, 10 Jun 2024 10:57:24 -0600 Subject: [PATCH 25/54] SNOW-1016467: Support test Mac environment in Github actions. (#1765) * SNOW-1016467 Support test Mac environment in Github actions. * Changed task name to include OS and java version --- .github/workflows/build-test.yml | 33 ++++++++++++++++++++++++++++++++ ci/container/test_component.sh | 13 +++++++------ ci/log_analyze_setup.sh | 31 +++++++++++++++++++++++------- ci/test_mac.sh | 21 ++++++++++++++++++++ 4 files changed, 85 insertions(+), 13 deletions(-) create mode 100755 ci/test_mac.sh diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index fce668461..6c3022b75 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -36,6 +36,39 @@ jobs: WHITESOURCE_API_KEY: ${{ secrets.WHITESOURCE_API_KEY }} run: ./ci/build.sh + test-mac: + needs: build + name: ${{ matrix.cloud }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} / Test on Mac(java ${{ matrix.javaVersion }}, ${{ matrix.cloud }} ) + runs-on: macos-13 + strategy: + fail-fast: false + matrix: + cloud: [ 'AWS' ] + javaVersion: [ '8', '11', '17'] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + additionalMavenProfile: ['', '-Dthin-jar'] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.javaVersion }} + distribution: 'temurin' + cache: maven + - uses: actions/setup-python@v4 + with: + python-version: '3.7' + - name: Install Homebrew Bash + shell: bash + run: brew install bash + - name: Tests + shell: bash + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + CLOUD_PROVIDER: ${{ matrix.cloud }} + JDBC_TEST_CATEGORY: ${{ matrix.category }} + ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} + run: /usr/local/bin/bash ./ci/test_mac.sh + test-linux: needs: build name: ${{ matrix.cloud }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} on ${{ matrix.image }} diff --git a/ci/container/test_component.sh b/ci/container/test_component.sh index 6a3479e7c..da245a627 100755 --- a/ci/container/test_component.sh +++ b/ci/container/test_component.sh @@ -1,11 +1,12 @@ #!/bin/bash -e # -# Test JDBC for Linux +# Test JDBC for Linux/MAC # set -o pipefail THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" export WORKSPACE=${WORKSPACE:-/mnt/workspace} export SOURCE_ROOT=${SOURCE_ROOT:-/mnt/host} +MVNW_EXE=$SOURCE_ROOT/mvnw echo "[INFO] Download JDBC Integration test cases and libraries" source $THIS_DIR/download_artifact.sh @@ -76,15 +77,15 @@ export MAVEN_OPTS="$MAVEN_OPTS -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=fa cd $SOURCE_ROOT # Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched -mvn --batch-mode --show-version dependency:go-offline +$MVNW_EXE --batch-mode --show-version dependency:go-offline for c in "${CATEGORY[@]}"; do c=$(echo $c | sed 's/ *$//g') if [[ "$is_old_driver" == "true" ]]; then pushd TestOnly >& /dev/null - JDBC_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") + JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") echo "[INFO] Run JDBC $JDBC_VERSION tests" - mvn -DjenkinsIT \ + $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ -DtestCategory=net.snowflake.client.category.$c \ @@ -95,7 +96,7 @@ for c in "${CATEGORY[@]}"; do elif [[ "$c" == "TestCategoryFips" ]]; then pushd FIPS >& /dev/null echo "[INFO] Run Fips tests" - mvn -DjenkinsIT \ + $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ @@ -105,7 +106,7 @@ for c in "${CATEGORY[@]}"; do popd >& /dev/null else echo "[INFO] Run $c tests" - mvn -DjenkinsIT \ + $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ -DtestCategory=net.snowflake.client.category.$c \ diff --git a/ci/log_analyze_setup.sh b/ci/log_analyze_setup.sh index b77d473c0..fd573d194 100755 --- a/ci/log_analyze_setup.sh +++ b/ci/log_analyze_setup.sh @@ -31,19 +31,36 @@ fi # The new complex password we use for jenkins test export SNOWFLAKE_TEST_PASSWORD_NEW="ThisIsRandomPassword123!" -LOG_PROPERTY_FILE_DOCKER=$(cd "$(dirname "${BASH_SOURCE[0]}")/.."; pwd)/src/test/resources/logging.properties +LOG_PROPERTY_FILE=$(cd "$(dirname "${BASH_SOURCE[0]}")/.."; pwd)/src/test/resources/logging.properties export CLIENT_DRIVER_NAME=JDBC function setup_log_env() { - sed -i "s|^java.util.logging.FileHandler.pattern.*|java.util.logging.FileHandler.pattern = $CLIENT_LOG_FILE_PATH_DOCKER|" ${LOG_PROPERTY_FILE_DOCKER} + if ["$WORKSPACE" == "/mnt/workspace"]; then + CLIENT_LOG_DIR_PATH=$LOCAL_CLIENT_LOG_DIR_PATH_DOCKER + CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH_DOCKER + CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH_DOCKER + else + CLIENT_LOG_DIR_PATH=$LOCAL_CLIENT_LOG_DIR_PATH + CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH + CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH + fi + echo "[INFO] CLIENT_LOG_DIR_PATH=$CLIENT_LOG_DIR_PATH" + echo "[INFO] CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH" + echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH" + echo "[INFO] Replace file handler for log file $LOG_PROPERTY_FILE" + + sed -i'' -e "s|^java.util.logging.FileHandler.pattern.*|java.util.logging.FileHandler.pattern = $CLIENT_LOG_FILE_PATH|" ${LOG_PROPERTY_FILE} - if [[ ! -d ${LOCAL_CLIENT_LOG_DIR_PATH_DOCKER} ]]; then - mkdir -p ${LOCAL_CLIENT_LOG_DIR_PATH_DOCKER} + if [[ ! -d ${CLIENT_LOG_DIR_PATH} ]]; then + echo "[INFO] create clien log directory $CLIENT_LOG_DIR_PATH" + mkdir -p ${CLIENT_LOG_DIR_PATH} fi - if [[ -f $CLIENT_KNOWN_SSM_FILE_PATH_DOCKER ]]; then - rm -f $CLIENT_KNOWN_SSM_FILE_PATH_DOCKER + if [[ -f $CLIENT_KNOWN_SSM_FILE_PATH ]]; then + rm -f $CLIENT_KNOWN_SSM_FILE_PATH fi - touch $CLIENT_KNOWN_SSM_FILE_PATH_DOCKER + + touch $CLIENT_KNOWN_SSM_FILE_PATH + echo "[INFO] finish setup log env" } diff --git a/ci/test_mac.sh b/ci/test_mac.sh new file mode 100755 index 000000000..7f98e9add --- /dev/null +++ b/ci/test_mac.sh @@ -0,0 +1,21 @@ +#!/bin/bash -e +# +# Test JDBC for Mac +# + +echo "DOWNLOADED" +set -o pipefail +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +source $THIS_DIR/_init.sh +source $THIS_DIR/scripts/set_git_info.sh + + +export WORKSPACE=$GITHUB_WORKSPACE +export SOURCE_ROOT=$GITHUB_WORKSPACE + +python3 --version +python3 -m venv venv +source venv/bin/activate +pip3 install -U pip +pip3 install -U snowflake-connector-python +$THIS_DIR/container/test_component.sh From 9b7c9eb3e02c93cf7788c9866e1a483fe978e1be Mon Sep 17 00:00:00 2001 From: Piotr Bulawa Date: Tue, 11 Jun 2024 13:27:48 +0200 Subject: [PATCH 26/54] SNOW-1017605: Improve logging (#1662) --- .../client/config/SFClientConfig.java | 24 ++ .../client/config/SFClientConfigParser.java | 14 +- .../snowflake/client/core/ArrowSqlInput.java | 5 +- .../client/core/CredentialManager.java | 38 +- .../client/core/ExecTimeTelemetryData.java | 192 ++++----- .../client/core/FileCacheManager.java | 56 +-- .../net/snowflake/client/core/FileUtil.java | 78 ++++ .../client/core/HeartbeatBackground.java | 22 +- .../client/core/HttpClientSettingsKey.java | 31 ++ .../net/snowflake/client/core/HttpUtil.java | 93 +++- .../snowflake/client/core/JsonSqlInput.java | 4 +- .../client/core/QueryContextCache.java | 18 +- .../net/snowflake/client/core/ResultUtil.java | 4 +- .../client/core/SFArrowResultSet.java | 8 +- .../client/core/SFBaseResultSet.java | 6 +- .../snowflake/client/core/SFBaseSession.java | 51 ++- .../client/core/SFBaseStatement.java | 8 +- .../snowflake/client/core/SFException.java | 2 +- .../client/core/SFFixedViewResultSet.java | 6 +- .../client/core/SFJsonResultSet.java | 28 +- .../client/core/SFOCSPException.java | 2 +- .../snowflake/client/core/SFResultSet.java | 4 +- .../client/core/SFResultSetMetaData.java | 2 +- .../core/SFSSLConnectionSocketFactory.java | 3 +- .../net/snowflake/client/core/SFSession.java | 95 +++-- .../snowflake/client/core/SFStatement.java | 20 +- .../snowflake/client/core/SFTrustManager.java | 105 ++--- .../core/SecureStorageAppleManager.java | 1 + .../core/SecureStorageLinuxManager.java | 2 + .../core/SecureStorageWindowsManager.java | 1 + .../snowflake/client/core/SessionUtil.java | 96 ++++- .../core/SessionUtilExternalBrowser.java | 11 +- .../client/core/SessionUtilKeyPair.java | 13 +- .../net/snowflake/client/core/StmtUtil.java | 12 +- .../net/snowflake/client/core/URLUtil.java | 23 +- .../jdbc/DefaultResultStreamProvider.java | 28 +- .../client/jdbc/FileBackedOutputStream.java | 2 + .../snowflake/client/jdbc/RestRequest.java | 129 ++++-- .../client/jdbc/SFAsyncResultSet.java | 8 +- .../client/jdbc/SnowflakeBaseResultSet.java | 342 +++++++-------- .../client/jdbc/SnowflakeBasicDataSource.java | 2 +- .../jdbc/SnowflakeCallableStatementV1.java | 4 + .../client/jdbc/SnowflakeChunkDownloader.java | 174 +++++--- .../client/jdbc/SnowflakeConnectString.java | 2 +- .../client/jdbc/SnowflakeConnectionV1.java | 116 +++-- .../jdbc/SnowflakeDatabaseMetaData.java | 399 +++++++++--------- .../SnowflakeDatabaseMetaDataResultSet.java | 42 +- .../jdbc/SnowflakeFileTransferAgent.java | 91 ++-- .../jdbc/SnowflakePreparedStatementV1.java | 66 +-- .../client/jdbc/SnowflakeResultChunk.java | 4 + .../jdbc/SnowflakeResultSetMetaDataV1.java | 9 +- .../SnowflakeResultSetSerializableV1.java | 23 +- .../client/jdbc/SnowflakeResultSetV1.java | 7 +- .../client/jdbc/SnowflakeSQLException.java | 10 +- .../jdbc/SnowflakeSQLLoggedException.java | 5 +- .../jdbc/SnowflakeSimulatedUploadFailure.java | 3 +- .../client/jdbc/SnowflakeStatementV1.java | 110 ++--- .../client/jdbc/cloud/storage/S3HttpUtil.java | 38 ++ .../cloud/storage/SnowflakeAzureClient.java | 81 +++- .../cloud/storage/SnowflakeGCSClient.java | 116 ++++- .../jdbc/cloud/storage/SnowflakeS3Client.java | 94 ++++- .../cloud/storage/StorageClientFactory.java | 14 +- .../jdbc/cloud/storage/StorageHelper.java | 36 ++ .../jdbc/telemetry/TelemetryClient.java | 23 +- .../jdbc/telemetryOOB/TelemetryService.java | 35 +- .../snowflake/client/loader/BufferStage.java | 12 +- .../snowflake/client/loader/FileUploader.java | 24 +- .../client/loader/LoaderFactory.java | 4 +- .../snowflake/client/loader/LoadingError.java | 4 +- .../snowflake/client/loader/ProcessQueue.java | 34 +- .../net/snowflake/client/loader/PutQueue.java | 8 +- .../snowflake/client/loader/StreamLoader.java | 72 ++-- .../snowflake/client/log/SFLoggerUtil.java | 13 + .../client/pooling/LogicalConnection.java | 7 + .../pooling/SnowflakePooledConnection.java | 17 +- .../snowflake/client/util/SecretDetector.java | 6 - .../net/snowflake/client/util/Stopwatch.java | 94 +++++ .../client/util/TimeMeasurement.java | 48 +++ .../jdbc/jdbc_error_messages.properties | 1 + .../config/SFClientConfigParserTest.java | 111 +++-- .../SnowflakeChunkDownloaderLatestIT.java | 4 +- .../snowflake/client/util/StopwatchTest.java | 103 +++++ 82 files changed, 2444 insertions(+), 1209 deletions(-) create mode 100644 src/main/java/net/snowflake/client/core/FileUtil.java create mode 100644 src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageHelper.java create mode 100644 src/main/java/net/snowflake/client/util/Stopwatch.java create mode 100644 src/main/java/net/snowflake/client/util/TimeMeasurement.java create mode 100644 src/test/java/net/snowflake/client/util/StopwatchTest.java diff --git a/src/main/java/net/snowflake/client/config/SFClientConfig.java b/src/main/java/net/snowflake/client/config/SFClientConfig.java index 1029b1167..a11071b1f 100644 --- a/src/main/java/net/snowflake/client/config/SFClientConfig.java +++ b/src/main/java/net/snowflake/client/config/SFClientConfig.java @@ -1,11 +1,20 @@ package net.snowflake.client.config; +import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; /** POJO class for Snowflake's client config. */ public class SFClientConfig { + // Used to keep the unknown properties when deserializing + @JsonIgnore @JsonAnySetter private Map unknownParams = new LinkedHashMap<>(); + @JsonProperty("common") private CommonProps commonProps; @@ -33,6 +42,18 @@ public void setConfigFilePath(String configFilePath) { this.configFilePath = configFilePath; } + Set getUnknownParamKeys() { + Set unknownParamKeys = new LinkedHashSet<>(unknownParams.keySet()); + + if (!commonProps.unknownParams.isEmpty()) { + unknownParamKeys.addAll( + commonProps.unknownParams.keySet().stream() + .map(s -> "common:" + s) + .collect(Collectors.toCollection(LinkedHashSet::new))); + } + return unknownParamKeys; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -51,6 +72,9 @@ public int hashCode() { } public static class CommonProps { + // Used to keep the unknown properties when deserializing + @JsonIgnore @JsonAnySetter Map unknownParams = new LinkedHashMap<>(); + @JsonProperty("log_level") private String logLevel; diff --git a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java index c2923af67..3c960bc98 100644 --- a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java @@ -8,6 +8,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.snowflake.client.jdbc.SnowflakeDriver; @@ -36,21 +37,26 @@ public static SFClientConfig loadSFClientConfig(String configFilePath) throws IO String derivedConfigFilePath = null; if (configFilePath != null && !configFilePath.isEmpty()) { // 1. Try to read the file at configFilePath. + logger.info("Using config file specified from connection string: {}", configFilePath); derivedConfigFilePath = configFilePath; } else if (System.getenv().containsKey(SF_CLIENT_CONFIG_ENV_NAME)) { // 2. If SF_CLIENT_CONFIG_ENV_NAME is set, read from env. - derivedConfigFilePath = systemGetEnv(SF_CLIENT_CONFIG_ENV_NAME); + String filePath = systemGetEnv(SF_CLIENT_CONFIG_ENV_NAME); + logger.info("Using config file specified from environment variable: {}", filePath); + derivedConfigFilePath = filePath; } else { // 3. Read SF_CLIENT_CONFIG_FILE_NAME from where jdbc jar is loaded. String driverLocation = Paths.get(getConfigFilePathFromJDBCJarLocation(), SF_CLIENT_CONFIG_FILE_NAME).toString(); if (Files.exists(Paths.get(driverLocation))) { + logger.info("Using config file specified from driver directory: {}", driverLocation); derivedConfigFilePath = driverLocation; } else { // 4. Read SF_CLIENT_CONFIG_FILE_NAME if it is present in user home directory. String userHomeFilePath = Paths.get(systemGetProperty("user.home"), SF_CLIENT_CONFIG_FILE_NAME).toString(); if (Files.exists(Paths.get(userHomeFilePath))) { + logger.info("Using config file specified from home directory: {}", userHomeFilePath); derivedConfigFilePath = userHomeFilePath; } } @@ -60,6 +66,12 @@ public static SFClientConfig loadSFClientConfig(String configFilePath) throws IO File configFile = new File(derivedConfigFilePath); ObjectMapper objectMapper = new ObjectMapper(); SFClientConfig clientConfig = objectMapper.readValue(configFile, SFClientConfig.class); + Set unknownParams = clientConfig.getUnknownParamKeys(); + if (!unknownParams.isEmpty()) { + for (String unknownParam : unknownParams) { + logger.warn("Unknown field from config: {}", unknownParam); + } + } clientConfig.setConfigFilePath(derivedConfigFilePath); return clientConfig; diff --git a/src/main/java/net/snowflake/client/core/ArrowSqlInput.java b/src/main/java/net/snowflake/client/core/ArrowSqlInput.java index 55e29bc65..f7093504e 100644 --- a/src/main/java/net/snowflake/client/core/ArrowSqlInput.java +++ b/src/main/java/net/snowflake/client/core/ArrowSqlInput.java @@ -4,7 +4,6 @@ package net.snowflake.client.core; -import static net.snowflake.client.core.SFResultSet.logger; import static net.snowflake.client.jdbc.SnowflakeUtil.mapSFExceptionToSQLException; import java.math.BigDecimal; @@ -22,12 +21,16 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.core.structs.SQLDataCreationHelper; import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.ThrowingBiFunction; import org.apache.arrow.vector.util.JsonStringArrayList; import org.apache.arrow.vector.util.JsonStringHashMap; @SnowflakeJdbcInternalApi public class ArrowSqlInput extends BaseSqlInput { + private static final SFLogger logger = SFLoggerFactory.getLogger(ArrowSqlInput.class); + private final Map input; private int currentIndex = 0; private boolean wasNull = false; diff --git a/src/main/java/net/snowflake/client/core/CredentialManager.java b/src/main/java/net/snowflake/client/core/CredentialManager.java index 0cd91f9ce..a5b919d3d 100644 --- a/src/main/java/net/snowflake/client/core/CredentialManager.java +++ b/src/main/java/net/snowflake/client/core/CredentialManager.java @@ -40,6 +40,7 @@ private void initSecureStorageManager() { /** Helper function for tests to go back to normal settings. */ void resetSecureStorageManager() { + logger.debug("Resetting the secure storage manager"); initSecureStorageManager(); } @@ -49,6 +50,7 @@ void resetSecureStorageManager() { * @param manager */ void injectSecureStorageManager(SecureStorageManager manager) { + logger.debug("Injecting secure storage manager"); secureStorageManager = manager; } @@ -66,6 +68,10 @@ public static CredentialManager getInstance() { * @param loginInput login input to attach id token */ void fillCachedIdToken(SFLoginInput loginInput) throws SFException { + logger.debug( + "Looking for cached id token for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); fillCachedCredential(loginInput, ID_TOKEN); } @@ -75,6 +81,10 @@ void fillCachedIdToken(SFLoginInput loginInput) throws SFException { * @param loginInput login input to attach mfa token */ void fillCachedMfaToken(SFLoginInput loginInput) throws SFException { + logger.debug( + "Looking for cached mfa token for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); fillCachedCredential(loginInput, MFA_TOKEN); } @@ -106,16 +116,26 @@ synchronized void fillCachedCredential(SFLoginInput loginInput, String credType) } if (cred == null) { - logger.debug("retrieved %s is null", credType); + logger.debug("Retrieved {} is null", credType); } // cred can be null if (credType == ID_TOKEN) { + logger.debug( + "Setting {}id token for user: {}, host: {}", + cred == null ? "null " : "", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); loginInput.setIdToken(cred); } else if (credType == MFA_TOKEN) { + logger.debug( + "Setting {}mfa token for user: {}, host: {}", + cred == null ? "null " : "", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); loginInput.setMfaToken(cred); } else { - logger.debug("unrecognized type %s for local cached credential", credType); + logger.debug("Unrecognized type {} for local cached credential", credType); } return; } @@ -127,6 +147,10 @@ synchronized void fillCachedCredential(SFLoginInput loginInput, String credType) * @param loginOutput loginOutput to denote to the cache */ void writeIdToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SFException { + logger.debug( + "Caching id token in a secure storage for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); writeTemporaryCredential(loginInput, loginOutput.getIdToken(), ID_TOKEN); } @@ -137,6 +161,10 @@ void writeIdToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SFE * @param loginOutput loginOutput to denote to the cache */ void writeMfaToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SFException { + logger.debug( + "Caching mfa token in a secure storage for user: {}, host: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl()); writeTemporaryCredential(loginInput, loginOutput.getMfaToken(), MFA_TOKEN); } @@ -150,7 +178,7 @@ void writeMfaToken(SFLoginInput loginInput, SFLoginOutput loginOutput) throws SF synchronized void writeTemporaryCredential(SFLoginInput loginInput, String cred, String credType) throws SFException { if (Strings.isNullOrEmpty(cred)) { - logger.debug("no %s is given.", credType); + logger.debug("No {} is given.", credType); return; // no credential } @@ -173,11 +201,15 @@ synchronized void writeTemporaryCredential(SFLoginInput loginInput, String cred, /** Delete the id token cache */ void deleteIdTokenCache(String host, String user) { + logger.debug( + "Removing cached id token from a secure storage for user: {}, host: {}", user, host); deleteTemporaryCredential(host, user, ID_TOKEN); } /** Delete the mfa token cache */ void deleteMfaTokenCache(String host, String user) { + logger.debug( + "Removing cached mfa token from a secure storage for user: {}, host: {}", user, host); deleteTemporaryCredential(host, user, MFA_TOKEN); } diff --git a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java index d4dd1ecf0..a9d40a054 100644 --- a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java +++ b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java @@ -5,24 +5,18 @@ import com.google.common.base.Strings; import net.minidev.json.JSONObject; -import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.util.TimeMeasurement; public class ExecTimeTelemetryData { - private long queryStart; - private long bindStart; - private long bindEnd; - private long gzipStart; - private long gzipEnd; - private long httpClientStart; - private long httpClientEnd; - private long responseIOStreamStart; - private long responseIOStreamEnd; - private long processResultChunkStart; - private long processResultChunkEnd; - private long createResultSetStart; - private long createResultSetEnd; - private long queryEnd; + private final TimeMeasurement query = new TimeMeasurement(); + private final TimeMeasurement bind = new TimeMeasurement(); + private final TimeMeasurement gzip = new TimeMeasurement(); + private final TimeMeasurement httpClient = new TimeMeasurement(); + private final TimeMeasurement responseIOStream = new TimeMeasurement(); + private final TimeMeasurement processResultChunk = new TimeMeasurement(); + private final TimeMeasurement createResultSet = new TimeMeasurement(); + private String batchId; private String queryId; private String queryFunction; @@ -34,11 +28,10 @@ public class ExecTimeTelemetryData { private String requestId; public ExecTimeTelemetryData(String queryFunction, String batchId) { - if (TelemetryService.getInstance().isHTAPEnabled()) { - this.queryStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); - this.queryFunction = queryFunction; - this.batchId = batchId; - } else { + this.query.setStart(); + this.queryFunction = queryFunction; + this.batchId = batchId; + if (!TelemetryService.getInstance().isHTAPEnabled()) { this.sendData = false; } } @@ -48,128 +41,74 @@ public ExecTimeTelemetryData() { } public void setBindStart() { - if (!this.sendData) { - return; - } - this.bindStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + bind.setStart(); } public void setOCSPStatus(Boolean ocspEnabled) { - if (!this.sendData) { - return; - } this.ocspEnabled = ocspEnabled; } public void setBindEnd() { - if (!this.sendData) { - return; - } - this.bindEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + this.bind.setEnd(); } public void setHttpClientStart() { - if (!this.sendData) { - return; - } - this.httpClientStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + httpClient.setStart(); } public void setHttpClientEnd() { - if (!this.sendData) { - return; - } - this.httpClientEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + httpClient.setEnd(); } public void setGzipStart() { - if (!this.sendData) { - return; - } - this.gzipStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + gzip.setStart(); } public void setGzipEnd() { - if (!this.sendData) { - return; - } - this.gzipEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + gzip.setEnd(); } public void setQueryEnd() { - if (!this.sendData) { - return; - } - this.queryEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + query.setEnd(); } public void setQueryId(String queryId) { - if (!this.sendData) { - return; - } this.queryId = queryId; } public void setProcessResultChunkStart() { - if (!this.sendData) { - return; - } - this.processResultChunkStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + processResultChunk.setStart(); } public void setProcessResultChunkEnd() { - if (!this.sendData) { - return; - } - this.processResultChunkEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + processResultChunk.setEnd(); } public void setResponseIOStreamStart() { - if (!this.sendData) { - return; - } - this.responseIOStreamStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + responseIOStream.setStart(); } public void setResponseIOStreamEnd() { - if (!this.sendData) { - return; - } - this.responseIOStreamEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + responseIOStream.setEnd(); } public void setCreateResultSetStart() { - if (!this.sendData) { - return; - } - this.createResultSetStart = SnowflakeUtil.getEpochTimeInMicroSeconds(); + createResultSet.setStart(); } public void setCreateResultSetEnd() { - if (!this.sendData) { - return; - } - this.createResultSetEnd = SnowflakeUtil.getEpochTimeInMicroSeconds(); + createResultSet.setEnd(); } public void incrementRetryCount() { - if (!this.sendData) { - return; - } this.retryCount++; } public void setRequestId(String requestId) { - if (!this.sendData) { - return; - } this.requestId = requestId; } public void addRetryLocation(String location) { - if (!this.sendData) { - return; - } if (Strings.isNullOrEmpty(this.retryLocations)) { this.retryLocations = location; } else { @@ -177,26 +116,46 @@ public void addRetryLocation(String location) { } } + long getTotalQueryTime() { + return query.getTime(); + } + + long getResultProcessingTime() { + if (createResultSet.getEnd() == 0 || processResultChunk.getStart() == 0) { + return -1; + } + + return createResultSet.getEnd() - processResultChunk.getStart(); + } + + long getHttpRequestTime() { + return httpClient.getTime(); + } + + long getResultSetCreationTime() { + return createResultSet.getTime(); + } + public String generateTelemetry() { if (this.sendData) { String eventType = "ExecutionTimeRecord"; JSONObject value = new JSONObject(); String valueStr; value.put("eventType", eventType); - value.put("QueryStart", this.queryStart); - value.put("BindStart", this.bindStart); - value.put("BindEnd", this.bindEnd); - value.put("GzipStart", this.gzipStart); - value.put("GzipEnd", this.gzipEnd); - value.put("HttpClientStart", this.httpClientStart); - value.put("HttpClientEnd", this.httpClientEnd); - value.put("ResponseIOStreamStart", this.responseIOStreamStart); - value.put("ResponseIOStreamEnd", this.responseIOStreamEnd); - value.put("ProcessResultChunkStart", this.processResultChunkStart); - value.put("ProcessResultChunkEnd", this.processResultChunkEnd); - value.put("CreateResultSetStart", this.createResultSetStart); - value.put("CreatResultSetEnd", this.createResultSetEnd); - value.put("QueryEnd", this.queryEnd); + value.put("QueryStart", this.query.getStart()); + value.put("BindStart", this.bind.getStart()); + value.put("BindEnd", this.bind.getEnd()); + value.put("GzipStart", this.gzip.getStart()); + value.put("GzipEnd", this.gzip.getEnd()); + value.put("HttpClientStart", this.httpClient.getStart()); + value.put("HttpClientEnd", this.httpClient.getEnd()); + value.put("ResponseIOStreamStart", this.responseIOStream.getStart()); + value.put("ResponseIOStreamEnd", this.responseIOStream.getEnd()); + value.put("ProcessResultChunkStart", this.processResultChunk.getStart()); + value.put("ProcessResultChunkEnd", this.processResultChunk.getEnd()); + value.put("CreateResultSetStart", this.createResultSet.getStart()); + value.put("CreatResultSetEnd", this.createResultSet.getEnd()); + value.put("QueryEnd", this.query.getEnd()); value.put("BatchID", this.batchId); value.put("QueryID", this.queryId); value.put("RequestID", this.requestId); @@ -204,9 +163,8 @@ public String generateTelemetry() { value.put("RetryCount", this.retryCount); value.put("RetryLocations", this.retryLocations); value.put("ocspEnabled", this.ocspEnabled); - value.put("ElapsedQueryTime", (this.queryEnd - this.queryStart)); - value.put( - "ElapsedResultProcessTime", (this.createResultSetEnd - this.processResultChunkStart)); + value.put("ElapsedQueryTime", getTotalQueryTime()); + value.put("ElapsedResultProcessTime", getResultProcessingTime()); value.put("Urgent", true); valueStr = value.toString(); // Avoid adding exception stacktrace to user logs. TelemetryService.getInstance().logExecutionTimeTelemetryEvent(value, eventType); @@ -214,4 +172,30 @@ public String generateTelemetry() { } return ""; } + + @SnowflakeJdbcInternalApi + public String getLogString() { + return "Query id: " + + this.queryId + + ", query function: " + + this.queryFunction + + ", batch id: " + + this.batchId + + ", request id: " + + this.requestId + + ", total query time: " + + getTotalQueryTime() / 1000 + + " ms" + + ", result processing time: " + + getResultProcessingTime() / 1000 + + " ms" + + ", result set creation time: " + + getResultSetCreationTime() / 1000 + + " ms" + + ", http request time: " + + getHttpRequestTime() / 1000 + + " ms" + + ", retry count: " + + this.retryCount; + } } diff --git a/src/main/java/net/snowflake/client/core/FileCacheManager.java b/src/main/java/net/snowflake/client/core/FileCacheManager.java index e635796c0..328aecc9c 100644 --- a/src/main/java/net/snowflake/client/core/FileCacheManager.java +++ b/src/main/java/net/snowflake/client/core/FileCacheManager.java @@ -28,7 +28,7 @@ import net.snowflake.client.log.SFLoggerFactory; class FileCacheManager { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(FileCacheManager.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(FileCacheManager.class); /** Object mapper for JSON encoding and decoding */ private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); @@ -87,6 +87,7 @@ void overrideCacheFile(File newCacheFile) { this.cacheFile = newCacheFile; this.cacheDir = newCacheFile.getParentFile(); this.baseCacheFileName = newCacheFile.getName(); + FileUtil.logFileUsage(cacheFile, "Override cache file", true); } FileCacheManager build() { @@ -102,8 +103,8 @@ FileCacheManager build() { ? systemGetEnv(this.cacheDirectoryEnvironmentVariable) : null; } catch (Throwable ex) { - LOGGER.debug( - "Cannot get environment variable for cache directory, " + "skip using cache", false); + logger.debug( + "Cannot get environment variable for cache directory, skip using cache", false); // In Boomi cloud, System.getenv is not allowed due to policy, // so we catch the exception and skip cache completely return this; @@ -122,7 +123,7 @@ FileCacheManager build() { // Checking if home directory is writable. File homeFile = new File(homeDir); if (!homeFile.canWrite()) { - LOGGER.debug("Home directory not writeable, using tmpdir", false); + logger.debug("Home directory not writeable, using tmpdir", false); homeDir = systemGetProperty("java.io.tmpdir"); } } @@ -142,11 +143,11 @@ FileCacheManager build() { } if (!this.cacheDir.mkdirs() && !this.cacheDir.exists()) { - LOGGER.debug( + logger.debug( "Cannot create the cache directory {}. Giving up.", this.cacheDir.getAbsolutePath()); return this; } - LOGGER.debug("Verified Directory {}", this.cacheDir.getAbsolutePath()); + logger.debug("Verified Directory {}", this.cacheDir.getAbsolutePath()); File cacheFileTmp = new File(this.cacheDir, this.baseCacheFileName).getAbsoluteFile(); try { @@ -155,15 +156,16 @@ FileCacheManager build() { // In this particular case, it doesn't matter as long as the file is // writable. if (cacheFileTmp.createNewFile()) { - LOGGER.debug("Successfully created a cache file {}", cacheFileTmp); + logger.debug("Successfully created a cache file {}", cacheFileTmp); } else { - LOGGER.debug("Cache file already exists {}", cacheFileTmp); + logger.debug("Cache file already exists {}", cacheFileTmp); } + FileUtil.logFileUsage(cacheFileTmp, "Cache file creation", false); this.cacheFile = cacheFileTmp.getCanonicalFile(); this.cacheLockFile = new File(this.cacheFile.getParentFile(), this.baseCacheFileName + ".lck"); } catch (IOException | SecurityException ex) { - LOGGER.info("Failed to touch the cache file. Ignored. {}", cacheFileTmp.getAbsoluteFile()); + logger.info("Failed to touch the cache file. Ignored. {}", cacheFileTmp.getAbsoluteFile()); } return this; } @@ -176,25 +178,26 @@ JsonNode readCacheFile() { } try { if (!cacheFile.exists()) { - LOGGER.debug("Cache file doesn't exists. File: {}", cacheFile); + logger.debug("Cache file doesn't exists. File: {}", cacheFile); return null; } try (Reader reader = new InputStreamReader(new FileInputStream(cacheFile), DEFAULT_FILE_ENCODING)) { + FileUtil.logFileUsage(cacheFile, "Read cache", false); return OBJECT_MAPPER.readTree(reader); } } catch (IOException ex) { - LOGGER.debug("Failed to read the cache file. No worry. File: {}, Err: {}", cacheFile, ex); + logger.debug("Failed to read the cache file. No worry. File: {}, Err: {}", cacheFile, ex); } return null; } void writeCacheFile(JsonNode input) { - LOGGER.debug("Writing cache file. File={}", cacheFile); + logger.debug("Writing cache file. File: {}", cacheFile); if (cacheFile == null || !tryLockCacheFile()) { // no cache file or it failed to lock file - LOGGER.debug( + logger.debug( "No cache file exists or failed to lock the file. Skipping writing the cache", false); return; } @@ -205,19 +208,20 @@ void writeCacheFile(JsonNode input) { } try (Writer writer = new OutputStreamWriter(new FileOutputStream(cacheFile), DEFAULT_FILE_ENCODING)) { + FileUtil.logFileUsage(cacheFile, "Write to cache", false); writer.write(input.toString()); } } catch (IOException ex) { - LOGGER.debug("Failed to write the cache file. File: {}", cacheFile); + logger.debug("Failed to write the cache file. File: {}", cacheFile); } finally { if (!unlockCacheFile()) { - LOGGER.debug("Failed to unlock cache file", false); + logger.debug("Failed to unlock cache file", false); } } } void deleteCacheFile() { - LOGGER.debug("Deleting cache file. File={}, Lock File={}", cacheFile, cacheLockFile); + logger.debug("Deleting cache file. File: {}, lock file: {}", cacheFile, cacheLockFile); if (cacheFile == null) { return; @@ -225,7 +229,7 @@ void deleteCacheFile() { unlockCacheFile(); if (!cacheFile.delete()) { - LOGGER.debug("Failed to delete the file: {}", cacheFile); + logger.debug("Failed to delete the file: {}", cacheFile); } } @@ -246,7 +250,7 @@ private boolean tryLockCacheFile() { ++cnt; } if (!locked) { - LOGGER.debug("Failed to lock the cache file.", false); + logger.debug("Failed to lock the cache file.", false); } return locked; } @@ -276,7 +280,7 @@ private boolean checkCacheLockFile() { if (!cacheLockFile.exists() && cacheFileTs > 0 && currentTime - this.cacheExpirationInMilliseconds <= cacheFileTs) { - LOGGER.debug("No cache file lock directory exists and cache file is up to date.", false); + logger.debug("No cache file lock directory exists and cache file is up to date.", false); return true; } @@ -288,13 +292,13 @@ private boolean checkCacheLockFile() { if (lockFileTs < currentTime - this.cacheFileLockExpirationInMilliseconds) { // old lock file if (!cacheLockFile.delete()) { - LOGGER.debug("Failed to delete the directory. Dir: {}", cacheLockFile); + logger.debug("Failed to delete the directory. Dir: {}", cacheLockFile); return false; } - LOGGER.debug("Deleted the cache lock directory, because it was old.", false); + logger.debug("Deleted the cache lock directory, because it was old.", false); return currentTime - this.cacheExpirationInMilliseconds <= cacheFileTs; } - LOGGER.debug("Failed to lock the file. Ignored.", false); + logger.debug("Failed to lock the file. Ignored.", false); return false; } @@ -305,7 +309,7 @@ private boolean checkCacheLockFile() { */ private static long fileCreationTime(File targetFile) { if (!targetFile.exists()) { - LOGGER.debug("File not exists. File: {}", targetFile); + logger.debug("File not exists. File: {}", targetFile); return -1; } try { @@ -313,8 +317,12 @@ private static long fileCreationTime(File targetFile) { BasicFileAttributes attr = Files.readAttributes(cacheFileLockPath, BasicFileAttributes.class); return attr.creationTime().toMillis(); } catch (IOException ex) { - LOGGER.debug("Failed to get creation time. File/Dir: {}, Err: {}", targetFile, ex); + logger.debug("Failed to get creation time. File/Dir: {}, Err: {}", targetFile, ex); } return -1; } + + String getCacheFilePath() { + return cacheFile.getAbsolutePath(); + } } diff --git a/src/main/java/net/snowflake/client/core/FileUtil.java b/src/main/java/net/snowflake/client/core/FileUtil.java new file mode 100644 index 000000000..3ae68909b --- /dev/null +++ b/src/main/java/net/snowflake/client/core/FileUtil.java @@ -0,0 +1,78 @@ +package net.snowflake.client.core; + +import com.google.common.base.Strings; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Arrays; +import java.util.Collection; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class FileUtil { + private static final SFLogger logger = SFLoggerFactory.getLogger(FileUtil.class); + private static final Collection WRITE_BY_OTHERS = + Arrays.asList(PosixFilePermission.GROUP_WRITE, PosixFilePermission.OTHERS_WRITE); + private static final Collection READ_BY_OTHERS = + Arrays.asList(PosixFilePermission.GROUP_READ, PosixFilePermission.OTHERS_READ); + + public static void logFileUsage(Path filePath, String context, boolean logReadAccess) { + logger.info("{}Accessing file: {}", getContextStr(context), filePath); + logWarnWhenAccessibleByOthers(filePath, context, logReadAccess); + } + + public static void logFileUsage(File file, String context, boolean logReadAccess) { + logFileUsage(file.toPath(), context, logReadAccess); + } + + public static void logFileUsage(String stringPath, String context, boolean logReadAccess) { + Path path = Paths.get(stringPath); + logFileUsage(path, context, logReadAccess); + } + + private static void logWarnWhenAccessibleByOthers( + Path filePath, String context, boolean logReadAccess) { + // we do not check the permissions for Windows + if (Constants.getOS() == Constants.OS.WINDOWS) { + return; + } + + try { + Collection filePermissions = Files.getPosixFilePermissions(filePath); + logger.debug( + "{}File {} access rights: {}", getContextStr(context), filePath, filePermissions); + + boolean isWritableByOthers = isPermPresent(filePermissions, WRITE_BY_OTHERS); + boolean isReadableByOthers = isPermPresent(filePermissions, READ_BY_OTHERS); + + if (isWritableByOthers || (isReadableByOthers && logReadAccess)) { + logger.warn( + "{}File {} is accessible by others to:{}{}", + getContextStr(context), + filePath, + isReadableByOthers && logReadAccess ? " read" : "", + isWritableByOthers ? " write" : ""); + } + } catch (IOException e) { + logger.warn( + "{}Unable to access the file to check the permissions: {}. Error: {}", + getContextStr(context), + filePath, + e); + } + } + + private static boolean isPermPresent( + Collection filePerms, Collection permsToCheck) + throws IOException { + return filePerms.stream().anyMatch(permsToCheck::contains); + } + + private static String getContextStr(String context) { + return Strings.isNullOrEmpty(context) ? "" : context + ": "; + } +} diff --git a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java index 152489b91..25ba5f946 100644 --- a/src/main/java/net/snowflake/client/core/HeartbeatBackground.java +++ b/src/main/java/net/snowflake/client/core/HeartbeatBackground.java @@ -23,7 +23,7 @@ public class HeartbeatBackground implements Runnable { private static HeartbeatBackground singleton = new HeartbeatBackground(); /** The logger. */ - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(HeartbeatBackground.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(HeartbeatBackground.class); // default master token validity (in seconds) is 4 hours private long masterTokenValidityInSecs = 4 * 3600; @@ -79,7 +79,7 @@ protected synchronized void addSession( this.heartBeatIntervalInSecs = masterTokenValidityInSecs / 4; } - LOGGER.debug( + logger.debug( "update heartbeat interval" + " from {} to {}", oldHeartBeatIntervalInSecs, this.heartBeatIntervalInSecs); @@ -96,7 +96,7 @@ protected synchronized void addSession( * JVM from exiting. */ if (this.scheduler == null) { - LOGGER.debug("create heartbeat thread pool", false); + logger.debug("create heartbeat thread pool", false); this.scheduler = Executors.newScheduledThreadPool( 1, @@ -113,19 +113,19 @@ public Thread newThread(Runnable runnable) { // schedule a heartbeat task if none exists if (heartbeatFuture == null) { - LOGGER.debug("schedule heartbeat task", false); + logger.debug("Schedule heartbeat task", false); this.scheduleHeartbeat(); } // or reschedule if the master token validity has been reduced (rare event) else if (requireReschedule) { - LOGGER.debug("Cancel existing heartbeat task", false); + logger.debug("Cancel existing heartbeat task", false); // Cancel existing task if not started yet and reschedule if (heartbeatFuture.cancel(false)) { - LOGGER.debug("Canceled existing heartbeat task, reschedule", false); + logger.debug("Canceled existing heartbeat task, reschedule", false); this.scheduleHeartbeat(); } else { - LOGGER.debug("Failed to cancel existing heartbeat task", false); + logger.debug("Failed to cancel existing heartbeat task", false); } } } @@ -155,7 +155,7 @@ private void scheduleHeartbeat() { */ long initialDelay = Math.max(heartBeatIntervalInSecs - elapsedSecsSinceLastHeartBeat, 0); - LOGGER.debug("schedule heartbeat task with initial delay of {} seconds", initialDelay); + logger.debug("Schedule heartbeat task with initial delay of {} seconds", initialDelay); // Creates and executes a periodic action to send heartbeats this.heartbeatFuture = this.scheduler.schedule(this, initialDelay, TimeUnit.SECONDS); @@ -191,7 +191,7 @@ public void run() { try { session.heartbeat(); } catch (Throwable ex) { - LOGGER.error("heartbeat error - message=" + ex.getMessage(), ex); + logger.error("Heartbeat error - message=" + ex.getMessage(), ex); } } @@ -203,11 +203,11 @@ public void run() { synchronized (this) { // schedule next heartbeat if (sessions.size() > 0) { - LOGGER.debug("schedule next heartbeat run", false); + logger.debug("Schedule next heartbeat run", false); scheduleHeartbeat(); } else { - LOGGER.debug("no need for heartbeat since no more sessions", false); + logger.debug("No need for heartbeat since no more sessions", false); // no need to heartbeat if no more session this.heartbeatFuture = null; diff --git a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java index 0ca3df8b0..f65b9e29d 100644 --- a/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java +++ b/src/main/java/net/snowflake/client/core/HttpClientSettingsKey.java @@ -150,4 +150,35 @@ public HttpProtocol getProxyHttpProtocol() { public Boolean getGzipDisabled() { return gzipDisabled; } + + @Override + public String toString() { + return "HttpClientSettingsKey[" + + "ocspMode=" + + ocspMode + + ", useProxy=" + + useProxy + + ", proxyHost='" + + proxyHost + + '\'' + + ", proxyPort=" + + proxyPort + + ", nonProxyHosts='" + + nonProxyHosts + + '\'' + + ", proxyUser='" + + proxyUser + + '\'' + + ", proxyPassword is " + + (proxyPassword.isEmpty() ? "not set" : "set") + + ", proxyProtocol='" + + proxyProtocol + + '\'' + + ", userAgentSuffix='" + + userAgentSuffix + + '\'' + + ", gzipDisabled=" + + gzipDisabled + + ']'; + } } diff --git a/src/main/java/net/snowflake/client/core/HttpUtil.java b/src/main/java/net/snowflake/client/core/HttpUtil.java index 84f31c8e8..f67031102 100644 --- a/src/main/java/net/snowflake/client/core/HttpUtil.java +++ b/src/main/java/net/snowflake/client/core/HttpUtil.java @@ -39,7 +39,9 @@ import net.snowflake.client.log.ArgSupplier; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.log.SFLoggerUtil; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; import org.apache.http.HttpHost; @@ -131,7 +133,7 @@ public static long getDownloadedConditionTimeoutInSeconds() { public static void closeExpiredAndIdleConnections() { if (connectionManager != null) { synchronized (connectionManager) { - logger.debug("connection pool stats: {}", connectionManager.getTotalStats()); + logger.debug("Connection pool stats: {}", connectionManager.getTotalStats()); connectionManager.closeExpiredConnections(); connectionManager.closeIdleConnections(DEFAULT_IDLE_CONNECTION_TIMEOUT, TimeUnit.SECONDS); } @@ -196,8 +198,13 @@ public static void setSessionlessProxyForAzure( ErrorCode.INVALID_PROXY_PROPERTIES, "Could not parse port number"); } Proxy azProxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort)); + logger.debug("Setting sessionless Azure proxy. Host: {}, port: {}", proxyHost, proxyPort); opContext.setProxy(azProxy); + } else { + logger.debug("Omitting sessionless Azure proxy setup as proxy is disabled"); } + } else { + logger.debug("Omitting sessionless Azure proxy setup"); } } @@ -211,7 +218,11 @@ public static void setProxyForAzure(HttpClientSettingsKey key, OperationContext if (key != null && key.usesProxy()) { Proxy azProxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(key.getProxyHost(), key.getProxyPort())); + logger.debug( + "Setting Azure proxy. Host: {}, port: {}", key.getProxyHost(), key.getProxyPort()); opContext.setProxy(azProxy); + } else { + logger.debug("Omitting Azure proxy setup"); } } @@ -262,16 +273,21 @@ static String buildUserAgent(String customSuffix) { */ public static CloseableHttpClient buildHttpClient( @Nullable HttpClientSettingsKey key, File ocspCacheFile, boolean downloadUnCompressed) { + logger.debug( + "Building http client with client settings key: {}, ocsp cache file: {}, download uncompressed: {}", + key != null ? key.toString() : null, + ocspCacheFile, + downloadUnCompressed); // set timeout so that we don't wait forever. // Setup the default configuration for all requests on this client int timeToLive = SystemUtil.convertSystemPropertyToIntValue(JDBC_TTL, DEFAULT_TTL); - logger.debug("time to live in connection pooling manager: {}", timeToLive); long connectTimeout = getConnectionTimeout().toMillis(); long socketTimeout = getSocketTimeout().toMillis(); logger.debug( - "Connect timeout is {} ms and socket timeout is {} for connection pooling manager", + "Connection pooling manager connect timeout: {} ms, socket timeout: {} ms, ttl: {} s", connectTimeout, - socketTimeout); + socketTimeout, + timeToLive); // Set proxy settings for DefaultRequestConfig. If current proxy settings are the same as for // the last request, keep the current DefaultRequestConfig. If not, build a new @@ -295,9 +311,26 @@ public static CloseableHttpClient buildHttpClient( // only set the proxy settings if they are not null // but no value has been specified for nonProxyHosts // the route planner will determine whether to use a proxy based on nonProxyHosts value. + String logMessage = + "Rebuilding request config. Connect timeout: " + + connectTimeout + + " ms, connection request " + + "timeout: " + + connectTimeout + + " ms, socket timeout: " + + socketTimeout + + " ms"; if (proxy != null && Strings.isNullOrEmpty(key.getNonProxyHosts())) { builder.setProxy(proxy); + logMessage += + ", host: " + + key.getProxyHost() + + ", port: " + + key.getProxyPort() + + ", scheme: " + + key.getProxyHttpProtocol().getScheme(); } + logger.debug(logMessage); DefaultRequestConfig = builder.build(); } @@ -309,6 +342,11 @@ public static CloseableHttpClient buildHttpClient( // care OCSP checks. // OCSP FailOpen is ON by default try { + if (ocspCacheFile == null) { + logger.debug("Instantiating trust manager with default ocsp cache file"); + } else { + logger.debug("Instantiating trust manager with ocsp cache file: {}", ocspCacheFile); + } TrustManager[] tm = {new SFTrustManager(key, ocspCacheFile)}; trustManagers = tm; } catch (Exception | Error err) { @@ -318,8 +356,18 @@ public static CloseableHttpClient buildHttpClient( logger.error(errors.toString(), true); throw new RuntimeException(err); // rethrow the exception } + } else if (key != null) { + logger.debug( + "Omitting trust manager instantiation as OCSP mode is set to {}", key.getOcspMode()); + } else { + logger.debug("Omitting trust manager instantiation as configuration is not provided"); } try { + logger.debug( + "Registering https connection socket factory with socks proxy disabled: {} and http " + + "connection socket factory", + socksProxyDisabled); + Registry registry = RegistryBuilder.create() .register( @@ -344,6 +392,7 @@ public static CloseableHttpClient buildHttpClient( connectionManager.setMaxTotal(maxConnections); connectionManager.setDefaultMaxPerRoute(maxConnectionsPerRoute); + logger.debug("Disabling cookie management for http client"); String userAgentSuffix = key != null ? key.getUserAgentSuffix() : ""; HttpClientBuilder httpClientBuilder = HttpClientBuilder.create() @@ -355,6 +404,8 @@ public static CloseableHttpClient buildHttpClient( .disableCookieManagement(); // SNOW-39748 if (key != null && key.usesProxy()) { + logger.debug( + "Instantiating proxy route planner with non-proxy hosts: {}", key.getNonProxyHosts()); // use the custom proxy properties SnowflakeMutableProxyRoutePlanner sdkProxyRoutePlanner = httpClientRoutePlanner.computeIfAbsent( @@ -372,12 +423,19 @@ public static CloseableHttpClient buildHttpClient( new UsernamePasswordCredentials(key.getProxyUser(), key.getProxyPassword()); AuthScope authScope = new AuthScope(key.getProxyHost(), key.getProxyPort()); CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + logger.debug( + "Using user: {}, password is {} for proxy host: {}, port: {}", + key.getProxyUser(), + SFLoggerUtil.isVariableProvided(key.getProxyPassword()), + key.getProxyHost(), + key.getProxyPort()); credentialsProvider.setCredentials(authScope, credentials); httpClientBuilder = httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); } } httpClientBuilder.setDefaultRequestConfig(DefaultRequestConfig); if (downloadUnCompressed) { + logger.debug("Disabling content compression for http client"); httpClientBuilder = httpClientBuilder.disableContentCompression(); } return httpClientBuilder.build(); @@ -392,6 +450,11 @@ public static void updateRoutePlanner(HttpClientSettingsKey key) { .get(key) .getNonProxyHosts() .equalsIgnoreCase(key.getNonProxyHosts())) { + logger.debug( + "Updating route planner non-proxy hosts for proxy: {}:{} to: {}", + key.getProxyHost(), + key.getProxyPort(), + key.getNonProxyHosts()); httpClientRoutePlanner.get(key).setNonProxyHosts(key.getNonProxyHosts()); } } @@ -490,6 +553,7 @@ public static RequestConfig getRequestConfigWithoutCookies() { } public static void setRequestConfig(RequestConfig requestConfig) { + logger.debug("Setting default request config to: {}", requestConfig); DefaultRequestConfig = requestConfig; } @@ -508,6 +572,7 @@ private static String getHttpClientStats() { * @param socksProxyDisabled new value */ public static void setSocksProxyDisabled(boolean socksProxyDisabled) { + logger.debug("Setting socks proxy disabled to {}", socksProxyDisabled); HttpUtil.socksProxyDisabled = socksProxyDisabled; } @@ -545,6 +610,7 @@ static String executeRequestWithoutCookies( AtomicBoolean canceling, HttpClientSettingsKey ocspAndProxyKey) throws SnowflakeSQLException, IOException { + logger.debug("Executing request without cookies"); return executeRequestInternal( httpRequest, retryTimeout, @@ -582,6 +648,7 @@ public static String executeGeneralRequest( int retryCount, HttpClientSettingsKey ocspAndProxyAndGzipKey) throws SnowflakeSQLException, IOException { + logger.debug("Executing general request"); return executeRequest( httpRequest, retryTimeout, @@ -617,6 +684,7 @@ public static String executeGeneralRequest( int retryCount, CloseableHttpClient httpClient) throws SnowflakeSQLException, IOException { + logger.debug("Executing general request"); return executeRequestInternal( httpRequest, retryTimeout, @@ -664,6 +732,7 @@ public static String executeRequest( ExecTimeTelemetryData execTimeData) throws SnowflakeSQLException, IOException { boolean ocspEnabled = !(ocspAndProxyKey.getOcspMode().equals(OCSPMode.INSECURE)); + logger.debug("Executing request with OCSP enabled: {}", ocspEnabled); execTimeData.setOCSPStatus(ocspEnabled); return executeRequestInternal( httpRequest, @@ -729,6 +798,12 @@ private static String executeRequestInternal( String theString; StringWriter writer = null; CloseableHttpResponse response = null; + Stopwatch stopwatch = null; + + if (logger.isDebugEnabled()) { + stopwatch = new Stopwatch(); + stopwatch.start(); + } try { response = @@ -746,6 +821,9 @@ private static String executeRequestInternal( includeRequestGuid, retryOnHTTP403, execTimeData); + if (logger.isDebugEnabled() && stopwatch != null) { + stopwatch.stop(); + } if (response == null || response.getStatusLine().getStatusCode() != 200) { logger.error("Error executing request: {}", requestInfoScrubbed); @@ -778,9 +856,10 @@ private static String executeRequestInternal( } logger.debug( - "Pool: {} Request returned for: {}", + "Pool: {} Request returned for: {} took {} ms", (ArgSupplier) HttpUtil::getHttpClientStats, - requestInfoScrubbed); + requestInfoScrubbed, + stopwatch == null ? "n/a" : stopwatch.elapsedMillis()); return theString; } @@ -855,8 +934,10 @@ static final class SFConnectionSocketFactory extends PlainConnectionSocketFactor @Override public Socket createSocket(HttpContext ctx) throws IOException { if (socksProxyDisabled) { + logger.trace("Creating socket with no proxy"); return new Socket(Proxy.NO_PROXY); } + logger.trace("Creating socket with proxy"); return super.createSocket(ctx); } } diff --git a/src/main/java/net/snowflake/client/core/JsonSqlInput.java b/src/main/java/net/snowflake/client/core/JsonSqlInput.java index daff3d9b0..0e6b274bd 100644 --- a/src/main/java/net/snowflake/client/core/JsonSqlInput.java +++ b/src/main/java/net/snowflake/client/core/JsonSqlInput.java @@ -4,7 +4,6 @@ package net.snowflake.client.core; import static net.snowflake.client.core.SFBaseResultSet.OBJECT_MAPPER; -import static net.snowflake.client.core.SFResultSet.logger; import static net.snowflake.client.jdbc.SnowflakeUtil.mapSFExceptionToSQLException; import com.fasterxml.jackson.core.type.TypeReference; @@ -29,12 +28,15 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.core.structs.SQLDataCreationHelper; import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.ThrowingBiFunction; import net.snowflake.common.core.SFTimestamp; import net.snowflake.common.core.SnowflakeDateTimeFormat; @SnowflakeJdbcInternalApi public class JsonSqlInput extends BaseSqlInput { + private static final SFLogger logger = SFLoggerFactory.getLogger(JsonSqlInput.class); private final String text; private final JsonNode input; private final Iterator elements; diff --git a/src/main/java/net/snowflake/client/core/QueryContextCache.java b/src/main/java/net/snowflake/client/core/QueryContextCache.java index ea47e6167..85fde42ac 100644 --- a/src/main/java/net/snowflake/client/core/QueryContextCache.java +++ b/src/main/java/net/snowflake/client/core/QueryContextCache.java @@ -109,7 +109,7 @@ else if (readTimestamp == qce.readTimestamp && qce.priority != priority) { /** Sync the newPriorityMap with the priorityMap at the end of current round of merge */ void syncPriorityMap() { logger.debug( - "syncPriorityMap called priorityMap size = {}, newPrioirtyMap size = {}", + "syncPriorityMap called priorityMap size: {}, newPrioirtyMap size: {}", priorityMap.size(), newPriorityMap.size()); for (Map.Entry entry : newPriorityMap.entrySet()) { @@ -125,7 +125,9 @@ void syncPriorityMap() { */ void checkCacheCapacity() { logger.debug( - "checkCacheCapacity() called. treeSet size {} cache capacity {}", treeSet.size(), capacity); + "checkCacheCapacity() called. treeSet size: {} cache capacity: {}", + treeSet.size(), + capacity); if (treeSet.size() > capacity) { // remove elements based on priority while (treeSet.size() > capacity) { @@ -135,18 +137,18 @@ void checkCacheCapacity() { } logger.debug( - "checkCacheCapacity() returns. treeSet size {} cache capacity {}", + "checkCacheCapacity() returns. treeSet size: {} cache capacity: {}", treeSet.size(), capacity); } /** Clear the cache. */ public void clearCache() { - logger.debug("clearCache() called"); + logger.trace("clearCache() called"); idMap.clear(); priorityMap.clear(); treeSet.clear(); - logger.debug("clearCache() returns. Number of entries in cache now {}", treeSet.size()); + logger.trace("clearCache() returns. Number of entries in cache now: {}", treeSet.size()); } /** @@ -211,7 +213,7 @@ public void deserializeQueryContextJson(String data) { syncPriorityMap(); } } catch (Exception e) { - logger.debug("deserializeQueryContextJson: Exception = {}", e.getMessage()); + logger.debug("deserializeQueryContextJson: Exception: {}", e.getMessage()); // Not rethrowing. clear the cache as incomplete merge can lead to unexpected behavior. clearCache(); } @@ -306,7 +308,7 @@ public void deserializeQueryContextDTO(QueryContextDTO queryContextDTO) { // round of merge. syncPriorityMap(); } catch (Exception e) { - logger.debug("deserializeQueryContextDTO: Exception = {}", e.getMessage()); + logger.debug("deserializeQueryContextDTO: Exception: {}", e.getMessage()); // Not rethrowing. clear the cache as incomplete merge can lead to unexpected behavior. clearCache(); } @@ -359,7 +361,7 @@ public QueryContextDTO serializeQueryContextDTO() { return queryContextDTO; } catch (Exception e) { - logger.debug("serializQueryContextDTO(): Exception {}", e.getMessage()); + logger.debug("serializeQueryContextDTO(): Exception: {}", e.getMessage()); return null; } } diff --git a/src/main/java/net/snowflake/client/core/ResultUtil.java b/src/main/java/net/snowflake/client/core/ResultUtil.java index b3e8bf3bb..b894f4259 100644 --- a/src/main/java/net/snowflake/client/core/ResultUtil.java +++ b/src/main/java/net/snowflake/client/core/ResultUtil.java @@ -29,7 +29,7 @@ import net.snowflake.common.util.TimeUtil; public class ResultUtil { - static final SFLogger logger = SFLoggerFactory.getLogger(ResultUtil.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(ResultUtil.class); public static final int MILLIS_IN_ONE_DAY = 86400000; public static final int DEFAULT_SCALE_OF_SFTIME_FRACTION_SECONDS = @@ -184,7 +184,7 @@ public static SFTimestamp getSFTimestamp( TimeZone sessionTZ, SFBaseSession session) throws SFException { - logger.debug("public Timestamp getTimestamp(int columnIndex)", false); + logger.trace("Timestamp getTimestamp(int columnIndex)", false); try { TimeUtil.TimestampType tsType = null; diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index a617bb739..02f16fff1 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -278,7 +278,9 @@ private boolean fetchNextRowUnsorted() throws SnowflakeSQLException { if (currentChunkIterator.next()) { logger.debug( - "Moving to chunk index {}, row count={}", nextChunkIndex, nextChunk.getRowCount()); + "Moving to chunk index: {}, row count: {}", + nextChunkIndex, + nextChunk.getRowCount()); nextChunkIndex++; return true; @@ -435,7 +437,7 @@ public boolean next() throws SFException, SnowflakeSQLException { } return true; } else { - logger.debug("end of result", false); + logger.debug("End of result", false); /* * Here we check if the result has been truncated and throw exception if @@ -805,7 +807,7 @@ public static void closeRootAllocator(RootAllocator rootAllocator) { rootAllocator.close(); } } catch (InterruptedException ie) { - logger.debug("interrupted during closing root allocator", false); + logger.debug("Interrupted during closing root allocator", false); } catch (Exception e) { logger.debug("Exception happened when closing rootAllocator: ", e.getLocalizedMessage()); } diff --git a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java index 0a0fffc63..71e56a515 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java @@ -139,12 +139,12 @@ public SFBaseSession getSession() { // default implementation public boolean next() throws SFException, SnowflakeSQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); return false; } public void close() throws SnowflakeSQLException { - logger.debug("public void close()", false); + logger.trace("void close()", false); // no exception even if already closed. resultSetMetaData = null; @@ -152,7 +152,7 @@ public void close() throws SnowflakeSQLException { } public boolean wasNull() { - logger.debug("public boolean wasNull() returning {}", wasNull); + logger.trace("boolean wasNull() returning {}", wasNull); return wasNull; } diff --git a/src/main/java/net/snowflake/client/core/SFBaseSession.java b/src/main/java/net/snowflake/client/core/SFBaseSession.java index b0da3e9b4..c5191cb82 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseSession.java +++ b/src/main/java/net/snowflake/client/core/SFBaseSession.java @@ -46,7 +46,7 @@ * which signals whether to enable client telemetry */ public abstract class SFBaseSession { - static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseSession.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseSession.class); private final Properties clientInfo = new Properties(); private final AtomicBoolean autoCommit = new AtomicBoolean(true); // Injected delay for the purpose of connection timeout testing @@ -354,6 +354,8 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { return ocspAndProxyAndGzipKey; } + OCSPMode ocspMode = getOCSPMode(); + Boolean gzipDisabled = false; if (connectionPropertiesMap.containsKey(SFSessionProperty.GZIP_DISABLED)) { gzipDisabled = (Boolean) connectionPropertiesMap.get(SFSessionProperty.GZIP_DISABLED); @@ -387,7 +389,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { String proxyProtocol = (String) connectionPropertiesMap.get(SFSessionProperty.PROXY_PROTOCOL); ocspAndProxyAndGzipKey = new HttpClientSettingsKey( - getOCSPMode(), + ocspMode, proxyHost, proxyPort, nonProxyHosts, @@ -397,6 +399,8 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); + return ocspAndProxyAndGzipKey; } // If JVM proxy parameters are specified, proxies need to go through the JDBC driver's @@ -417,7 +421,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { // log the JVM parameters that are being used if (httpUseProxy) { logger.debug( - "Proxy environment settings: http.useProxy={}, http.proxyHost={}, http.proxyPort={}, http.proxyUser={}, " + "Using JVM parameters for proxy setup: http.useProxy={}, http.proxyHost={}, http.proxyPort={}, http.proxyUser={}, " + "http.proxyPassword is {}, https.proxyHost={}, https.proxyPort={}, https.proxyUser={}, " + "https.proxyPassword is {}, http.nonProxyHosts={}, NO_PROXY={}, http.proxyProtocol={}", httpUseProxy, @@ -456,6 +460,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { if (proxyProtocol.equals("https") && !Strings.isNullOrEmpty(httpsProxyHost) && !Strings.isNullOrEmpty(httpsProxyPort)) { + logger.debug("Using https proxy configuration from JVM parameters"); int proxyPort; try { proxyPort = Integer.parseInt(httpsProxyPort); @@ -465,7 +470,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { } ocspAndProxyAndGzipKey = new HttpClientSettingsKey( - getOCSPMode(), + ocspMode, httpsProxyHost, proxyPort, combinedNonProxyHosts, @@ -474,9 +479,11 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { "https", userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } else if (proxyProtocol.equals("http") && !Strings.isNullOrEmpty(httpProxyHost) && !Strings.isNullOrEmpty(httpProxyPort)) { + logger.debug("Using http proxy configuration from JVM parameters"); int proxyPort; try { proxyPort = Integer.parseInt(httpProxyPort); @@ -486,7 +493,7 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { } ocspAndProxyAndGzipKey = new HttpClientSettingsKey( - getOCSPMode(), + ocspMode, httpProxyHost, proxyPort, combinedNonProxyHosts, @@ -495,25 +502,49 @@ public HttpClientSettingsKey getHttpClientKey() throws SnowflakeSQLException { "http", userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } else { // Not enough parameters set to use the proxy. - logger.debug( - "http.useProxy={} but valid host and port were not provided. No proxy in use.", + logger.warn( + "Failed parsing the proxy settings from JVM parameters as http.useProxy={}," + + " but valid host and port were not provided.", httpUseProxy); ocspAndProxyAndGzipKey = - new HttpClientSettingsKey(getOCSPMode(), userAgentSuffix, gzipDisabled); + new HttpClientSettingsKey(ocspMode, userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } } else { // If no proxy is used or JVM http proxy is used, no need for setting parameters logger.debug("http.useProxy={}. JVM proxy not used.", httpUseProxy); unsetInvalidProxyHostAndPort(); - ocspAndProxyAndGzipKey = - new HttpClientSettingsKey(getOCSPMode(), userAgentSuffix, gzipDisabled); + ocspAndProxyAndGzipKey = new HttpClientSettingsKey(ocspMode, userAgentSuffix, gzipDisabled); + logHttpClientInitInfo(ocspAndProxyAndGzipKey); } } return ocspAndProxyAndGzipKey; } + private void logHttpClientInitInfo(HttpClientSettingsKey key) { + if (key.usesProxy()) { + logger.info( + "Driver OCSP mode: {}, gzip disabled: {}, proxy protocol: {}," + + " proxy host: {}, proxy port: {}, non proxy hosts: {}, proxy user: {}, proxy password is {}", + key.getOcspMode(), + key.getGzipDisabled(), + key.getProxyHttpProtocol(), + key.getProxyHost(), + key.getProxyPort(), + key.getNonProxyHosts(), + key.getProxyUser(), + key.getProxyPassword().isEmpty() ? "not set" : "set"); + } else { + logger.info( + "Driver OCSP mode: {}, gzip disabled: {} and no proxy", + key.getOcspMode(), + key.getGzipDisabled()); + } + } + public void unsetInvalidProxyHostAndPort() { // If proxyHost and proxyPort are used without http or https unset them, so they are not used // later by the ProxySelector. diff --git a/src/main/java/net/snowflake/client/core/SFBaseStatement.java b/src/main/java/net/snowflake/client/core/SFBaseStatement.java index 62d933b2e..17b2fd1b6 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseStatement.java +++ b/src/main/java/net/snowflake/client/core/SFBaseStatement.java @@ -20,7 +20,7 @@ public abstract class SFBaseStatement { // maximum number of parameters for the statement; if this threshold is exceeded, // we throw an exception protected static final int MAX_STATEMENT_PARAMETERS = 1000; - static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseStatement.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseStatement.class); // statement level parameters; just a string-key, object-value map. protected final Map statementParametersMap = new HashMap<>(); // timeout in seconds for queries @@ -125,7 +125,7 @@ public abstract SFBaseResultSet asyncExecute( * @param sql the set property sql */ public void executeSetProperty(final String sql) { - logger.debug("setting property", false); + logger.trace("Setting property", false); // tokenize the sql String[] tokens = sql.split("\\s+"); @@ -136,11 +136,11 @@ public void executeSetProperty(final String sql) { if ("sort".equalsIgnoreCase(tokens[1])) { if (tokens.length >= 3 && "on".equalsIgnoreCase(tokens[2])) { - logger.debug("setting sort on", false); + logger.debug("Setting sort on", false); this.getSFBaseSession().setSessionPropertyByKey("sort", true); } else { - logger.debug("setting sort off", false); + logger.debug("Setting sort off", false); this.getSFBaseSession().setSessionPropertyByKey("sort", false); } } diff --git a/src/main/java/net/snowflake/client/core/SFException.java b/src/main/java/net/snowflake/client/core/SFException.java index 37c47da32..facbf238d 100644 --- a/src/main/java/net/snowflake/client/core/SFException.java +++ b/src/main/java/net/snowflake/client/core/SFException.java @@ -11,7 +11,7 @@ /** Created by jhuang on 1/5/16. */ public class SFException extends Throwable { - static final SFLogger logger = SFLoggerFactory.getLogger(SFException.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFException.class); private static final long serialVersionUID = 1L; diff --git a/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java b/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java index b3f797f6f..06c503ce4 100644 --- a/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFFixedViewResultSet.java @@ -83,7 +83,7 @@ public SFFixedViewResultSet(SnowflakeFixedView fixedView, CommandType commandTyp */ @Override public boolean next() throws SFException { - logger.debug("next called", false); + logger.trace("next called", false); List nextRowList; try { @@ -98,7 +98,7 @@ public boolean next() throws SFException { row++; if (nextRowList == null) { - logger.debug("end of result", false); + logger.debug("End of result", false); return false; } @@ -112,7 +112,7 @@ public boolean next() throws SFException { @Override protected Object getObjectInternal(int columnIndex) throws SFException { - logger.debug("public Object getObjectInternal(int columnIndex)", false); + logger.trace("Object getObjectInternal(int columnIndex)", false); if (nextRow == null) { throw new SFException(ErrorCode.ROW_DOES_NOT_EXIST); diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 3437448fe..2232eea00 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -123,7 +123,7 @@ public Array getArray(int columnIndex) throws SFException { @Override public String getString(int columnIndex) throws SFException { - logger.debug("public String getString(int columnIndex)", false); + logger.trace("String getString(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getInternalColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -133,21 +133,21 @@ public String getString(int columnIndex) throws SFException { @Override public boolean getBoolean(int columnIndex) throws SFException { - logger.debug("public boolean getBoolean(int columnIndex)", false); + logger.trace("boolean getBoolean(int columnIndex)", false); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getBooleanConverter().getBoolean(getObjectInternal(columnIndex), columnType); } @Override public byte getByte(int columnIndex) throws SFException { - logger.debug("public short getByte(int columnIndex)", false); + logger.trace("short getByte(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); return converters.getNumberConverter().getByte(obj); } @Override public short getShort(int columnIndex) throws SFException { - logger.debug("public short getShort(int columnIndex)", false); + logger.trace("short getShort(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getShort(obj, columnType); @@ -155,7 +155,7 @@ public short getShort(int columnIndex) throws SFException { @Override public int getInt(int columnIndex) throws SFException { - logger.debug("public int getInt(int columnIndex)", false); + logger.trace("int getInt(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getInt(obj, columnType); @@ -163,7 +163,7 @@ public int getInt(int columnIndex) throws SFException { @Override public long getLong(int columnIndex) throws SFException { - logger.debug("public long getLong(int columnIndex)", false); + logger.trace("long getLong(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getLong(obj, columnType); @@ -171,7 +171,7 @@ public long getLong(int columnIndex) throws SFException { @Override public BigDecimal getBigDecimal(int columnIndex) throws SFException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getBigDecimal(obj, columnType); @@ -179,7 +179,7 @@ public BigDecimal getBigDecimal(int columnIndex) throws SFException { @Override public BigDecimal getBigDecimal(int columnIndex, int scale) throws SFException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getBigDecimal(obj, columnType, scale); @@ -187,7 +187,7 @@ public BigDecimal getBigDecimal(int columnIndex, int scale) throws SFException { @Override public Time getTime(int columnIndex) throws SFException { - logger.debug("public Time getTime(int columnIndex)", false); + logger.trace("Time getTime(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -199,7 +199,7 @@ public Time getTime(int columnIndex) throws SFException { @Override public Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SFException { - logger.debug("public Timestamp getTimestamp(int columnIndex)", false); + logger.trace("Timestamp getTimestamp(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -211,7 +211,7 @@ public Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SFException { @Override public float getFloat(int columnIndex) throws SFException { - logger.debug("public float getFloat(int columnIndex)", false); + logger.trace("float getFloat(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getFloat(obj, columnType); @@ -219,7 +219,7 @@ public float getFloat(int columnIndex) throws SFException { @Override public double getDouble(int columnIndex) throws SFException { - logger.debug("public double getDouble(int columnIndex)", false); + logger.trace("double getDouble(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); return converters.getNumberConverter().getDouble(obj, columnType); @@ -227,7 +227,7 @@ public double getDouble(int columnIndex) throws SFException { @Override public byte[] getBytes(int columnIndex) throws SFException { - logger.debug("public byte[] getBytes(int columnIndex)", false); + logger.trace("byte[] getBytes(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); @@ -241,7 +241,7 @@ public Date getDate(int columnIndex) throws SFException { @Override public Date getDate(int columnIndex, TimeZone tz) throws SFException { - logger.debug("public Date getDate(int columnIndex)", false); + logger.trace("Date getDate(int columnIndex)", false); Object obj = getObjectInternal(columnIndex); int columnType = resultSetMetaData.getColumnType(columnIndex); int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex); diff --git a/src/main/java/net/snowflake/client/core/SFOCSPException.java b/src/main/java/net/snowflake/client/core/SFOCSPException.java index 6d96bd366..f4eb4360c 100644 --- a/src/main/java/net/snowflake/client/core/SFOCSPException.java +++ b/src/main/java/net/snowflake/client/core/SFOCSPException.java @@ -9,7 +9,7 @@ import net.snowflake.client.log.SFLoggerFactory; public class SFOCSPException extends Throwable { - static final SFLogger logger = SFLoggerFactory.getLogger(SFOCSPException.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFOCSPException.class); private static final long serialVersionUID = 1L; diff --git a/src/main/java/net/snowflake/client/core/SFResultSet.java b/src/main/java/net/snowflake/client/core/SFResultSet.java index b7698cf5d..2716cc780 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFResultSet.java @@ -35,7 +35,7 @@ * @author jhuang */ public class SFResultSet extends SFJsonResultSet { - static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSet.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSet.class); private int columnCount = 0; @@ -309,7 +309,7 @@ public boolean next() throws SFException, SnowflakeSQLException { } return true; } else { - logger.debug("end of result", false); + logger.debug("End of result", false); /* * Here we check if the result has been truncated and throw exception if diff --git a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java index 30072926f..30a680030 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java @@ -25,7 +25,7 @@ /** Snowflake ResultSetMetaData */ public class SFResultSetMetaData { - static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSetMetaData.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFResultSetMetaData.class); private int columnCount = 0; diff --git a/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java b/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java index bbd1e1c14..aca26a272 100644 --- a/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java +++ b/src/main/java/net/snowflake/client/core/SFSSLConnectionSocketFactory.java @@ -23,7 +23,8 @@ /** Snowflake custom SSLConnectionSocketFactory */ public class SFSSLConnectionSocketFactory extends SSLConnectionSocketFactory { - static final SFLogger logger = SFLoggerFactory.getLogger(SFSSLConnectionSocketFactory.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SFSSLConnectionSocketFactory.class); private static final String SSL_VERSION = "TLSv1.2"; diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index 3d0900940..eeb1bd479 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -45,6 +45,8 @@ import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.log.SFLoggerUtil; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.SqlState; import org.apache.http.HttpHeaders; @@ -58,7 +60,7 @@ public class SFSession extends SFBaseSession { public static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION; public static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake"; public static final String SF_HEADER_TOKEN_TAG = "Token"; - static final SFLogger logger = SFLoggerFactory.getLogger(SFSession.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFSession.class); private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); private static final String SF_PATH_SESSION_HEARTBEAT = "/session/heartbeat"; private static final String SF_PATH_QUERY_MONITOR = "/monitoring/queries/"; @@ -94,6 +96,7 @@ public class SFSession extends SFBaseSession { *

Default:300 seconds */ private int loginTimeout = 300; + /** * Amount of milliseconds a user is willing to tolerate for network related issues (e.g. HTTP * 503/504) or database transient issues (e.g. GS not responding) @@ -224,7 +227,7 @@ private JsonNode getQueryMetadata(String queryID) throws SQLException { // Get response as JSON and parse it to get the query status // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", response); + logger.debug("Response: {}", response); int errorCode = jsonNode.path("code").asInt(); // If the error is due to an expired session token, try renewing the session and trying @@ -511,21 +514,22 @@ public boolean containProperty(String key) { * @throws SnowflakeSQLException exception raised from Snowflake components */ public synchronized void open() throws SFException, SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); performSanityCheckOnProperties(); Map connectionPropertiesMap = getConnectionPropertiesMap(); - logger.debug( - "input: server={}, account={}, user={}, password={}, role={}, database={}, schema={}," - + " warehouse={}, validate_default_parameters={}, authenticator={}, ocsp_mode={}," - + " passcode_in_password={}, passcode={}, private_key={}, disable_socks_proxy={}," - + " application={}, app_id={}, app_version={}, login_timeout={}, retry_timeout={}, network_timeout={}," - + " query_timeout={}, tracing={}, private_key_file={}, private_key_file_pwd={}." - + " session_parameters: client_store_temporary_credential={}, gzip_disabled={}", + logger.info( + "Opening session with server: {}, account: {}, user: {}, password is {}, role: {}, database: {}, schema: {}," + + " warehouse: {}, validate default parameters: {}, authenticator: {}, ocsp mode: {}," + + " passcode in password: {}, passcode is {}, private key is {}, disable socks proxy: {}," + + " application: {}, app id: {}, app version: {}, login timeout: {}, retry timeout: {}, network timeout: {}," + + " query timeout: {}, tracing: {}, private key file: {}, private key file pwd is {}," + + " session parameters: client store temporary credential: {}, gzip disabled: {}", connectionPropertiesMap.get(SFSessionProperty.SERVER_URL), connectionPropertiesMap.get(SFSessionProperty.ACCOUNT), connectionPropertiesMap.get(SFSessionProperty.USER), - !Strings.isNullOrEmpty((String) connectionPropertiesMap.get(SFSessionProperty.PASSWORD)) - ? "***" - : "(empty)", + SFLoggerUtil.isVariableProvided( + (String) connectionPropertiesMap.get(SFSessionProperty.PASSWORD)), connectionPropertiesMap.get(SFSessionProperty.ROLE), connectionPropertiesMap.get(SFSessionProperty.DATABASE), connectionPropertiesMap.get(SFSessionProperty.SCHEMA), @@ -534,12 +538,9 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.AUTHENTICATOR), getOCSPMode().name(), connectionPropertiesMap.get(SFSessionProperty.PASSCODE_IN_PASSWORD), - !Strings.isNullOrEmpty((String) connectionPropertiesMap.get(SFSessionProperty.PASSCODE)) - ? "***" - : "(empty)", - connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY) != null - ? "(not null)" - : "(null)", + SFLoggerUtil.isVariableProvided( + (String) connectionPropertiesMap.get(SFSessionProperty.PASSCODE)), + SFLoggerUtil.isVariableProvided(connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY)), connectionPropertiesMap.get(SFSessionProperty.DISABLE_SOCKS_PROXY), connectionPropertiesMap.get(SFSessionProperty.APPLICATION), connectionPropertiesMap.get(SFSessionProperty.APP_ID), @@ -550,22 +551,20 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.QUERY_TIMEOUT), connectionPropertiesMap.get(SFSessionProperty.TRACING), connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE), - !Strings.isNullOrEmpty( - (String) connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD)) - ? "***" - : "(empty)", + SFLoggerUtil.isVariableProvided( + (String) connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD)), sessionParametersMap.get(CLIENT_STORE_TEMPORARY_CREDENTIAL), connectionPropertiesMap.get(SFSessionProperty.GZIP_DISABLED)); HttpClientSettingsKey httpClientSettingsKey = getHttpClientKey(); logger.debug( - "connection proxy parameters: use_proxy={}, proxy_host={}, proxy_port={}, proxy_user={}," - + " proxy_password={}, non_proxy_hosts={}, proxy_protocol={}", + "Connection proxy parameters: use proxy: {}, proxy host: {}, proxy port: {}, proxy user: {}," + + " proxy password is {}, non proxy hosts: {}, proxy protocol: {}", httpClientSettingsKey.usesProxy(), httpClientSettingsKey.getProxyHost(), httpClientSettingsKey.getProxyPort(), httpClientSettingsKey.getProxyUser(), - !Strings.isNullOrEmpty(httpClientSettingsKey.getProxyPassword()) ? "***" : "(empty)", + SFLoggerUtil.isVariableProvided(httpClientSettingsKey.getProxyPassword()), httpClientSettingsKey.getNonProxyHosts(), httpClientSettingsKey.getProxyHttpProtocol()); @@ -707,6 +706,8 @@ public synchronized void open() throws SFException, SnowflakeSQLException { // start heartbeat for this session so that the master token will not expire startHeartbeatForThisSession(); + stopwatch.stop(); + logger.info("Session {} opened in {} ms.", getSessionId(), stopwatch.elapsedMillis()); } /** @@ -767,10 +768,14 @@ boolean isUsernamePasswordMFAAuthenticator() { synchronized void renewSession(String prevSessionToken) throws SFException, SnowflakeSQLException { if (sessionToken != null && !sessionToken.equals(prevSessionToken)) { - logger.debug("not renew session because session token has not been updated.", false); + logger.debug( + "Not renewing session {} because session token has not been updated.", getSessionId()); return; } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + logger.debug("Renewing session {}", getSessionId()); SFLoginInput loginInput = new SFLoginInput(); loginInput .setServerUrl(getServerUrl()) @@ -791,6 +796,9 @@ synchronized void renewSession(String prevSessionToken) sessionToken = loginOutput.getSessionToken(); masterToken = loginOutput.getMasterToken(); + stopwatch.stop(); + logger.debug( + "Session {} renewed successfully in {} ms", getSessionId(), stopwatch.elapsedMillis()); } /** @@ -810,14 +818,17 @@ public String getSessionToken() { */ @Override public void close() throws SFException, SnowflakeSQLException { - logger.debug(" public void close()", false); + logger.debug("Closing session {}", getSessionId()); // stop heartbeat for this session stopHeartbeatForThisSession(); if (isClosed) { + logger.debug("Session {} is already closed", getSessionId()); return; } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); SFLoginInput loginInput = new SFLoginInput(); loginInput @@ -837,6 +848,11 @@ public void close() throws SFException, SnowflakeSQLException { qcc.clearCache(); } + stopwatch.stop(); + logger.info( + "Session {} has been successfully closed in {} ms", + getSessionId(), + stopwatch.elapsedMillis()); isClosed = true; } @@ -892,23 +908,26 @@ public Void call() throws SQLException { /** Start heartbeat for this session */ protected void startHeartbeatForThisSession() { if (getEnableHeartbeat() && !Strings.isNullOrEmpty(masterToken)) { - logger.debug("start heartbeat, master token validity: " + masterTokenValidityInSeconds); + logger.debug( + "Session {} start heartbeat, master token validity: {} s", + getSessionId(), + masterTokenValidityInSeconds); HeartbeatBackground.getInstance() .addSession(this, masterTokenValidityInSeconds, heartbeatFrequency); } else { - logger.debug("heartbeat not enabled for the session", false); + logger.debug("Heartbeat not enabled for the session {}", getSessionId()); } } /** Stop heartbeat for this session */ protected void stopHeartbeatForThisSession() { if (getEnableHeartbeat() && !Strings.isNullOrEmpty(masterToken)) { - logger.debug("stop heartbeat", false); + logger.debug("Session {} stop heartbeat", getSessionId()); HeartbeatBackground.getInstance().removeSession(this); } else { - logger.debug("heartbeat not enabled for the session", false); + logger.debug("Heartbeat not enabled for the session {}", getSessionId()); } } @@ -919,12 +938,15 @@ protected void stopHeartbeatForThisSession() { * @throws SQLException exception raised from SQL generic layers */ protected void heartbeat() throws SFException, SQLException { - logger.debug(" public void heartbeat()", false); + logger.debug("Session {} heartbeat", getSessionId()); if (isClosed) { return; } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + HttpPost postRequest = null; String requestId = UUIDUtils.getUUID().toString(); @@ -974,14 +996,14 @@ protected void heartbeat() throws SFException, SQLException { JsonNode rootNode; - logger.debug("connection heartbeat response: {}", theResponse); + logger.debug("Connection heartbeat response: {}", theResponse); rootNode = OBJECT_MAPPER.readTree(theResponse); // check the response to see if it is session expiration response if (rootNode != null && (Constants.SESSION_EXPIRED_GS_CODE == rootNode.path("code").asInt())) { - logger.debug("renew session and retry", false); + logger.debug("Renew session and retry", false); this.renewSession(prevSessionToken); retry = true; continue; @@ -997,12 +1019,15 @@ protected void heartbeat() throws SFException, SQLException { throw (SnowflakeSQLException) ex; } - logger.error("unexpected exception", ex); + logger.error("Unexpected exception", ex); throw new SFException( ErrorCode.INTERNAL_ERROR, IncidentUtil.oneLiner("unexpected exception", ex)); } } while (retry); + stopwatch.stop(); + logger.debug( + "Session {} heartbeat successful in {} ms", getSessionId(), stopwatch.elapsedMillis()); } void injectedDelay() { diff --git a/src/main/java/net/snowflake/client/core/SFStatement.java b/src/main/java/net/snowflake/client/core/SFStatement.java index 1dd555bb5..6142b8eb9 100644 --- a/src/main/java/net/snowflake/client/core/SFStatement.java +++ b/src/main/java/net/snowflake/client/core/SFStatement.java @@ -44,7 +44,7 @@ /** Snowflake statement */ public class SFStatement extends SFBaseStatement { - static final SFLogger logger = SFLoggerFactory.getLogger(SFStatement.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFStatement.class); private SFSession session; @@ -80,7 +80,7 @@ public class SFStatement extends SFBaseStatement { private long conservativeMemoryLimit; // in bytes public SFStatement(SFSession session) { - logger.debug(" public SFStatement(SFSession session)", false); + logger.trace("SFStatement(SFSession session)", false); this.session = session; Integer queryTimeout = session == null ? null : session.getQueryTimeout(); @@ -91,7 +91,7 @@ public SFStatement(SFSession session) { private void verifyArrowSupport() { if (SnowflakeDriver.isDisableArrowResultFormat()) { logger.debug( - "disable arrow support: {}", SnowflakeDriver.getDisableArrowResultFormatMessage()); + "Disable arrow support: {}", SnowflakeDriver.getDisableArrowResultFormatMessage()); statementParametersMap.put("JDBC_QUERY_RESULT_FORMAT", "JSON"); } } @@ -205,7 +205,7 @@ SFBaseResultSet executeQueryInternal( throws SQLException, SFException { resetState(); - logger.debug("executeQuery: {}", sql); + logger.debug("ExecuteQuery: {}", sql); if (session == null || session.isClosed()) { throw new SQLException("connection is closed"); @@ -771,9 +771,9 @@ public SFBaseResultSet execute( session.injectedDelay(); if (session.getPreparedStatementLogging()) { - logger.info("execute: {}", sql); + logger.info("Execute: {}", sql); } else { - logger.debug("execute: {}", sql); + logger.debug("Execute: {}", sql); } String trimmedSql = sql.trim(); @@ -798,7 +798,7 @@ private SFBaseResultSet executeFileTransfer(String sql) throws SQLException, SFE try { transferAgent.execute(); - logger.debug("setting result set", false); + logger.debug("Setting result set", false); resultSet = (SFFixedViewResultSet) transferAgent.getResultSet(); childResults = Collections.emptyList(); @@ -814,7 +814,7 @@ private SFBaseResultSet executeFileTransfer(String sql) throws SQLException, SFE @Override public void close() { - logger.debug("public void close()", false); + logger.trace("void close()", false); if (requestId != null) { EventUtil.triggerStateTransition( @@ -827,7 +827,7 @@ public void close() { isClosed = true; if (httpRequest != null) { - logger.debug("releasing connection for the http request", false); + logger.debug("Releasing connection for the http request", false); httpRequest.releaseConnection(); httpRequest = null; @@ -841,7 +841,7 @@ public void close() { @Override public void cancel() throws SFException, SQLException { - logger.debug("public void cancel()", false); + logger.trace("void cancel()", false); if (canceling.get()) { logger.debug("Query is already cancelled", false); diff --git a/src/main/java/net/snowflake/client/core/SFTrustManager.java b/src/main/java/net/snowflake/client/core/SFTrustManager.java index bd05729c3..171f69e1b 100644 --- a/src/main/java/net/snowflake/client/core/SFTrustManager.java +++ b/src/main/java/net/snowflake/client/core/SFTrustManager.java @@ -140,7 +140,7 @@ public class SFTrustManager extends X509ExtendedTrustManager { /** OCSP response cache file name. Should be identical to other driver's cache file name. */ static final String CACHE_FILE_NAME = "ocsp_response_cache.json"; - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(SFTrustManager.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManager.class); private static final ASN1ObjectIdentifier OIDocsp = new ASN1ObjectIdentifier("1.3.6.1.5.5.7.48.1").intern(); private static final ASN1ObjectIdentifier SHA1RSA = @@ -283,6 +283,9 @@ public class SFTrustManager extends X509ExtendedTrustManager { JsonNode res = fileCacheManager.readCacheFile(); readJsonStoreCache(res); } + + logger.debug( + "Initializing trust manager with OCSP mode: {}, cache file: {}", ocspMode, cacheFile); } /** Deletes OCSP response cache file from disk. */ @@ -322,6 +325,9 @@ static void resetOCSPResponseCacherServerURL(String ocspCacheServerUrl) throws I SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = String.format("%s://%s/retry/%s", url.getProtocol(), url.getHost(), "%s/%s"); } + logger.debug( + "Reset OCSP response cache server URL to: {}", + SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); } } @@ -336,7 +342,7 @@ private static void setOCSPResponseCacheServerURL() { SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheUrl; } } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Failed to get environment variable " + SF_OCSP_RESPONSE_CACHE_SERVER_URL + ". Ignored", true); } @@ -344,22 +350,23 @@ private static void setOCSPResponseCacheServerURL() { SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = String.format("%s/%s", DEFAULT_OCSP_CACHE_HOST, CACHE_FILE_NAME); } + logger.debug("Set OCSP response cache server to: {}", SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); } private static boolean useOCSPResponseCacheServer() { String ocspCacheServerEnabled = systemGetProperty(SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED); if (Boolean.FALSE.toString().equalsIgnoreCase(ocspCacheServerEnabled)) { - LOGGER.debug("No OCSP Response Cache Server is used.", false); + logger.debug("No OCSP Response Cache Server is used.", false); return false; } try { ocspCacheServerEnabled = systemGetEnv(SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED); if (Boolean.FALSE.toString().equalsIgnoreCase(ocspCacheServerEnabled)) { - LOGGER.debug("No OCSP Response Cache Server is used.", false); + logger.debug("No OCSP Response Cache Server is used.", false); return false; } } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Failed to get environment variable " + SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED + ". Ignored", @@ -383,7 +390,7 @@ private static String encodeCacheKey(OcspResponseCacheKey ocsp_cache_key) { CertID cid = new CertID(algo, nameHash, keyHash, snumber); return Base64.encodeBase64String(cid.toASN1Primitive().getEncoded()); } catch (Exception ex) { - LOGGER.debug("Failed to encode cache key to base64 encoded cert id", false); + logger.debug("Failed to encode cache key to base64 encoded cert id", false); } return null; } @@ -423,7 +430,7 @@ private static SFPair> decodeCacheFro JsonNode ocspRespBase64 = elem.getValue(); if (!ocspRespBase64.isArray() || ocspRespBase64.size() != 2) { - LOGGER.debug("Invalid cache file format. Ignored", false); + logger.debug("Invalid cache file format. Ignored", false); return null; } long producedAt = ocspRespBase64.get(0).asLong(); @@ -465,14 +472,14 @@ private static ObjectNode encodeCacheToJSON() { } return out; } catch (IOException ex) { - LOGGER.debug("Failed to encode ASN1 object.", false); + logger.debug("Failed to encode ASN1 object.", false); } return null; } private static synchronized void readJsonStoreCache(JsonNode m) { if (m == null || !m.getNodeType().equals(JsonNodeType.OBJECT)) { - LOGGER.debug("Invalid cache file format.", false); + logger.debug("Invalid cache file format.", false); return; } try { @@ -489,7 +496,7 @@ private static synchronized void readJsonStoreCache(JsonNode m) { } } } catch (IOException ex) { - LOGGER.debug("Failed to decode the cache file", false); + logger.debug("Failed to decode the cache file", false); } } @@ -669,7 +676,7 @@ private void checkNewOCSPEndpointAvailability() { try { new_ocsp_ept = systemGetEnv("SF_OCSP_ACTIVATE_NEW_ENDPOINT"); } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Could not get environment variable to check for New OCSP Endpoint Availability", false); new_ocsp_ept = systemGetProperty("net.snowflake.jdbc.ocsp_activate_new_endpoint"); } @@ -788,18 +795,18 @@ void validateRevocationStatus(X509Certificate[] chain, String peerHost) boolean isCached = isCached(pairIssuerSubjectList); if (useOCSPResponseCacheServer() && !isCached) { if (!ocspCacheServer.new_endpoint_enabled) { - LOGGER.debug( + logger.debug( "Downloading OCSP response cache from the server. URL: {}", SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); } else { - LOGGER.debug( + logger.debug( "Downloading OCSP response cache from the server. URL: {}", ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER); } try { readOcspResponseCacheServer(); } catch (SFOCSPException ex) { - LOGGER.debug( + logger.debug( "Error downloading OCSP Response from cache server : {}." + "OCSP Responses will be fetched directly from the CA OCSP" + "Responder ", @@ -900,7 +907,7 @@ private void executeOneRevocationStatusCheck( telemetryData.setCacheHit(true); } } catch (Throwable ex) { - LOGGER.debug( + logger.debug( "Exception occurred while trying to fetch OCSP Response - {}", ex.getMessage()); throw new SFOCSPException( OCSPErrorCode.OCSP_RESPONSE_FETCH_FAILURE, @@ -908,8 +915,8 @@ private void executeOneRevocationStatusCheck( ex); } - LOGGER.debug( - "validating. {}", CertificateIDToString(req.getRequestList()[0].getCertID())); + logger.debug( + "Validating. {}", CertificateIDToString(req.getRequestList()[0].getCertID())); try { validateRevocationStatusMain(pairIssuerSubject, value0.right); success = true; @@ -930,12 +937,12 @@ private void executeOneRevocationStatusCheck( } catch (CertificateException ex) { WAS_CACHE_UPDATED.set(OCSP_RESPONSE_CACHE.remove(keyOcspResponse) != null); if (WAS_CACHE_UPDATED.get()) { - LOGGER.debug("deleting the invalid OCSP cache.", false); + logger.debug("Deleting the invalid OCSP cache.", false); } cause = ex; - LOGGER.debug( - "Retrying {}/{} after sleeping {}(ms)", retry + 1, maxRetryCounter, sleepTime); + logger.debug( + "Retrying {}/{} after sleeping {} ms", retry + 1, maxRetryCounter, sleepTime); try { if (retry + 1 < maxRetryCounter) { Thread.sleep(sleepTime); @@ -950,7 +957,7 @@ private void executeOneRevocationStatusCheck( error = new CertificateException(ex); ocspLog = telemetryData.generateTelemetry(SF_OCSP_EVENT_TYPE_REVOKED_CERTIFICATE_ERROR, error); - LOGGER.error(ocspLog, false); + logger.error(ocspLog, false); throw error; } @@ -960,21 +967,21 @@ private void executeOneRevocationStatusCheck( error = new CertificateException( "Certificate Revocation check failed. Could not retrieve OCSP Response.", cause); - LOGGER.debug(cause.getMessage(), false); + logger.debug(cause.getMessage(), false); } else { error = new CertificateException( "Certificate Revocation check failed. Could not retrieve OCSP Response."); - LOGGER.debug(error.getMessage(), false); + logger.debug(error.getMessage(), false); } ocspLog = telemetryData.generateTelemetry(SF_OCSP_EVENT_TYPE_VALIDATION_ERROR, error); if (isOCSPFailOpen()) { // Log includes fail-open warning. - LOGGER.error(generateFailOpenLog(ocspLog), false); + logger.error(generateFailOpenLog(ocspLog), false); } else { // still not success, raise an error. - LOGGER.debug(ocspLog, false); + logger.debug(ocspLog, false); throw error; } } @@ -993,7 +1000,7 @@ private boolean isCached(List> pairIssuerSubjec for (SFPair pairIssuerSubject : pairIssuerSubjectList) { OCSPReq req = createRequest(pairIssuerSubject); CertificateID certificateId = req.getRequestList()[0].getCertID(); - LOGGER.debug(CertificateIDToString(certificateId), false); + logger.debug(CertificateIDToString(certificateId), false); CertID cid = certificateId.toASN1Primitive(); OcspResponseCacheKey k = new OcspResponseCacheKey( @@ -1003,18 +1010,18 @@ private boolean isCached(List> pairIssuerSubjec SFPair res = OCSP_RESPONSE_CACHE.get(k); if (res == null) { - LOGGER.debug("Not all OCSP responses for the certificate is in the cache.", false); + logger.debug("Not all OCSP responses for the certificate is in the cache.", false); isCached = false; break; } else if (currentTimeSecond - CACHE_EXPIRATION_IN_SECONDS > res.left) { - LOGGER.debug("Cache for CertID expired.", false); + logger.debug("Cache for CertID expired.", false); isCached = false; break; } else { try { validateRevocationStatusMain(pairIssuerSubject, res.right); } catch (SFOCSPException ex) { - LOGGER.debug( + logger.debug( "Cache includes invalid OCSPResponse. " + "Will download the OCSP cache from Snowflake OCSP server", false); @@ -1023,7 +1030,7 @@ private boolean isCached(List> pairIssuerSubjec } } } catch (IOException ex) { - LOGGER.debug("Failed to encode CertID.", false); + logger.debug("Failed to encode CertID.", false); } return isCached; } @@ -1059,14 +1066,14 @@ private void readOcspResponseCacheServer() throws SFOCSPException { JsonNode m = OBJECT_MAPPER.readTree(out.toByteArray()); out.close(); readJsonStoreCache(m); - LOGGER.debug("Successfully downloaded OCSP cache from the server.", false); + logger.debug("Successfully downloaded OCSP cache from the server.", false); } catch (IOException ex) { - LOGGER.debug( + logger.debug( "Failed to read the OCSP response cache from the server. " + "Server: {}, Err: {}", ocspCacheServerInUse, ex); } catch (URISyntaxException ex) { - LOGGER.debug("Indicate that a string could not be parsed as a URI reference.", false); + logger.debug("Indicate that a string could not be parsed as a URI reference.", false); throw new SFOCSPException( OCSPErrorCode.INVALID_CACHE_SERVER_URL, "Invalid OCSP Cache Server URL used", ex); } finally { @@ -1141,11 +1148,11 @@ private OCSPResp fetchOcspResponse( } else { url = new URL(String.format("%s/%s", ocspUrlStr, urlEncodedOCSPReq)); } - LOGGER.debug("not hit cache. Fetching OCSP response from CA OCSP server. {}", url); + logger.debug("Not hit cache. Fetching OCSP response from CA OCSP server. {}", url); } else { url = new URL(ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL); - LOGGER.debug( - "not hit cache. Fetching OCSP response from Snowflake OCSP Response Fetcher. {}", url); + logger.debug( + "Not hit cache. Fetching OCSP response from Snowflake OCSP Response Fetcher. {}", url); } long sleepTime = INITIAL_SLEEPING_TIME_IN_MILLISECONDS; @@ -1180,12 +1187,12 @@ private OCSPResp fetchOcspResponse( break; } } catch (IOException ex) { - LOGGER.debug("Failed to reach out OCSP responder: {}", ex.getMessage()); + logger.debug("Failed to reach out OCSP responder: {}", ex.getMessage()); savedEx = ex; } IOUtils.closeQuietly(response); - LOGGER.debug("Retrying {}/{} after sleeping {}(ms)", retry + 1, maxRetryCounter, sleepTime); + logger.debug("Retrying {}/{} after sleeping {} ms", retry + 1, maxRetryCounter, sleepTime); try { if (retry + 1 < maxRetryCounter) { Thread.sleep(sleepTime); @@ -1245,8 +1252,10 @@ private int getOCSPResponderConnectionTimeout() { private String overrideOCSPURL(String ocspURL) { String ocspURLInput = systemGetProperty(SF_OCSP_TEST_RESPONDER_URL); if (ocspURLInput != null) { + logger.debug("Overriding OCSP url to: {}", ocspURLInput); return ocspURLInput; } + logger.debug("Overriding OCSP url to: {}", ocspURL); return ocspURL; } @@ -1272,7 +1281,7 @@ private void validateRevocationStatusMain( X509CertificateHolder signVerifyCert; checkInvalidSigningCertTestParameter(); if (attachedCerts.length > 0) { - LOGGER.debug( + logger.debug( "Certificate is attached for verification. " + "Verifying it by the issuer certificate.", false); @@ -1296,15 +1305,15 @@ private void validateRevocationStatusMain( CONVERTER_X509.getCertificate(signVerifyCert).getTBSCertificate(), signVerifyCert.getSignatureAlgorithm()); } catch (CertificateException ex) { - LOGGER.debug("OCSP Signing Certificate signature verification failed", false); + logger.debug("OCSP Signing Certificate signature verification failed", false); throw new SFOCSPException( OCSPErrorCode.INVALID_CERTIFICATE_SIGNATURE, "OCSP Signing Certificate signature verification failed", ex); } - LOGGER.debug("Verifying OCSP signature by the attached certificate public key.", false); + logger.debug("Verifying OCSP signature by the attached certificate public key.", false); } else { - LOGGER.debug( + logger.debug( "Certificate is NOT attached for verification. " + "Verifying OCSP signature by the issuer public key.", false); @@ -1317,7 +1326,7 @@ private void validateRevocationStatusMain( basicOcspResp.getTBSResponseData(), basicOcspResp.getSignatureAlgorithmID()); } catch (CertificateException ex) { - LOGGER.debug("OCSP signature verification failed", false); + logger.debug("OCSP signature verification failed", false); throw new SFOCSPException( OCSPErrorCode.INVALID_OCSP_RESPONSE_SIGNATURE, "OCSP signature verification failed", @@ -1376,7 +1385,7 @@ private void validateBasicOcspResponse(Date currentTime, BasicOCSPResp basicOcsp Date thisUpdate = singleResps.getThisUpdate(); Date nextUpdate = singleResps.getNextUpdate(); - LOGGER.debug( + logger.debug( "Current Time: {}, This Update: {}, Next Update: {}", currentTime, thisUpdate, @@ -1392,7 +1401,7 @@ private void validateBasicOcspResponse(Date currentTime, BasicOCSPResp basicOcsp DATE_FORMAT_UTC.format(nextUpdate))); } } - LOGGER.debug("OK. Verified the certificate revocation status.", false); + logger.debug("OK. Verified the certificate revocation status.", false); } private void checkCertUnknownTestParameter() throws SFOCSPException { @@ -1516,7 +1525,7 @@ private String ocspResponseToB64(OCSPResp ocspResp) { try { return Base64.encodeBase64String(ocspResp.getEncoded()); } catch (Throwable ex) { - LOGGER.debug("Could not convert OCSP Response to Base64", false); + logger.debug("Could not convert OCSP Response to Base64", false); return null; } } @@ -1525,7 +1534,7 @@ private OCSPResp b64ToOCSPResp(String ocspRespB64) { try { return new OCSPResp(Base64.decodeBase64(ocspRespB64)); } catch (Throwable ex) { - LOGGER.debug("Could not cover OCSP Response from Base64 to OCSPResp object", false); + logger.debug("Could not cover OCSP Response from Base64 to OCSPResp object", false); return null; } } @@ -1624,7 +1633,7 @@ public byte[] getDigest() { String.format( "Failed to instantiate the algorithm: %s. err=%s", ALGORITHM_SHA1_NAME, ex.getMessage()); - LOGGER.error(errMsg, false); + logger.error(errMsg, false); throw new RuntimeException(errMsg); } } diff --git a/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java b/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java index 144caefec..5030e4603 100644 --- a/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java +++ b/src/main/java/net/snowflake/client/core/SecureStorageAppleManager.java @@ -22,6 +22,7 @@ private SecureStorageAppleManager() { } public static SecureStorageAppleManager builder() { + logger.info("Using Apple Keychain as a token cache storage"); return new SecureStorageAppleManager(); } diff --git a/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java b/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java index e1f352187..7663147b3 100644 --- a/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java +++ b/src/main/java/net/snowflake/client/core/SecureStorageLinuxManager.java @@ -41,6 +41,8 @@ private SecureStorageLinuxManager() { .setCacheExpirationInSeconds(CACHE_EXPIRATION_IN_SECONDS) .setCacheFileLockExpirationInSeconds(CACHE_FILE_LOCK_EXPIRATION_IN_SECONDS) .build(); + logger.info( + "Using temporary file: {} as a token cache storage", fileCacheManager.getCacheFilePath()); } private static class SecureStorageLinuxManagerHolder { diff --git a/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java b/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java index f43952023..f38c1570b 100644 --- a/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java +++ b/src/main/java/net/snowflake/client/core/SecureStorageWindowsManager.java @@ -33,6 +33,7 @@ private SecureStorageWindowsManager() { } public static SecureStorageWindowsManager builder() { + logger.info("Using Windows Credential Manager as a token cache storage"); return new SecureStorageWindowsManager(); } diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index ec856112d..189b5137f 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -37,6 +37,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.ClientAuthnParameter; import net.snowflake.common.core.SqlState; @@ -343,6 +344,8 @@ private static SFLoginOutput newSession( Map connectionPropertiesMap, String tracingLevel) throws SFException, SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); // build URL for login request URIBuilder uriBuilder; URI loginURI; @@ -369,6 +372,18 @@ private static SFLoginOutput newSession( final ClientAuthnDTO.AuthenticatorType authenticatorType = getAuthenticator(loginInput); Map commonParams; + String oktaUsername = loginInput.getOKTAUserName(); + logger.debug( + "Authenticating user: {}, host: {} with authentication method: {}." + + " Login timeout: {} s, auth timeout: {} s, OCSP mode: {}{}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + authenticatorType, + loginInput.getLoginTimeout(), + loginInput.getAuthTimeout(), + loginInput.getOCSPMode(), + Strings.isNullOrEmpty(oktaUsername) ? "" : ", okta username: " + oktaUsername); + try { uriBuilder = new URIBuilder(loginInput.getServerUrl()); @@ -639,6 +654,8 @@ private static SFLoginOutput newSession( int leftsocketTimeout = loginInput.getSocketTimeoutInMillis(); int retryCount = 0; + Exception lastRestException = null; + while (true) { try { theString = @@ -650,6 +667,7 @@ private static SFLoginOutput newSession( retryCount, loginInput.getHttpClientSettingsKey()); } catch (SnowflakeSQLException ex) { + lastRestException = ex; if (ex.getErrorCode() == ErrorCode.AUTHENTICATOR_REQUEST_TIMEOUT.getMessageCode()) { if (authenticatorType == ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT || authenticatorType == ClientAuthnDTO.AuthenticatorType.OKTA) { @@ -714,16 +732,42 @@ private static SFLoginOutput newSession( } else { throw ex; } + } catch (Exception ex) { + lastRestException = ex; } break; } + if (theString == null) { + if (lastRestException != null) { + logger.error( + "Failed to open new session for user: {}, host: {}. Error: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + lastRestException); + throw lastRestException; + } else { + SnowflakeSQLException exception = + new SnowflakeSQLException( + NO_QUERY_ID, + "empty authentication response", + SqlState.CONNECTION_EXCEPTION, + ErrorCode.CONNECTION_ERROR.getMessageCode()); + logger.error( + "Failed to open new session for user: {}, host: {}. Error: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + exception); + throw exception; + } + } + // general method, same as with data binding JsonNode jsonNode = mapper.readTree(theString); // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", theString); + logger.debug("Response: {}", theString); int errorCode = jsonNode.path("code").asInt(); if (errorCode == Constants.ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE) { @@ -741,9 +785,16 @@ private static SFLoginOutput newSession( deleteMfaTokenCache(loginInput.getHostFromServerUrl(), loginInput.getUserName()); } + String errorMessage = jsonNode.path("message").asText(); + + logger.error( + "Failed to open new session for user: {}, host: {}. Error: {}", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + errorMessage); throw new SnowflakeSQLException( NO_QUERY_ID, - jsonNode.path("message").asText(), + errorMessage, SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, errorCode); } @@ -769,7 +820,7 @@ private static SFLoginOutput newSession( commonParams = SessionUtil.getCommonParams(jsonNode.path("data").path("parameters")); if (serverVersion != null) { - logger.debug("server version = {}", serverVersion); + logger.debug("Server version: {}", serverVersion); if (serverVersion.indexOf(" ") > 0) { databaseVersion = serverVersion.substring(0, serverVersion.indexOf(" ")); @@ -777,7 +828,7 @@ private static SFLoginOutput newSession( databaseVersion = serverVersion; } } else { - logger.debug("server version is null", false); + logger.debug("Server version is null", false); } if (databaseVersion != null) { @@ -800,13 +851,13 @@ private static SFLoginOutput newSession( if (!jsonNode.path("data").path("newClientForUpgrade").isNull()) { newClientForUpgrade = jsonNode.path("data").path("newClientForUpgrade").asText(); - logger.debug("new client: {}", newClientForUpgrade); + logger.debug("New client: {}", newClientForUpgrade); } // get health check interval and adjust network timeouts if different int healthCheckIntervalFromGS = jsonNode.path("data").path("healthCheckInterval").asInt(); - logger.debug("health check interval = {}", healthCheckIntervalFromGS); + logger.debug("Health check interval: {}", healthCheckIntervalFromGS); if (healthCheckIntervalFromGS > 0 && healthCheckIntervalFromGS != healthCheckInterval) { // add health check interval to socket timeout @@ -821,9 +872,9 @@ private static SFLoginOutput newSession( HttpUtil.setRequestConfig(requestConfig); - logger.debug("adjusted connection timeout to = {}", loginInput.getConnectionTimeout()); + logger.debug("Adjusted connection timeout to: {}", loginInput.getConnectionTimeout()); - logger.debug("adjusted socket timeout to = {}", httpClientSocketTimeout); + logger.debug("Adjusted socket timeout to: {}", httpClientSocketTimeout); } } catch (SnowflakeSQLException ex) { throw ex; // must catch here to avoid Throwable to get the exception @@ -873,6 +924,13 @@ && asBoolean(loginInput.getSessionParameters().get(CLIENT_STORE_TEMPORARY_CREDEN CredentialManager.getInstance().writeMfaToken(loginInput, ret); } + stopwatch.stop(); + logger.debug( + "User: {}, host: {} with authentication method: {} authenticated successfully in {} ms", + loginInput.getUserName(), + loginInput.getHostFromServerUrl(), + authenticatorType, + stopwatch.elapsedMillis()); return ret; } @@ -980,7 +1038,7 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT setServiceNameHeader(loginInput, postRequest); logger.debug( - "request type: {}, old session token: {}, " + "master token: {}", + "Request type: {}, old session token: {}, " + "master token: {}", requestType.value, (ArgSupplier) () -> loginInput.getSessionToken() != null ? "******" : null, (ArgSupplier) () -> loginInput.getMasterToken() != null ? "******" : null); @@ -999,7 +1057,7 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", theString); + logger.debug("Response: {}", theString); String errorCode = jsonNode.path("code").asText(); String message = jsonNode.path("message").asText(); @@ -1037,7 +1095,7 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT * @throws SFException if failed to close session */ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException { - logger.debug(" public void close() throws SFException"); + logger.trace("void close() throws SFException"); // assert the following inputs are valid AssertUtil.assertTrue( @@ -1089,15 +1147,15 @@ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeS JsonNode rootNode; - logger.debug("connection close response: {}", theString); + logger.debug("Connection close response: {}", theString); rootNode = mapper.readTree(theString); SnowflakeUtil.checkErrorAndThrowException(rootNode); } catch (URISyntaxException ex) { - throw new RuntimeException("unexpected URI syntax exception", ex); + throw new RuntimeException("Unexpected URI syntax exception", ex); } catch (IOException ex) { - logger.error("unexpected IO exception for: " + postRequest, ex); + logger.error("Unexpected IO exception for: " + postRequest, ex); } catch (SnowflakeSQLException ex) { // ignore exceptions for session expiration exceptions and for // sessions that no longer exist @@ -1234,7 +1292,7 @@ private static String federatedFlowStep3(SFLoginInput loginInput, String tokenUr null, loginInput.getHttpClientSettingsKey()); - logger.debug("user is authenticated against {}.", loginInput.getAuthenticator()); + logger.debug("User is authenticated against {}.", loginInput.getAuthenticator()); // session token is in the data field of the returned json response final JsonNode jsonNode = mapper.readTree(idpResponse); @@ -1322,12 +1380,12 @@ private static JsonNode federatedFlowStep1(SFLoginInput loginInput) throws Snowf loginInput.getSocketTimeoutInMillis(), 0, loginInput.getHttpClientSettingsKey()); - logger.debug("authenticator-request response: {}", gsResponse); + logger.debug("Authenticator-request response: {}", gsResponse); JsonNode jsonNode = mapper.readTree(gsResponse); // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", gsResponse); + logger.debug("Response: {}", gsResponse); int errorCode = jsonNode.path("code").asInt(); throw new SnowflakeSQLException( @@ -1465,7 +1523,7 @@ public static Map getCommonParams(JsonNode paramsNode) { // What type of value is it and what's the value? if (!child.hasNonNull("value")) { - logger.debug("No value found for Common Parameter {}", child.path("name").asText()); + logger.debug("No value found for Common Parameter: {}", child.path("name").asText()); continue; } @@ -1500,7 +1558,7 @@ static void updateSfDriverParamValues(Map parameters, SFBaseSess session.setCommonParameters(parameters); } for (Map.Entry entry : parameters.entrySet()) { - logger.debug("processing parameter {}", entry.getKey()); + logger.debug("Processing parameter {}", entry.getKey()); if ("CLIENT_DISABLE_INCIDENTS".equalsIgnoreCase(entry.getKey())) { SnowflakeDriver.setDisableIncidents((Boolean) entry.getValue()); diff --git a/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java b/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java index da7807b69..9db2f0589 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java +++ b/src/main/java/net/snowflake/client/core/SessionUtilExternalBrowser.java @@ -46,7 +46,8 @@ * user can type IdP username and password. 4. Return token and proof key to the GS to gain access. */ public class SessionUtilExternalBrowser { - static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilExternalBrowser.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SessionUtilExternalBrowser.class); public interface AuthExternalBrowserHandlers { // build a HTTP post object @@ -202,14 +203,14 @@ private String getSSOUrl(int port) throws SFException, SnowflakeSQLException { 0, loginInput.getHttpClientSettingsKey()); - logger.debug("authenticator-request response: {}", theString); + logger.debug("Authenticator-request response: {}", theString); // general method, same as with data binding JsonNode jsonNode = mapper.readTree(theString); // check the success field first if (!jsonNode.path("success").asBoolean()) { - logger.debug("response = {}", theString); + logger.debug("Response: {}", theString); String errorCode = jsonNode.path("code").asText(); throw new SnowflakeSQLException( SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, @@ -240,7 +241,7 @@ private String getConsoleLoginUrl(int port) throws SFException { String consoleLoginUrl = consoleLoginUriBuilder.build().toURL().toString(); - logger.debug("console login url: {}", consoleLoginUrl); + logger.debug("Console login url: {}", consoleLoginUrl); return consoleLoginUrl; } catch (Exception ex) { @@ -266,7 +267,7 @@ void authenticate() throws SFException, SnowflakeSQLException { try { // main procedure int port = this.getLocalPort(ssocket); - logger.debug("Listening localhost:{}", port); + logger.debug("Listening localhost: {}", port); if (loginInput.getDisableConsoleLogin()) { // Access GS to get SSO URL diff --git a/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java b/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java index 2bef91eda..ad63ea603 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java +++ b/src/main/java/net/snowflake/client/core/SessionUtilKeyPair.java @@ -17,6 +17,7 @@ import java.io.IOException; import java.io.StringReader; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.security.InvalidKeyException; import java.security.KeyFactory; @@ -52,7 +53,7 @@ /** Class used to compute jwt token for key pair authentication Created by hyu on 1/16/18. */ class SessionUtilKeyPair { - static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilKeyPair.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtilKeyPair.class); // user name in upper case private final String userName; @@ -147,7 +148,6 @@ private SecretKeyFactory getSecretKeyFactory(String algorithm) throws NoSuchAlgo private PrivateKey extractPrivateKeyFromFile(String privateKeyFile, String privateKeyFilePwd) throws SFException { - if (isBouncyCastleProviderEnabled) { try { return extractPrivateKeyWithBouncyCastle(privateKeyFile, privateKeyFilePwd); @@ -234,8 +234,11 @@ public static int getTimeout() { private PrivateKey extractPrivateKeyWithBouncyCastle( String privateKeyFile, String privateKeyFilePwd) throws IOException, PKCSException, OperatorCreationException { + Path privKeyPath = Paths.get(privateKeyFile); + FileUtil.logFileUsage( + privKeyPath, "Extract private key from file using Bouncy Castle provider", true); PrivateKeyInfo privateKeyInfo = null; - PEMParser pemParser = new PEMParser(new FileReader(Paths.get(privateKeyFile).toFile())); + PEMParser pemParser = new PEMParser(new FileReader(privKeyPath.toFile())); Object pemObject = pemParser.readObject(); if (pemObject instanceof PKCS8EncryptedPrivateKeyInfo) { // Handle the case where the private key is encrypted. @@ -263,7 +266,9 @@ private PrivateKey extractPrivateKeyWithBouncyCastle( private PrivateKey extractPrivateKeyWithJdk(String privateKeyFile, String privateKeyFilePwd) throws IOException, NoSuchAlgorithmException, InvalidKeySpecException, InvalidKeyException { - String privateKeyContent = new String(Files.readAllBytes(Paths.get(privateKeyFile))); + Path privKeyPath = Paths.get(privateKeyFile); + FileUtil.logFileUsage(privKeyPath, "Extract private key from file using Jdk", true); + String privateKeyContent = new String(Files.readAllBytes(privKeyPath)); if (Strings.isNullOrEmpty(privateKeyFilePwd)) { // unencrypted private key file return generatePrivateKey(false, privateKeyContent, privateKeyFilePwd); diff --git a/src/main/java/net/snowflake/client/core/StmtUtil.java b/src/main/java/net/snowflake/client/core/StmtUtil.java index a02fb4d7b..3566ea225 100644 --- a/src/main/java/net/snowflake/client/core/StmtUtil.java +++ b/src/main/java/net/snowflake/client/core/StmtUtil.java @@ -61,7 +61,7 @@ public class StmtUtil { // twice as much as our default socket timeout static final int SF_CANCELING_RETRY_TIMEOUT_IN_MILLIS = 600000; // 10 min - static final SFLogger logger = SFLoggerFactory.getLogger(StmtUtil.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(StmtUtil.class); /** Input for executing a statement on server */ static class StmtInput { @@ -310,7 +310,7 @@ public static StmtOutput execute(StmtInput stmtInput, ExecTimeTelemetryData exec // don't need to execute the query again if (stmtInput.retry && stmtInput.prevGetResultURL != null) { logger.debug( - "retrying statement execution with get result URL: {}", stmtInput.prevGetResultURL); + "Retrying statement execution with get result URL: {}", stmtInput.prevGetResultURL); } else { URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl); @@ -522,11 +522,11 @@ else if (stmtInput.asyncExec // simulate client pause before trying to fetch result so that // we can test query behavior related to disconnected client if (stmtInput.injectClientPause != 0) { - logger.debug("inject client pause for {} seconds", stmtInput.injectClientPause); + logger.debug("Inject client pause for {} seconds", stmtInput.injectClientPause); try { Thread.sleep(stmtInput.injectClientPause * 1000); } catch (InterruptedException ex) { - logger.debug("exception encountered while injecting pause", false); + logger.debug("Exception encountered while injecting pause", false); } } } @@ -606,7 +606,7 @@ protected static String getQueryResult( protected static String getQueryResult(String getResultPath, StmtInput stmtInput) throws SFException, SnowflakeSQLException { HttpGet httpRequest = null; - logger.debug("get query result: {}", getResultPath); + logger.debug("Get query result: {}", getResultPath); try { URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl); @@ -798,7 +798,7 @@ public static SFStatementType checkStageManageCommand(String sql) { // skip commenting prefixed with // while (trimmedSql.startsWith("//")) { if (logger.isDebugEnabled()) { - logger.debug("skipping // comments in: \n{}", trimmedSql); + logger.debug("Skipping // comments in: \n{}", trimmedSql); } if (trimmedSql.indexOf('\n') > 0) { diff --git a/src/main/java/net/snowflake/client/core/URLUtil.java b/src/main/java/net/snowflake/client/core/URLUtil.java index cd4129e4c..56fa0f266 100644 --- a/src/main/java/net/snowflake/client/core/URLUtil.java +++ b/src/main/java/net/snowflake/client/core/URLUtil.java @@ -3,8 +3,11 @@ */ package net.snowflake.client.core; +import static net.snowflake.client.core.SFSession.SF_QUERY_REQUEST_ID; + import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; +import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; @@ -15,10 +18,12 @@ import javax.annotation.Nullable; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import org.apache.http.NameValuePair; +import org.apache.http.client.utils.URLEncodedUtils; public class URLUtil { - static final SFLogger logger = SFLoggerFactory.getLogger(URLUtil.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(URLUtil.class); static final String validURLPattern = "^http(s?)\\:\\/\\/[0-9a-zA-Z]([-.\\w]*[0-9a-zA-Z@:])*(:(0-9)*)*(\\/?)([a-zA-Z0-9\\-\\.\\?\\,\\&\\(\\)\\/\\\\\\+&%\\$#_=@]*)?$"; static final Pattern pattern = Pattern.compile(validURLPattern); @@ -53,4 +58,20 @@ public static String urlEncode(String target) throws UnsupportedEncodingExceptio } return encodedTarget; } + + @SnowflakeJdbcInternalApi + public static String getRequestId(URI uri) { + return URLEncodedUtils.parse(uri, StandardCharsets.UTF_8).stream() + .filter(p -> p.getName().equals(SF_QUERY_REQUEST_ID)) + .findFirst() + .map(NameValuePair::getValue) + .orElse(null); + } + + @SnowflakeJdbcInternalApi + public static String getRequestIdLogStr(URI uri) { + String requestId = getRequestId(uri); + + return requestId == null ? "" : "[requestId=" + requestId + "] "; + } } diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java index 39f94235b..3ee556bb4 100644 --- a/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java +++ b/src/main/java/net/snowflake/client/jdbc/DefaultResultStreamProvider.java @@ -11,6 +11,8 @@ import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.log.ArgSupplier; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; import net.snowflake.common.core.SqlState; import org.apache.http.Header; @@ -21,6 +23,8 @@ import org.apache.http.impl.client.CloseableHttpClient; public class DefaultResultStreamProvider implements ResultStreamProvider { + private static final SFLogger logger = + SFLoggerFactory.getLogger(DefaultResultStreamProvider.class); // SSE-C algorithm header private static final String SSE_C_ALGORITHM = "x-amz-server-side-encryption-customer-algorithm"; @@ -53,16 +57,15 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception * means failure. */ if (response == null || response.getStatusLine().getStatusCode() != 200) { - SnowflakeResultSetSerializableV1.logger.error( - "Error fetching chunk from: {}", context.getResultChunk().getScrubbedUrl()); + logger.error("Error fetching chunk from: {}", context.getResultChunk().getScrubbedUrl()); - SnowflakeUtil.logResponseDetails(response, SnowflakeResultSetSerializableV1.logger); + SnowflakeUtil.logResponseDetails(response, logger); throw new SnowflakeSQLException( SqlState.IO_ERROR, ErrorCode.NETWORK_ERROR.getMessageCode(), "Error encountered when downloading a result chunk: HTTP " - + "status=" + + "status: " + ((response != null) ? response.getStatusLine().getStatusCode() : "null response")); } @@ -72,7 +75,7 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception // read the chunk data inputStream = detectContentEncodingAndGetInputStream(response, entity.getContent()); } catch (Exception ex) { - SnowflakeResultSetSerializableV1.logger.error("Failed to decompress data: {}", response); + logger.error("Failed to decompress data: {}", response); throw new SnowflakeSQLLoggedException( context.getSession(), @@ -82,7 +85,7 @@ public InputStream getInputStream(ChunkDownloadContext context) throws Exception } // trace the response if requested - SnowflakeResultSetSerializableV1.logger.debug("Json response: {}", response); + logger.debug("Json response: {}", response); return inputStream; } @@ -94,8 +97,7 @@ private HttpResponse getResultChunk(ChunkDownloadContext context) throws Excepti if (context.getChunkHeadersMap() != null && context.getChunkHeadersMap().size() != 0) { for (Map.Entry entry : context.getChunkHeadersMap().entrySet()) { - SnowflakeResultSetSerializableV1.logger.debug( - "Adding header key={}, value={}", entry.getKey(), entry.getValue()); + logger.debug("Adding header key: {}", entry.getKey()); httpRequest.addHeader(entry.getKey(), entry.getValue()); } } @@ -103,11 +105,11 @@ private HttpResponse getResultChunk(ChunkDownloadContext context) throws Excepti else if (context.getQrmk() != null) { httpRequest.addHeader(SSE_C_ALGORITHM, SSE_C_AES); httpRequest.addHeader(SSE_C_KEY, context.getQrmk()); - SnowflakeResultSetSerializableV1.logger.debug("Adding SSE-C headers", false); + logger.debug("Adding SSE-C headers", false); } - SnowflakeResultSetSerializableV1.logger.debug( - "Thread {} Fetching result #chunk{}: {}", + logger.debug( + "Thread {} Fetching result chunk#{}: {}", Thread.currentThread().getId(), context.getChunkIndex(), context.getResultChunk().getScrubbedUrl()); @@ -133,8 +135,8 @@ else if (context.getQrmk() != null) { true, // no retry on http request new ExecTimeTelemetryData()); - SnowflakeResultSetSerializableV1.logger.debug( - "Thread {} Call #chunk{} returned for URL: {}, response={}", + logger.debug( + "Thread {} Call chunk#{} returned for URL: {}, response: {}", Thread.currentThread().getId(), context.getChunkIndex(), (ArgSupplier) () -> SecretDetector.maskSASToken(context.getResultChunk().getUrl()), diff --git a/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java b/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java index 2930188eb..14fb7dbdc 100644 --- a/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java +++ b/src/main/java/net/snowflake/client/jdbc/FileBackedOutputStream.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import net.snowflake.client.core.FileUtil; /** * An {@link OutputStream} that starts buffering to a byte array, but switches to file buffering @@ -129,6 +130,7 @@ public ByteSource asByteSource() { private synchronized InputStream openInputStream() throws IOException { if (file != null) { + FileUtil.logFileUsage(file, "Data buffering stream", false); return new FileInputStream(file); } else { return new ByteArrayInputStream(memory.getBuffer(), 0, memory.getCount()); diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java index fa7826664..e2d48fa82 100644 --- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java +++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java @@ -6,6 +6,7 @@ import java.io.PrintWriter; import java.io.StringWriter; +import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLKeyException; @@ -17,6 +18,7 @@ import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFOCSPException; import net.snowflake.client.core.SessionUtil; +import net.snowflake.client.core.URLUtil; import net.snowflake.client.core.UUIDUtils; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.ArgSupplier; @@ -24,6 +26,7 @@ import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.DecorrelatedJitterBackoff; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.SqlState; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpRequestBase; @@ -123,9 +126,32 @@ public static CloseableHttpResponse execute( boolean noRetry, ExecTimeTelemetryData execTimeData) throws SnowflakeSQLException { - CloseableHttpResponse response = null; + Stopwatch stopwatch = null; + + if (logger.isDebugEnabled()) { + stopwatch = new Stopwatch(); + stopwatch.start(); + } String requestInfoScrubbed = SecretDetector.maskSASToken(httpRequest.toString()); + String requestIdStr = URLUtil.getRequestIdLogStr(httpRequest.getURI()); + logger.debug( + "{}Executing rest request: {}, retry timeout: {}, socket timeout: {}, max retries: {}," + + " inject socket timeout: {}, canceling: {}, without cookies: {}, include retry parameters: {}," + + " include request guid: {}, retry http 403: {}, no retry: {}", + requestIdStr, + requestInfoScrubbed, + retryTimeout, + socketTimeout, + maxRetries, + injectSocketTimeout, + canceling, + withoutCookies, + includeRetryParameters, + includeRequestGuid, + retryHTTP403, + noRetry); + CloseableHttpResponse response = null; // time the client started attempting to submit request final long startTime = System.currentTimeMillis(); @@ -140,6 +166,10 @@ public static CloseableHttpResponse execute( // Used to indicate that this is a login/auth request and will be using the new retry strategy. boolean isLoginRequest = SessionUtil.isNewRetryStrategyRequest(httpRequest); + if (isLoginRequest) { + logger.debug("{}Request is a login/auth request. Using new retry strategy", requestIdStr); + } + // total elapsed time due to transient issues. long elapsedMilliForTransientIssues = 0; @@ -168,9 +198,14 @@ public static CloseableHttpResponse execute( // try request till we get a good response or retry timeout while (true) { - logger.debug("Retry count: {}", retryCount); - logger.debug("Attempting request: {}", requestInfoScrubbed); - + logger.debug( + "{}Retry count: {}, max retries: {}, retry timeout: {} s, backoff: {} ms. Attempting request: {}", + requestIdStr, + retryCount, + maxRetries, + retryTimeout, + backoffInMilli, + requestInfoScrubbed); try { // update start time startTimePerRequest = System.currentTimeMillis(); @@ -184,7 +219,8 @@ public static CloseableHttpResponse execute( if (injectSocketTimeout != 0 && retryCount == 0) { // test code path logger.debug( - "Injecting socket timeout by setting " + "socket timeout to {} millisecond ", + "{}Injecting socket timeout by setting socket timeout to {} ms", + requestIdStr, injectSocketTimeout); httpRequest.setConfig( HttpUtil.getDefaultRequestConfigWithSocketTimeout( @@ -203,6 +239,7 @@ public static CloseableHttpResponse execute( // If HTAP if ("true".equalsIgnoreCase(System.getenv("HTAP_SIMULATION")) && builder.getPathSegments().contains("query-request")) { + logger.debug("{}Setting htap simulation", requestIdStr); builder.setParameter("target", "htap_simulation"); } if (includeRetryParameters && retryCount > 0) { @@ -215,14 +252,18 @@ public static CloseableHttpResponse execute( // so that it can be renewed in time and pass it to the http request configuration. if (authTimeout > 0) { int requestSocketAndConnectTimeout = (int) authTimeout * 1000; + logger.debug( + "{}Setting auth timeout as the socket timeout: {} s", requestIdStr, authTimeout); httpRequest.setConfig( HttpUtil.getDefaultRequestConfigWithSocketAndConnectTimeout( requestSocketAndConnectTimeout, withoutCookies)); } if (includeRequestGuid) { + UUID guid = UUIDUtils.getUUID(); + logger.debug("{}Request {} guid: {}", requestIdStr, requestInfoScrubbed, guid.toString()); // Add request_guid for better tracing - builder.setParameter(SF_REQUEST_GUID, UUIDUtils.getUUID().toString()); + builder.setParameter(SF_REQUEST_GUID, guid.toString()); } httpRequest.setURI(builder.build()); @@ -247,17 +288,20 @@ public static CloseableHttpResponse execute( } catch (Exception ex) { savedEx = ex; - // if the request took more than 5 min (socket timeout) log an error - if ((System.currentTimeMillis() - startTimePerRequest) - > HttpUtil.getSocketTimeout().toMillis()) { + // if the request took more than socket timeout log an error + long currentMillis = System.currentTimeMillis(); + if ((currentMillis - startTimePerRequest) > HttpUtil.getSocketTimeout().toMillis()) { logger.warn( - "HTTP request took longer than 5 min: {} sec", - (System.currentTimeMillis() - startTimePerRequest) / 1000); + "{}HTTP request took longer than socket timeout {} ms: {} ms", + requestIdStr, + HttpUtil.getSocketTimeout().toMillis(), + (currentMillis - startTimePerRequest)); } StringWriter sw = new StringWriter(); savedEx.printStackTrace(new PrintWriter(sw)); logger.debug( - "Exception encountered for: {}, {}, {}", + "{}Exception encountered for: {}, {}, {}", + requestIdStr, requestInfoScrubbed, ex.getLocalizedMessage(), (ArgSupplier) sw::toString); @@ -281,7 +325,11 @@ public static CloseableHttpResponse execute( || isNonRetryableHTTPCode(response, retryHTTP403)) { String msg = "Unknown cause"; if (response != null) { - logger.debug("HTTP response code: {}", response.getStatusLine().getStatusCode()); + logger.debug( + "{}HTTP response code for request {}: {}", + requestIdStr, + requestInfoScrubbed, + response.getStatusLine().getStatusCode()); msg = "StatusCode: " + response.getStatusLine().getStatusCode() @@ -295,13 +343,16 @@ public static CloseableHttpResponse execute( if (response == null || response.getStatusLine().getStatusCode() != 200) { logger.debug( - "Error response not retryable, " + msg + ", request: {}", requestInfoScrubbed); + "{}Error response not retryable, " + msg + ", request: {}", + requestIdStr, + requestInfoScrubbed); EventUtil.triggerBasicEvent( - Event.EventType.NETWORK_ERROR, msg + ", Request: " + httpRequest.toString(), false); + Event.EventType.NETWORK_ERROR, msg + ", Request: " + httpRequest, false); } breakRetryReason = "status code does not need retry"; if (noRetry) { - logger.debug("HTTP retry disabled for this request. noRetry: {}", noRetry); + logger.debug( + "{}HTTP retry disabled for this request. noRetry: {}", requestIdStr, noRetry); breakRetryReason = "retry is disabled"; } @@ -311,16 +362,18 @@ public static CloseableHttpResponse execute( } else { if (response != null) { logger.debug( - "HTTP response not ok: status code: {}, request: {}", + "{}HTTP response not ok: status code: {}, request: {}", + requestIdStr, response.getStatusLine().getStatusCode(), requestInfoScrubbed); } else if (savedEx != null) { logger.debug( - "Null response for cause: {}, request: {}", + "{}Null response for cause: {}, request: {}", + requestIdStr, getRootCause(savedEx).getMessage(), requestInfoScrubbed); } else { - logger.debug("Null response for request: {}", requestInfoScrubbed); + logger.debug("{}Null response for request: {}", requestIdStr, requestInfoScrubbed); } // get the elapsed time for the last request @@ -331,7 +384,7 @@ public static CloseableHttpResponse execute( // check canceling flag if (canceling != null && canceling.get()) { - logger.debug("Stop retrying since canceling is requested", false); + logger.debug("{}Stop retrying since canceling is requested", requestIdStr); breakRetryReason = "canceling is requested"; break; } @@ -349,9 +402,10 @@ public static CloseableHttpResponse execute( if (elapsedMilliForTransientIssues > retryTimeoutInMilliseconds && retryCount >= MIN_RETRY_COUNT) { logger.error( - "Stop retrying since elapsed time due to network " + "{}Stop retrying since elapsed time due to network " + "issues has reached timeout. " - + "Elapsed: {}(ms), timeout: {}(ms)", + + "Elapsed: {} ms, timeout: {} ms", + requestIdStr, elapsedMilliForTransientIssues, retryTimeoutInMilliseconds); @@ -362,7 +416,10 @@ public static CloseableHttpResponse execute( if (maxRetries > 0 && retryCount > maxRetries) { // check for max retries. logger.error( - "Stop retrying as max retries have been reached! max retry count: {}", maxRetries); + "{}Stop retrying as max retries have been reached for request: {}! Max retry count: {}", + requestIdStr, + requestInfoScrubbed, + maxRetries); breakRetryReason = "max retries reached"; breakRetryEventName = "HttpRequestRetryLimitExceeded"; } @@ -433,7 +490,11 @@ public static CloseableHttpResponse execute( // sleep for backoff - elapsed amount of time if (backoffInMilli > elapsedMilliForLastCall) { try { - logger.debug("sleeping in {}(ms)", backoffInMilli); + logger.debug( + "{}Retry request {}: sleeping for {} ms", + requestIdStr, + requestInfoScrubbed, + backoffInMilli); Thread.sleep(backoffInMilli); elapsedMilliForTransientIssues += backoffInMilli; if (isLoginRequest) { @@ -455,7 +516,7 @@ public static CloseableHttpResponse execute( backoffInMilli, retryTimeoutInMilliseconds - elapsedMilliForTransientIssues); } } catch (InterruptedException ex1) { - logger.debug("Backoff sleep before retrying login got interrupted", false); + logger.debug("{}Backoff sleep before retrying login got interrupted", requestIdStr); } } @@ -504,15 +565,18 @@ public static CloseableHttpResponse execute( if (response == null) { if (savedEx != null) { logger.error( - "Returning null response: cause: {}, request: {}", + "{}Returning null response. Cause: {}, request: {}", + requestIdStr, getRootCause(savedEx), requestInfoScrubbed); } else { - logger.error("Returning null response for request: {}", requestInfoScrubbed); + logger.error( + "{}Returning null response for request: {}", requestIdStr, requestInfoScrubbed); } } else if (response.getStatusLine().getStatusCode() != 200) { logger.error( - "Error response: HTTP Response code: {}, request: {}", + "{}Error response: HTTP Response code: {}, request: {}", + requestIdStr, response.getStatusLine().getStatusCode(), requestInfoScrubbed); } @@ -554,6 +618,15 @@ public static CloseableHttpResponse execute( } } + if (logger.isDebugEnabled() && stopwatch != null) { + stopwatch.stop(); + } + logger.debug( + "{}Execution of request {} took {} ms with total of {} retries", + requestIdStr, + requestInfoScrubbed, + stopwatch == null ? "n/a" : stopwatch.elapsedMillis(), + retryCount); return response; } diff --git a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java index c50bf4900..0bafbf12d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java @@ -21,11 +21,15 @@ import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFSession; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.SqlState; /** SFAsyncResultSet implementation. Note: For Snowflake internal use */ public class SFAsyncResultSet extends SnowflakeBaseResultSet implements SnowflakeResultSet, ResultSet { + private static final SFLogger logger = SFLoggerFactory.getLogger(SFAsyncResultSet.class); + private ResultSet resultSetForNext = new SnowflakeResultSetV1.EmptyResultSet(); private boolean resultSetForNextInitialized = false; private String queryID; @@ -367,7 +371,7 @@ public boolean isBeforeFirst() throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -375,7 +379,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java index 15c819479..d191b646c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java @@ -52,7 +52,7 @@ /** Base class for query result set and metadata result set */ public abstract class SnowflakeBaseResultSet implements ResultSet { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBaseResultSet.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBaseResultSet.class); private final int resultSetType; private final int resultSetConcurrency; private final int resultSetHoldability; @@ -150,7 +150,7 @@ public Timestamp getTimestamp(int columnIndex) throws SQLException { @Override public InputStream getAsciiStream(int columnIndex) throws SQLException { - logger.debug("public InputStream getAsciiStream(int columnIndex)", false); + logger.trace("InputStream getAsciiStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -160,33 +160,33 @@ public InputStream getAsciiStream(int columnIndex) throws SQLException { @Deprecated @Override public InputStream getUnicodeStream(int columnIndex) throws SQLException { - logger.debug("public InputStream getUnicodeStream(int columnIndex)", false); + logger.trace("InputStream getUnicodeStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public InputStream getBinaryStream(int columnIndex) throws SQLException { - logger.debug("public InputStream getBinaryStream(int columnIndex)", false); + logger.trace("InputStream getBinaryStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public String getString(String columnLabel) throws SQLException { - logger.debug("public String getString(String columnLabel)", false); + logger.trace("String getString(String columnLabel)", false); return getString(findColumn(columnLabel)); } @Override public boolean getBoolean(String columnLabel) throws SQLException { - logger.debug("public boolean getBoolean(String columnLabel)", false); + logger.trace("boolean getBoolean(String columnLabel)", false); return getBoolean(findColumn(columnLabel)); } @Override public byte getByte(String columnLabel) throws SQLException { - logger.debug("public byte getByte(String columnLabel)", false); + logger.trace("byte getByte(String columnLabel)", false); raiseSQLExceptionIfResultSetIsClosed(); return getByte(findColumn(columnLabel)); @@ -194,35 +194,35 @@ public byte getByte(String columnLabel) throws SQLException { @Override public short getShort(String columnLabel) throws SQLException { - logger.debug("public short getShort(String columnLabel)", false); + logger.trace("short getShort(String columnLabel)", false); return getShort(findColumn(columnLabel)); } @Override public int getInt(String columnLabel) throws SQLException { - logger.debug("public int getInt(String columnLabel)", false); + logger.trace("int getInt(String columnLabel)", false); return getInt(findColumn(columnLabel)); } @Override public long getLong(String columnLabel) throws SQLException { - logger.debug("public long getLong(String columnLabel)", false); + logger.trace("long getLong(String columnLabel)", false); return getLong(findColumn(columnLabel)); } @Override public float getFloat(String columnLabel) throws SQLException { - logger.debug("public float getFloat(String columnLabel)", false); + logger.trace("float getFloat(String columnLabel)", false); return getFloat(findColumn(columnLabel)); } @Override public double getDouble(String columnLabel) throws SQLException { - logger.debug("public double getDouble(String columnLabel)", false); + logger.trace("double getDouble(String columnLabel)", false); return getDouble(findColumn(columnLabel)); } @@ -233,42 +233,42 @@ public double getDouble(String columnLabel) throws SQLException { @Deprecated @Override public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(String columnLabel, " + "int scale)", false); + logger.trace("BigDecimal getBigDecimal(String columnLabel, " + "int scale)", false); return getBigDecimal(findColumn(columnLabel), scale); } @Override public byte[] getBytes(String columnLabel) throws SQLException { - logger.debug("public byte[] getBytes(String columnLabel)", false); + logger.trace("byte[] getBytes(String columnLabel)", false); return getBytes(findColumn(columnLabel)); } @Override public Date getDate(String columnLabel) throws SQLException { - logger.debug("public Date getDate(String columnLabel)", false); + logger.trace("Date getDate(String columnLabel)", false); return getDate(findColumn(columnLabel)); } @Override public Time getTime(String columnLabel) throws SQLException { - logger.debug("public Time getTime(String columnLabel)", false); + logger.trace("Time getTime(String columnLabel)", false); return getTime(findColumn(columnLabel)); } @Override public Timestamp getTimestamp(String columnLabel) throws SQLException { - logger.debug("public Timestamp getTimestamp(String columnLabel)", false); + logger.trace("Timestamp getTimestamp(String columnLabel)", false); return getTimestamp(findColumn(columnLabel)); } @Override public InputStream getAsciiStream(String columnLabel) throws SQLException { - logger.debug("public InputStream getAsciiStream(String columnLabel)", false); + logger.trace("InputStream getAsciiStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -278,55 +278,55 @@ public InputStream getAsciiStream(String columnLabel) throws SQLException { @Deprecated @Override public InputStream getUnicodeStream(String columnLabel) throws SQLException { - logger.debug("public InputStream getUnicodeStream(String columnLabel)", false); + logger.trace("InputStream getUnicodeStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public InputStream getBinaryStream(String columnLabel) throws SQLException { - logger.debug("public InputStream getBinaryStream(String columnLabel)", false); + logger.trace("InputStream getBinaryStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public SQLWarning getWarnings() throws SQLException { - logger.debug("public SQLWarning getWarnings()", false); + logger.trace("SQLWarning getWarnings()", false); raiseSQLExceptionIfResultSetIsClosed(); return null; } @Override public void clearWarnings() throws SQLException { - logger.debug("public void clearWarnings()", false); + logger.trace("void clearWarnings()", false); raiseSQLExceptionIfResultSetIsClosed(); } @Override public String getCursorName() throws SQLException { - logger.debug("public String getCursorName()", false); + logger.trace("String getCursorName()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public ResultSetMetaData getMetaData() throws SQLException { - logger.debug("public ResultSetMetaData getMetaData()", false); + logger.trace("ResultSetMetaData getMetaData()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetMetaData; } @Override public Object getObject(String columnLabel) throws SQLException { - logger.debug("public Object getObject(String columnLabel)", false); + logger.trace("Object getObject(String columnLabel)", false); return getObject(findColumn(columnLabel)); } @Override public int findColumn(String columnLabel) throws SQLException { - logger.debug("public int findColumn(String columnLabel)", false); + logger.trace("int findColumn(String columnLabel)", false); raiseSQLExceptionIfResultSetIsClosed(); int columnIndex = resultSetMetaData.getColumnIndex(columnLabel); @@ -340,7 +340,7 @@ public int findColumn(String columnLabel) throws SQLException { @Override public Reader getCharacterStream(int columnIndex) throws SQLException { - logger.debug("public Reader getCharacterStream(int columnIndex)", false); + logger.trace("Reader getCharacterStream(int columnIndex)", false); raiseSQLExceptionIfResultSetIsClosed(); String streamData = getString(columnIndex); return (streamData == null) ? null : new StringReader(streamData); @@ -348,76 +348,76 @@ public Reader getCharacterStream(int columnIndex) throws SQLException { @Override public Reader getCharacterStream(String columnLabel) throws SQLException { - logger.debug("public Reader getCharacterStream(String columnLabel)", false); + logger.trace("Reader getCharacterStream(String columnLabel)", false); return getCharacterStream(findColumn(columnLabel)); } @Override public BigDecimal getBigDecimal(String columnLabel) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(String columnLabel)", false); + logger.trace("BigDecimal getBigDecimal(String columnLabel)", false); return getBigDecimal(findColumn(columnLabel)); } @Override public void beforeFirst() throws SQLException { - logger.debug("public void beforeFirst()", false); + logger.trace("void beforeFirst()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void afterLast() throws SQLException { - logger.debug("public void afterLast()", false); + logger.trace("void afterLast()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean first() throws SQLException { - logger.debug("public boolean first()", false); + logger.trace("boolean first()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean last() throws SQLException { - logger.debug("public boolean last()", false); + logger.trace("boolean last()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean absolute(int row) throws SQLException { - logger.debug("public boolean absolute(int row)", false); + logger.trace("boolean absolute(int row)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean relative(int rows) throws SQLException { - logger.debug("public boolean relative(int rows)", false); + logger.trace("boolean relative(int rows)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean previous() throws SQLException { - logger.debug("public boolean previous()", false); + logger.trace("boolean previous()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public int getFetchDirection() throws SQLException { - logger.debug("public int getFetchDirection()", false); + logger.trace("int getFetchDirection()", false); raiseSQLExceptionIfResultSetIsClosed(); return ResultSet.FETCH_FORWARD; } @Override public void setFetchDirection(int direction) throws SQLException { - logger.debug("public void setFetchDirection(int direction)", false); + logger.trace("void setFetchDirection(int direction)", false); raiseSQLExceptionIfResultSetIsClosed(); if (direction != ResultSet.FETCH_FORWARD) { @@ -427,14 +427,14 @@ public void setFetchDirection(int direction) throws SQLException { @Override public int getFetchSize() throws SQLException { - logger.debug("public int getFetchSize()", false); + logger.trace("int getFetchSize()", false); raiseSQLExceptionIfResultSetIsClosed(); return this.fetchSize; } @Override public void setFetchSize(int rows) throws SQLException { - logger.debug("public void setFetchSize(int rows)", false); + logger.trace("void setFetchSize(int rows)", false); raiseSQLExceptionIfResultSetIsClosed(); this.fetchSize = rows; @@ -442,140 +442,140 @@ public void setFetchSize(int rows) throws SQLException { @Override public int getType() throws SQLException { - logger.debug("public int getType()", false); + logger.trace("int getType()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetType; } @Override public int getConcurrency() throws SQLException { - logger.debug("public int getConcurrency()", false); + logger.trace("int getConcurrency()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetConcurrency; } @Override public boolean rowUpdated() throws SQLException { - logger.debug("public boolean rowUpdated()", false); + logger.trace("boolean rowUpdated()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean rowInserted() throws SQLException { - logger.debug("public boolean rowInserted()", false); + logger.trace("boolean rowInserted()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public boolean rowDeleted() throws SQLException { - logger.debug("public boolean rowDeleted()", false); + logger.trace("boolean rowDeleted()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNull(int columnIndex) throws SQLException { - logger.debug("public void updateNull(int columnIndex)", false); + logger.trace("void updateNull(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBoolean(int columnIndex, boolean x) throws SQLException { - logger.debug("public void updateBoolean(int columnIndex, boolean x)", false); + logger.trace("void updateBoolean(int columnIndex, boolean x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateByte(int columnIndex, byte x) throws SQLException { - logger.debug("public void updateByte(int columnIndex, byte x)", false); + logger.trace("void updateByte(int columnIndex, byte x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateShort(int columnIndex, short x) throws SQLException { - logger.debug("public void updateShort(int columnIndex, short x)", false); + logger.trace("void updateShort(int columnIndex, short x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateInt(int columnIndex, int x) throws SQLException { - logger.debug("public void updateInt(int columnIndex, int x)", false); + logger.trace("void updateInt(int columnIndex, int x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateLong(int columnIndex, long x) throws SQLException { - logger.debug("public void updateLong(int columnIndex, long x)", false); + logger.trace("void updateLong(int columnIndex, long x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateFloat(int columnIndex, float x) throws SQLException { - logger.debug("public void updateFloat(int columnIndex, float x)", false); + logger.trace("void updateFloat(int columnIndex, float x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDouble(int columnIndex, double x) throws SQLException { - logger.debug("public void updateDouble(int columnIndex, double x)", false); + logger.trace("void updateDouble(int columnIndex, double x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { - logger.debug("public void updateBigDecimal(int columnIndex, BigDecimal x)", false); + logger.trace("void updateBigDecimal(int columnIndex, BigDecimal x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateString(int columnIndex, String x) throws SQLException { - logger.debug("public void updateString(int columnIndex, String x)", false); + logger.trace("void updateString(int columnIndex, String x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBytes(int columnIndex, byte[] x) throws SQLException { - logger.debug("public void updateBytes(int columnIndex, byte[] x)", false); + logger.trace("void updateBytes(int columnIndex, byte[] x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDate(int columnIndex, Date x) throws SQLException { - logger.debug("public void updateDate(int columnIndex, Date x)", false); + logger.trace("void updateDate(int columnIndex, Date x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTime(int columnIndex, Time x) throws SQLException { - logger.debug("public void updateTime(int columnIndex, Time x)", false); + logger.trace("void updateTime(int columnIndex, Time x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { - logger.debug("public void updateTimestamp(int columnIndex, Timestamp x)", false); + logger.trace("void updateTimestamp(int columnIndex, Timestamp x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(int columnIndex, " + "InputStream x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -583,7 +583,7 @@ public void updateAsciiStream(int columnIndex, InputStream x, int length) throws @Override public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(int columnIndex, " + "InputStream x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -591,7 +591,7 @@ public void updateBinaryStream(int columnIndex, InputStream x, int length) throw @Override public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(int columnIndex, " + "Reader x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -599,7 +599,7 @@ public void updateCharacterStream(int columnIndex, Reader x, int length) throws @Override public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { - logger.debug( + logger.trace( "public void updateObject(int columnIndex, Object x, " + "int scaleOrLength)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -607,112 +607,112 @@ public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQ @Override public void updateObject(int columnIndex, Object x) throws SQLException { - logger.debug("public void updateObject(int columnIndex, Object x)", false); + logger.trace("void updateObject(int columnIndex, Object x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNull(String columnLabel) throws SQLException { - logger.debug("public void updateNull(String columnLabel)", false); + logger.trace("void updateNull(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBoolean(String columnLabel, boolean x) throws SQLException { - logger.debug("public void updateBoolean(String columnLabel, boolean x)", false); + logger.trace("void updateBoolean(String columnLabel, boolean x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateByte(String columnLabel, byte x) throws SQLException { - logger.debug("public void updateByte(String columnLabel, byte x)", false); + logger.trace("void updateByte(String columnLabel, byte x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateShort(String columnLabel, short x) throws SQLException { - logger.debug("public void updateShort(String columnLabel, short x)", false); + logger.trace("void updateShort(String columnLabel, short x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateInt(String columnLabel, int x) throws SQLException { - logger.debug("public void updateInt(String columnLabel, int x)", false); + logger.trace("void updateInt(String columnLabel, int x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateLong(String columnLabel, long x) throws SQLException { - logger.debug("public void updateLong(String columnLabel, long x)", false); + logger.trace("void updateLong(String columnLabel, long x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateFloat(String columnLabel, float x) throws SQLException { - logger.debug("public void updateFloat(String columnLabel, float x)", false); + logger.trace("void updateFloat(String columnLabel, float x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDouble(String columnLabel, double x) throws SQLException { - logger.debug("public void updateDouble(String columnLabel, double x)", false); + logger.trace("void updateDouble(String columnLabel, double x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { - logger.debug("public void updateBigDecimal(String columnLabel, " + "BigDecimal x)", false); + logger.trace("void updateBigDecimal(String columnLabel, " + "BigDecimal x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateString(String columnLabel, String x) throws SQLException { - logger.debug("public void updateString(String columnLabel, String x)", false); + logger.trace("void updateString(String columnLabel, String x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBytes(String columnLabel, byte[] x) throws SQLException { - logger.debug("public void updateBytes(String columnLabel, byte[] x)", false); + logger.trace("void updateBytes(String columnLabel, byte[] x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateDate(String columnLabel, Date x) throws SQLException { - logger.debug("public void updateDate(String columnLabel, Date x)", false); + logger.trace("void updateDate(String columnLabel, Date x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTime(String columnLabel, Time x) throws SQLException { - logger.debug("public void updateTime(String columnLabel, Time x)", false); + logger.trace("void updateTime(String columnLabel, Time x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { - logger.debug("public void updateTimestamp(String columnLabel, Timestamp x)", false); + logger.trace("void updateTimestamp(String columnLabel, Timestamp x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(String columnLabel, " + "InputStream x, int length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -721,7 +721,7 @@ public void updateAsciiStream(String columnLabel, InputStream x, int length) thr @Override public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(String columnLabel, " + "InputStream x, int length)", false); @@ -731,7 +731,7 @@ public void updateBinaryStream(String columnLabel, InputStream x, int length) @Override public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(String columnLabel, " + "Reader reader,int length)", false); @@ -740,7 +740,7 @@ public void updateCharacterStream(String columnLabel, Reader reader, int length) @Override public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { - logger.debug( + logger.trace( "public void updateObject(String columnLabel, Object x, " + "int scaleOrLength)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -748,91 +748,91 @@ public void updateObject(String columnLabel, Object x, int scaleOrLength) throws @Override public void updateObject(String columnLabel, Object x) throws SQLException { - logger.debug("public void updateObject(String columnLabel, Object x)", false); + logger.trace("void updateObject(String columnLabel, Object x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void insertRow() throws SQLException { - logger.debug("public void insertRow()", false); + logger.trace("void insertRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRow() throws SQLException { - logger.debug("public void updateRow()", false); + logger.trace("void updateRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void deleteRow() throws SQLException { - logger.debug("public void deleteRow()", false); + logger.trace("void deleteRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void refreshRow() throws SQLException { - logger.debug("public void refreshRow()", false); + logger.trace("void refreshRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void cancelRowUpdates() throws SQLException { - logger.debug("public void cancelRowUpdates()", false); + logger.trace("void cancelRowUpdates()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void moveToInsertRow() throws SQLException { - logger.debug("public void moveToInsertRow()", false); + logger.trace("void moveToInsertRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void moveToCurrentRow() throws SQLException { - logger.debug("public void moveToCurrentRow()", false); + logger.trace("void moveToCurrentRow()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Statement getStatement() throws SQLException { - logger.debug("public Statement getStatement()", false); + logger.trace("Statement getStatement()", false); raiseSQLExceptionIfResultSetIsClosed(); return statement; } @Override public Object getObject(int columnIndex, Map> map) throws SQLException { - logger.debug("public Object getObject(int columnIndex, Map> map)", false); + logger.trace("Object getObject(int columnIndex, Map> map)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Ref getRef(int columnIndex) throws SQLException { - logger.debug("public Ref getRef(int columnIndex)", false); + logger.trace("Ref getRef(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Blob getBlob(int columnIndex) throws SQLException { - logger.debug("public Blob getBlob(int columnIndex)", false); + logger.trace("Blob getBlob(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Clob getClob(int columnIndex) throws SQLException { - logger.debug("public Clob getClob(int columnIndex)", false); + logger.trace("Clob getClob(int columnIndex)", false); String columnValue = getString(columnIndex); return columnValue == null ? null : new SnowflakeClob(columnValue); @@ -840,14 +840,14 @@ public Clob getClob(int columnIndex) throws SQLException { @Override public Array getArray(int columnIndex) throws SQLException { - logger.debug("public Array getArray(int columnIndex)", false); + logger.trace("Array getArray(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Object getObject(String columnLabel, Map> map) throws SQLException { - logger.debug( + logger.trace( "public Object getObject(String columnLabel, " + "Map> map)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -855,21 +855,21 @@ public Object getObject(String columnLabel, Map> map) throws SQ @Override public Ref getRef(String columnLabel) throws SQLException { - logger.debug("public Ref getRef(String columnLabel)", false); + logger.trace("Ref getRef(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Blob getBlob(String columnLabel) throws SQLException { - logger.debug("public Blob getBlob(String columnLabel)", false); + logger.trace("Blob getBlob(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Clob getClob(String columnLabel) throws SQLException { - logger.debug("public Clob getClob(String columnLabel)", false); + logger.trace("Clob getClob(String columnLabel)", false); String columnValue = getString(columnLabel); return columnValue == null ? null : new SnowflakeClob(columnValue); @@ -877,258 +877,258 @@ public Clob getClob(String columnLabel) throws SQLException { @Override public Array getArray(String columnLabel) throws SQLException { - logger.debug("public Array getArray(String columnLabel)", false); + logger.trace("Array getArray(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Date getDate(int columnIndex, Calendar cal) throws SQLException { - logger.debug("public Date getDate(int columnIndex, Calendar cal)", false); + logger.trace("Date getDate(int columnIndex, Calendar cal)", false); return getDate(columnIndex, cal.getTimeZone()); } @Override public Date getDate(String columnLabel, Calendar cal) throws SQLException { - logger.debug("public Date getDate(String columnLabel, Calendar cal)", false); + logger.trace("Date getDate(String columnLabel, Calendar cal)", false); return getDate(findColumn(columnLabel), cal.getTimeZone()); } @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { - logger.debug("public Time getTime(int columnIndex, Calendar cal)", false); + logger.trace("Time getTime(int columnIndex, Calendar cal)", false); return getTime(columnIndex); } @Override public Time getTime(String columnLabel, Calendar cal) throws SQLException { - logger.debug("public Time getTime(String columnLabel, Calendar cal)", false); + logger.trace("Time getTime(String columnLabel, Calendar cal)", false); return getTime(columnLabel); } @Override public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - logger.debug("public Timestamp getTimestamp(int columnIndex, Calendar cal)", false); + logger.trace("Timestamp getTimestamp(int columnIndex, Calendar cal)", false); return getTimestamp(columnIndex, cal.getTimeZone()); } @Override public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { - logger.debug("public Timestamp getTimestamp(String columnLabel, " + "Calendar cal)", false); + logger.trace("Timestamp getTimestamp(String columnLabel, " + "Calendar cal)", false); return getTimestamp(findColumn(columnLabel), cal.getTimeZone()); } @Override public URL getURL(int columnIndex) throws SQLException { - logger.debug("public URL getURL(int columnIndex)", false); + logger.trace("URL getURL(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public URL getURL(String columnLabel) throws SQLException { - logger.debug("public URL getURL(String columnLabel)", false); + logger.trace("URL getURL(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRef(int columnIndex, Ref x) throws SQLException { - logger.debug("public void updateRef(int columnIndex, Ref x)", false); + logger.trace("void updateRef(int columnIndex, Ref x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRef(String columnLabel, Ref x) throws SQLException { - logger.debug("public void updateRef(String columnLabel, Ref x)", false); + logger.trace("void updateRef(String columnLabel, Ref x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBlob(int columnIndex, Blob x) throws SQLException { - logger.debug("public void updateBlob(int columnIndex, Blob x)", false); + logger.trace("void updateBlob(int columnIndex, Blob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBlob(String columnLabel, Blob x) throws SQLException { - logger.debug("public void updateBlob(String columnLabel, Blob x)", false); + logger.trace("void updateBlob(String columnLabel, Blob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(int columnIndex, Clob x) throws SQLException { - logger.debug("public void updateClob(int columnIndex, Clob x)", false); + logger.trace("void updateClob(int columnIndex, Clob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(String columnLabel, Clob x) throws SQLException { - logger.debug("public void updateClob(String columnLabel, Clob x)", false); + logger.trace("void updateClob(String columnLabel, Clob x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateArray(int columnIndex, Array x) throws SQLException { - logger.debug("public void updateArray(int columnIndex, Array x)", false); + logger.trace("void updateArray(int columnIndex, Array x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateArray(String columnLabel, Array x) throws SQLException { - logger.debug("public void updateArray(String columnLabel, Array x)", false); + logger.trace("void updateArray(String columnLabel, Array x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public RowId getRowId(int columnIndex) throws SQLException { - logger.debug("public RowId getRowId(int columnIndex)", false); + logger.trace("RowId getRowId(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public RowId getRowId(String columnLabel) throws SQLException { - logger.debug("public RowId getRowId(String columnLabel)", false); + logger.trace("RowId getRowId(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRowId(int columnIndex, RowId x) throws SQLException { - logger.debug("public void updateRowId(int columnIndex, RowId x)", false); + logger.trace("void updateRowId(int columnIndex, RowId x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateRowId(String columnLabel, RowId x) throws SQLException { - logger.debug("public void updateRowId(String columnLabel, RowId x)", false); + logger.trace("void updateRowId(String columnLabel, RowId x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public int getHoldability() throws SQLException { - logger.debug("public int getHoldability()", false); + logger.trace("int getHoldability()", false); raiseSQLExceptionIfResultSetIsClosed(); return resultSetHoldability; } @Override public void updateNString(int columnIndex, String nString) throws SQLException { - logger.debug("public void updateNString(int columnIndex, String nString)", false); + logger.trace("void updateNString(int columnIndex, String nString)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNString(String columnLabel, String nString) throws SQLException { - logger.debug("public void updateNString(String columnLabel, String nString)", false); + logger.trace("void updateNString(String columnLabel, String nString)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(int columnIndex, NClob nClob) throws SQLException { - logger.debug("public void updateNClob(int columnIndex, NClob nClob)", false); + logger.trace("void updateNClob(int columnIndex, NClob nClob)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(String columnLabel, NClob nClob) throws SQLException { - logger.debug("public void updateNClob(String columnLabel, NClob nClob)", false); + logger.trace("void updateNClob(String columnLabel, NClob nClob)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public NClob getNClob(int columnIndex) throws SQLException { - logger.debug("public NClob getNClob(int columnIndex)", false); + logger.trace("NClob getNClob(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public NClob getNClob(String columnLabel) throws SQLException { - logger.debug("public NClob getNClob(String columnLabel)", false); + logger.trace("NClob getNClob(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public SQLXML getSQLXML(int columnIndex) throws SQLException { - logger.debug("public SQLXML getSQLXML(int columnIndex)", false); + logger.trace("SQLXML getSQLXML(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public SQLXML getSQLXML(String columnLabel) throws SQLException { - logger.debug("public SQLXML getSQLXML(String columnLabel)", false); + logger.trace("SQLXML getSQLXML(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { - logger.debug("public void updateSQLXML(int columnIndex, SQLXML xmlObject)", false); + logger.trace("void updateSQLXML(int columnIndex, SQLXML xmlObject)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { - logger.debug("public void updateSQLXML(String columnLabel, SQLXML xmlObject)", false); + logger.trace("void updateSQLXML(String columnLabel, SQLXML xmlObject)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public String getNString(int columnIndex) throws SQLException { - logger.debug("public String getNString(int columnIndex)", false); + logger.trace("String getNString(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public String getNString(String columnLabel) throws SQLException { - logger.debug("public String getNString(String columnLabel)", false); + logger.trace("String getNString(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Reader getNCharacterStream(int columnIndex) throws SQLException { - logger.debug("public Reader getNCharacterStream(int columnIndex)", false); + logger.trace("Reader getNCharacterStream(int columnIndex)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public Reader getNCharacterStream(String columnLabel) throws SQLException { - logger.debug("public Reader getNCharacterStream(String columnLabel)", false); + logger.trace("Reader getNCharacterStream(String columnLabel)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNCharacterStream(int columnIndex, " + "Reader x, long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1137,7 +1137,7 @@ public void updateNCharacterStream(int columnIndex, Reader x, long length) throw @Override public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNCharacterStream(String columnLabel, " + "Reader reader,long length)", false); @@ -1146,7 +1146,7 @@ public void updateNCharacterStream(String columnLabel, Reader reader, long lengt @Override public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(int columnIndex, " + "InputStream x, long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1154,7 +1154,7 @@ public void updateAsciiStream(int columnIndex, InputStream x, long length) throw @Override public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(int columnIndex, " + "InputStream x, long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1162,7 +1162,7 @@ public void updateBinaryStream(int columnIndex, InputStream x, long length) thro @Override public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(int columnIndex, Reader x, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1171,7 +1171,7 @@ public void updateCharacterStream(int columnIndex, Reader x, long length) throws @Override public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateAsciiStream(String columnLabel, " + "InputStream x, long length)", false); @@ -1181,7 +1181,7 @@ public void updateAsciiStream(String columnLabel, InputStream x, long length) @Override public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBinaryStream(String columnLabel, " + "InputStream x, long length)", false); @@ -1191,7 +1191,7 @@ public void updateBinaryStream(String columnLabel, InputStream x, long length) @Override public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(String columnLabel, " + "Reader reader,long length)", false); @@ -1201,7 +1201,7 @@ public void updateCharacterStream(String columnLabel, Reader reader, long length @Override public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBlob(int columnIndex, InputStream " + "inputStream, long length)", false); @@ -1211,7 +1211,7 @@ public void updateBlob(int columnIndex, InputStream inputStream, long length) @Override public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateBlob(String columnLabel, " + "InputStream inputStream,long length)", false); @@ -1220,14 +1220,14 @@ public void updateBlob(String columnLabel, InputStream inputStream, long length) @Override public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { - logger.debug("public void updateClob(int columnIndex, Reader reader, " + "long length)", false); + logger.trace("void updateClob(int columnIndex, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateClob(String columnLabel, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1235,7 +1235,7 @@ public void updateClob(String columnLabel, Reader reader, long length) throws SQ @Override public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNClob(int columnIndex, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1243,7 +1243,7 @@ public void updateNClob(int columnIndex, Reader reader, long length) throws SQLE @Override public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { - logger.debug( + logger.trace( "public void updateNClob(String columnLabel, Reader reader, " + "long length)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1251,14 +1251,14 @@ public void updateNClob(String columnLabel, Reader reader, long length) throws S @Override public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { - logger.debug("public void updateNCharacterStream(int columnIndex, Reader x)", false); + logger.trace("void updateNCharacterStream(int columnIndex, Reader x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { - logger.debug( + logger.trace( "public void updateNCharacterStream(String columnLabel, " + "Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1266,42 +1266,42 @@ public void updateNCharacterStream(String columnLabel, Reader reader) throws SQL @Override public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { - logger.debug("public void updateAsciiStream(int columnIndex, InputStream x)", false); + logger.trace("void updateAsciiStream(int columnIndex, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { - logger.debug("public void updateBinaryStream(int columnIndex, InputStream x)", false); + logger.trace("void updateBinaryStream(int columnIndex, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { - logger.debug("public void updateCharacterStream(int columnIndex, Reader x)", false); + logger.trace("void updateCharacterStream(int columnIndex, Reader x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { - logger.debug("public void updateAsciiStream(String columnLabel, InputStream x)", false); + logger.trace("void updateAsciiStream(String columnLabel, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { - logger.debug("public void updateBinaryStream(String columnLabel, InputStream x)", false); + logger.trace("void updateBinaryStream(String columnLabel, InputStream x)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { - logger.debug( + logger.trace( "public void updateCharacterStream(String columnLabel, " + "Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); @@ -1309,49 +1309,49 @@ public void updateCharacterStream(String columnLabel, Reader reader) throws SQLE @Override public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { - logger.debug("public void updateBlob(int columnIndex, InputStream inputStream)", false); + logger.trace("void updateBlob(int columnIndex, InputStream inputStream)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { - logger.debug("public void updateBlob(String columnLabel, InputStream " + "inputStream)", false); + logger.trace("void updateBlob(String columnLabel, InputStream " + "inputStream)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(int columnIndex, Reader reader) throws SQLException { - logger.debug("public void updateClob(int columnIndex, Reader reader)", false); + logger.trace("void updateClob(int columnIndex, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateClob(String columnLabel, Reader reader) throws SQLException { - logger.debug("public void updateClob(String columnLabel, Reader reader)", false); + logger.trace("void updateClob(String columnLabel, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(int columnIndex, Reader reader) throws SQLException { - logger.debug("public void updateNClob(int columnIndex, Reader reader)", false); + logger.trace("void updateNClob(int columnIndex, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public void updateNClob(String columnLabel, Reader reader) throws SQLException { - logger.debug("public void updateNClob(String columnLabel, Reader reader)", false); + logger.trace("void updateNClob(String columnLabel, Reader reader)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public T getObject(int columnIndex, Class type) throws SQLException { - logger.debug("public T getObject(int columnIndex,Class type)", false); + logger.trace(" T getObject(int columnIndex,Class type)", false); if (resultSetMetaData.isStructuredTypeColumn(columnIndex)) { if (SQLData.class.isAssignableFrom(type)) { SQLInput sqlInput = @@ -1416,7 +1416,7 @@ public T getObject(int columnIndex, Class type) throws SQLException { } public List getList(int columnIndex, Class type) throws SQLException { - logger.debug("public List getList(int columnIndex, Class type)", false); + logger.trace(" List getList(int columnIndex, Class type)", false); if (!resultSetMetaData.isStructuredTypeColumn(columnIndex)) { throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -1425,7 +1425,7 @@ public List getList(int columnIndex, Class type) throws SQLException { } public T[] getArray(int columnIndex, Class type) throws SQLException { - logger.debug("public T[] getArray(int columnIndex, Class type)", false); + logger.trace(" T[] getArray(int columnIndex, Class type)", false); if (!resultSetMetaData.isStructuredTypeColumn(columnIndex)) { throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -1578,7 +1578,7 @@ public T[] getArray(int columnIndex, Class type) throws SQLException { } public Map getMap(int columnIndex, Class type) throws SQLException { - logger.debug("public Map getMap(int columnIndex, Class type)", false); + logger.trace(" Map getMap(int columnIndex, Class type)", false); if (!resultSetMetaData.isStructuredTypeColumn(columnIndex)) { throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -1742,14 +1742,14 @@ public Map getMap(int columnIndex, Class type) throws SQLExcep @Override public T getObject(String columnLabel, Class type) throws SQLException { - logger.debug("public T getObject(String columnLabel,Class type)", false); + logger.trace(" T getObject(String columnLabel,Class type)", false); return getObject(findColumn(columnLabel), type); } @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -1760,7 +1760,7 @@ public T unwrap(Class iface) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java index f1281d593..074e3f878 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java @@ -36,7 +36,7 @@ public class SnowflakeBasicDataSource implements DataSource, Serializable { private Properties properties = new Properties(); - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBasicDataSource.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeBasicDataSource.class); static { try { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java index 3c2386b78..930e70039 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeCallableStatementV1.java @@ -18,9 +18,13 @@ import java.sql.Timestamp; import java.util.Calendar; import java.util.Map; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; final class SnowflakeCallableStatementV1 extends SnowflakePreparedStatementV1 implements CallableStatement, SnowflakeCallableStatement { + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeCallableStatementV1.class); /** * Construct SnowflakePreparedStatementV1 diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java index ff9eb9003..8f29f5702 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeChunkDownloader.java @@ -124,6 +124,10 @@ public class SnowflakeChunkDownloader implements ChunkDownloader { /** Arrow memory allocator for the current resultSet */ private RootAllocator rootAllocator; + private final String queryId; + + private final int firstChunkRowCount; + static long getCurrentMemoryUsage() { synchronized (currentMemoryUsage) { return currentMemoryUsage.longValue(); @@ -191,7 +195,7 @@ public Thread newThread(final Runnable r) { thread.setUncaughtExceptionHandler( new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread t, Throwable e) { - logger.error("uncaughtException in thread: " + t + " {}", e); + logger.error("Uncaught Exception in thread {}: {}", t, e); } }); @@ -211,6 +215,8 @@ public void uncaughtException(Thread t, Throwable e) { */ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerializable) throws SnowflakeSQLException { + this.queryId = resultSetSerializable.getQueryId(); + this.firstChunkRowCount = resultSetSerializable.getFirstChunkRowCount(); this.snowflakeConnectionString = resultSetSerializable.getSnowflakeConnectString(); this.ocspMode = resultSetSerializable.getOCSPMode(); this.ocspModeAndProxyKey = resultSetSerializable.getHttpClientKey(); @@ -221,7 +227,7 @@ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerial this.maxHttpRetries = resultSetSerializable.getMaxHttpRetries(); this.prefetchSlots = resultSetSerializable.getResultPrefetchThreads() * 2; this.queryResultFormat = resultSetSerializable.getQueryResultFormat(); - logger.debug("qrmk = {}", this.qrmk); + logger.debug("qrmk: {}", this.qrmk); this.chunkHeadersMap = resultSetSerializable.getChunkHeadersMap(); // session may be null. Its only use is for in-band telemetry in this class this.session = @@ -288,8 +294,7 @@ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerial } logger.debug( - "add chunk, url={} rowCount={} uncompressedSize={} " - + "neededChunkMemory={}, chunkResultFormat={}", + "Add chunk: url: {} rowCount: {} uncompressedSize: {} neededChunkMemory: {}, chunkResultFormat: {}", chunk.getScrubbedUrl(), chunk.getRowCount(), chunk.getUncompressedSize(), @@ -305,7 +310,7 @@ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerial resultSetSerializable.getChunkFileCount()); logger.debug( - "#chunks: {} #threads:{} #slots:{} -> pool:{}", + "#chunks: {} #threads: {} #slots: {} -> pool: {}", resultSetSerializable.getChunkFileCount(), resultSetSerializable.getResultPrefetchThreads(), prefetchSlots, @@ -396,7 +401,7 @@ private void startNextDownloaders() throws SnowflakeSQLException { neededChunkMemory); logger.debug( - "submit chunk #{} for downloading, url={}", + "Submit chunk #{} for downloading, url: {}", this.nextChunkToDownload, nextChunk.getScrubbedUrl()); @@ -432,7 +437,9 @@ private void startNextDownloaders() throws SnowflakeSQLException { authTimeout, socketTimeout, maxHttpRetries, - this.session)); + this.session, + chunks.size(), + queryId)); downloaderFutures.put(nextChunkToDownload, downloaderFuture); // increment next chunk to download nextChunkToDownload++; @@ -442,7 +449,7 @@ private void startNextDownloaders() throws SnowflakeSQLException { continue; } else { // cancel the reserved memory - logger.debug("cancel the reserved memory.", false); + logger.debug("Cancel the reserved memory.", false); curMem = currentMemoryUsage.addAndGet(-neededChunkMemory); if (getPrefetchMemRetry > prefetchMaxRetry) { logger.debug( @@ -468,7 +475,7 @@ private void startNextDownloaders() throws SnowflakeSQLException { getPrefetchMemRetry++; if (logger.isDebugEnabled()) { logger.debug( - "Thread {} waiting for {}s: currentMemoryUsage in MB: {}, neededChunkMemory in MB:" + "Thread {} waiting for {} s: currentMemoryUsage in MB: {}, neededChunkMemory in MB:" + " {}, nextChunkToDownload: {}, nextChunkToConsume: {}, retry: {}", (ArgSupplier) () -> Thread.currentThread().getId(), waitingTime / 1000.0, @@ -507,7 +514,7 @@ private void releaseCurrentMemoryUsage(int chunkId, Optional optionalRelea // has to be before reusing the memory long curMem = currentMemoryUsage.addAndGet(-releaseSize); logger.debug( - "Thread {}: currentMemoryUsage in MB: {}, released in MB: {}, " + "Thread {} - currentMemoryUsage in MB: {}, released in MB: {}, " + "chunk: {}, optionalReleaseSize: {}, JVMFreeMem: {}", (ArgSupplier) () -> Thread.currentThread().getId(), (ArgSupplier) () -> curMem / MB, @@ -549,7 +556,7 @@ public SnowflakeResultChunk getNextChunkToConsume() int prevChunk = this.nextChunkToConsume - 1; // free the chunk data for previous chunk - logger.debug("free chunk data for chunk #{}", prevChunk); + logger.debug("Free chunk data for chunk #{}", prevChunk); long chunkMemUsage = chunks.get(prevChunk).computeNeededChunkMemory(); @@ -573,7 +580,7 @@ public SnowflakeResultChunk getNextChunkToConsume() // if no more chunks, return null if (this.nextChunkToConsume >= this.chunks.size()) { - logger.debug("no more chunk", false); + logger.debug("No more chunk", false); return null; } @@ -591,7 +598,7 @@ public SnowflakeResultChunk getNextChunkToConsume() SnowflakeResultChunk currentChunk = this.chunks.get(nextChunkToConsume); if (currentChunk.getDownloadState() == DownloadState.SUCCESS) { - logger.debug("chunk #{} is ready to consume", nextChunkToConsume); + logger.debug("Chunk #{} is ready to consume", nextChunkToConsume); nextChunkToConsume++; if (nextChunkToConsume == this.chunks.size()) { // make sure to release the last chunk @@ -602,15 +609,15 @@ public SnowflakeResultChunk getNextChunkToConsume() // the chunk we want to consume is not ready yet, wait for it currentChunk.getLock().lock(); try { - logger.debug("#chunk{} is not ready to consume", nextChunkToConsume); - logger.debug("consumer get lock to check chunk state", false); + logger.debug("Chunk#{} is not ready to consume", nextChunkToConsume); + logger.debug("Consumer get lock to check chunk state", false); waitForChunkReady(currentChunk); // downloader thread encountered an error if (currentChunk.getDownloadState() == DownloadState.FAILURE) { releaseAllChunkMemoryUsage(); - logger.error("downloader encountered error: {}", currentChunk.getDownloadError()); + logger.error("Downloader encountered error: {}", currentChunk.getDownloadError()); if (currentChunk .getDownloadError() @@ -625,14 +632,14 @@ public SnowflakeResultChunk getNextChunkToConsume() currentChunk.getDownloadError()); } - logger.debug("#chunk{} is ready to consume", nextChunkToConsume); + logger.debug("Chunk#{} is ready to consume", nextChunkToConsume); nextChunkToConsume++; // next chunk to consume is ready for consumption return currentChunk; } finally { - logger.debug("consumer free lock", false); + logger.debug("Consumer free lock", false); boolean terminateDownloader = (currentChunk.getDownloadState() == DownloadState.FAILURE); // release the unlock always @@ -662,7 +669,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru long startTime = System.currentTimeMillis(); while (true) { logger.debug( - "Thread {} is waiting for #chunk{} to be ready, current" + "chunk state is: {}, retry={}", + "Thread {} is waiting for chunk#{} to be ready, current chunk state is: {}, retry: {}", Thread.currentThread().getId(), nextChunkToConsume, currentChunk.getDownloadState(), @@ -677,8 +684,8 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru .await(downloadedConditionTimeoutInSeconds, TimeUnit.SECONDS)) { // if the current chunk has not condition change over the timeout (which is rare) logger.debug( - "Thread {} is timeout for waiting #chunk{} to be ready, current" - + " chunk state is: {}, retry={}, scrubbedUrl={}", + "Thread {} is timeout for waiting chunk#{} to be ready, current" + + " chunk state is: {}, retry: {}, scrubbedUrl: {}", Thread.currentThread().getId(), nextChunkToConsume, currentChunk.getDownloadState(), @@ -688,7 +695,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru currentChunk.setDownloadState(DownloadState.FAILURE); currentChunk.setDownloadError( String.format( - "Timeout waiting for the download of #chunk%d(Total chunks: %d) retry=%d scrubbedUrl=%s", + "Timeout waiting for the download of chunk#%d(Total chunks: %d) retry: %d scrubbedUrl: %s", nextChunkToConsume, this.chunks.size(), retry, currentChunk.getScrubbedUrl())); break; } @@ -699,7 +706,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru retry++; // timeout or failed logger.debug( - "Since downloadState is {} Thread {} decides to retry {} time(s) for #chunk{}", + "Since downloadState is {} Thread {} decides to retry {} time(s) for chunk#{}", currentChunk.getDownloadState(), Thread.currentThread().getId(), retry, @@ -733,7 +740,9 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru authTimeout, socketTimeout, maxHttpRetries, - session)); + session, + chunks.size(), + queryId)); downloaderFutures.put(nextChunkToConsume, downloaderFuture); // Only when prefetch fails due to internal memory limitation, nextChunkToDownload // equals nextChunkToConsume. In that case we need to increment nextChunkToDownload @@ -750,14 +759,14 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru } } if (currentChunk.getDownloadState() == DownloadState.SUCCESS) { - logger.debug("ready to consume #chunk{}, succeed retry={}", nextChunkToConsume, retry); + logger.debug("Ready to consume chunk#{}, succeed retry={}", nextChunkToConsume, retry); } else if (retry >= maxHttpRetries) { // stop retrying and report failure currentChunk.setDownloadState(DownloadState.FAILURE); currentChunk.setDownloadError( String.format( - "Max retry reached for the download of #chunk%d " - + "(Total chunks: %d) retry=%d, error=%s", + "Max retry reached for the download of chunk#%d " + + "(Total chunks: %d) retry: %d, error: %s", nextChunkToConsume, this.chunks.size(), retry, @@ -814,9 +823,8 @@ public DownloaderMetrics terminate() throws InterruptedException { logger.debug("Executor did not terminate in the specified time.", false); List droppedTasks = executor.shutdownNow(); // optional ** logger.debug( - "Executor was abruptly shut down. " - + droppedTasks.size() - + " tasks will not be executed."); // optional ** + "Executor was abruptly shut down. {} tasks will not be executed.", + droppedTasks.size()); // optional ** } } // Normal flow will never hit here. This is only for testing purposes @@ -825,15 +833,32 @@ public DownloaderMetrics terminate() throws InterruptedException { throw (InterruptedException) SnowflakeChunkDownloader.injectedDownloaderException; } } - logger.debug( - "Total milliseconds waiting for chunks: {}, " - + "Total memory used: {}, total download time: {} millisec, " - + "total parsing time: {} milliseconds, total chunks: {}", - numberMillisWaitingForChunks, - Runtime.getRuntime().totalMemory(), + + long totalUncompressedSize = + chunks.stream() + .reduce(0L, (acc, chunk) -> acc + chunk.getUncompressedSize(), Long::sum); + long rowsInChunks = + chunks.stream().reduce(0L, (acc, chunk) -> acc + chunk.getRowCount(), Long::sum); + long chunksSize = chunks.size(); + + logger.info( + "Completed processing {} {} chunks for query {} in {} ms. Download took {} ms (average: {} ms)," + + " parsing took {} ms (average: {} ms). Chunks uncompressed size: {} MB (average: {} MB)," + + " rows in chunks: {} (total: {}, average in chunk: {}), total memory used: {} MB", + chunksSize, + queryResultFormat == QueryResultFormat.ARROW ? "ARROW" : "JSON", + queryId, + totalMillisParsingChunks.get() + totalMillisDownloadingChunks.get(), totalMillisDownloadingChunks.get(), - totalMillisParsingChunks.get(), - chunks.size()); + totalMillisDownloadingChunks.get() / chunksSize, + totalMillisParsingChunks, + totalMillisParsingChunks.get() / chunksSize, + totalUncompressedSize / MB, + totalUncompressedSize / MB / chunksSize, + rowsInChunks, + firstChunkRowCount + rowsInChunks, + rowsInChunks / chunksSize, + Runtime.getRuntime().totalMemory() / MB); return new DownloaderMetrics( numberMillisWaitingForChunks, @@ -884,6 +909,8 @@ private void addParsingTime(long parsingTime) { * mainly for logging purpose * @param chunkHeadersMap contains headers needed to be added when downloading from s3 * @param networkTimeoutInMilli network timeout + * @param totalChunks used to log the information of total chunks + * @param queryId used to log the queryId to which the chunk belongs to * @return A callable responsible for downloading chunk */ private static Callable getDownloadChunkCallable( @@ -896,7 +923,9 @@ private static Callable getDownloadChunkCallable( final int authTimeout, final int socketTimeout, final int maxHttpRetries, - final SFBaseSession session) { + final SFBaseSession session, + final int totalChunks, + final String queryId) { ChunkDownloadContext downloadContext = new ChunkDownloadContext( downloader, @@ -934,7 +963,7 @@ private void downloadAndParseChunk(InputStream inputStream) throws SnowflakeSQLE } } catch (Exception ex) { logger.debug( - "Thread {} Exception when parsing result #chunk{}: {}", + "Thread {} Exception when parsing result chunk#{}: {}", Thread.currentThread().getId(), chunkIndex, ex.getLocalizedMessage()); @@ -948,7 +977,7 @@ private void downloadAndParseChunk(InputStream inputStream) throws SnowflakeSQLE } finally { // close the buffer reader will close underlying stream logger.debug( - "Thread {} close input stream for #chunk{}", + "Thread {} close input stream for chunk#{}", Thread.currentThread().getId(), chunkIndex); try { @@ -979,7 +1008,7 @@ public Void call() { } logger.debug( - "Downloading #chunk{}, url={}, Thread {}", + "Downloading chunk#{}, url: {}, Thread {}", chunkIndex, resultChunk.getUrl(), Thread.currentThread().getId()); @@ -998,51 +1027,78 @@ public Void call() { InputStream is = downloader.getResultStreamProvider().getInputStream(downloadContext); logger.debug( - "Thread {} start downloading #chunk{}", Thread.currentThread().getId(), chunkIndex); + "Thread {} start downloading chunk#{}", Thread.currentThread().getId(), chunkIndex); downloadAndParseChunk(is); logger.debug( - "Thread {} finish downloading #chunk{}", Thread.currentThread().getId(), chunkIndex); + "Thread {} finish downloading chunk#{}", Thread.currentThread().getId(), chunkIndex); downloader.downloaderFutures.remove(chunkIndex); - logger.debug( - "Finished preparing chunk data for {}, " - + "total download time={}ms, total parse time={}ms", - resultChunk.getScrubbedUrl(), - resultChunk.getDownloadTime(), - resultChunk.getParseTime()); + if (chunkIndex % 5 == 0) { + logger.info( + "Processed {} chunk#{} in {} ms ({} out of {}) for query {}. Download took {} ms, " + + "parsing took {} ms. Chunk uncompressed size: {} kB, cols: {}, rows: {}, scrubbed URL: {}", + downloader.queryResultFormat == QueryResultFormat.ARROW ? "ARROW" : "JSON", + chunkIndex, + resultChunk.getTotalTime(), + chunkIndex + 1, + totalChunks, + queryId, + resultChunk.getDownloadTime(), + resultChunk.getParseTime(), + resultChunk.getUncompressedSize() / 1024, + resultChunk.colCount, + resultChunk.rowCount, + resultChunk.getScrubbedUrl()); + } else { + logger.debug( + "Processed {} chunk#{} in {} ms ({} out of {}) for query {}. Download took {} ms, " + + "parsing took {} ms. Chunk uncompressed size: {} kB, cols: {}, rows: {}, scrubbed URL: {}", + downloader.queryResultFormat == QueryResultFormat.ARROW ? "ARROW" : "JSON", + chunkIndex, + resultChunk.getTotalTime(), + chunkIndex + 1, + totalChunks, + queryId, + resultChunk.getDownloadTime(), + resultChunk.getParseTime(), + resultChunk.getUncompressedSize() / 1024, + resultChunk.colCount, + resultChunk.rowCount, + resultChunk.getScrubbedUrl()); + } resultChunk.getLock().lock(); try { - logger.debug("get lock to change the chunk to be ready to consume", false); + logger.debug("Get lock to change the chunk to be ready to consume", false); - logger.debug("wake up consumer if it is waiting for a chunk to be " + "ready", false); + logger.debug("Wake up consumer if it is waiting for a chunk to be ready", false); resultChunk.setDownloadState(DownloadState.SUCCESS); resultChunk.getDownloadCondition().signal(); } finally { - logger.debug("Downloaded #chunk{}, free lock", chunkIndex); + logger.debug("Downloaded chunk#{}, free lock", chunkIndex); resultChunk.getLock().unlock(); } } catch (Throwable th) { resultChunk.getLock().lock(); try { - logger.debug("get lock to set chunk download error", false); + logger.debug("Get lock to set chunk download error", false); resultChunk.setDownloadState(DownloadState.FAILURE); downloader.releaseCurrentMemoryUsage(chunkIndex, Optional.empty()); StringWriter errors = new StringWriter(); th.printStackTrace(new PrintWriter(errors)); resultChunk.setDownloadError(errors.toString()); - logger.debug("wake up consumer if it is waiting for a chunk to be ready", false); + logger.debug("Wake up consumer if it is waiting for a chunk to be ready", false); resultChunk.getDownloadCondition().signal(); } finally { - logger.debug("Failed to download #chunk{}, free lock", chunkIndex); + logger.debug("Failed to download chunk#{}, free lock", chunkIndex); resultChunk.getLock().unlock(); } logger.debug( - "Thread {} Exception encountered ({}:{}) fetching #chunk{} from: {}, Error {}", + "Thread {} Exception encountered ({}:{}) fetching chunk#{} from: {}, Error {}", Thread.currentThread().getId(), th.getClass().getName(), th.getLocalizedMessage(), @@ -1078,7 +1134,7 @@ private void parseJsonToChunkV2(InputStream jsonInputStream, SnowflakeResultChun ByteBuffer bBuf = null; int len; logger.debug( - "Thread {} start to read inputstream for #chunk{}", + "Thread {} start to read inputstream for chunk#{}", Thread.currentThread().getId(), chunkIndex); while ((len = jsonInputStream.read(buf)) != -1) { @@ -1103,7 +1159,7 @@ private void parseJsonToChunkV2(InputStream jsonInputStream, SnowflakeResultChun } } logger.debug( - "Thread {} finish reading inputstream for #chunk{}", + "Thread {} finish reading inputstream for chunk#{}", Thread.currentThread().getId(), chunkIndex); if (prevBuffer != null) { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java index 9d7da8071..bc4ecadc4 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java @@ -21,7 +21,7 @@ public class SnowflakeConnectString implements Serializable { private static final long serialVersionUID = 1L; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeConnectString.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeConnectString.class); private final String scheme; private final String host; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java index 9cae2de4b..473aa2041 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectionV1.java @@ -48,6 +48,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.log.SFLoggerUtil; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.SqlState; /** Snowflake connection implementation */ @@ -138,11 +139,19 @@ public SnowflakeConnectionV1(String url, Properties info, boolean fakeConnection private void initConnectionWithImpl( SFConnectionHandler sfConnectionHandler, String url, Properties info) throws SQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + logger.info("Initializing new connection"); this.sfConnectionHandler = sfConnectionHandler; sfConnectionHandler.initializeConnection(url, info); this.sfSession = sfConnectionHandler.getSFSession(); missingProperties = sfSession.checkProperties(); this.showStatementParameters = sfSession.getPreparedStatementLogging(); + stopwatch.stop(); + logger.info( + "Connection initialized successfully in {} ms. Session id: {}", + stopwatch.elapsedMillis(), + sfSession.getSessionId()); } public List returnMissingProperties() { @@ -221,9 +230,19 @@ public String[] getChildQueryIds(String queryID) throws SQLException { */ @Override public void close() throws SQLException { - logger.debug(" public void close()", false); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + String sessionId = null; + + if (sfSession != null) { + sessionId = sfSession.getSessionId(); + logger.info("Closing connection with session id: {}", sessionId); + } else { + logger.debug("Closing connection without associated session"); + } if (isClosed) { + logger.debug("Connection is already closed"); // No exception is raised even if the connection is closed. return; } @@ -235,6 +254,9 @@ public void close() throws SQLException { sfSession = null; } // make sure to close all created statements + if (!openStatements.isEmpty()) { + logger.debug("Closing {} opened statements", openStatements.size()); + } for (Statement stmt : openStatements) { if (stmt != null && !stmt.isClosed()) { if (stmt.isWrapperFor(SnowflakeStatementV1.class)) { @@ -244,12 +266,20 @@ public void close() throws SQLException { } } } + if (!openStatements.isEmpty()) { + logger.debug("Statements closed successfully"); + } openStatements.clear(); } catch (SFException ex) { throw new SnowflakeSQLLoggedException( sfSession, ex.getSqlState(), ex.getVendorCode(), ex.getCause(), ex.getParams()); } + stopwatch.stop(); + logger.info( + "Connection with session id: {} closed successfully in {} ms", + sessionId, + stopwatch.elapsedMillis()); } public String getSessionID() throws SQLException { @@ -261,7 +291,7 @@ public String getSessionID() throws SQLException { @Override public boolean isClosed() throws SQLException { - logger.debug(" public boolean isClosed()", false); + logger.trace("boolean isClosed()", false); return isClosed; } @@ -274,14 +304,14 @@ public boolean isClosed() throws SQLException { */ @Override public DatabaseMetaData getMetaData() throws SQLException { - logger.debug(" public DatabaseMetaData getMetaData()", false); + logger.trace("DatabaseMetaData getMetaData()", false); raiseSQLExceptionIfConnectionIsClosed(); return new SnowflakeDatabaseMetaData(this); } @Override public CallableStatement prepareCall(String sql) throws SQLException { - logger.debug(" public CallableStatement prepareCall(String sql)", false); + logger.trace("CallableStatement prepareCall(String sql)", false); raiseSQLExceptionIfConnectionIsClosed(); CallableStatement stmt = prepareCall(sql, false); openStatements.add(stmt); @@ -289,7 +319,7 @@ public CallableStatement prepareCall(String sql) throws SQLException { } public CallableStatement prepareCall(String sql, boolean skipParsing) throws SQLException { - logger.debug(" public CallableStatement prepareCall(String sql, boolean skipParsing)", false); + logger.trace("CallableStatement prepareCall(String sql, boolean skipParsing)", false); raiseSQLExceptionIfConnectionIsClosed(); CallableStatement stmt = new SnowflakeCallableStatementV1( @@ -306,9 +336,8 @@ public CallableStatement prepareCall(String sql, boolean skipParsing) throws SQL @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - logger.debug( - " public CallableStatement prepareCall(String sql," - + " int resultSetType,int resultSetConcurrency", + logger.trace( + "CallableStatement prepareCall(String sql," + " int resultSetType,int resultSetConcurrency", false); CallableStatement stmt = prepareCall(sql, resultSetType, resultSetConcurrency, ResultSet.CLOSE_CURSORS_AT_COMMIT); @@ -320,8 +349,7 @@ public CallableStatement prepareCall(String sql, int resultSetType, int resultSe public CallableStatement prepareCall( String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug( - " public CallableStatement prepareCall(String sql, int " + "resultSetType,", false); + logger.trace("CallableStatement prepareCall(String sql, int " + "resultSetType,", false); CallableStatement stmt = new SnowflakeCallableStatementV1( this, sql, false, resultSetType, resultSetConcurrency, resultSetHoldability); @@ -331,21 +359,21 @@ public CallableStatement prepareCall( @Override public String nativeSQL(String sql) throws SQLException { - logger.debug("public String nativeSQL(String sql)", false); + logger.trace("String nativeSQL(String sql)", false); raiseSQLExceptionIfConnectionIsClosed(); return sql; } @Override public boolean getAutoCommit() throws SQLException { - logger.debug("boolean getAutoCommit()", false); + logger.trace("boolean getAutoCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return sfSession.getAutoCommit(); } @Override public void setAutoCommit(boolean isAutoCommit) throws SQLException { - logger.debug("void setAutoCommit(boolean isAutoCommit)", false); + logger.trace("void setAutoCommit(boolean isAutoCommit)", false); boolean currentAutoCommit = this.getAutoCommit(); if (isAutoCommit != currentAutoCommit) { sfSession.setAutoCommit(isAutoCommit); @@ -357,33 +385,33 @@ public void setAutoCommit(boolean isAutoCommit) throws SQLException { @Override public void commit() throws SQLException { - logger.debug("void commit()", false); + logger.trace("void commit()", false); this.executeImmediate("commit"); } @Override public void rollback() throws SQLException { - logger.debug("void rollback()", false); + logger.trace("void rollback()", false); this.executeImmediate("rollback"); } @Override public void rollback(Savepoint savepoint) throws SQLException { - logger.debug("void rollback(Savepoint savepoint)", false); + logger.trace("void rollback(Savepoint savepoint)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); } @Override public boolean isReadOnly() throws SQLException { - logger.debug("boolean isReadOnly()", false); + logger.trace("boolean isReadOnly()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public void setReadOnly(boolean readOnly) throws SQLException { - logger.debug("void setReadOnly(boolean readOnly)", false); + logger.trace("void setReadOnly(boolean readOnly)", false); raiseSQLExceptionIfConnectionIsClosed(); if (readOnly) { logger.debug("setReadOnly not supported.", false); @@ -398,7 +426,7 @@ public String getCatalog() throws SQLException { @Override public void setCatalog(String catalog) throws SQLException { - logger.debug("void setCatalog(String catalog)", false); + logger.trace("void setCatalog(String catalog)", false); // switch db by running "use db" this.executeImmediate("use database \"" + catalog + "\""); @@ -406,7 +434,7 @@ public void setCatalog(String catalog) throws SQLException { @Override public int getTransactionIsolation() throws SQLException { - logger.debug("int getTransactionIsolation()", false); + logger.trace("int getTransactionIsolation()", false); raiseSQLExceptionIfConnectionIsClosed(); return this.transactionIsolation; } @@ -419,7 +447,7 @@ public int getTransactionIsolation() throws SQLException { */ @Override public void setTransactionIsolation(int level) throws SQLException { - logger.debug("void setTransactionIsolation(int level), level = {}", level); + logger.trace("void setTransactionIsolation(int level), level = {}", level); raiseSQLExceptionIfConnectionIsClosed(); if (level == Connection.TRANSACTION_NONE || level == Connection.TRANSACTION_READ_COMMITTED) { this.transactionIsolation = level; @@ -433,14 +461,14 @@ public void setTransactionIsolation(int level) throws SQLException { @Override public SQLWarning getWarnings() throws SQLException { - logger.debug("SQLWarning getWarnings()", false); + logger.trace("SQLWarning getWarnings()", false); raiseSQLExceptionIfConnectionIsClosed(); return sqlWarnings; } @Override public void clearWarnings() throws SQLException { - logger.debug("void clearWarnings()", false); + logger.trace("void clearWarnings()", false); raiseSQLExceptionIfConnectionIsClosed(); sfSession.clearSqlWarnings(); sqlWarnings = null; @@ -449,7 +477,7 @@ public void clearWarnings() throws SQLException { @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { - logger.debug( + logger.trace( "Statement createStatement(int resultSetType, " + "int resultSetConcurrency)", false); Statement stmt = @@ -461,7 +489,7 @@ public Statement createStatement(int resultSetType, int resultSetConcurrency) @Override public Statement createStatement( int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug( + logger.trace( "Statement createStatement(int resultSetType, " + "int resultSetConcurrency, int resultSetHoldability", false); @@ -474,7 +502,7 @@ public Statement createStatement( @Override public PreparedStatement prepareStatement(String sql) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql)", false); + logger.trace("PreparedStatement prepareStatement(String sql)", false); raiseSQLExceptionIfConnectionIsClosed(); PreparedStatement stmt = prepareStatement(sql, false); openStatements.add(stmt); @@ -483,7 +511,7 @@ public PreparedStatement prepareStatement(String sql) throws SQLException { @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug( + logger.trace( "PreparedStatement prepareStatement(String sql, " + "int autoGeneratedKeys)", false); if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) { @@ -495,14 +523,14 @@ public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) thr @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, " + "int[] columnIndexes)", false); + logger.trace("PreparedStatement prepareStatement(String sql, " + "int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); } @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { - logger.debug( + logger.trace( "PreparedStatement prepareStatement(String sql, " + "String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); @@ -511,7 +539,7 @@ public PreparedStatement prepareStatement(String sql, String[] columnNames) thro @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); + logger.trace("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); PreparedStatement stmt = prepareStatement( @@ -524,7 +552,7 @@ public PreparedStatement prepareStatement(String sql, int resultSetType, int res public PreparedStatement prepareStatement( String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); + logger.trace("PreparedStatement prepareStatement(String sql, " + "int resultSetType,", false); PreparedStatement stmt = new SnowflakePreparedStatementV1( @@ -534,7 +562,7 @@ public PreparedStatement prepareStatement( } public PreparedStatement prepareStatement(String sql, boolean skipParsing) throws SQLException { - logger.debug("PreparedStatement prepareStatement(String sql, boolean skipParsing)", false); + logger.trace("PreparedStatement prepareStatement(String sql, boolean skipParsing)", false); raiseSQLExceptionIfConnectionIsClosed(); PreparedStatement stmt = new SnowflakePreparedStatementV1( @@ -656,7 +684,7 @@ private void raiseSetClientInfoException(Map failedPro @Override public Properties getClientInfo() throws SQLException { - logger.debug("Properties getClientInfo()", false); + logger.trace("Properties getClientInfo()", false); raiseSQLExceptionIfConnectionIsClosed(); // sfSession must not be null if the connection is not closed. return sfSession.getClientInfo(); @@ -675,7 +703,7 @@ public void setClientInfo(Properties properties) throws SQLClientInfoException { @Override public String getClientInfo(String name) throws SQLException { - logger.debug("String getClientInfo(String name)", false); + logger.trace("String getClientInfo(String name)", false); raiseSQLExceptionIfConnectionIsClosed(); // sfSession must not be null if the connection is not closed. @@ -684,13 +712,13 @@ public String getClientInfo(String name) throws SQLException { @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { - logger.debug("Array createArrayOf(String typeName, Object[] " + "elements)", false); + logger.trace("Array createArrayOf(String typeName, Object[] " + "elements)", false); return new SfSqlArray(JDBCType.valueOf(typeName).getVendorTypeNumber(), elements); } @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { - logger.debug("Struct createStruct(String typeName, Object[] " + "attributes)", false); + logger.trace("Struct createStruct(String typeName, Object[] " + "attributes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(sfSession); } @@ -703,7 +731,7 @@ public String getSchema() throws SQLException { @Override public void setSchema(String schema) throws SQLException { - logger.debug("void setSchema(String schema)", false); + logger.trace("void setSchema(String schema)", false); String databaseName = getCatalog(); @@ -717,14 +745,14 @@ public void setSchema(String schema) throws SQLException { @Override public void abort(Executor executor) throws SQLException { - logger.debug("void abort(Executor executor)", false); + logger.trace("void abort(Executor executor)", false); close(); } @Override public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { - logger.debug("void setNetworkTimeout(Executor executor, int " + "milliseconds)", false); + logger.trace("void setNetworkTimeout(Executor executor, int " + "milliseconds)", false); raiseSQLExceptionIfConnectionIsClosed(); networkTimeoutInMilli = milliseconds; @@ -732,14 +760,14 @@ public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLExc @Override public int getNetworkTimeout() throws SQLException { - logger.debug("int getNetworkTimeout()", false); + logger.trace("int getNetworkTimeout()", false); raiseSQLExceptionIfConnectionIsClosed(); return networkTimeoutInMilli; } @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -747,7 +775,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug(" T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -873,7 +901,7 @@ private void uploadStreamInternal( boolean compressData) throws SQLException { logger.debug( - "upload data from stream: stageName={}" + ", destPrefix={}, destFileName={}", + "Upload data from stream: stageName={}" + ", destPrefix={}, destFileName={}", stageName, destPrefix, destFileName); @@ -942,7 +970,7 @@ public InputStream downloadStream(String stageName, String sourceFileName, boole throws SQLException { logger.debug( - "download data to stream: stageName={}" + ", sourceFileName={}", stageName, sourceFileName); + "Download data to stream: stageName={}" + ", sourceFileName={}", stageName, sourceFileName); if (Strings.isNullOrEmpty(stageName)) { throw new SnowflakeSQLLoggedException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java index d79fd1b7c..ff5e0529f 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java @@ -52,7 +52,7 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDatabaseMetaData.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDatabaseMetaData.class); private static final ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); @@ -162,7 +162,7 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { private boolean isPatternMatchingEnabled = true; SnowflakeDatabaseMetaData(Connection connection) throws SQLException { - logger.debug("public SnowflakeDatabaseMetaData(SnowflakeConnection connection)", false); + logger.trace("SnowflakeDatabaseMetaData(SnowflakeConnection connection)", false); this.connection = connection; this.session = connection.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); @@ -273,21 +273,21 @@ private boolean isSchemaNameWildcardPattern(String inputString) { @Override public boolean allProceduresAreCallable() throws SQLException { - logger.debug("public boolean allProceduresAreCallable()", false); + logger.trace("boolean allProceduresAreCallable()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean allTablesAreSelectable() throws SQLException { - logger.debug("public boolean allTablesAreSelectable()", false); + logger.trace("boolean allTablesAreSelectable()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getURL() throws SQLException { - logger.debug("public String getURL()", false); + logger.trace("String getURL()", false); raiseSQLExceptionIfConnectionIsClosed(); String url = session.getUrl(); return url.startsWith("http://") @@ -297,14 +297,14 @@ public String getURL() throws SQLException { @Override public String getUserName() throws SQLException { - logger.debug("public String getUserName()", false); + logger.trace("String getUserName()", false); raiseSQLExceptionIfConnectionIsClosed(); return session.getUser(); } @Override public boolean isReadOnly() throws SQLException { - logger.debug("public boolean isReadOnly()", false); + logger.trace("boolean isReadOnly()", false); raiseSQLExceptionIfConnectionIsClosed(); // no read only mode is supported. return false; @@ -312,56 +312,56 @@ public boolean isReadOnly() throws SQLException { @Override public boolean nullsAreSortedHigh() throws SQLException { - logger.debug("public boolean nullsAreSortedHigh()", false); + logger.trace("boolean nullsAreSortedHigh()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean nullsAreSortedLow() throws SQLException { - logger.debug("public boolean nullsAreSortedLow()", false); + logger.trace("boolean nullsAreSortedLow()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean nullsAreSortedAtStart() throws SQLException { - logger.debug("public boolean nullsAreSortedAtStart()", false); + logger.trace("boolean nullsAreSortedAtStart()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean nullsAreSortedAtEnd() throws SQLException { - logger.debug("public boolean nullsAreSortedAtEnd()", false); + logger.trace("boolean nullsAreSortedAtEnd()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public String getDatabaseProductName() throws SQLException { - logger.debug("public String getDatabaseProductName()", false); + logger.trace("String getDatabaseProductName()", false); raiseSQLExceptionIfConnectionIsClosed(); return DatabaseProductName; } @Override public String getDatabaseProductVersion() throws SQLException { - logger.debug("public String getDatabaseProductVersion()", false); + logger.trace("String getDatabaseProductVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection.unwrap(SnowflakeConnectionV1.class).getDatabaseVersion(); } @Override public String getDriverName() throws SQLException { - logger.debug("public String getDriverName()", false); + logger.trace("String getDriverName()", false); raiseSQLExceptionIfConnectionIsClosed(); return DriverName; } @Override public String getDriverVersion() throws SQLException { - logger.debug("public String getDriverVersion()", false); + logger.trace("String getDriverVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return SnowflakeDriver.majorVersion + "." @@ -372,705 +372,705 @@ public String getDriverVersion() throws SQLException { @Override public int getDriverMajorVersion() { - logger.debug("public int getDriverMajorVersion()", false); + logger.trace("int getDriverMajorVersion()", false); return SnowflakeDriver.majorVersion; } @Override public int getDriverMinorVersion() { - logger.debug("public int getDriverMinorVersion()", false); + logger.trace("int getDriverMinorVersion()", false); return SnowflakeDriver.minorVersion; } @Override public boolean usesLocalFiles() throws SQLException { - logger.debug("public boolean usesLocalFiles()", false); + logger.trace("boolean usesLocalFiles()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean usesLocalFilePerTable() throws SQLException { - logger.debug("public boolean usesLocalFilePerTable()", false); + logger.trace("boolean usesLocalFilePerTable()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMixedCaseIdentifiers() throws SQLException { - logger.debug("public boolean supportsMixedCaseIdentifiers()", false); + logger.trace("boolean supportsMixedCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesUpperCaseIdentifiers() throws SQLException { - logger.debug("public boolean storesUpperCaseIdentifiers()", false); + logger.trace("boolean storesUpperCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean storesLowerCaseIdentifiers() throws SQLException { - logger.debug("public boolean storesLowerCaseIdentifiers()", false); + logger.trace("boolean storesLowerCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesMixedCaseIdentifiers() throws SQLException { - logger.debug("public boolean storesMixedCaseIdentifiers()", false); + logger.trace("boolean storesMixedCaseIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean supportsMixedCaseQuotedIdentifiers()", false); + logger.trace("boolean supportsMixedCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean storesUpperCaseQuotedIdentifiers()", false); + logger.trace("boolean storesUpperCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean storesLowerCaseQuotedIdentifiers()", false); + logger.trace("boolean storesLowerCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { - logger.debug("public boolean storesMixedCaseQuotedIdentifiers()", false); + logger.trace("boolean storesMixedCaseQuotedIdentifiers()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getIdentifierQuoteString() throws SQLException { - logger.debug("public String getIdentifierQuoteString()", false); + logger.trace("String getIdentifierQuoteString()", false); raiseSQLExceptionIfConnectionIsClosed(); return "\""; } @Override public String getSQLKeywords() throws SQLException { - logger.debug("public String getSQLKeywords()", false); + logger.trace("String getSQLKeywords()", false); raiseSQLExceptionIfConnectionIsClosed(); return notSQL2003Keywords; } @Override public String getNumericFunctions() throws SQLException { - logger.debug("public String getNumericFunctions()", false); + logger.trace("String getNumericFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return NumericFunctionsSupported; } @Override public String getStringFunctions() throws SQLException { - logger.debug("public String getStringFunctions()", false); + logger.trace("String getStringFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return StringFunctionsSupported; } @Override public String getSystemFunctions() throws SQLException { - logger.debug("public String getSystemFunctions()", false); + logger.trace("String getSystemFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return SystemFunctionsSupported; } @Override public String getTimeDateFunctions() throws SQLException { - logger.debug("public String getTimeDateFunctions()", false); + logger.trace("String getTimeDateFunctions()", false); raiseSQLExceptionIfConnectionIsClosed(); return DateAndTimeFunctionsSupported; } @Override public String getSearchStringEscape() throws SQLException { - logger.debug("public String getSearchStringEscape()", false); + logger.trace("String getSearchStringEscape()", false); raiseSQLExceptionIfConnectionIsClosed(); return Character.toString(SEARCH_STRING_ESCAPE); } @Override public String getExtraNameCharacters() throws SQLException { - logger.debug("public String getExtraNameCharacters()", false); + logger.trace("String getExtraNameCharacters()", false); raiseSQLExceptionIfConnectionIsClosed(); return "$"; } @Override public boolean supportsAlterTableWithAddColumn() throws SQLException { - logger.debug("public boolean supportsAlterTableWithAddColumn()", false); + logger.trace("boolean supportsAlterTableWithAddColumn()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsAlterTableWithDropColumn() throws SQLException { - logger.debug("public boolean supportsAlterTableWithDropColumn()", false); + logger.trace("boolean supportsAlterTableWithDropColumn()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsColumnAliasing() throws SQLException { - logger.debug("public boolean supportsColumnAliasing()", false); + logger.trace("boolean supportsColumnAliasing()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean nullPlusNonNullIsNull() throws SQLException { - logger.debug("public boolean nullPlusNonNullIsNull()", false); + logger.trace("boolean nullPlusNonNullIsNull()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsConvert() throws SQLException { - logger.debug("public boolean supportsConvert()", false); + logger.trace("boolean supportsConvert()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsConvert(int fromType, int toType) throws SQLException { - logger.debug("public boolean supportsConvert(int fromType, int toType)", false); + logger.trace("boolean supportsConvert(int fromType, int toType)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsTableCorrelationNames() throws SQLException { - logger.debug("public boolean supportsTableCorrelationNames()", false); + logger.trace("boolean supportsTableCorrelationNames()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsDifferentTableCorrelationNames() throws SQLException { - logger.debug("public boolean supportsDifferentTableCorrelationNames()", false); + logger.trace("boolean supportsDifferentTableCorrelationNames()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsExpressionsInOrderBy() throws SQLException { - logger.debug("public boolean supportsExpressionsInOrderBy()", false); + logger.trace("boolean supportsExpressionsInOrderBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsOrderByUnrelated() throws SQLException { - logger.debug("public boolean supportsOrderByUnrelated()", false); + logger.trace("boolean supportsOrderByUnrelated()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsGroupBy() throws SQLException { - logger.debug("public boolean supportsGroupBy()", false); + logger.trace("boolean supportsGroupBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsGroupByUnrelated() throws SQLException { - logger.debug("public boolean supportsGroupByUnrelated()", false); + logger.trace("boolean supportsGroupByUnrelated()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsGroupByBeyondSelect() throws SQLException { - logger.debug("public boolean supportsGroupByBeyondSelect()", false); + logger.trace("boolean supportsGroupByBeyondSelect()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsLikeEscapeClause() throws SQLException { - logger.debug("public boolean supportsLikeEscapeClause()", false); + logger.trace("boolean supportsLikeEscapeClause()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMultipleResultSets() throws SQLException { - logger.debug("public boolean supportsMultipleResultSets()", false); + logger.trace("boolean supportsMultipleResultSets()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMultipleTransactions() throws SQLException { - logger.debug("public boolean supportsMultipleTransactions()", false); + logger.trace("boolean supportsMultipleTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsNonNullableColumns() throws SQLException { - logger.debug("public boolean supportsNonNullableColumns()", false); + logger.trace("boolean supportsNonNullableColumns()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsMinimumSQLGrammar() throws SQLException { - logger.debug("public boolean supportsMinimumSQLGrammar()", false); + logger.trace("boolean supportsMinimumSQLGrammar()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCoreSQLGrammar() throws SQLException { - logger.debug("public boolean supportsCoreSQLGrammar()", false); + logger.trace("boolean supportsCoreSQLGrammar()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsExtendedSQLGrammar() throws SQLException { - logger.debug("public boolean supportsExtendedSQLGrammar()", false); + logger.trace("boolean supportsExtendedSQLGrammar()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsANSI92EntryLevelSQL() throws SQLException { - logger.debug("public boolean supportsANSI92EntryLevelSQL()", false); + logger.trace("boolean supportsANSI92EntryLevelSQL()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsANSI92IntermediateSQL() throws SQLException { - logger.debug("public boolean supportsANSI92IntermediateSQL()", false); + logger.trace("boolean supportsANSI92IntermediateSQL()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsANSI92FullSQL() throws SQLException { - logger.debug("public boolean supportsANSI92FullSQL()", false); + logger.trace("boolean supportsANSI92FullSQL()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsIntegrityEnhancementFacility() throws SQLException { - logger.debug("public boolean supportsIntegrityEnhancementFacility()", false); + logger.trace("boolean supportsIntegrityEnhancementFacility()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOuterJoins() throws SQLException { - logger.debug("public boolean supportsOuterJoins()", false); + logger.trace("boolean supportsOuterJoins()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsFullOuterJoins() throws SQLException { - logger.debug("public boolean supportsFullOuterJoins()", false); + logger.trace("boolean supportsFullOuterJoins()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsLimitedOuterJoins() throws SQLException { - logger.debug("public boolean supportsLimitedOuterJoins()", false); + logger.trace("boolean supportsLimitedOuterJoins()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getSchemaTerm() throws SQLException { - logger.debug("public String getSchemaTerm()", false); + logger.trace("String getSchemaTerm()", false); raiseSQLExceptionIfConnectionIsClosed(); return "schema"; } @Override public String getProcedureTerm() throws SQLException { - logger.debug("public String getProcedureTerm()", false); + logger.trace("String getProcedureTerm()", false); raiseSQLExceptionIfConnectionIsClosed(); return "procedure"; } @Override public String getCatalogTerm() throws SQLException { - logger.debug("public String getCatalogTerm()", false); + logger.trace("String getCatalogTerm()", false); raiseSQLExceptionIfConnectionIsClosed(); return "database"; } @Override public boolean isCatalogAtStart() throws SQLException { - logger.debug("public boolean isCatalogAtStart()", false); + logger.trace("boolean isCatalogAtStart()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public String getCatalogSeparator() throws SQLException { - logger.debug("public String getCatalogSeparator()", false); + logger.trace("String getCatalogSeparator()", false); raiseSQLExceptionIfConnectionIsClosed(); return "."; } @Override public boolean supportsSchemasInDataManipulation() throws SQLException { - logger.debug("public boolean supportsSchemasInDataManipulation()", false); + logger.trace("boolean supportsSchemasInDataManipulation()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSchemasInProcedureCalls() throws SQLException { - logger.debug("public boolean supportsSchemasInProcedureCalls()", false); + logger.trace("boolean supportsSchemasInProcedureCalls()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsSchemasInTableDefinitions() throws SQLException { - logger.debug("public boolean supportsSchemasInTableDefinitions()", false); + logger.trace("boolean supportsSchemasInTableDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSchemasInIndexDefinitions() throws SQLException { - logger.debug("public boolean supportsSchemasInIndexDefinitions()", false); + logger.trace("boolean supportsSchemasInIndexDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { - logger.debug("public boolean supportsSchemasInPrivilegeDefinitions()", false); + logger.trace("boolean supportsSchemasInPrivilegeDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCatalogsInDataManipulation() throws SQLException { - logger.debug("public boolean supportsCatalogsInDataManipulation()", false); + logger.trace("boolean supportsCatalogsInDataManipulation()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsCatalogsInProcedureCalls() throws SQLException { - logger.debug("public boolean supportsCatalogsInProcedureCalls()", false); + logger.trace("boolean supportsCatalogsInProcedureCalls()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCatalogsInTableDefinitions() throws SQLException { - logger.debug("public boolean supportsCatalogsInTableDefinitions()", false); + logger.trace("boolean supportsCatalogsInTableDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsCatalogsInIndexDefinitions() throws SQLException { - logger.debug("public boolean supportsCatalogsInIndexDefinitions()", false); + logger.trace("boolean supportsCatalogsInIndexDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { - logger.debug("public boolean supportsCatalogsInPrivilegeDefinitions()", false); + logger.trace("boolean supportsCatalogsInPrivilegeDefinitions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsPositionedDelete() throws SQLException { - logger.debug("public boolean supportsPositionedDelete()", false); + logger.trace("boolean supportsPositionedDelete()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsPositionedUpdate() throws SQLException { - logger.debug("public boolean supportsPositionedUpdate()", false); + logger.trace("boolean supportsPositionedUpdate()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsSelectForUpdate() throws SQLException { - logger.debug("public boolean supportsSelectForUpdate()", false); + logger.trace("boolean supportsSelectForUpdate()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsStoredProcedures() throws SQLException { - logger.debug("public boolean supportsStoredProcedures()", false); + logger.trace("boolean supportsStoredProcedures()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInComparisons() throws SQLException { - logger.debug("public boolean supportsSubqueriesInComparisons()", false); + logger.trace("boolean supportsSubqueriesInComparisons()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInExists() throws SQLException { - logger.debug("public boolean supportsSubqueriesInExists()", false); + logger.trace("boolean supportsSubqueriesInExists()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInIns() throws SQLException { - logger.debug("public boolean supportsSubqueriesInIns()", false); + logger.trace("boolean supportsSubqueriesInIns()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsSubqueriesInQuantifieds() throws SQLException { - logger.debug("public boolean supportsSubqueriesInQuantifieds()", false); + logger.trace("boolean supportsSubqueriesInQuantifieds()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsCorrelatedSubqueries() throws SQLException { - logger.debug("public boolean supportsCorrelatedSubqueries()", false); + logger.trace("boolean supportsCorrelatedSubqueries()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsUnion() throws SQLException { - logger.debug("public boolean supportsUnion()", false); + logger.trace("boolean supportsUnion()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsUnionAll() throws SQLException { - logger.debug("public boolean supportsUnionAll()", false); + logger.trace("boolean supportsUnionAll()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsOpenCursorsAcrossCommit() throws SQLException { - logger.debug("public boolean supportsOpenCursorsAcrossCommit()", false); + logger.trace("boolean supportsOpenCursorsAcrossCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOpenCursorsAcrossRollback() throws SQLException { - logger.debug("public boolean supportsOpenCursorsAcrossRollback()", false); + logger.trace("boolean supportsOpenCursorsAcrossRollback()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOpenStatementsAcrossCommit() throws SQLException { - logger.debug("public boolean supportsOpenStatementsAcrossCommit()", false); + logger.trace("boolean supportsOpenStatementsAcrossCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsOpenStatementsAcrossRollback() throws SQLException { - logger.debug("public boolean supportsOpenStatementsAcrossRollback()", false); + logger.trace("boolean supportsOpenStatementsAcrossRollback()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public int getMaxBinaryLiteralLength() throws SQLException { - logger.debug("public int getMaxBinaryLiteralLength()", false); + logger.trace("int getMaxBinaryLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 8388608; } @Override public int getMaxCharLiteralLength() throws SQLException { - logger.debug("public int getMaxCharLiteralLength()", false); + logger.trace("int getMaxCharLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 16777216; } @Override public int getMaxColumnNameLength() throws SQLException { - logger.debug("public int getMaxColumnNameLength()", false); + logger.trace("int getMaxColumnNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxColumnsInGroupBy() throws SQLException { - logger.debug("public int getMaxColumnsInGroupBy()", false); + logger.trace("int getMaxColumnsInGroupBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInIndex() throws SQLException { - logger.debug("public int getMaxColumnsInIndex()", false); + logger.trace("int getMaxColumnsInIndex()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInOrderBy() throws SQLException { - logger.debug("public int getMaxColumnsInOrderBy()", false); + logger.trace("int getMaxColumnsInOrderBy()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInSelect() throws SQLException { - logger.debug("public int getMaxColumnsInSelect()", false); + logger.trace("int getMaxColumnsInSelect()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxColumnsInTable() throws SQLException { - logger.debug("public int getMaxColumnsInTable()", false); + logger.trace("int getMaxColumnsInTable()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxConnections() throws SQLException { - logger.debug("public int getMaxConnections()", false); + logger.trace("int getMaxConnections()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxCursorNameLength() throws SQLException { - logger.debug("public int getMaxCursorNameLength()", false); + logger.trace("int getMaxCursorNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxIndexLength() throws SQLException { - logger.debug("public int getMaxIndexLength()", false); + logger.trace("int getMaxIndexLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxSchemaNameLength() throws SQLException { - logger.debug("public int getMaxSchemaNameLength()", false); + logger.trace("int getMaxSchemaNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxProcedureNameLength() throws SQLException { - logger.debug("public int getMaxProcedureNameLength()", false); + logger.trace("int getMaxProcedureNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxCatalogNameLength() throws SQLException { - logger.debug("public int getMaxCatalogNameLength()", false); + logger.trace("int getMaxCatalogNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxRowSize() throws SQLException { - logger.debug("public int getMaxRowSize()", false); + logger.trace("int getMaxRowSize()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { - logger.debug("public boolean doesMaxRowSizeIncludeBlobs()", false); + logger.trace("boolean doesMaxRowSizeIncludeBlobs()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public int getMaxStatementLength() throws SQLException { - logger.debug("public int getMaxStatementLength()", false); + logger.trace("int getMaxStatementLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxStatements() throws SQLException { - logger.debug("public int getMaxStatements()", false); + logger.trace("int getMaxStatements()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxTableNameLength() throws SQLException { - logger.debug("public int getMaxTableNameLength()", false); + logger.trace("int getMaxTableNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getMaxTablesInSelect() throws SQLException { - logger.debug("public int getMaxTablesInSelect()", false); + logger.trace("int getMaxTablesInSelect()", false); raiseSQLExceptionIfConnectionIsClosed(); return 0; } @Override public int getMaxUserNameLength() throws SQLException { - logger.debug("public int getMaxUserNameLength()", false); + logger.trace("int getMaxUserNameLength()", false); raiseSQLExceptionIfConnectionIsClosed(); return 255; } @Override public int getDefaultTransactionIsolation() throws SQLException { - logger.debug("public int getDefaultTransactionIsolation()", false); + logger.trace("int getDefaultTransactionIsolation()", false); raiseSQLExceptionIfConnectionIsClosed(); return Connection.TRANSACTION_READ_COMMITTED; } @Override public boolean supportsTransactions() throws SQLException { - logger.debug("public boolean supportsTransactions()", false); + logger.trace("boolean supportsTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsTransactionIsolationLevel(int level) throws SQLException { - logger.debug("public boolean supportsTransactionIsolationLevel(int level)", false); + logger.trace("boolean supportsTransactionIsolationLevel(int level)", false); raiseSQLExceptionIfConnectionIsClosed(); return (level == Connection.TRANSACTION_NONE) || (level == Connection.TRANSACTION_READ_COMMITTED); @@ -1078,29 +1078,28 @@ public boolean supportsTransactionIsolationLevel(int level) throws SQLException @Override public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException { - logger.debug( - "public boolean " + "supportsDataDefinitionAndDataManipulationTransactions()", false); + logger.trace("boolean supportsDataDefinitionAndDataManipulationTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean supportsDataManipulationTransactionsOnly() throws SQLException { - logger.debug("public boolean supportsDataManipulationTransactionsOnly()", false); + logger.trace("boolean supportsDataManipulationTransactionsOnly()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean dataDefinitionCausesTransactionCommit() throws SQLException { - logger.debug("public boolean dataDefinitionCausesTransactionCommit()", false); + logger.trace("boolean dataDefinitionCausesTransactionCommit()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean dataDefinitionIgnoredInTransactions() throws SQLException { - logger.debug("public boolean dataDefinitionIgnoredInTransactions()", false); + logger.trace("boolean dataDefinitionIgnoredInTransactions()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @@ -1111,7 +1110,7 @@ public ResultSet getProcedures( throws SQLException { raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); - logger.debug( + logger.trace( "public ResultSet getProcedures(String catalog, " + "String schemaPattern,String procedureNamePattern)", false); @@ -1133,7 +1132,7 @@ public ResultSet getProcedures( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_PROCEDURES, resultSet, statement) { public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -1149,7 +1148,7 @@ public boolean next() throws SQLException { || compiledProcedurePattern.matcher(procedureName).matches()) && (compiledSchemaPattern == null || compiledSchemaPattern.matcher(schemaName).matches())) { - logger.debug("Found a matched function:" + schemaName + "." + procedureName); + logger.trace("Found a matched function:" + schemaName + "." + procedureName); nextRow[0] = catalogName; nextRow[1] = schemaName; @@ -1173,7 +1172,7 @@ public ResultSet getProcedureColumns( final String procedureNamePattern, final String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getProcedureColumns(String catalog, " + "String schemaPattern,String procedureNamePattern," + "String columnNamePattern)", @@ -1431,7 +1430,7 @@ private String getFirstResultSetCommand( showProcedureCommand += " in schema \"" + catalogEscaped + "\".\"" + schemaPattern + "\""; } } - logger.debug("sql command to get column metadata: {}", showProcedureCommand); + logger.debug("Sql command to get column metadata: {}", showProcedureCommand); return showProcedureCommand; } @@ -1465,7 +1464,7 @@ public ResultSet getTables( final String tableNamePattern, final String[] types) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getTables(String catalog={}, String " + "schemaPattern={}, String tableNamePattern={}, String[] types={})", originalCatalog, @@ -1549,7 +1548,7 @@ public ResultSet getTables( } } - logger.debug("sql command to get table metadata: {}", showTablesCommand); + logger.debug("Sql command to get table metadata: {}", showTablesCommand); resultSet = executeAndReturnEmptyResultIfNotFound(statement, showTablesCommand, GET_TABLES); sendInBandTelemetryMetadataMetrics( @@ -1563,7 +1562,7 @@ public ResultSet getTables( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_TABLES, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -1613,14 +1612,14 @@ public boolean next() throws SQLException { @Override public ResultSet getSchemas() throws SQLException { - logger.debug("public ResultSet getSchemas()", false); + logger.trace("ResultSet getSchemas()", false); return getSchemas(null, null); } @Override public ResultSet getCatalogs() throws SQLException { - logger.debug("public ResultSet getCatalogs()", false); + logger.trace("ResultSet getCatalogs()", false); raiseSQLExceptionIfConnectionIsClosed(); String showDB = "show /* JDBC:DatabaseMetaData.getCatalogs() */ databases in account"; @@ -1630,7 +1629,7 @@ public ResultSet getCatalogs() throws SQLException { GET_CATALOGS, statement.executeQuery(showDB), statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show databases result @@ -1648,7 +1647,7 @@ public boolean next() throws SQLException { @Override public ResultSet getTableTypes() throws SQLException { - logger.debug("public ResultSet getTableTypes()", false); + logger.trace("ResultSet getTableTypes()", false); raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); @@ -1678,7 +1677,7 @@ public ResultSet getColumns( final String columnNamePattern, final boolean extendedSet) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getColumns(String catalog={}, String schemaPattern={}, " + "String tableNamePattern={}, String columnNamePattern={}, boolean extendedSet={}", originalCatalog, @@ -1740,7 +1739,7 @@ public ResultSet getColumns( } } - logger.debug("sql command to get column metadata: {}", showColumnsCommand); + logger.debug("Sql command to get column metadata: {}", showColumnsCommand); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound( @@ -1760,7 +1759,7 @@ public ResultSet getColumns( String currentTableName = null; public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -1813,13 +1812,13 @@ public boolean next() throws SQLException { "error parsing data type: " + dataTypeStr); } - logger.debug("data type string: {}", dataTypeStr); + logger.debug("Data type string: {}", dataTypeStr); SnowflakeColumnMetadata columnMetadata = SnowflakeUtil.extractColumnMetadata( jsonNode, session.isJdbcTreatDecimalAsInt(), session); - logger.debug("nullable: {}", columnMetadata.isNullable()); + logger.debug("Nullable: {}", columnMetadata.isNullable()); // SNOW-16881: add catalog name nextRow[0] = catalogName; @@ -1874,7 +1873,7 @@ public boolean next() throws SQLException { nextRow[9] = null; nextRow[10] = (columnMetadata.isNullable() ? columnNullable : columnNoNulls); - logger.debug("returning nullable: {}", nextRow[10]); + logger.debug("Returning nullable: {}", nextRow[10]); nextRow[11] = comment; nextRow[12] = defaultValue; @@ -1910,7 +1909,7 @@ public boolean next() throws SQLException { @Override public ResultSet getColumnPrivileges( String catalog, String schema, String table, String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getColumnPrivileges(String catalog, " + "String schema,String table, String columnNamePattern)", false); @@ -1945,7 +1944,7 @@ public ResultSet getColumnPrivileges( public ResultSet getTablePrivileges( String originalCatalog, String originalSchemaPattern, final String tableNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getTablePrivileges(String catalog, " + "String schemaPattern,String tableNamePattern)", false); @@ -2006,7 +2005,7 @@ public ResultSet getTablePrivileges( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_TABLE_PRIVILEGES, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); while (showObjectResultSet.next()) { @@ -2045,7 +2044,7 @@ public boolean next() throws SQLException { public ResultSet getBestRowIdentifier( String catalog, String schema, String table, int scope, boolean nullable) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getBestRowIdentifier(String catalog, " + "String schema,String table, int scope,boolean nullable)", false); @@ -2055,7 +2054,7 @@ public ResultSet getBestRowIdentifier( @Override public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getVersionColumns(String catalog, " + "String schema, String table)", false); @@ -2065,7 +2064,7 @@ public ResultSet getVersionColumns(String catalog, String schema, String table) @Override public ResultSet getPrimaryKeys(String originalCatalog, String originalSchema, final String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getPrimaryKeys(String catalog={}, " + "String schema={}, String table={})", originalCatalog, @@ -2121,7 +2120,7 @@ public ResultSet getPrimaryKeys(String originalCatalog, String originalSchema, f final String schemaIn = schema; final String tableIn = table; - logger.debug("sql command to get primary key metadata: {}", showPKCommand); + logger.debug("Sql command to get primary key metadata: {}", showPKCommand); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound(statement, showPKCommand, GET_PRIMARY_KEYS); sendInBandTelemetryMetadataMetrics( @@ -2130,7 +2129,7 @@ public ResultSet getPrimaryKeys(String originalCatalog, String originalSchema, f return new SnowflakeDatabaseMetaDataQueryResultSet(GET_PRIMARY_KEYS, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); while (showObjectResultSet.next()) { @@ -2316,7 +2315,7 @@ private ResultSet getForeignKeys( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_FOREIGN_KEYS, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); while (showObjectResultSet.next()) { @@ -2562,7 +2561,7 @@ private short getForeignKeyConstraintProperty(String property_name, String prope @Override public ResultSet getImportedKeys(String originalCatalog, String originalSchema, String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getImportedKeys(String catalog={}, " + "String schema={}, String table={})", originalCatalog, @@ -2580,7 +2579,7 @@ public ResultSet getImportedKeys(String originalCatalog, String originalSchema, @Override public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getExportedKeys(String catalog={}, " + "String schema={}, String table={})", catalog, @@ -2603,7 +2602,7 @@ public ResultSet getCrossReference( String foreignSchema, String foreignTable) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getCrossReference(String parentCatalog={}, " + "String parentSchema={}, String parentTable={}, " + "String foreignCatalog={}, String foreignSchema={}, " @@ -2631,7 +2630,7 @@ public ResultSet getCrossReference( @Override public ResultSet getTypeInfo() throws SQLException { - logger.debug("public ResultSet getTypeInfo()", false); + logger.trace("ResultSet getTypeInfo()", false); raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); @@ -2856,7 +2855,7 @@ public ResultSet getTypeInfo() throws SQLException { */ public ResultSet getStreams( String originalCatalog, String originalSchemaPattern, String streamName) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getStreams(String catalog={}, String schemaPattern={}" + "String streamName={}", originalCatalog, @@ -2900,7 +2899,7 @@ public ResultSet getStreams( } } - logger.debug("sql command to get stream metadata: {}", showStreamsCommand); + logger.debug("Sql command to get stream metadata: {}", showStreamsCommand); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound(statement, showStreamsCommand, GET_STREAMS); @@ -2910,7 +2909,7 @@ public ResultSet getStreams( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_STREAMS, resultSet, statement) { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()"); + logger.trace("boolean next()"); incrementRow(); // iterate throw the show streams result until we find an entry @@ -2959,7 +2958,7 @@ public boolean next() throws SQLException { public ResultSet getIndexInfo( String catalog, String schema, String table, boolean unique, boolean approximate) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getIndexInfo(String catalog, String schema, " + "String table,boolean unique, boolean approximate)", false); @@ -3006,14 +3005,14 @@ public ResultSet getIndexInfo( @Override public boolean supportsResultSetType(int type) throws SQLException { - logger.debug("public boolean supportsResultSetType(int type)", false); + logger.trace("boolean supportsResultSetType(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return (type == ResultSet.TYPE_FORWARD_ONLY); } @Override public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException { - logger.debug( + logger.trace( "public boolean supportsResultSetConcurrency(int type, " + "int concurrency)", false); raiseSQLExceptionIfConnectionIsClosed(); return (type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY); @@ -3021,70 +3020,70 @@ public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQ @Override public boolean ownUpdatesAreVisible(int type) throws SQLException { - logger.debug("public boolean ownUpdatesAreVisible(int type)", false); + logger.trace("boolean ownUpdatesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean ownDeletesAreVisible(int type) throws SQLException { - logger.debug("public boolean ownDeletesAreVisible(int type)", false); + logger.trace("boolean ownDeletesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean ownInsertsAreVisible(int type) throws SQLException { - logger.debug("public boolean ownInsertsAreVisible(int type)", false); + logger.trace("boolean ownInsertsAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean othersUpdatesAreVisible(int type) throws SQLException { - logger.debug("public boolean othersUpdatesAreVisible(int type)", false); + logger.trace("boolean othersUpdatesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean othersDeletesAreVisible(int type) throws SQLException { - logger.debug("public boolean othersDeletesAreVisible(int type)", false); + logger.trace("boolean othersDeletesAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean othersInsertsAreVisible(int type) throws SQLException { - logger.debug("public boolean othersInsertsAreVisible(int type)", false); + logger.trace("boolean othersInsertsAreVisible(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean updatesAreDetected(int type) throws SQLException { - logger.debug("public boolean updatesAreDetected(int type)", false); + logger.trace("boolean updatesAreDetected(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean deletesAreDetected(int type) throws SQLException { - logger.debug("public boolean deletesAreDetected(int type)", false); + logger.trace("boolean deletesAreDetected(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean insertsAreDetected(int type) throws SQLException { - logger.debug("public boolean insertsAreDetected(int type)", false); + logger.trace("boolean insertsAreDetected(int type)", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsBatchUpdates() throws SQLException { - logger.debug("public boolean supportsBatchUpdates()", false); + logger.trace("boolean supportsBatchUpdates()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @@ -3093,7 +3092,7 @@ public boolean supportsBatchUpdates() throws SQLException { public ResultSet getUDTs( String catalog, String schemaPattern, String typeNamePattern, int[] types) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getUDTs(String catalog, " + "String schemaPattern,String typeNamePattern, int[] types)", false); @@ -3124,35 +3123,35 @@ public ResultSet getUDTs( @Override public Connection getConnection() throws SQLException { - logger.debug("public Connection getConnection()", false); + logger.trace("Connection getConnection()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection; } @Override public boolean supportsSavepoints() throws SQLException { - logger.debug("public boolean supportsSavepoints()", false); + logger.trace("boolean supportsSavepoints()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsNamedParameters() throws SQLException { - logger.debug("public boolean supportsNamedParameters()", false); + logger.trace("boolean supportsNamedParameters()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsMultipleOpenResults() throws SQLException { - logger.debug("public boolean supportsMultipleOpenResults()", false); + logger.trace("boolean supportsMultipleOpenResults()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public boolean supportsGetGeneratedKeys() throws SQLException { - logger.debug("public boolean supportsGetGeneratedKeys()", false); + logger.trace("boolean supportsGetGeneratedKeys()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @@ -3160,7 +3159,7 @@ public boolean supportsGetGeneratedKeys() throws SQLException { @Override public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getSuperTypes(String catalog, " + "String schemaPattern,String typeNamePattern)", false); @@ -3171,7 +3170,7 @@ public ResultSet getSuperTypes(String catalog, String schemaPattern, String type @Override public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getSuperTables(String catalog, " + "String schemaPattern,String tableNamePattern)", false); @@ -3183,7 +3182,7 @@ public ResultSet getSuperTables(String catalog, String schemaPattern, String tab public ResultSet getAttributes( String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getAttributes(String catalog, String " + "schemaPattern," + "String typeNamePattern,String attributeNamePattern)", @@ -3194,75 +3193,75 @@ public ResultSet getAttributes( @Override public boolean supportsResultSetHoldability(int holdability) throws SQLException { - logger.debug("public boolean supportsResultSetHoldability(int holdability)", false); + logger.trace("boolean supportsResultSetHoldability(int holdability)", false); raiseSQLExceptionIfConnectionIsClosed(); return holdability == ResultSet.CLOSE_CURSORS_AT_COMMIT; } @Override public int getResultSetHoldability() throws SQLException { - logger.debug("public int getResultSetHoldability()", false); + logger.trace("int getResultSetHoldability()", false); return ResultSet.CLOSE_CURSORS_AT_COMMIT; } @Override public int getDatabaseMajorVersion() throws SQLException { - logger.debug("public int getDatabaseMajorVersion()", false); + logger.trace("int getDatabaseMajorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection.unwrap(SnowflakeConnectionV1.class).getDatabaseMajorVersion(); } @Override public int getDatabaseMinorVersion() throws SQLException { - logger.debug("public int getDatabaseMinorVersion()", false); + logger.trace("int getDatabaseMinorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return connection.unwrap(SnowflakeConnectionV1.class).getDatabaseMinorVersion(); } @Override public int getJDBCMajorVersion() throws SQLException { - logger.debug("public int getJDBCMajorVersion()", false); + logger.trace("int getJDBCMajorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return Integer.parseInt(JDBCVersion.split("\\.", 2)[0]); } @Override public int getJDBCMinorVersion() throws SQLException { - logger.debug("public int getJDBCMinorVersion()", false); + logger.trace("int getJDBCMinorVersion()", false); raiseSQLExceptionIfConnectionIsClosed(); return Integer.parseInt(JDBCVersion.split("\\.", 2)[1]); } @Override public int getSQLStateType() throws SQLException { - logger.debug("public int getSQLStateType()", false); + logger.trace("int getSQLStateType()", false); return sqlStateSQL; } @Override public boolean locatorsUpdateCopy() { - logger.debug("public boolean locatorsUpdateCopy()", false); + logger.trace("boolean locatorsUpdateCopy()", false); return false; } @Override public boolean supportsStatementPooling() throws SQLException { - logger.debug("public boolean supportsStatementPooling()", false); + logger.trace("boolean supportsStatementPooling()", false); raiseSQLExceptionIfConnectionIsClosed(); return false; } @Override public RowIdLifetime getRowIdLifetime() throws SQLException { - logger.debug("public RowIdLifetime getRowIdLifetime()", false); + logger.trace("RowIdLifetime getRowIdLifetime()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public ResultSet getSchemas(String originalCatalog, String originalSchema) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getSchemas(String catalog={}, String " + "schemaPattern={})", originalCatalog, originalSchema); @@ -3294,7 +3293,7 @@ public ResultSet getSchemas(String originalCatalog, String originalSchema) throw showSchemas += " in database \"" + escapeSqlQuotes(catalog) + "\""; } - logger.debug("sql command to get schemas metadata: {}", showSchemas); + logger.debug("Sql command to get schemas metadata: {}", showSchemas); ResultSet resultSet = executeAndReturnEmptyResultIfNotFound(statement, showSchemas, GET_SCHEMAS); @@ -3302,7 +3301,7 @@ public ResultSet getSchemas(String originalCatalog, String originalSchema) throw resultSet, "getSchemas", originalCatalog, originalSchema, "none", "none"); return new SnowflakeDatabaseMetaDataQueryResultSet(GET_SCHEMAS, resultSet, statement) { public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -3326,21 +3325,21 @@ public boolean next() throws SQLException { @Override public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException { - logger.debug("public boolean supportsStoredFunctionsUsingCallSyntax()", false); + logger.trace("boolean supportsStoredFunctionsUsingCallSyntax()", false); raiseSQLExceptionIfConnectionIsClosed(); return true; } @Override public boolean autoCommitFailureClosesAllResultSets() throws SQLException { - logger.debug("public boolean autoCommitFailureClosesAllResultSets()", false); + logger.trace("boolean autoCommitFailureClosesAllResultSets()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @Override public ResultSet getClientInfoProperties() throws SQLException { - logger.debug("public ResultSet getClientInfoProperties()", false); + logger.trace("ResultSet getClientInfoProperties()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -3351,7 +3350,7 @@ public ResultSet getFunctions( throws SQLException { raiseSQLExceptionIfConnectionIsClosed(); Statement statement = connection.createStatement(); - logger.debug( + logger.trace( "public ResultSet getFunctions(String catalog={}, String schemaPattern={}, " + "String functionNamePattern={}", catalog, @@ -3373,7 +3372,7 @@ public ResultSet getFunctions( return new SnowflakeDatabaseMetaDataQueryResultSet(GET_FUNCTIONS, resultSet, statement) { public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // iterate throw the show table result until we find an entry @@ -3446,7 +3445,7 @@ private List parseColumns(String retType, String args) { public ResultSet getFunctionColumns( String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getFunctionColumns(String catalog, " + "String schemaPattern,String functionNamePattern," + "String columnNamePattern)", @@ -3604,7 +3603,7 @@ public ResultSet getFunctionColumns( public ResultSet getPseudoColumns( String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { - logger.debug( + logger.trace( "public ResultSet getPseudoColumns(String catalog, " + "String schemaPattern,String tableNamePattern," + "String columnNamePattern)", @@ -3615,7 +3614,7 @@ public ResultSet getPseudoColumns( // @Override public boolean generatedKeyAlwaysReturned() throws SQLException { - logger.debug("public boolean generatedKeyAlwaysReturned()", false); + logger.trace("boolean generatedKeyAlwaysReturned()", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } @@ -3623,7 +3622,7 @@ public boolean generatedKeyAlwaysReturned() throws SQLException { // unchecked @Override public T unwrap(Class iface) throws SQLException { - logger.debug(" T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -3634,7 +3633,7 @@ public T unwrap(Class iface) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); throw new SnowflakeLoggedFeatureNotSupportedException(session); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java index 456eed7d7..ce93e49e8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaDataResultSet.java @@ -31,7 +31,7 @@ class SnowflakeDatabaseMetaDataResultSet extends SnowflakeBaseResultSet { private String queryId; - static final SFLogger logger = + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDatabaseMetaDataResultSet.class); /** @@ -132,7 +132,7 @@ public boolean isClosed() throws SQLException { @Override public boolean next() throws SQLException { - logger.debug("public boolean next()", false); + logger.trace("boolean next()", false); incrementRow(); // no exception is raised even after the result set is closed. @@ -158,41 +158,41 @@ public void close() throws SQLException { try { getStatement().close(); // should close both result set and statement. } catch (SQLException ex) { - logger.debug("failed to close", ex); + logger.debug("Failed to close", ex); } } @Override public boolean isFirst() throws SQLException { - logger.debug("public boolean isFirst()", false); + logger.trace("boolean isFirst()", false); raiseSQLExceptionIfResultSetIsClosed(); return row == 0; } @Override public boolean isBeforeFirst() throws SQLException { - logger.debug("public boolean isBeforeFirst()", false); + logger.trace("boolean isBeforeFirst()", false); raiseSQLExceptionIfResultSetIsClosed(); return row == -1; } @Override public boolean isLast() throws SQLException { - logger.debug("public boolean isLast()", false); + logger.trace("boolean isLast()", false); raiseSQLExceptionIfResultSetIsClosed(); return !isBeforeFirst() && row == rows.length - 1; } @Override public boolean isAfterLast() throws SQLException { - logger.debug("public boolean isAfterLast()", false); + logger.trace("boolean isAfterLast()", false); raiseSQLExceptionIfResultSetIsClosed(); return row == rows.length; } @Override public int getRow() throws SQLException { - logger.debug("public int getRow()", false); + logger.trace("int getRow()", false); raiseSQLExceptionIfResultSetIsClosed(); return row; } @@ -260,7 +260,7 @@ static ResultSet getEmptyResultSet(DBMetadataResultSetMetadata metadataType, Sta } Object getObjectInternal(int columnIndex) throws SQLException { - logger.debug("public Object getObjectInternal(int columnIndex)", false); + logger.trace("Object getObjectInternal(int columnIndex)", false); raiseSQLExceptionIfResultSetIsClosed(); if (nextRow == null) { @@ -280,14 +280,14 @@ Object getObjectInternal(int columnIndex) throws SQLException { @Override public boolean wasNull() throws SQLException { - logger.debug("public boolean wasNull() returning {}", wasNull); + logger.trace("boolean wasNull() returning {}", wasNull); raiseSQLExceptionIfResultSetIsClosed(); return wasNull; } @Override public String getString(int columnIndex) throws SQLException { - logger.debug("public String getString(int columnIndex)", false); + logger.trace("String getString(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -297,7 +297,7 @@ public String getString(int columnIndex) throws SQLException { @Override public boolean getBoolean(int columnIndex) throws SQLException { - logger.debug("public boolean getBoolean(int columnIndex)", false); + logger.trace("boolean getBoolean(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -324,7 +324,7 @@ public boolean getBoolean(int columnIndex) throws SQLException { @Override public byte getByte(int columnIndex) throws SQLException { - logger.debug("public byte getByte(int columnIndex)", false); + logger.trace("byte getByte(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -341,7 +341,7 @@ public byte getByte(int columnIndex) throws SQLException { @Override public short getShort(int columnIndex) throws SQLException { - logger.debug("public short getShort(int columnIndex)", false); + logger.trace("short getShort(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -359,7 +359,7 @@ public short getShort(int columnIndex) throws SQLException { @Override public int getInt(int columnIndex) throws SQLException { - logger.debug("public int getInt(int columnIndex)", false); + logger.trace("int getInt(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -377,7 +377,7 @@ public int getInt(int columnIndex) throws SQLException { @Override public long getLong(int columnIndex) throws SQLException { - logger.debug("public long getLong(int columnIndex)", false); + logger.trace("long getLong(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -402,7 +402,7 @@ public long getLong(int columnIndex) throws SQLException { @Override public float getFloat(int columnIndex) throws SQLException { - logger.debug("public float getFloat(int columnIndex)", false); + logger.trace("float getFloat(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -420,7 +420,7 @@ public float getFloat(int columnIndex) throws SQLException { @Override public double getDouble(int columnIndex) throws SQLException { - logger.debug("public double getDouble(int columnIndex)", false); + logger.trace("double getDouble(int columnIndex)", false); // Column index starts from 1, not 0. Object obj = getObjectInternal(columnIndex); @@ -447,7 +447,7 @@ public String getQueryID() { @Deprecated @Override public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex, int scale)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex, int scale)", false); BigDecimal value; @@ -471,7 +471,7 @@ public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException @Override public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - logger.debug("public BigDecimal getBigDecimal(int columnIndex)", false); + logger.trace("BigDecimal getBigDecimal(int columnIndex)", false); BigDecimal value = null; @@ -493,7 +493,7 @@ public BigDecimal getBigDecimal(int columnIndex) throws SQLException { @Override public Object getObject(int columnIndex) throws SQLException { - logger.debug("public Object getObject(int columnIndex)", false); + logger.trace("Object getObject(int columnIndex)", false); int type = resultSetMetaData.getColumnType(columnIndex); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java index 9a0f874bb..0afe353f0 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java @@ -47,6 +47,7 @@ import java.util.concurrent.TimeUnit; import java.util.zip.GZIPOutputStream; import net.snowflake.client.core.ExecTimeTelemetryData; +import net.snowflake.client.core.FileUtil; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.OCSPMode; import net.snowflake.client.core.ObjectMapperFactory; @@ -80,7 +81,8 @@ * @author jhuang */ public class SnowflakeFileTransferAgent extends SFBaseFileTransferAgent { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeFileTransferAgent.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeFileTransferAgent.class); static final StorageClientFactory storageFactory = StorageClientFactory.getFactory(); @@ -202,7 +204,7 @@ static List getEncryptionMaterial( List encryptionMaterial = new ArrayList<>(); JsonNode rootNode = jsonNode.path("data").path("encryptionMaterial"); if (commandType == CommandType.UPLOAD) { - logger.debug("initEncryptionMaterial: UPLOAD", false); + logger.debug("InitEncryptionMaterial: UPLOAD", false); RemoteStoreFileEncryptionMaterial encMat = null; if (!rootNode.isMissingNode() && !rootNode.isNull()) { @@ -211,7 +213,7 @@ static List getEncryptionMaterial( encryptionMaterial.add(encMat); } else { - logger.debug("initEncryptionMaterial: DOWNLOAD", false); + logger.debug("InitEncryptionMaterial: DOWNLOAD", false); if (!rootNode.isMissingNode() && !rootNode.isNull()) { encryptionMaterial = @@ -231,7 +233,7 @@ private static List getPresignedUrls(CommandType commandType, JsonNode j List presignedUrls = new ArrayList<>(); JsonNode rootNode = jsonNode.path("data").path("presignedUrls"); if (commandType == CommandType.DOWNLOAD) { - logger.debug("initEncryptionMaterial: DOWNLOAD", false); + logger.debug("InitEncryptionMaterial: DOWNLOAD", false); if (!rootNode.isMissingNode() && !rootNode.isNull()) { presignedUrls = Arrays.asList(mapper.readValue(rootNode.toString(), String[].class)); @@ -541,7 +543,7 @@ public static Callable getUploadFileCallable( return new Callable() { public Void call() throws Exception { - logger.debug("Entering getUploadFileCallable...", false); + logger.trace("Entering getUploadFileCallable...", false); // make sure initialize context for the telemetry service for this thread TelemetryService.getInstance().updateContext(session.getSnowflakeConnectionString()); @@ -561,6 +563,7 @@ public Void call() throws Exception { SnowflakeFileTransferAgent.injectedFileTransferException; } + FileUtil.logFileUsage(srcFilePath, "Get file to upload", false); uploadStream = new FileInputStream(srcFilePath); } catch (FileNotFoundException ex) { metadata.resultStatus = ResultStatus.ERROR; @@ -585,7 +588,7 @@ public Void call() throws Exception { String digest = null; - logger.debug("Dest file name={}", false); + logger.debug("Dest file name: {}", false); // Temp file that needs to be cleaned up when upload was successful FileBackedOutputStream fileBackedOutputStream = null; @@ -637,7 +640,7 @@ public Void call() throws Exception { logger.debug( "Started copying file from: {} to {}:{} destName: {} " - + "auto compressed? {} size={}", + + "auto compressed? {} size: {}", srcFilePath, stage.getStageType().name(), stage.getLocation(), @@ -705,7 +708,7 @@ public Void call() throws Exception { try { fileBackedOutputStream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } if (inputStream == null) { @@ -713,7 +716,7 @@ public Void call() throws Exception { } } - logger.debug("filePath: {}", srcFilePath); + logger.debug("FilePath: {}", srcFilePath); // set dest size metadata.destFileSize = uploadSize; @@ -863,7 +866,7 @@ public Void call() throws Exception { throw ex; } - logger.debug("filePath: {}", srcFilePath); + logger.debug("FilePath: {}", srcFilePath); File destFile = new File(localLocation + localFSFileSep + destFileName); long downloadSize = destFile.length(); @@ -1053,9 +1056,9 @@ private void parseCommand() throws SnowflakeSQLException { logger.debug("Command type: {}", commandType); if (commandType == CommandType.UPLOAD) { - logger.debug("autoCompress: {}, source compression: {}", autoCompress, sourceCompression); + logger.debug("Auto compress: {}, source compression: {}", autoCompress, sourceCompression); } else { - logger.debug("local download location: {}", localLocation); + logger.debug("Local download location: {}", localLocation); } logger.debug("Source files: {}", String.join(",", sourceFiles)); @@ -1215,7 +1218,7 @@ private void verifyLocalFilePath(String localFilePathFromGS) throws SnowflakeSQL + ", expected: " + localFilePath); } else if (localFilePath.isEmpty()) { - logger.debug("fail to parse local file path from command: {}", command); + logger.debug("Fail to parse local file path from command: {}", command); } else { logger.trace("local file path from GS matches local parsing: {}", localFilePath); } @@ -1306,7 +1309,7 @@ private static JsonNode parseCommandInGS(SFStatement statement, String command) } JsonNode jsonNode = (JsonNode) result; - logger.debug("response: {}", jsonNode.toString()); + logger.debug("Response: {}", jsonNode.toString()); SnowflakeUtil.checkErrorAndThrowException(jsonNode); return jsonNode; @@ -1513,7 +1516,7 @@ public boolean execute() throws SQLException { filterExistingFiles(); - logger.debug("filtering done"); + logger.debug("Filtering done"); } synchronized (canceled) { @@ -1531,9 +1534,9 @@ public boolean execute() throws SQLException { boolean created = dir.mkdirs(); if (created) { - logger.debug("directory created: {}", localLocation); + logger.debug("Directory created: {}", localLocation); } else { - logger.debug("directory not created {}", localLocation); + logger.debug("Directory not created {}", localLocation); } } @@ -1544,19 +1547,19 @@ public boolean execute() throws SQLException { // separate files to big files list and small files list // big files will be uploaded in serial, while small files will be // uploaded concurrently. - logger.debug("start segregate files by size"); + logger.debug("Start segregate files by size"); segregateFilesBySize(); if (bigSourceFiles != null) { - logger.debug("start uploading big files"); + logger.debug("Start uploading big files"); uploadFiles(bigSourceFiles, 1); - logger.debug("end uploading big files"); + logger.debug("End uploading big files"); } if (smallSourceFiles != null) { - logger.debug("start uploading small files"); + logger.debug("Start uploading small files"); uploadFiles(smallSourceFiles, parallel); - logger.debug("end uploading small files"); + logger.debug("End uploading small files"); } } @@ -1717,7 +1720,7 @@ private void downloadFiles() throws SnowflakeSQLException { presignedUrl, queryID)); - logger.debug("submitted download job for: {}", srcFile); + logger.debug("Submitted download job for: {}", srcFile); } threadExecutor.shutdown(); @@ -1813,7 +1816,7 @@ private void uploadFiles(Set fileList, int parallel) throws SnowflakeSQL encryptionMaterial.get(0), queryID)); - logger.debug("submitted copy job for: {}", srcFile); + logger.debug("Submitted copy job for: {}", srcFile); } // shut down the thread executor @@ -1971,7 +1974,7 @@ static Set expandFileNames(String[] filePathList, String queryId) logger.debug("Expanded file paths: "); for (String filePath : result) { - logger.debug("file: {}", filePath); + logger.debug("File: {}", filePath); } return result; @@ -1991,7 +1994,7 @@ private static boolean pushFileToLocal( stageLocation = stageLocation.replace("~", systemGetProperty("user.home")); try { logger.debug( - "Copy file. srcFile={}, destination={}, destFileName={}", + "Copy file. srcFile: {}, destination: {}, destFileName: {}", filePath, stageLocation, destFileName); @@ -2026,7 +2029,7 @@ private static boolean pullFileFromLocal( throws SQLException { try { logger.debug( - "Copy file. srcFile={}, destination={}, destFileName={}", + "Copy file. srcFile: {}, destination: {}, destFileName: {}", sourceLocation + localFSFileSep + filePath, destLocation, destFileName); @@ -2076,7 +2079,7 @@ private static void pushFileToRemoteStore( } logger.debug( - "upload object. location={}, key={}, srcFile={}, encryption={}", + "Upload object. Location: {}, key: {}, srcFile: {}, encryption: {}", remoteLocation.location, destFileName, srcFile, @@ -2142,7 +2145,7 @@ private static void pushFileToRemoteStore( * @throws Exception if error occurs while data upload. */ public static void uploadWithoutConnection(SnowflakeFileTransferConfig config) throws Exception { - logger.debug("Entering uploadWithoutConnection..."); + logger.trace("Entering uploadWithoutConnection..."); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) config.getSnowflakeFileTransferMetadata(); @@ -2287,7 +2290,7 @@ public static void uploadWithoutConnection(SnowflakeFileTransferConfig config) t try { fileBackedOutputStream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } } @@ -2327,7 +2330,7 @@ private static void pushFileToRemoteStoreWithPresignedUrl( } logger.debug( - "upload object. location={}, key={}, srcFile={}, encryption={}", + "Upload object. Location: {}, key: {}, srcFile: {}, encryption: {}", remoteLocation.location, destFileName, srcFile, @@ -2416,7 +2419,7 @@ private static void pullFileFromRemoteStore( } logger.debug( - "Download object. location={}, key={}, srcFile={}, encryption={}", + "Download object. Location: {}, key: {}, srcFile: {}, encryption: {}", remoteLocation.location, stageFilePath, filePath, @@ -2503,7 +2506,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { if (stageInfo.getStageType() == StageInfo.StageType.S3 || stageInfo.getStageType() == StageInfo.StageType.AZURE || stageInfo.getStageType() == StageInfo.StageType.GCS) { - logger.debug("check existing files on remote storage for the common prefix"); + logger.debug("Check existing files on remote storage for the common prefix"); remoteLocation storeLocation = extractLocationAndPath(stageInfo.getLocation()); @@ -2511,7 +2514,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { int retryCount = 0; - logger.debug("start dragging object summaries from remote storage"); + logger.debug("Start dragging object summaries from remote storage"); do { try { // Normal flow will never hit here. This is only for testing purposes @@ -2525,7 +2528,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { storageClient.listObjects( storeLocation.location, SnowflakeUtil.concatFilePathNames(storeLocation.path, greatestCommonPrefix, "/")); - logger.debug("received object summaries from remote storage"); + logger.debug("Received object summaries from remote storage"); } catch (Exception ex) { logger.debug("Listing objects for filtering encountered exception: {}", ex.getMessage()); @@ -2639,7 +2642,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { try { stream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } } @@ -2670,7 +2673,7 @@ private void filterExistingFiles() throws SnowflakeSQLException { fileBackedOutputStream.reset(); } } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } IOUtils.closeQuietly(stageFileStream); } @@ -2678,12 +2681,12 @@ private void filterExistingFiles() throws SnowflakeSQLException { // continue if digest is different so that we will process the file if (!stageFileHashText.equals(localFileHashText)) { logger.debug( - "digest diff between local and stage, will {} {}", + "Digest diff between local and stage, will {} {}", commandType.name().toLowerCase(), mappedSrcFile); continue; } else { - logger.debug("digest matches between local and stage, will skip {}", mappedSrcFile); + logger.debug("Digest matches between local and stage, will skip {}", mappedSrcFile); // skip the file given that the check sum is the same b/w source // and destination @@ -2706,7 +2709,7 @@ private void compareAndSkipRemoteFiles( throws SnowflakeSQLException { for (StorageObjectSummary obj : objectSummaries) { logger.debug( - "Existing object: key={} size={} md5={}", obj.getKey(), obj.getSize(), obj.getMD5()); + "Existing object: key: {} size: {} md5: {}", obj.getKey(), obj.getSize(), obj.getMD5()); int idxOfLastFileSep = obj.getKey().lastIndexOf("/"); String objFileName = obj.getKey().substring(idxOfLastFileSep + 1); @@ -2770,7 +2773,7 @@ private void compareAndSkipRemoteFiles( // log it logger.debug( "File returned from listing but found missing {} when getting its" - + " metadata. Location={}, key={}", + + " metadata. Location: {}, key: {}", obj.getLocation(), obj.getKey()); @@ -2831,7 +2834,7 @@ private void compareAndSkipRemoteFiles( try { stream.reset(); } catch (IOException ex) { - logger.debug("failed to clean up temp file: {}", ex); + logger.debug("Failed to clean up temp file: {}", ex); } } } @@ -2845,7 +2848,7 @@ private void compareAndSkipRemoteFiles( (objDigest == null && !hashText.equals(obj.getMD5()))) // ETag/MD5 mismatch { logger.debug( - "digest diff between remote store and local, will {} {}, " + "Digest diff between remote store and local, will {} {}, " + "local digest: {}, remote store md5: {}", commandType.name().toLowerCase(), mappedSrcFile, @@ -2864,7 +2867,7 @@ private void compareAndSkipRemoteFiles( } logger.debug( - "digest same between remote store and local, will not upload {} {}", + "Digest same between remote store and local, will not upload {} {}", commandType.name().toLowerCase(), mappedSrcFile); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java index 9754447de..000d4634d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java @@ -56,7 +56,8 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1 implements PreparedStatement, SnowflakePreparedStatement { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakePreparedStatementV1.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakePreparedStatementV1.class); /** Error code returned when describing a statement that is binding table name */ private static final Integer ERROR_CODE_TABLE_BIND_VARIABLE_NOT_SET = 2128; /** Error code when preparing statement with binding object names */ @@ -152,11 +153,12 @@ public ResultSet executeQuery() throws SQLException { if (showStatementParameters) { logger.info("executeQuery()", false); } else { - logger.debug("executeQuery()", false); + logger.trace("executeQuery()", false); } ResultSet rs = executeQueryInternal(sql, false, parameterBindings, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return rs; } @@ -173,11 +175,12 @@ public ResultSet executeAsyncQuery() throws SQLException { if (showStatementParameters) { logger.info("executeAsyncQuery()", false); } else { - logger.debug("executeAsyncQuery()", false); + logger.trace("executeAsyncQuery()", false); } ResultSet rs = executeQueryInternal(sql, true, parameterBindings, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return rs; } @@ -185,20 +188,20 @@ public ResultSet executeAsyncQuery() throws SQLException { public long executeLargeUpdate() throws SQLException { ExecTimeTelemetryData execTimeTelemetryData = new ExecTimeTelemetryData("long PreparedStatement.executeLargeUpdate()", this.batchID); - logger.debug("executeLargeUpdate()", false); + logger.trace("executeLargeUpdate()", false); long updates = executeUpdateInternal(sql, parameterBindings, true, execTimeTelemetryData); return updates; } @Override public int executeUpdate() throws SQLException { - logger.debug("executeUpdate()", false); + logger.trace("executeUpdate()", false); return (int) executeLargeUpdate(); } @Override public void setNull(int parameterIndex, int sqlType) throws SQLException { - logger.debug( + logger.trace( "setNull(parameterIndex: {}, sqlType: {})", parameterIndex, SnowflakeType.JavaSQLType.find(sqlType)); @@ -210,7 +213,7 @@ public void setNull(int parameterIndex, int sqlType) throws SQLException { @Override public void setBoolean(int parameterIndex, boolean x) throws SQLException { - logger.debug("setBoolean(parameterIndex: {}, boolean x)", parameterIndex); + logger.trace("setBoolean(parameterIndex: {}, boolean x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( SnowflakeUtil.javaTypeToSFTypeString(Types.BOOLEAN, connection.getSFBaseSession()), @@ -220,7 +223,7 @@ public void setBoolean(int parameterIndex, boolean x) throws SQLException { @Override public void setByte(int parameterIndex, byte x) throws SQLException { - logger.debug("setByte(parameterIndex: {}, byte x)", parameterIndex); + logger.trace("setByte(parameterIndex: {}, byte x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( SnowflakeUtil.javaTypeToSFTypeString(Types.TINYINT, connection.getSFBaseSession()), @@ -230,7 +233,7 @@ public void setByte(int parameterIndex, byte x) throws SQLException { @Override public void setShort(int parameterIndex, short x) throws SQLException { - logger.debug("setShort(parameterIndex: {}, short x)", parameterIndex); + logger.trace("setShort(parameterIndex: {}, short x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -241,7 +244,7 @@ public void setShort(int parameterIndex, short x) throws SQLException { @Override public void setInt(int parameterIndex, int x) throws SQLException { - logger.debug("setInt(parameterIndex: {}, int x)", parameterIndex); + logger.trace("setInt(parameterIndex: {}, int x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -252,7 +255,7 @@ public void setInt(int parameterIndex, int x) throws SQLException { @Override public void setLong(int parameterIndex, long x) throws SQLException { - logger.debug("setLong(parameterIndex: {}, long x)", parameterIndex); + logger.trace("setLong(parameterIndex: {}, long x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -263,7 +266,7 @@ public void setLong(int parameterIndex, long x) throws SQLException { @Override public void setBigInteger(int parameterIndex, BigInteger x) throws SQLException { - logger.debug("setBigInteger(parameterIndex: {}, BigInteger x)", parameterIndex); + logger.trace("setBigInteger(parameterIndex: {}, BigInteger x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -274,7 +277,7 @@ public void setBigInteger(int parameterIndex, BigInteger x) throws SQLException @Override public void setFloat(int parameterIndex, float x) throws SQLException { - logger.debug("setFloat(parameterIndex: {}, float x)", parameterIndex); + logger.trace("setFloat(parameterIndex: {}, float x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -285,7 +288,7 @@ public void setFloat(int parameterIndex, float x) throws SQLException { @Override public void setDouble(int parameterIndex, double x) throws SQLException { - logger.debug("setDouble(parameterIndex: {}, double x)", parameterIndex); + logger.trace("setDouble(parameterIndex: {}, double x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -296,7 +299,7 @@ public void setDouble(int parameterIndex, double x) throws SQLException { @Override public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - logger.debug("setBigDecimal(parameterIndex: {}, BigDecimal x)", parameterIndex); + logger.trace("setBigDecimal(parameterIndex: {}, BigDecimal x)", parameterIndex); if (x == null) { setNull(parameterIndex, Types.DECIMAL); @@ -311,7 +314,7 @@ public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException @Override public void setString(int parameterIndex, String x) throws SQLException { - logger.debug("setString(parameterIndex: {}, String x)", parameterIndex); + logger.trace("setString(parameterIndex: {}, String x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -321,7 +324,7 @@ public void setString(int parameterIndex, String x) throws SQLException { @Override public void setBytes(int parameterIndex, byte[] x) throws SQLException { - logger.debug("setBytes(parameterIndex: {}, byte[] x)", parameterIndex); + logger.trace("setBytes(parameterIndex: {}, byte[] x)", parameterIndex); ParameterBindingDTO binding = new ParameterBindingDTO( @@ -346,7 +349,7 @@ private void setObjectInternal(int parameterIndex, SQLData sqlData) throws SQLEx @Override public void setDate(int parameterIndex, Date x) throws SQLException { - logger.debug("setDate(parameterIndex: {}, Date x)", parameterIndex); + logger.trace("setDate(parameterIndex: {}, Date x)", parameterIndex); if (x == null) { setNull(parameterIndex, Types.DATE); @@ -365,7 +368,7 @@ public void setDate(int parameterIndex, Date x) throws SQLException { @Override public void setTime(int parameterIndex, Time x) throws SQLException { - logger.debug("setTime(parameterIndex: {}, Time x)", parameterIndex); + logger.trace("setTime(parameterIndex: {}, Time x)", parameterIndex); if (x == null) { setNull(parameterIndex, Types.TIME); @@ -384,7 +387,7 @@ public void setTime(int parameterIndex, Time x) throws SQLException { @Override public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { - logger.debug("setTimestamp(parameterIndex: {}, Timestamp x)", parameterIndex); + logger.trace("setTimestamp(parameterIndex: {}, Timestamp x)", parameterIndex); setTimestampWithType(parameterIndex, x, Types.TIMESTAMP); } @@ -451,7 +454,7 @@ public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQ || targetSqlType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ) { setTimestampWithType(parameterIndex, (Timestamp) x, targetSqlType); } else { - logger.debug( + logger.trace( "setObject(parameterIndex: {}, Object x, sqlType: {})", parameterIndex, SnowflakeType.JavaSQLType.find(targetSqlType)); @@ -509,17 +512,18 @@ public void setObject(int parameterIndex, Object x) throws SQLException { public boolean execute() throws SQLException { ExecTimeTelemetryData execTimeData = new ExecTimeTelemetryData("boolean PreparedStatement.execute(String)", this.batchID); - logger.debug("execute: {}", sql); + logger.debug("Execute: {}", sql); boolean success = executeInternal(sql, parameterBindings, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return success; } @Override public void addBatch() throws SQLException { - logger.debug("addBatch()", false); + logger.trace("addBatch()", false); raiseSQLExceptionIfStatementIsClosed(); @@ -675,7 +679,7 @@ public void setMap(int parameterIndex, Map map, int type) throws @Override public ResultSetMetaData getMetaData() throws SQLException { - logger.debug("getMetaData()", false); + logger.trace("getMetaData()", false); raiseSQLExceptionIfStatementIsClosed(); @@ -685,7 +689,7 @@ public ResultSetMetaData getMetaData() throws SQLException { @Override public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - logger.debug("setDate(int parameterIndex, Date x, Calendar cal)", false); + logger.trace("setDate(int parameterIndex, Date x, Calendar cal)", false); raiseSQLExceptionIfStatementIsClosed(); if (x == null) { @@ -708,14 +712,14 @@ public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLExceptio @Override public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - logger.debug("setTime(int parameterIndex, Time x, Calendar cal)", false); + logger.trace("setTime(int parameterIndex, Time x, Calendar cal)", false); raiseSQLExceptionIfStatementIsClosed(); setTime(parameterIndex, x); } @Override public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - logger.debug("setTimestamp(int parameterIndex, Timestamp x, Calendar cal)", false); + logger.trace("setTimestamp(int parameterIndex, Timestamp x, Calendar cal)", false); raiseSQLExceptionIfStatementIsClosed(); // convert the time from being in UTC to be in local time zone @@ -756,7 +760,7 @@ public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws S @Override public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - logger.debug("setNull(int parameterIndex, int sqlType, String typeName)", false); + logger.trace("setNull(int parameterIndex, int sqlType, String typeName)", false); setNull(parameterIndex, sqlType); } @@ -817,7 +821,7 @@ public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException @Override public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - logger.debug( + logger.trace( "setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength)", false); raiseSQLExceptionIfStatementIsClosed(); @@ -916,13 +920,13 @@ public void clearBatch() throws SQLException { @Override public int[] executeBatch() throws SQLException { - logger.debug("executeBatch()", false); + logger.trace("executeBatch()", false); return executeBatchInternalWithArrayBind(false).intArr; } @Override public long[] executeLargeBatch() throws SQLException { - logger.debug("executeLargeBatch()", false); + logger.trace("executeLargeBatch()", false); return executeBatchInternalWithArrayBind(true).longArr; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java index 5cf9e1c40..7e91d9bab 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultChunk.java @@ -144,4 +144,8 @@ public DownloadState getDownloadState() { public void setDownloadState(DownloadState downloadState) { this.downloadState = downloadState; } + + long getTotalTime() { + return downloadTime + parseTime; + } } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java index 5c67cde9a..0a88b1ebd 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java @@ -22,7 +22,8 @@ public enum QueryType { SYNC }; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeResultSetMetaDataV1.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeResultSetMetaDataV1.class); private SFResultSetMetaData resultSetMetaData; private String queryId; @@ -86,7 +87,7 @@ public List getColumnFields(int column) throws SQLException { @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -97,7 +98,7 @@ public T unwrap(Class iface) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -161,7 +162,7 @@ public boolean isDefinitelyWritable(int column) throws SQLException { @Override public String getColumnClassName(int column) throws SQLException { - logger.debug("public String getColumnClassName(int column)", false); + logger.trace("String getColumnClassName(int column)", false); int type = this.getColumnType(column); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java index 5a7821e0b..f82505665 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableV1.java @@ -74,7 +74,8 @@ public class SnowflakeResultSetSerializableV1 implements SnowflakeResultSetSerializable, Serializable { private static final long serialVersionUID = 1L; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeResultSetSerializableV1.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeResultSetSerializableV1.class); static final ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); private static final long LOW_MAX_MEMORY = GB; @@ -544,7 +545,7 @@ public static SnowflakeResultSetSerializableV1 create( ResultStreamProvider resultStreamProvider) throws SnowflakeSQLException { SnowflakeResultSetSerializableV1 resultSetSerializable = new SnowflakeResultSetSerializableV1(); - logger.debug("Entering create()", false); + logger.trace("Entering create()", false); SnowflakeUtil.checkErrorAndThrowException(rootNode); @@ -581,7 +582,7 @@ public static SnowflakeResultSetSerializableV1 create( resultSetSerializable.possibleSession = Optional.ofNullable(sfSession); - logger.debug("query id: {}", resultSetSerializable.queryId); + logger.debug("Query id: {}", resultSetSerializable.queryId); Optional queryResultFormat = QueryResultFormat.lookupByName(rootNode.path("data").path("queryResultFormat").asText()); @@ -670,7 +671,7 @@ public static SnowflakeResultSetSerializableV1 create( resultSetSerializable.sendResultTime = sendResultTimeNode.longValue(); } - logger.debug("result version={}", resultSetSerializable.resultVersion); + logger.debug("Result version: {}", resultSetSerializable.resultVersion); // Bind parameter metadata JsonNode bindData = rootNode.path("data").path("metaDataOfBinds"); @@ -756,7 +757,7 @@ private void setupFieldsFromParameters() { this.dateFormatter = SnowflakeDateTimeFormat.fromSqlFormat(sqlDateFormat); logger.debug( - "sql date format: {}, java date format: {}", + "Sql date format: {}, java date format: {}", sqlDateFormat, (ArgSupplier) () -> this.dateFormatter.toSimpleDateTimePattern()); @@ -766,7 +767,7 @@ private void setupFieldsFromParameters() { this.timeFormatter = SnowflakeDateTimeFormat.fromSqlFormat(sqlTimeFormat); logger.debug( - "sql time format: {}, java time format: {}", + "Sql time format: {}, java time format: {}", sqlTimeFormat, (ArgSupplier) () -> this.timeFormatter.toSimpleDateTimePattern()); @@ -803,7 +804,7 @@ private void parseChunkFiles(JsonNode rootNode, SFBaseStatement sfStatement) { // Determine the prefetch thread count and memoryLimit if (this.chunkFileCount > 0) { - logger.debug("#chunks={}, initialize chunk downloader", this.chunkFileCount); + logger.debug("#chunks: {}, initialize chunk downloader", this.chunkFileCount); adjustMemorySettings(sfStatement); @@ -816,7 +817,7 @@ private void parseChunkFiles(JsonNode rootNode, SFBaseStatement sfStatement) { Map.Entry chunkHeader = chunkHeadersIter.next(); logger.debug( - "add header key={}, value={}", + "Add header key: {}, value: {}", chunkHeader.getKey(), chunkHeader.getValue().asText()); this.chunkHeadersMap.put(chunkHeader.getKey(), chunkHeader.getValue().asText()); @@ -835,7 +836,7 @@ private void parseChunkFiles(JsonNode rootNode, SFBaseStatement sfStatement) { new ChunkFileMetadata(url, rowCount, compressedSize, uncompressedSize)); logger.debug( - "add chunk, url={} rowCount={} " + "compressedSize={} uncompressedSize={}", + "Add chunk, url: {} rowCount: {} " + "compressedSize: {} uncompressedSize: {}", url, rowCount, compressedSize, @@ -855,8 +856,8 @@ private void adjustMemorySettings(SFBaseStatement sfStatement) { this.memoryLimit = sfStatement.getConservativeMemoryLimit(); int chunkSize = (int) this.parameters.get(CLIENT_RESULT_CHUNK_SIZE); logger.debug( - "enable conservative memory usage with prefetchThreads = {} and memoryLimit = {} and " - + "resultChunkSize = {}", + "Enable conservative memory usage with prefetchThreads: {} and memoryLimit: {} and " + + "resultChunkSize: {}", this.resultPrefetchThreads, this.memoryLimit, chunkSize); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java index bc79c5669..49c8c8546 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java @@ -33,10 +33,13 @@ import net.snowflake.client.core.QueryStatus; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; /** Snowflake ResultSet implementation */ public class SnowflakeResultSetV1 extends SnowflakeBaseResultSet implements SnowflakeResultSet, ResultSet { + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeResultSetV1.class); /** * Constructor takes an inputstream from the API response that we get from executing a SQL @@ -359,7 +362,7 @@ public boolean isBeforeFirst() throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("public boolean isWrapperFor(Class iface)", false); + logger.trace("boolean isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -367,7 +370,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("public T unwrap(Class iface)", false); + logger.trace(" T unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java index 00e8a3b64..660e83134 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java @@ -14,7 +14,7 @@ * @author jhuang */ public class SnowflakeSQLException extends SQLException { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeSQLException.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeSQLException.class); private static final long serialVersionUID = 1L; @@ -44,7 +44,7 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState, int // log user error from GS at fine level logger.debug( - "Snowflake exception: {}, sqlState:{}, vendorCode:{}, queryId:{}", + "Snowflake exception: {}, sqlState: {}, vendorCode: {}, queryId: {}", reason, sqlState, vendorCode, @@ -54,7 +54,7 @@ public SnowflakeSQLException(String queryId, String reason, String sqlState, int public SnowflakeSQLException(String reason, String sqlState) { super(reason, sqlState); // log user error from GS at fine level - logger.debug("Snowflake exception: {}, sqlState:{}", reason, sqlState); + logger.debug("Snowflake exception: {}, sqlState: {}", reason, sqlState); } /** use {@link SnowflakeSQLException#SnowflakeSQLException(String, String, int)} */ @@ -70,7 +70,7 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode) { vendorCode); this.queryId = queryId; logger.debug( - "Snowflake exception: {}, sqlState:{}, vendorCode:{}", + "Snowflake exception: {}, sqlState: {}, vendorCode: {}", errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode)), sqlState, vendorCode); @@ -89,7 +89,7 @@ public SnowflakeSQLException(String queryId, String sqlState, int vendorCode, Ob vendorCode); this.queryId = queryId; logger.debug( - "Snowflake exception: {}, sqlState:{}, vendorCode:{}", + "Snowflake exception: {}, sqlState: {}, vendorCode: {}", errorResourceBundleManager.getLocalizedMessage(String.valueOf(vendorCode), params), sqlState, vendorCode); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java index d9d741a8c..cdc33322d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLLoggedException.java @@ -25,6 +25,8 @@ import net.snowflake.client.jdbc.telemetry.TelemetryUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryEvent; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.LoginInfoDTO; import net.snowflake.common.core.SqlState; @@ -36,7 +38,8 @@ * exception with OOB telemetry. */ public class SnowflakeSQLLoggedException extends SnowflakeSQLException { - + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeSQLLoggedException.class); private static final ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); /** diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java index 992cf123d..ede179a9c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSimulatedUploadFailure.java @@ -11,7 +11,8 @@ public class SnowflakeSimulatedUploadFailure extends RuntimeException { private static final long serialVersionUID = 1L; - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeSimulatedUploadFailure.class); + private static final SFLogger logger = + SFLoggerFactory.getLogger(SnowflakeSimulatedUploadFailure.class); public SnowflakeSimulatedUploadFailure() { super(); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java index a4c9100c3..3d8b8c464 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java @@ -35,7 +35,7 @@ /** Snowflake statement */ class SnowflakeStatementV1 implements Statement, SnowflakeStatement { - static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeStatementV1.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeStatementV1.class); private static final String NOOP_MESSAGE = "This is a dummy SnowflakeStatement, " + "no member function should be called for it."; @@ -101,7 +101,7 @@ class SnowflakeStatementV1 implements Statement, SnowflakeStatement { int resultSetConcurrency, int resultSetHoldability) throws SQLException { - logger.debug(" public SnowflakeStatement(SnowflakeConnectionV1 conn)", false); + logger.trace("SnowflakeStatement(SnowflakeConnectionV1 conn)", false); this.connection = connection; @@ -155,6 +155,7 @@ public ResultSet executeQuery(String sql) throws SQLException { ResultSet rs = executeQueryInternal(sql, false, null, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", execTimeData.getLogString()); return rs; } @@ -172,6 +173,7 @@ public ResultSet executeAsyncQuery(String sql) throws SQLException { ResultSet rs = executeQueryInternal(sql, true, null, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", queryID, execTimeData.getLogString()); return rs; } @@ -206,6 +208,7 @@ public long executeLargeUpdate(String sql) throws SQLException { long res = executeUpdateInternal(sql, null, true, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", queryID, execTimeData.getLogString()); return res; } @@ -337,7 +340,7 @@ boolean executeInternal( raiseSQLExceptionIfStatementIsClosed(); connection.injectedDelay(); - logger.debug("execute: {}", sql); + logger.debug("Execute: {}", sql); String trimmedSql = sql.trim(); @@ -428,12 +431,13 @@ public boolean execute(String sql) throws SQLException { boolean res = executeInternal(sql, null, execTimeData); execTimeData.setQueryEnd(); execTimeData.generateTelemetry(); + logger.debug("Query completed. {}", queryID, execTimeData.getLogString()); return res; } @Override public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("execute(String sql, int autoGeneratedKeys)", false); + logger.trace("execute(String sql, int autoGeneratedKeys)", false); if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) { return execute(sql); @@ -444,14 +448,14 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { @Override public boolean execute(String sql, int[] columnIndexes) throws SQLException { - logger.debug("execute(String sql, int[] columnIndexes)", false); + logger.trace("execute(String sql, int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public boolean execute(String sql, String[] columnNames) throws SQLException { - logger.debug("execute(String sql, String[] columnNames)", false); + logger.trace("execute(String sql, String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @@ -465,7 +469,7 @@ public boolean execute(String sql, String[] columnNames) throws SQLException { */ @Override public int[] executeBatch() throws SQLException { - logger.debug("int[] executeBatch()", false); + logger.trace("int[] executeBatch()", false); return executeBatchInternal(false).intArr; } @@ -478,7 +482,7 @@ public int[] executeBatch() throws SQLException { */ @Override public long[] executeLargeBatch() throws SQLException { - logger.debug("executeBatch()", false); + logger.trace("executeBatch()", false); return executeBatchInternal(true).longArr; } @@ -560,14 +564,14 @@ VariableTypeArray executeBatchInternal(boolean isLong) throws SQLException { @Override public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("executeUpdate(String sql, int autoGeneratedKeys)", false); + logger.trace("executeUpdate(String sql, int autoGeneratedKeys)", false); return (int) this.executeLargeUpdate(sql, autoGeneratedKeys); } @Override public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - logger.debug("executeUpdate(String sql, int autoGeneratedKeys)", false); + logger.trace("executeUpdate(String sql, int autoGeneratedKeys)", false); if (autoGeneratedKeys == Statement.NO_GENERATED_KEYS) { return executeLargeUpdate(sql); @@ -578,84 +582,84 @@ public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLExce @Override public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - logger.debug("executeUpdate(String sql, int[] columnIndexes)", false); + logger.trace("executeUpdate(String sql, int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { - logger.debug("executeLargeUpdate(String sql, int[] columnIndexes)", false); + logger.trace("executeLargeUpdate(String sql, int[] columnIndexes)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public int executeUpdate(String sql, String[] columnNames) throws SQLException { - logger.debug("executeUpdate(String sql, String[] columnNames)", false); + logger.trace("executeUpdate(String sql, String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { - logger.debug("executeUpdate(String sql, String[] columnNames)", false); + logger.trace("executeUpdate(String sql, String[] columnNames)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public Connection getConnection() throws SQLException { - logger.debug("getConnection()", false); + logger.trace("getConnection()", false); raiseSQLExceptionIfStatementIsClosed(); return connection; } @Override public int getFetchDirection() throws SQLException { - logger.debug("getFetchDirection()", false); + logger.trace("getFetchDirection()", false); raiseSQLExceptionIfStatementIsClosed(); return ResultSet.FETCH_FORWARD; } @Override public int getFetchSize() throws SQLException { - logger.debug("getFetchSize()", false); + logger.trace("getFetchSize()", false); raiseSQLExceptionIfStatementIsClosed(); return fetchSize; } @Override public ResultSet getGeneratedKeys() throws SQLException { - logger.debug("getGeneratedKeys()", false); + logger.trace("getGeneratedKeys()", false); raiseSQLExceptionIfStatementIsClosed(); return new SnowflakeResultSetV1.EmptyResultSet(); } @Override public int getMaxFieldSize() throws SQLException { - logger.debug("getMaxFieldSize()", false); + logger.trace("getMaxFieldSize()", false); raiseSQLExceptionIfStatementIsClosed(); return maxFieldSize; } @Override public int getMaxRows() throws SQLException { - logger.debug("getMaxRows()", false); + logger.trace("getMaxRows()", false); raiseSQLExceptionIfStatementIsClosed(); return maxRows; } @Override public boolean getMoreResults() throws SQLException { - logger.debug("getMoreResults()", false); + logger.trace("getMoreResults()", false); return getMoreResults(Statement.CLOSE_CURRENT_RESULT); } @Override public boolean getMoreResults(int current) throws SQLException { - logger.debug("getMoreResults(int current)", false); + logger.trace("getMoreResults(int current)", false); raiseSQLExceptionIfStatementIsClosed(); // clean up the current result set, if it exists @@ -704,48 +708,48 @@ public boolean getMoreResults(int current) throws SQLException { @Override public int getQueryTimeout() throws SQLException { - logger.debug("getQueryTimeout()", false); + logger.trace("getQueryTimeout()", false); raiseSQLExceptionIfStatementIsClosed(); return this.queryTimeout; } @Override public ResultSet getResultSet() throws SQLException { - logger.debug("getResultSet()", false); + logger.trace("getResultSet()", false); raiseSQLExceptionIfStatementIsClosed(); return resultSet; } @Override public int getResultSetConcurrency() throws SQLException { - logger.debug("getResultSetConcurrency()", false); + logger.trace("getResultSetConcurrency()", false); raiseSQLExceptionIfStatementIsClosed(); return resultSetConcurrency; } @Override public int getResultSetHoldability() throws SQLException { - logger.debug("getResultSetHoldability()", false); + logger.trace("getResultSetHoldability()", false); raiseSQLExceptionIfStatementIsClosed(); return resultSetHoldability; } @Override public int getResultSetType() throws SQLException { - logger.debug("getResultSetType()", false); + logger.trace("getResultSetType()", false); raiseSQLExceptionIfStatementIsClosed(); return this.resultSetType; } @Override public int getUpdateCount() throws SQLException { - logger.debug("getUpdateCount()", false); + logger.trace("getUpdateCount()", false); return (int) getUpdateCountIfDML(); } @Override public long getLargeUpdateCount() throws SQLException { - logger.debug("getLargeUpdateCount()", false); + logger.trace("getLargeUpdateCount()", false); return getUpdateCountIfDML(); } @@ -756,34 +760,34 @@ private long getUpdateCountIfDML() throws SQLException { @Override public SQLWarning getWarnings() throws SQLException { - logger.debug("getWarnings()", false); + logger.trace("getWarnings()", false); raiseSQLExceptionIfStatementIsClosed(); return sqlWarnings; } @Override public boolean isClosed() throws SQLException { - logger.debug("isClosed()", false); + logger.trace("isClosed()", false); return isClosed; // no exception } @Override public boolean isPoolable() throws SQLException { - logger.debug("isPoolable()", false); + logger.trace("isPoolable()", false); raiseSQLExceptionIfStatementIsClosed(); return poolable; } @Override public void setCursorName(String name) throws SQLException { - logger.debug("setCursorName(String name)", false); + logger.trace("setCursorName(String name)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public void setEscapeProcessing(boolean enable) throws SQLException { - logger.debug("setEscapeProcessing(boolean enable)", false); + logger.trace("setEscapeProcessing(boolean enable)", false); // NOTE: We could raise an exception here, because not implemented // but it may break the existing applications. For now returning nothing. // we should revisit. @@ -792,7 +796,7 @@ public void setEscapeProcessing(boolean enable) throws SQLException { @Override public void setFetchDirection(int direction) throws SQLException { - logger.debug("setFetchDirection(int direction)", false); + logger.trace("setFetchDirection(int direction)", false); raiseSQLExceptionIfStatementIsClosed(); if (direction != ResultSet.FETCH_FORWARD) { throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); @@ -801,21 +805,21 @@ public void setFetchDirection(int direction) throws SQLException { @Override public void setFetchSize(int rows) throws SQLException { - logger.debug("setFetchSize(int rows), rows={}", rows); + logger.trace("setFetchSize(int rows), rows={}", rows); raiseSQLExceptionIfStatementIsClosed(); fetchSize = rows; } @Override public void setMaxFieldSize(int max) throws SQLException { - logger.debug("setMaxFieldSize(int max)", false); + logger.trace("setMaxFieldSize(int max)", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public void setMaxRows(int max) throws SQLException { - logger.debug("setMaxRows(int max)", false); + logger.trace("setMaxRows(int max)", false); raiseSQLExceptionIfStatementIsClosed(); @@ -832,7 +836,7 @@ public void setMaxRows(int max) throws SQLException { @Override public void setPoolable(boolean poolable) throws SQLException { - logger.debug("setPoolable(boolean poolable)", false); + logger.trace("setPoolable(boolean poolable)", false); raiseSQLExceptionIfStatementIsClosed(); if (poolable) { @@ -849,7 +853,7 @@ public void setPoolable(boolean poolable) throws SQLException { * @throws SQLException if any SQL error occurs. */ public void setParameter(String name, Object value) throws SQLException { - logger.debug("setParameter", false); + logger.trace("setParameter", false); try { if (this.sfBaseStatement != null) { @@ -867,7 +871,7 @@ public void setBatchID(String batchID) { @Override public void setQueryTimeout(int seconds) throws SQLException { - logger.debug("setQueryTimeout(int seconds)", false); + logger.trace("setQueryTimeout(int seconds)", false); raiseSQLExceptionIfStatementIsClosed(); this.queryTimeout = seconds; @@ -883,7 +887,7 @@ public void setQueryTimeout(int seconds) throws SQLException { @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("isWrapperFor(Class iface)", false); + logger.trace("isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -891,7 +895,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("unwrap(Class iface)", false); + logger.trace("unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( @@ -902,13 +906,13 @@ public T unwrap(Class iface) throws SQLException { @Override public void closeOnCompletion() throws SQLException { - logger.debug("closeOnCompletion()", false); + logger.trace("closeOnCompletion()", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @Override public boolean isCloseOnCompletion() throws SQLException { - logger.debug("isCloseOnCompletion()", false); + logger.trace("isCloseOnCompletion()", false); throw new SnowflakeLoggedFeatureNotSupportedException(connection.getSFBaseSession()); } @@ -918,7 +922,7 @@ public void close() throws SQLException { } public void close(boolean removeClosedStatementFromConnection) throws SQLException { - logger.debug("close()", false); + logger.trace("close()", false); // No exception is raised even if the statement is closed. if (resultSet != null) { @@ -947,7 +951,7 @@ public void close(boolean removeClosedStatementFromConnection) throws SQLExcepti @Override public void cancel() throws SQLException { - logger.debug("cancel()", false); + logger.trace("cancel()", false); raiseSQLExceptionIfStatementIsClosed(); try { @@ -959,14 +963,14 @@ public void cancel() throws SQLException { @Override public void clearWarnings() throws SQLException { - logger.debug("clearWarnings()", false); + logger.trace("clearWarnings()", false); raiseSQLExceptionIfStatementIsClosed(); sqlWarnings = null; } @Override public void addBatch(String sql) throws SQLException { - logger.debug("addBatch(String sql)", false); + logger.trace("addBatch(String sql)", false); raiseSQLExceptionIfStatementIsClosed(); @@ -975,7 +979,7 @@ public void addBatch(String sql) throws SQLException { @Override public void clearBatch() throws SQLException { - logger.debug("clearBatch()", false); + logger.trace("clearBatch()", false); raiseSQLExceptionIfStatementIsClosed(); @@ -983,7 +987,7 @@ public void clearBatch() throws SQLException { } private void executeSetProperty(final String sql) { - logger.debug("setting property", false); + logger.trace("setting property", false); // tokenize the sql String[] tokens = sql.split("\\s+"); @@ -1260,7 +1264,7 @@ public void setQueryTimeout(int seconds) throws SQLException {} @Override public boolean isWrapperFor(Class iface) throws SQLException { - logger.debug("isWrapperFor(Class iface)", false); + logger.trace("isWrapperFor(Class iface)", false); return iface.isInstance(this); } @@ -1268,7 +1272,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { @SuppressWarnings("unchecked") @Override public T unwrap(Class iface) throws SQLException { - logger.debug("unwrap(Class iface)", false); + logger.trace("unwrap(Class iface)", false); if (!iface.isInstance(this)) { throw new SQLException( diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java index ec7f0c7ca..49b3542fd 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/S3HttpUtil.java @@ -10,13 +10,19 @@ import java.util.Properties; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.HttpProtocol; +import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFSessionProperty; import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.log.SFLoggerUtil; @SnowflakeJdbcInternalApi public class S3HttpUtil { + private static final SFLogger logger = SFLoggerFactory.getLogger(HttpUtil.class); + /** * A static function to set S3 proxy params when there is a valid session * @@ -30,11 +36,28 @@ public static void setProxyForS3(HttpClientSettingsKey key, ClientConfiguration clientConfig.setProxyHost(key.getProxyHost()); clientConfig.setProxyPort(key.getProxyPort()); clientConfig.setNonProxyHosts(key.getNonProxyHosts()); + String logMessage = + "Setting S3 proxy. Host: " + + key.getProxyHost() + + ", port: " + + key.getProxyPort() + + ", protocol: " + + key.getProxyHttpProtocol() + + ", non-proxy hosts: " + + key.getNonProxyHosts(); if (!Strings.isNullOrEmpty(key.getProxyUser()) && !Strings.isNullOrEmpty(key.getProxyPassword())) { + logMessage += + ", user: " + + key.getProxyUser() + + ", password is " + + SFLoggerUtil.isVariableProvided(key.getProxyPassword()); clientConfig.setProxyUsername(key.getProxyUser()); clientConfig.setProxyPassword(key.getProxyPassword()); } + logger.debug(logMessage); + } else { + logger.debug("Omitting S3 proxy setup"); } } @@ -84,11 +107,26 @@ public static void setSessionlessProxyForS3( clientConfig.setProxyPort(proxyPort); clientConfig.setNonProxyHosts(nonProxyHosts); clientConfig.setProxyProtocol(protocolEnum); + String logMessage = + "Setting sessionless S3 proxy. Host: " + + proxyHost + + ", port: " + + proxyPort + + ", non-proxy hosts: " + + nonProxyHosts + + ", protocol: " + + proxyProtocol; if (!Strings.isNullOrEmpty(proxyUser) && !Strings.isNullOrEmpty(proxyPassword)) { + logMessage += ", user: " + proxyUser + " with password provided"; clientConfig.setProxyUsername(proxyUser); clientConfig.setProxyPassword(proxyPassword); } + logger.debug(logMessage); + } else { + logger.debug("Omitting sessionless S3 proxy setup as proxy is disabled"); } + } else { + logger.debug("Omitting sessionless S3 proxy setup"); } } } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java index 8977d154b..889a1d9e0 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClient.java @@ -56,6 +56,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SFPair; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; @@ -92,6 +93,9 @@ private SnowflakeAzureClient() {} public static SnowflakeAzureClient createSnowflakeAzureClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFBaseSession sfSession) throws SnowflakeSQLException { + logger.info( + "Initializing Snowflake Azure client with encryption: {}", + encMat != null ? "true" : "false"); SnowflakeAzureClient azureClient = new SnowflakeAzureClient(); azureClient.setupAzureClient(stage, encMat, sfSession); @@ -208,6 +212,7 @@ public int getEncryptionKeySize() { */ @Override public void renew(Map stageCredentials) throws SnowflakeSQLException { + logger.debug("Renewing the Azure client"); stageInfo.setCredentials(stageCredentials); setupAzureClient(stageInfo, encMat, session); } @@ -320,10 +325,14 @@ public void download( String presignedUrl, String queryId) throws SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + String localFilePath = localLocation + localFileSep + destFileName; + logger.info( + "Staring download of file from Azure stage path: {} to {}", stageFilePath, localFilePath); int retryCount = 0; do { try { - String localFilePath = localLocation + localFileSep + destFileName; File localFile = new File(localFilePath); CloudBlobContainer container = azStorageClient.getContainerReference(remoteStorageLocation); CloudBlob blob = container.getBlockBlobReference(stageFilePath); @@ -332,6 +341,8 @@ public void download( transferOptions.setConcurrentRequestCount(parallelism); blob.downloadToFile(localFilePath, null, transferOptions, opContext); + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); // Pull object metadata from Azure blob.downloadAttributes(null, transferOptions, opContext); @@ -345,6 +356,7 @@ public void download( String iv = encryptionData.getValue(); if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -357,10 +369,27 @@ public void download( // Decrypt file try { EncryptionProvider.decrypt(localFile, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "Azure file {} downloaded to {}. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + remoteStorageLocation, + localFile.getAbsolutePath(), + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); } catch (Exception ex) { logger.error("Error decrypting file", ex); throw ex; } + } else { + logger.info( + "Azure file {} downloaded to {}. It took {} ms with {} retries", + remoteStorageLocation, + localFile.getAbsolutePath(), + downloadMillis, + retryCount); } return; @@ -403,6 +432,10 @@ public InputStream downloadToStream( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + "Staring download of file from Azure stage path: {} to input stream", stageFilePath); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); int retryCount = 0; do { @@ -412,7 +445,8 @@ public InputStream downloadToStream( CloudBlob blob = container.getBlockBlobReference(stageFilePath); InputStream stream = blob.openInputStream(null, null, opContext); - + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); Map userDefinedMetadata = blob.getMetadata(); AbstractMap.SimpleEntry encryptionData = @@ -423,6 +457,7 @@ public InputStream downloadToStream( String iv = encryptionData.getValue(); if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -433,8 +468,18 @@ public InputStream downloadToStream( } try { - - return EncryptionProvider.decryptStream(stream, key, iv, encMat); + InputStream is = EncryptionProvider.decryptStream(stream, key, iv, encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "Azure file {} downloaded to input stream. It took {} ms " + + "(download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); + return is; } catch (Exception ex) { logger.error("Error in decrypting file", ex); @@ -442,6 +487,11 @@ public InputStream downloadToStream( } } else { + logger.info( + "Azure file {} downloaded to input stream. Download took {} ms with {} retries", + stageFilePath, + downloadMillis, + retryCount); return stream; } @@ -493,6 +543,9 @@ public void upload( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "Azure", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); final List toClose = new ArrayList<>(); long originalContentLength = meta.getContentLength(); @@ -512,9 +565,10 @@ public void upload( } int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); do { try { - logger.debug("Starting upload", false); InputStream fileInputStream = uploadStreamInfo.left; CloudBlobContainer container = azStorageClient.getContainerReference(remoteStorageLocation); CloudBlockBlob blob = container.getBlockBlobReference(destFileName); @@ -531,7 +585,22 @@ public void upload( null, transferOptions, opContext); - logger.debug("Upload successful", false); + stopwatch.stop(); + + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to Azure location: {}. It took {} ms with {} retries", + remoteStorageLocation, + stopwatch.elapsedMillis(), + retryCount); + } else { + logger.info( + "Uploaded file {} to Azure location: {}. It took {} ms with {} retries", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis(), + retryCount); + } blob.uploadMetadata(null, transferOptions, opContext); diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java index 61c31b3ab..506293023 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java @@ -58,6 +58,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SFPair; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; @@ -106,6 +107,8 @@ private SnowflakeGCSClient() {} public static SnowflakeGCSClient createSnowflakeGCSClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFSession session) throws SnowflakeSQLException { + logger.debug( + "Initializing Snowflake GCS client with encryption: {}", encMat != null ? "true" : "false"); SnowflakeGCSClient sfGcsClient = new SnowflakeGCSClient(); sfGcsClient.setupGCSClient(stage, encMat, session); @@ -165,6 +168,7 @@ public boolean requirePresignedUrl() { @Override public void renew(Map stageCredentials) throws SnowflakeSQLException { + logger.debug("Renewing the Snowflake GCS client"); stageInfo.setCredentials(stageCredentials); setupGCSClient(stageInfo, encMat, session); } @@ -249,14 +253,18 @@ public void download( String presignedUrl, String queryId) throws SnowflakeSQLException { - int retryCount = 0; String localFilePath = localLocation + localFileSep + destFileName; + logger.info( + "Staring download of file from GCS stage path: {} to {}", stageFilePath, localFilePath); + int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); File localFile = new File(localFilePath); - do { try { String key = null; String iv = null; + long downloadMillis = 0; if (!Strings.isNullOrEmpty(presignedUrl)) { logger.debug("Starting download with presigned URL", false); URIBuilder uriBuilder = new URIBuilder(presignedUrl); @@ -269,7 +277,7 @@ public void download( CloseableHttpClient httpClient = HttpUtil.getHttpClientWithoutDecompression(session.getHttpClientKey()); - // Put the file on storage using the presigned url + // Get the file on storage using the presigned url HttpResponse response = RestRequest.execute( httpClient, @@ -315,6 +323,8 @@ public void download( } } } + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); logger.debug("Download successful", false); } catch (IOException ex) { logger.debug("Download unsuccessful {}", ex); @@ -340,6 +350,8 @@ public void download( logger.debug("Starting download without presigned URL", false); blob.downloadTo( localFile.toPath(), Blob.BlobSourceOption.shouldReturnRawInputStream(true)); + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); logger.debug("Download successful", false); // Get the user-defined BLOB metadata @@ -370,7 +382,18 @@ public void download( // Decrypt file try { + stopwatch.start(); EncryptionProvider.decrypt(localFile, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "GCS file {} downloaded to {}. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); } catch (Exception ex) { logger.error("Error decrypting file", ex); throw new SnowflakeSQLLoggedException( @@ -380,6 +403,13 @@ public void download( SqlState.INTERNAL_ERROR, "Cannot decrypt file"); } + } else { + logger.info( + "GCS file {} downloaded to {}. It took {} ms with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis, + retryCount); } return; } catch (Exception ex) { @@ -421,8 +451,12 @@ public InputStream downloadToStream( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info("Staring download of file from GCS stage path: {} to input stream", stageFilePath); int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); InputStream inputStream = null; + long downloadMillis = 0; do { try { String key = null; @@ -478,6 +512,8 @@ public InputStream downloadToStream( } } } + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); logger.debug("Download successful", false); } catch (IOException ex) { logger.debug("Download unsuccessful {}", ex); @@ -509,9 +545,12 @@ public InputStream downloadToStream( key = encryptionData.getKey(); iv = encryptionData.getValue(); } + stopwatch.stop(); + downloadMillis = stopwatch.elapsedMillis(); } if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLException( queryId, @@ -523,7 +562,17 @@ public InputStream downloadToStream( // Decrypt file try { if (inputStream != null) { + inputStream = EncryptionProvider.decryptStream(inputStream, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "GCS file {} downloaded to stream. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); return inputStream; } } catch (Exception ex) { @@ -535,7 +584,15 @@ public InputStream downloadToStream( SqlState.INTERNAL_ERROR, "Cannot decrypt file"); } + } else { + logger.info( + "GCS file {} downloaded to stream. Download took {} ms with {} retries", + stageFilePath, + downloadMillis, + retryCount); } + + return inputStream; } catch (Exception ex) { logger.debug("Download unsuccessful {}", ex); handleStorageException(ex, ++retryCount, "download", session, command, queryId); @@ -584,6 +641,9 @@ public void uploadWithPresignedUrlWithoutConnection( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "GCS", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); final List toClose = new ArrayList<>(); long originalContentLength = meta.getContentLength(); @@ -601,7 +661,8 @@ public void uploadWithPresignedUrlWithoutConnection( if (!(meta instanceof CommonObjectMetadata)) { throw new IllegalArgumentException("Unexpected metadata object type"); } - + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); if (Strings.isNullOrEmpty(presignedUrl) || "null".equalsIgnoreCase(presignedUrl)) { logger.debug("Starting upload with downscoped token"); uploadWithDownScopedToken( @@ -611,7 +672,7 @@ public void uploadWithPresignedUrlWithoutConnection( meta.getUserMetadata(), uploadStreamInfo.left, queryId); - logger.debug("Upload successfully with downscoped token"); + logger.debug("Upload successful with downscoped token"); } else { logger.debug("Starting upload with presigned url"); @@ -627,6 +688,20 @@ public void uploadWithPresignedUrlWithoutConnection( queryId); logger.debug("Upload successfully with presigned url"); } + stopwatch.stop(); + + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to GCS location: {}. It took {} ms", + remoteStorageLocation, + stopwatch.elapsedMillis()); + } else { + logger.info( + "Uploaded file {} to GCS location: {}. It took {} ms", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis()); + } // close any open streams in the "toClose" list and return for (FileInputStream is : toClose) { @@ -668,6 +743,9 @@ public void upload( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "GCS", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); final List toClose = new ArrayList<>(); long originalContentLength = meta.getContentLength(); @@ -686,6 +764,8 @@ public void upload( throw new IllegalArgumentException("Unexpected metadata object type"); } + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); if (!Strings.isNullOrEmpty(presignedUrl)) { logger.debug("Starting upload with downscope token", false); uploadWithPresignedUrl( @@ -698,7 +778,20 @@ public void upload( presignedUrl, session.getHttpClientKey(), queryId); + stopwatch.stop(); logger.debug("Upload successful", false); + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to GCS location: {}. It took {} ms", + remoteStorageLocation, + stopwatch.elapsedMillis()); + } else { + logger.info( + "Uploaded file {} to GCS location: {}. It took {} ms", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis()); + } // close any open streams in the "toClose" list and return for (FileInputStream is : toClose) { @@ -722,7 +815,20 @@ public void upload( uploadStreamInfo.left, queryId); + stopwatch.stop(); logger.debug("Upload successful", false); + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to GCS location: {}. It took {} ms", + remoteStorageLocation, + stopwatch.elapsedMillis()); + } else { + logger.info( + "Uploaded file {} to GCS location: {}. It took {} ms", + srcFile.getAbsolutePath(), + remoteStorageLocation, + stopwatch.elapsedMillis()); + } // close any open streams in the "toClose" list and return for (FileInputStream is : toClose) { diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java index 958eaca2e..15110bfc8 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java @@ -67,6 +67,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SFPair; +import net.snowflake.client.util.Stopwatch; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import net.snowflake.common.core.SqlState; import org.apache.commons.io.IOUtils; @@ -116,6 +117,10 @@ public SnowflakeS3Client( SFBaseSession session, boolean useS3RegionalUrl) throws SnowflakeSQLException { + logger.debug( + "Initializing Snowflake S3 client with encryption: {}, client side encrypted: {}", + encMat != null, + isClientSideEncrypted); this.session = session; this.isUseS3RegionalUrl = useS3RegionalUrl; setupSnowflakeS3Client( @@ -279,6 +284,7 @@ public int getEncryptionKeySize() { */ @Override public void renew(Map stageCredentials) throws SnowflakeSQLException { + logger.debug("Renewing the Snowflake S3 client"); // We renew the client with fresh credentials and with its original parameters setupSnowflakeS3Client( stageCredentials, @@ -293,6 +299,7 @@ public void renew(Map stageCredentials) throws SnowflakeSQLException { @Override public void shutdown() { + logger.debug("Shutting down the Snowflake S3 client"); amazonClient.shutdown(); } @@ -340,14 +347,18 @@ public void download( String presignedUrl, String queryId) throws SnowflakeSQLException { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + String localFilePath = localLocation + localFileSep + destFileName; + logger.info( + "Staring download of file from S3 stage path: {} to {}", stageFilePath, localFilePath); TransferManager tx = null; int retryCount = 0; do { try { - File localFile = new File(localLocation + localFileSep + destFileName); + File localFile = new File(localFilePath); - logger.debug( - "Creating executor service for transfer" + "manager with {} threads", parallelism); + logger.debug("Creating executor service for transfer manager with {} threads", parallelism); // download files from s3 tx = @@ -374,7 +385,11 @@ public ExecutorService newExecutor() { myDownload.waitForCompletion(); + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); + if (this.isEncrypting() && this.getEncryptionKeySize() < 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -387,10 +402,27 @@ public ExecutorService newExecutor() { // Decrypt file try { EncryptionProvider.decrypt(localFile, key, iv, this.encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "S3 file {} downloaded to {}. It took {} ms (download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); } catch (Exception ex) { logger.error("Error decrypting file", ex); throw ex; } + } else { + logger.info( + "S3 file {} downloaded to {}. It took {} ms with {} retries", + stageFilePath, + localFile.getAbsolutePath(), + downloadMillis, + retryCount); } return; @@ -438,21 +470,24 @@ public InputStream downloadToStream( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info("Staring download of file from S3 stage path: {} to input stream", stageFilePath); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); int retryCount = 0; do { try { S3Object file = amazonClient.getObject(remoteStorageLocation, stageFilePath); - ObjectMetadata meta = amazonClient.getObjectMetadata(remoteStorageLocation, stageFilePath); - InputStream stream = file.getObjectContent(); - + stopwatch.stop(); + long downloadMillis = stopwatch.elapsedMillis(); Map metaMap = meta.getUserMetadata(); String key = metaMap.get(AMZ_KEY); String iv = metaMap.get(AMZ_IV); if (this.isEncrypting() && this.getEncryptionKeySize() < 256) { + stopwatch.restart(); if (key == null || iv == null) { throw new SnowflakeSQLLoggedException( queryId, @@ -463,16 +498,31 @@ public InputStream downloadToStream( } try { - - return EncryptionProvider.decryptStream(stream, key, iv, encMat); + InputStream is = EncryptionProvider.decryptStream(stream, key, iv, encMat); + stopwatch.stop(); + long decryptMillis = stopwatch.elapsedMillis(); + logger.info( + "S3 file {} downloaded to input stream. It took {} ms " + + "(download: {} ms, decryption: {} ms) with {} retries", + stageFilePath, + downloadMillis + decryptMillis, + downloadMillis, + decryptMillis, + retryCount); + return is; } catch (Exception ex) { logger.error("Error in decrypting file", ex); throw ex; } } else { - return stream; + logger.info( + "S3 file {} downloaded to input stream. Download took {} ms with {} retries", + stageFilePath, + downloadMillis, + retryCount); } + return stream; } catch (Exception ex) { handleS3Exception(ex, ++retryCount, "download", session, command, this, queryId); } @@ -520,6 +570,10 @@ public void upload( String presignedUrl, String queryId) throws SnowflakeSQLException { + logger.info( + StorageHelper.getStartUploadLog( + "S3", uploadFromStream, inputStream, fileBackedOutputStream, srcFile, destFileName)); + final long originalContentLength = meta.getContentLength(); final List toClose = new ArrayList<>(); SFPair uploadStreamInfo = @@ -542,9 +596,10 @@ public void upload( TransferManager tx = null; int retryCount = 0; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); do { try { - logger.debug( "Creating executor service for transfer" + "manager with {} threads", parallelism); @@ -581,11 +636,28 @@ public ExecutorService newExecutor() { } myUpload.waitForCompletion(); + stopwatch.stop(); + long uploadMillis = stopwatch.elapsedMillis(); // get out for (FileInputStream is : toClose) { IOUtils.closeQuietly(is); } + + if (uploadFromStream) { + logger.info( + "Uploaded data from input stream to S3 location: {}. It took {} ms with {} retries", + destFileName, + uploadMillis, + retryCount); + } else { + logger.info( + "Uploaded file {} to S3 location: {}. It took {} ms with {} retries", + srcFile.getAbsolutePath(), + destFileName, + uploadMillis, + retryCount); + } return; } catch (Exception ex) { @@ -640,7 +712,7 @@ private SFPair createUploadStream( String queryId) throws SnowflakeSQLException { logger.debug( - "createUploadStream({}, {}, {}, {}, {}, {}, {}) " + "keySize={}", + "createUploadStream({}, {}, {}, {}, {}, {}, {}) " + "keySize: {}", this, srcFile, uploadFromStream, diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java index ef97c9508..ac7de73a6 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java @@ -53,7 +53,7 @@ public static StorageClientFactory getFactory() { public SnowflakeStorageClient createClient( StageInfo stage, int parallel, RemoteStoreFileEncryptionMaterial encMat, SFSession session) throws SnowflakeSQLException { - logger.debug("createClient client type={}", stage.getStageType().name()); + logger.debug("Creating storage client. Client type: {}", stage.getStageType().name()); switch (stage.getStageType()) { case S3: @@ -113,7 +113,7 @@ private SnowflakeS3Client createS3Client( throws SnowflakeSQLException { final int S3_TRANSFER_MAX_RETRIES = 3; - logger.debug("createS3Client encryption={}", (encMat == null ? "no" : "yes")); + logger.debug("Creating S3 client with encryption: {}", (encMat == null ? "no" : "yes")); SnowflakeS3Client s3Client; @@ -130,8 +130,8 @@ private SnowflakeS3Client createS3Client( clientConfig.setProxyPassword(""); logger.debug( - "s3 client configuration: maxConnection={}, connectionTimeout={}, " - + "socketTimeout={}, maxErrorRetry={}", + "S3 client configuration: maxConnection: {}, connectionTimeout: {}, " + + "socketTimeout: {}, maxErrorRetry: {}", clientConfig.getMaxConnections(), clientConfig.getConnectionTimeout(), clientConfig.getSocketTimeout(), @@ -153,7 +153,7 @@ private SnowflakeS3Client createS3Client( logger.debug("Exception creating s3 client", ex); throw ex; } - logger.debug("s3 client created", false); + logger.debug("S3 Storage client created", false); return s3Client; } @@ -195,7 +195,7 @@ public StorageObjectMetadata createStorageMetadataObj(StageInfo.StageType stageT private SnowflakeAzureClient createAzureClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFBaseSession session) throws SnowflakeSQLException { - logger.debug("createAzureClient encryption={}", (encMat == null ? "no" : "yes")); + logger.debug("Creating Azure client with encryption: {}", (encMat == null ? "no" : "yes")); SnowflakeAzureClient azureClient; @@ -220,7 +220,7 @@ private SnowflakeAzureClient createAzureClient( private SnowflakeGCSClient createGCSClient( StageInfo stage, RemoteStoreFileEncryptionMaterial encMat, SFSession session) throws SnowflakeSQLException { - logger.debug("createGCSClient encryption={}", (encMat == null ? "no" : "yes")); + logger.debug("Creating GCS client with encryption: {}", (encMat == null ? "no" : "yes")); SnowflakeGCSClient gcsClient; diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageHelper.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageHelper.java new file mode 100644 index 000000000..34098d9d0 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageHelper.java @@ -0,0 +1,36 @@ +package net.snowflake.client.jdbc.cloud.storage; + +import java.io.File; +import java.io.InputStream; +import net.snowflake.client.jdbc.FileBackedOutputStream; + +class StorageHelper { + static String getStartUploadLog( + String serviceName, + boolean uploadFromStream, + InputStream inputStream, + FileBackedOutputStream fileBackedOutputStream, + File srcFile, + String destFileName) { + if (uploadFromStream && fileBackedOutputStream != null) { + File file = fileBackedOutputStream.getFile(); + String fileBackedOutputStreamType = + file == null ? "byte stream" : ("file: " + file.getAbsolutePath()); + return "Starting upload from stream (" + + fileBackedOutputStreamType + + ") to " + + serviceName + + " location: " + + destFileName; + } else if (uploadFromStream && inputStream != null) { + return "Starting upload from input stream to " + serviceName + " location: " + destFileName; + } else { + return "Starting upload from file " + + srcFile.getAbsolutePath() + + " to " + + serviceName + + " location: " + + destFileName; + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java b/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java index e92a57524..50efb9234 100644 --- a/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java +++ b/src/main/java/net/snowflake/client/jdbc/telemetry/TelemetryClient.java @@ -22,6 +22,7 @@ import net.snowflake.client.jdbc.telemetryOOB.TelemetryThreadPool; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.util.Stopwatch; import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; @@ -117,6 +118,11 @@ private TelemetryClient( this.logBatch = new LinkedList<>(); this.isClosed = false; this.forceFlushSize = flushSize; + logger.debug( + "Initializing telemetry client with telemetry url: {}, flush size: {}, auth type: {}", + telemetryUrl, + forceFlushSize, + authType); } /** @@ -131,6 +137,7 @@ public boolean isTelemetryEnabled() { /** Disable any use of the client to add/send metrics */ public void disableTelemetry() { + logger.debug("Disabling telemetry"); this.isTelemetryServiceAvailable = false; } @@ -146,7 +153,7 @@ public static Telemetry createTelemetry(Connection conn, int flushSize) { return createTelemetry( (SFSession) conn.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(), flushSize); } catch (SQLException ex) { - logger.debug("input connection is not a SnowflakeConnection", false); + logger.debug("Input connection is not a SnowflakeConnection", false); return null; } } @@ -243,7 +250,9 @@ public void addLogToBatch(TelemetryData log) { this.logBatch.add(log); } - if (this.logBatch.size() >= this.forceFlushSize) { + int logBatchSize = this.logBatch.size(); + if (logBatchSize >= this.forceFlushSize) { + logger.debug("Force flushing telemetry batch of size: {}", logBatchSize); this.sendBatchAsync(); } } @@ -312,7 +321,6 @@ public void postProcess(String queryId, String sqlState, int vendorCode, Throwab * @throws IOException if closed or uploading batch fails */ private boolean sendBatch() throws IOException { - if (isClosed) { throw new IOException("Telemetry connector is closed"); } @@ -331,6 +339,8 @@ private boolean sendBatch() throws IOException { } if (!tmpList.isEmpty()) { + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); // session shared with JDBC String payload = logsToString(tmpList); @@ -369,11 +379,16 @@ private boolean sendBatch() throws IOException { this.session.getHttpClientSocketTimeout(), 0, this.session.getHttpClientKey()); + stopwatch.stop(); + logger.debug( + "Sending telemetry took {} ms. Batch size: {}", + stopwatch.elapsedMillis(), + tmpList.size()); } catch (SnowflakeSQLException e) { disableTelemetry(); // when got error like 404 or bad request, disable telemetry in this // telemetry instance logger.error( - "Telemetry request failed, " + "response: {}, exception: {}", response, e.getMessage()); + "Telemetry request failed, response: {}, exception: {}", response, e.getMessage()); return false; } } diff --git a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java index 8b1918bc1..7ddb3c7ce 100644 --- a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java +++ b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java @@ -15,6 +15,7 @@ import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; +import net.snowflake.client.util.Stopwatch; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; @@ -108,24 +109,28 @@ public void setNumOfRetryToTriggerTelemetry(int num) { public static void enable() { synchronized (enableLock) { + logger.debug("Enabling out-of-band telemetry", false); enabled = true; } } public static void disable() { synchronized (enableLock) { + logger.debug("Disabling out-of-band telemetry", false); enabled = false; } } public static void enableHTAP() { synchronized (enableHTAPLock) { + logger.debug("Enabling out-of-band HTAP telemetry"); htapEnabled = true; } } public static void disableHTAP() { synchronized (enableHTAPLock) { + logger.debug("Disabling out-of-band HTAP telemetry"); htapEnabled = false; } } @@ -309,6 +314,7 @@ public void setURL(String url) { } public void setDeployment(TELEMETRY_SERVER_DEPLOYMENT deployment) { + logger.debug("Setting out-of-band telemetry sever deployment to {}", deployment); serverDeployment = deployment; } @@ -421,13 +427,13 @@ public void run() { if (!instance.isDeploymentEnabled()) { // skip the disabled deployment - logger.debug("skip the disabled deployment: ", instance.serverDeployment.name); + logger.debug("Skip the disabled deployment: ", instance.serverDeployment.name); return; } if (!instance.serverDeployment.url.matches(TELEMETRY_SERVER_URL_PATTERN)) { // skip the disabled deployment - logger.debug("ignore invalid url: ", instance.serverDeployment.url); + logger.debug("Ignore invalid url: ", instance.serverDeployment.url); return; } @@ -435,7 +441,10 @@ public void run() { } private void uploadPayload() { - logger.debugNoMask("Running telemetry uploader. The payload is: " + payloadLogStr); + Stopwatch stopwatch = new Stopwatch(); + stopwatch.start(); + logger.debugNoMask( + "Running out-of-band telemetry uploader. The payload is: " + payloadLogStr); CloseableHttpResponse response = null; boolean success = true; @@ -450,13 +459,14 @@ private void uploadPayload() { int statusCode = response.getStatusLine().getStatusCode(); if (statusCode == 200) { - logger.debug("telemetry server request success: {}", response, true); + logger.debug("Out-of-band telemetry server request success: {}", response, true); instance.count(); } else if (statusCode == 429) { - logger.debug("telemetry server request hit server cap on response: {}", response); + logger.debug( + "Out-of-band telemetry server request hit server cap on response: {}", response); instance.serverFailureCnt.incrementAndGet(); } else { - logger.debug("telemetry server request error: {}", response, true); + logger.debug("Out-of-band telemetry server request error: {}", response, true); instance.lastClientError = response.toString(); instance.clientFailureCnt.incrementAndGet(); success = false; @@ -467,7 +477,7 @@ private void uploadPayload() { } catch (Exception e) { // exception from here is always captured logger.debug( - "Telemetry request failed, Exception" + "response: {}, exception: {}", + "Out-of-band telemetry request failed, Exception response: {}, exception: {}", response, e.getMessage()); String res = "null"; @@ -478,7 +488,16 @@ private void uploadPayload() { instance.clientFailureCnt.incrementAndGet(); success = false; } finally { - logger.debug("Telemetry request success={} " + "and clean the current queue", success); + stopwatch.stop(); + logger.debug( + "Out-of-band telemetry request success: {} and clean the current queue. It took {} ms." + + " Total successful events: {}, total unsuccessful events: {} (client failures: {}, server failures: {})", + success, + stopwatch.elapsedMillis(), + instance.eventCnt, + instance.clientFailureCnt.get() + instance.serverFailureCnt.get(), + instance.clientFailureCnt, + instance.serverFailureCnt); } } } diff --git a/src/main/java/net/snowflake/client/loader/BufferStage.java b/src/main/java/net/snowflake/client/loader/BufferStage.java index 6f70ec050..d7690f532 100644 --- a/src/main/java/net/snowflake/client/loader/BufferStage.java +++ b/src/main/java/net/snowflake/client/loader/BufferStage.java @@ -24,7 +24,7 @@ * single processing stage. */ public class BufferStage { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(BufferStage.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(BufferStage.class); public enum State { CREATED, @@ -87,7 +87,7 @@ public enum State { private ArrayList _uploaders = new ArrayList<>(); BufferStage(StreamLoader loader, Operation op, long csvFileBucketSize, long csvFileSize) { - LOGGER.debug("Operation: {}", op); + logger.debug("Operation: {}", op); _state = State.CREATED; _loader = loader; @@ -145,7 +145,7 @@ private synchronized void openFile() { if (_loader._compressDataBeforePut) { fName += StreamLoader.FILE_SUFFIX; } - LOGGER.debug("openFile: {}", fName); + logger.debug("openFile: {}", fName); OutputStream fileStream = new FileOutputStream(fName); if (_loader._compressDataBeforePut) { @@ -173,7 +173,7 @@ private synchronized void openFile() { // not thread safe boolean stageData(final byte[] line) throws IOException { if (this._rowCount % 10000 == 0) { - LOGGER.debug("rowCount: {}, currentSize: {}", this._rowCount, _currentSize); + logger.debug("rowCount: {}, currentSize: {}", this._rowCount, _currentSize); } _outstream.write(line); _currentSize += line.length; @@ -191,7 +191,7 @@ boolean stageData(final byte[] line) throws IOException { } if (_currentSize >= this._csvFileSize) { - LOGGER.debug( + logger.debug( "name: {}, currentSize: {}, Threshold: {}," + " fileCount: {}, fileBucketSize: {}", _file.getAbsolutePath(), _currentSize, @@ -217,7 +217,7 @@ boolean stageData(final byte[] line) throws IOException { * @throws IOException raises an exception if IO error occurs */ void completeUploading() throws IOException { - LOGGER.debug( + logger.debug( "name: {}, currentSize: {}, Threshold: {}," + " fileCount: {}, fileBucketSize: {}", _file.getAbsolutePath(), _currentSize, diff --git a/src/main/java/net/snowflake/client/loader/FileUploader.java b/src/main/java/net/snowflake/client/loader/FileUploader.java index 3862bd0e5..05b6c84f5 100644 --- a/src/main/java/net/snowflake/client/loader/FileUploader.java +++ b/src/main/java/net/snowflake/client/loader/FileUploader.java @@ -14,7 +14,7 @@ /** Class responsible for uploading a single data file. */ public class FileUploader implements Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(PutQueue.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(PutQueue.class); private static final int RETRY = 6; private final Thread _thread; @@ -23,7 +23,7 @@ public class FileUploader implements Runnable { private final File _file; FileUploader(StreamLoader loader, String stage, File file) { - LOGGER.debug("", false); + logger.trace("Creating new FileUploader", false); _loader = loader; _thread = new Thread(this); _thread.setName("FileUploaderThread"); @@ -33,7 +33,7 @@ public class FileUploader implements Runnable { public synchronized void upload() { // throttle up will wait if too many files are uploading - LOGGER.debug("", false); + logger.trace("Creating new FileUploader", false); _loader.throttleUp(); _thread.start(); } @@ -66,7 +66,7 @@ public void run() { } if (attempt > 0) { - LOGGER.debug("Will retry PUT after {} seconds", Math.pow(2, attempt)); + logger.debug("Will retry PUT after {} seconds", Math.pow(2, attempt)); Thread.sleep(1000 * ((int) Math.pow(2, attempt))); } @@ -114,9 +114,9 @@ public void run() { Statement statement = _loader.getPutConnection().createStatement(); try { - LOGGER.debug("Put Statement start: {}", putStatement); + logger.debug("Put Statement start: {}", putStatement); statement.execute(putStatement); - LOGGER.debug("Put Statement end: {}", putStatement); + logger.debug("Put Statement end: {}", putStatement); ResultSet putResult = statement.getResultSet(); putResult.next(); @@ -137,13 +137,13 @@ public void run() { } else { // The log level should be WARNING for a single upload failure. if (message.startsWith("Simulated upload failure")) { - LOGGER.debug( + logger.debug( "Failed to upload a file:" + " status={}," + " filename={}," + " message={}", status, file, message); } else { - LOGGER.debug( + logger.debug( "Failed to upload a file:" + " status={}," + " filename={}," + " message={}", status, file, @@ -152,7 +152,7 @@ public void run() { } } catch (Throwable t) { // The log level for unknown error is set to SEVERE - LOGGER.error( + logger.error( String.format( "Failed to PUT on attempt: attempt=[%s], " + "Message=[%s]", attempt, t.getMessage()), @@ -161,7 +161,7 @@ public void run() { } } } catch (Throwable t) { - LOGGER.error("PUT exception", t); + logger.error("PUT exception", t); _loader.abort(new Loader.ConnectionError(t.getMessage(), t.getCause())); } finally { _loader.throttleDown(); @@ -169,11 +169,11 @@ public void run() { } public void join() { - LOGGER.debug("", false); + logger.trace("Joining threads", false); try { _thread.join(0); } catch (InterruptedException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); } } diff --git a/src/main/java/net/snowflake/client/loader/LoaderFactory.java b/src/main/java/net/snowflake/client/loader/LoaderFactory.java index 2d0dc8e99..1bd5ca1b0 100644 --- a/src/main/java/net/snowflake/client/loader/LoaderFactory.java +++ b/src/main/java/net/snowflake/client/loader/LoaderFactory.java @@ -10,13 +10,13 @@ import net.snowflake.client.log.SFLoggerFactory; public class LoaderFactory { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(LoaderFactory.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(LoaderFactory.class); public static Loader createLoader( Map properties, Connection uploadConnection, Connection processingConnection) { - LOGGER.debug("", false); + logger.debug("", false); StreamLoader loader = new StreamLoader(properties, uploadConnection, processingConnection); return loader; } diff --git a/src/main/java/net/snowflake/client/loader/LoadingError.java b/src/main/java/net/snowflake/client/loader/LoadingError.java index 9593458dc..e04a1ba29 100644 --- a/src/main/java/net/snowflake/client/loader/LoadingError.java +++ b/src/main/java/net/snowflake/client/loader/LoadingError.java @@ -13,7 +13,7 @@ /** Wrapper for data format errors returned by the COPY/validate command */ public class LoadingError { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(LoadingError.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(LoadingError.class); public enum ErrorProperty { ERROR, @@ -65,7 +65,7 @@ public LoadingError(ResultSet rs, BufferStage bs, StreamLoader loader) { try { _properties.put(p, rs.getString(p.name())); } catch (SQLException ex) { - LOGGER.error("Exception", ex); + logger.error("Exception", ex); } } } diff --git a/src/main/java/net/snowflake/client/loader/ProcessQueue.java b/src/main/java/net/snowflake/client/loader/ProcessQueue.java index 6a20184aa..4a4114661 100644 --- a/src/main/java/net/snowflake/client/loader/ProcessQueue.java +++ b/src/main/java/net/snowflake/client/loader/ProcessQueue.java @@ -20,14 +20,14 @@ * BufferStage class */ public class ProcessQueue implements Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(ProcessQueue.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(ProcessQueue.class); private final Thread _thread; private final StreamLoader _loader; public ProcessQueue(StreamLoader loader) { - LOGGER.debug("", false); + logger.debug("", false); _loader = loader; _thread = new Thread(this); @@ -73,10 +73,10 @@ public void run() { if (_loader.isAborted()) { if (!_loader._preserveStageFile) { currentCommand = "RM '" + remoteStage + "'"; - LOGGER.debug(currentCommand, true); + logger.debug(currentCommand, true); conn.createStatement().execute(currentCommand); } else { - LOGGER.debug( + logger.debug( "Error occurred. The remote stage is preserved for " + "further investigation: {}", remoteStage); @@ -97,7 +97,7 @@ public void run() { String lastErrorRow = ""; // Create temp table to load data (may have a subset of columns) - LOGGER.debug("Creating Temporary Table: name={}", stage.getId()); + logger.debug("Creating Temporary Table: name={}", stage.getId()); currentState = State.CREATE_TEMP_TABLE; List allColumns = getAllColumns(conn); @@ -124,7 +124,7 @@ public void run() { } // Load data there - LOGGER.debug( + logger.debug( "COPY data in the stage to table:" + " stage={}," + " name={}", remoteStage, stage.getId()); @@ -152,7 +152,7 @@ public void run() { } int errorRecordCount = toIntExact(parsed - loaded); - LOGGER.debug( + logger.debug( "errorRecordCount=[{}]," + " parsed=[{}]," + " loaded=[{}]", errorRecordCount, parsed, @@ -163,13 +163,13 @@ public void run() { if (loaded == stage.getRowCount()) { // successfully loaded everything - LOGGER.debug( + logger.debug( "COPY command successfully finished:" + " stage={}," + " name={}", remoteStage, stage.getId()); listener.addErrorCount(0); } else { - LOGGER.debug( + logger.debug( "Found errors in COPY command:" + " stage={}," + " name={}", remoteStage, stage.getId()); @@ -204,7 +204,7 @@ public void run() { dataError = loadError.getException(); } } - LOGGER.debug("errorCount: {}", errorCount); + logger.debug("errorCount: {}", errorCount); listener.addErrorCount(errorCount); if (listener.throwOnError()) { @@ -212,10 +212,10 @@ public void run() { _loader.abort(dataError); if (!_loader._preserveStageFile) { - LOGGER.debug("RM: {}", remoteStage); + logger.debug("RM: {}", remoteStage); conn.createStatement().execute("RM '" + remoteStage + "'"); } else { - LOGGER.error( + logger.error( "Error occurred. The remote stage is preserved for " + "further investigation: {}", remoteStage); @@ -320,7 +320,7 @@ public void run() { } currentCommand = loadStatement; - LOGGER.debug("Load Statement: {}", loadStatement); + logger.debug("Load Statement: {}", loadStatement); Statement s = conn.createStatement(); s.execute(loadStatement); @@ -357,13 +357,13 @@ public void run() { } } } catch (InterruptedException ex) { - LOGGER.error("Interrupted", ex); + logger.error("Interrupted", ex); break; } catch (Exception ex) { String msg = String.format("State: %s, %s, %s", currentState, currentCommand, ex.getMessage()); _loader.abort(new Loader.ConnectionError(msg, Utils.getCause(ex))); - LOGGER.error(msg, true); + logger.error(msg, true); if (stage == null || stage.isTerminate()) { break; } @@ -406,11 +406,11 @@ private String getOn(List keys, String L, String R) { } public void join() { - LOGGER.debug("", false); + logger.trace("Joining threads", false); try { _thread.join(0); } catch (InterruptedException ex) { - LOGGER.debug("Exception: ", ex); + logger.debug("Exception: ", ex); } } diff --git a/src/main/java/net/snowflake/client/loader/PutQueue.java b/src/main/java/net/snowflake/client/loader/PutQueue.java index 69f2b08e7..d11067016 100644 --- a/src/main/java/net/snowflake/client/loader/PutQueue.java +++ b/src/main/java/net/snowflake/client/loader/PutQueue.java @@ -13,14 +13,14 @@ * ProcessQueue. */ public class PutQueue implements Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(PutQueue.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(PutQueue.class); private final Thread _thread; private final StreamLoader _loader; public PutQueue(StreamLoader loader) { - LOGGER.debug("", false); + logger.trace("Creating new PutQueue", false); _loader = loader; _thread = new Thread(this); _thread.setName("PutQueueThread"); @@ -60,7 +60,7 @@ public void run() { } } catch (InterruptedException | IOException ex) { - LOGGER.error("Exception: ", ex); + logger.error("Exception: ", ex); break; } finally { @@ -72,7 +72,7 @@ public void join() { try { _thread.join(0); } catch (InterruptedException ex) { - LOGGER.error("Exception: ", ex); + logger.error("Exception: ", ex); } } } diff --git a/src/main/java/net/snowflake/client/loader/StreamLoader.java b/src/main/java/net/snowflake/client/loader/StreamLoader.java index af96a8763..d680b0363 100644 --- a/src/main/java/net/snowflake/client/loader/StreamLoader.java +++ b/src/main/java/net/snowflake/client/loader/StreamLoader.java @@ -30,7 +30,7 @@ /** Stream Loader */ public class StreamLoader implements Loader, Runnable { - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(StreamLoader.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(StreamLoader.class); private static final String SYSTEM_PARAMETER_PREFIX = "net.snowflake.client.loader."; @@ -333,7 +333,7 @@ private void initDateFormats() { /** Starts the loader */ @Override public void start() { - LOGGER.debug("Start Loading", false); + logger.debug("Start Loading", false); // validate parameters validateParameters(); @@ -351,10 +351,10 @@ public void start() { try { if (_startTransaction) { - LOGGER.debug("Begin Transaction", false); + logger.debug("Begin Transaction", false); _processConn.createStatement().execute("begin transaction"); } else { - LOGGER.debug("No Transaction started", false); + logger.debug("No Transaction started", false); } } catch (SQLException ex) { abort(new Loader.ConnectionError("Failed to start Transaction", Utils.getCause(ex))); @@ -366,7 +366,7 @@ public void start() { try { if (_before != null) { - LOGGER.debug("Running Execute Before SQL", false); + logger.debug("Running Execute Before SQL", false); _processConn.createStatement().execute(_before); } } catch (SQLException ex) { @@ -379,14 +379,14 @@ public void start() { } private void validateParameters() { - LOGGER.debug("Validate Parameters", false); + logger.debug("Validate Parameters", false); if (Operation.INSERT != this._op) { if (this._keys == null || this._keys.isEmpty()) { throw new ConnectionError("Updating operations require keys"); } } setPropertyBySystemProperty(); - LOGGER.debug( + logger.debug( "Database Name: {}, Schema Name: {}, Table Name: {}, " + "Remote Stage: {}, Columns: {}, Keys: {}, Operation: {}, " + "Start Transaction: {}, OneBatch: {}, Truncate Table: {}, " @@ -427,7 +427,7 @@ String getNoise() { public void abort(RuntimeException t) { synchronized (this) { // Abort once, keep first error. - LOGGER.debug("Exception received. Aborting...", t); + logger.debug("Exception received. Aborting...", t); if (_aborted.getAndSet(true)) { return; @@ -451,14 +451,14 @@ boolean isAborted() { @Override public void rollback() { - LOGGER.debug("Rollback", false); + logger.debug("Rollback", false); try { terminate(); - LOGGER.debug("Rollback", false); + logger.debug("Rollback", false); this._processConn.createStatement().execute("rollback"); } catch (SQLException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); } } @@ -478,7 +478,7 @@ public void submitRow(final Object[] row) { byte[] data = null; try { if (!_active.get()) { - LOGGER.debug("Inactive loader. Row ignored", false); + logger.debug("Inactive loader. Row ignored", false); return; } @@ -502,7 +502,7 @@ public void submitRow(final Object[] row) { if (_batchRowSize > 0 && _listener.getSubmittedRowCount() > 0 && (_listener.getSubmittedRowCount() % _batchRowSize) == 0) { - LOGGER.debug( + logger.debug( "Flushing Queue: Submitted Row Count: {}, Batch Row Size: {}", _listener.getSubmittedRowCount(), _batchRowSize); @@ -522,7 +522,7 @@ public void submitRow(final Object[] row) { /** Initializes queues */ private void initQueues() { - LOGGER.debug("Init Queues", false); + logger.debug("Init Queues", false); if (_active.getAndSet(true)) { // NOP if the loader is already active return; @@ -546,7 +546,7 @@ private void initQueues() { /** Flushes data by joining PUT and PROCESS queues */ private void flushQueues() { // Terminate data loading thread. - LOGGER.debug("Flush Queues", false); + logger.debug("Flush Queues", false); try { _queueData.put(new byte[0]); _thread.join(10000); @@ -556,7 +556,7 @@ private void flushQueues() { } } catch (Exception ex) { String msg = "Failed to join StreamLoader queue: " + ex.getMessage(); - LOGGER.error(msg, ex); + logger.error(msg, ex); throw new DataError(msg, Utils.getCause(ex)); } // Put last stage on queue @@ -597,7 +597,7 @@ private void truncateTargetTable() { // TODO: could be replaced with TRUNCATE? _processConn.createStatement().execute("DELETE FROM " + this.getFullTableName()); } catch (SQLException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); abort(new Loader.ConnectionError(Utils.getCause(ex))); } } @@ -615,7 +615,7 @@ public void run() { this.writeBytes(data); } } catch (Exception ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); abort(new Loader.ConnectionError(Utils.getCause(ex))); } } @@ -642,25 +642,25 @@ private byte[] createCSVRecord(final Object[] data) { */ @Override public void finish() throws Exception { - LOGGER.debug("Finish Loading", false); + logger.debug("Finish Loading", false); flushQueues(); if (_is_last_finish_call) { try { if (_after != null) { - LOGGER.debug("Running Execute After SQL", false); + logger.debug("Running Execute After SQL", false); _processConn.createStatement().execute(_after); } // Loader successfully completed. Commit and return. _processConn.createStatement().execute("commit"); - LOGGER.debug("Committed", false); + logger.debug("Committed", false); } catch (SQLException ex) { try { _processConn.createStatement().execute("rollback"); } catch (SQLException ex0) { - LOGGER.debug("Failed to rollback", false); + logger.debug("Failed to rollback", false); } - LOGGER.debug(String.format("Execute After SQL failed to run: %s", _after), ex); + logger.debug(String.format("Execute After SQL failed to run: %s", _after), ex); throw new Loader.ConnectionError(Utils.getCause(ex)); } } @@ -668,19 +668,19 @@ public void finish() throws Exception { @Override public void close() { - LOGGER.debug("Close Loader", false); + logger.debug("Close Loader", false); try { this._processConn.close(); this._putConn.close(); } catch (SQLException ex) { - LOGGER.error(ex.getMessage(), ex); + logger.error(ex.getMessage(), ex); throw new ConnectionError(Utils.getCause(ex)); } } /** Set active to false (no-op if not active), add a stage with terminate flag onto the queue */ private void terminate() { - LOGGER.debug("Terminate Loader", false); + logger.debug("Terminate Loader", false); boolean active = _active.getAndSet(false); @@ -697,10 +697,10 @@ private void terminate() { try { queuePut(_stage); } catch (InterruptedException ex) { - LOGGER.error("Unknown Error", ex); + logger.error("Unknown Error", ex); } - LOGGER.debug("Snowflake loader terminating", false); + logger.debug("Snowflake loader terminating", false); } // If operation changes, existing stage needs to be scheduled for processing. @@ -712,14 +712,14 @@ public void resetOperation(Operation op) { return; } - LOGGER.debug("Operation is changing from {} to {}", _op, op); + logger.debug("Operation is changing from {} to {}", _op, op); _op = op; if (_stage != null) { try { queuePut(_stage); } catch (InterruptedException ex) { - LOGGER.error(_stage.getId(), ex); + logger.error(_stage.getId(), ex); } } @@ -802,27 +802,27 @@ BufferStage takeProcess() throws InterruptedException { void throttleUp() { int open = this._throttleCounter.incrementAndGet(); - LOGGER.debug("PUT Throttle Up: {}", open); + logger.debug("PUT Throttle Up: {}", open); if (open > 8) { - LOGGER.debug( + logger.debug( "Will retry scheduling file for upload after {} seconds", (Math.pow(2, open - 7))); try { Thread.sleep(1000 * ((int) Math.pow(2, open - 7))); } catch (InterruptedException ex) { - LOGGER.error("Exception occurs while waiting", ex); + logger.error("Exception occurs while waiting", ex); } } } void throttleDown() { int throttleLevel = this._throttleCounter.decrementAndGet(); - LOGGER.debug("PUT Throttle Down: {}", throttleLevel); + logger.debug("PUT Throttle Down: {}", throttleLevel); if (throttleLevel < 0) { - LOGGER.debug("Unbalanced throttle", false); + logger.debug("Unbalanced throttle", false); _throttleCounter.set(0); } - LOGGER.debug("Connector throttle {}", throttleLevel); + logger.debug("Connector throttle {}", throttleLevel); } private LoadResultListener _listener = diff --git a/src/main/java/net/snowflake/client/log/SFLoggerUtil.java b/src/main/java/net/snowflake/client/log/SFLoggerUtil.java index da42e0a0c..568802f3b 100644 --- a/src/main/java/net/snowflake/client/log/SFLoggerUtil.java +++ b/src/main/java/net/snowflake/client/log/SFLoggerUtil.java @@ -5,9 +5,14 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; +import com.google.common.base.Strings; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import org.apache.commons.logging.LogFactory; public class SFLoggerUtil { + private static final String NOT_PROVIDED_LOG = "not provided"; + private static final String PROVIDED_LOG = "provided"; + public static void initializeSnowflakeLogger() { String logger = systemGetProperty("net.snowflake.jdbc.loggerImpl"); SFLoggerFactory.LoggerImpl loggerImplementation = SFLoggerFactory.LoggerImpl.fromString(logger); @@ -29,4 +34,12 @@ public static void initializeSnowflakeLogger() { "org.apache.commons.logging.Log", "net.snowflake.client.log.JDK14JCLWrapper"); } } + + @SnowflakeJdbcInternalApi + public static String isVariableProvided(T variable) { + if (variable instanceof String) { + return (Strings.isNullOrEmpty((String) variable)) ? NOT_PROVIDED_LOG : PROVIDED_LOG; + } + return variable == null ? NOT_PROVIDED_LOG : PROVIDED_LOG; + } } diff --git a/src/main/java/net/snowflake/client/pooling/LogicalConnection.java b/src/main/java/net/snowflake/client/pooling/LogicalConnection.java index 76c2b328c..623f9bcf6 100644 --- a/src/main/java/net/snowflake/client/pooling/LogicalConnection.java +++ b/src/main/java/net/snowflake/client/pooling/LogicalConnection.java @@ -21,13 +21,18 @@ import java.util.Properties; import java.util.concurrent.Executor; import net.snowflake.client.jdbc.ErrorCode; +import net.snowflake.client.jdbc.SnowflakeConnectionV1; import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; /** * Logical connection is wrapper class on top of SnowflakeConnectionV1 Every method call will be * delegated to SnowflakeConnectionV1 except for close method */ class LogicalConnection implements Connection { + private static final SFLogger logger = SFLoggerFactory.getLogger(LogicalConnection.class); + /** physical connection to snowflake, instance SnowflakeConnectionV1 */ private final Connection physicalConnection; @@ -148,6 +153,8 @@ public void close() throws SQLException { if (isClosed) { return; } + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug("Closing logical connection with session id: {}", sfConnection.getSessionID()); pooledConnection.fireConnectionCloseEvent(); isClosed = true; } diff --git a/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java b/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java index 345274516..4d053c5e5 100644 --- a/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java +++ b/src/main/java/net/snowflake/client/pooling/SnowflakePooledConnection.java @@ -11,22 +11,35 @@ import javax.sql.ConnectionEventListener; import javax.sql.PooledConnection; import javax.sql.StatementEventListener; +import net.snowflake.client.jdbc.SnowflakeConnectionV1; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; /** Snowflake implementation of pooled connection */ public class SnowflakePooledConnection implements PooledConnection { + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakePooledConnection.class); + /** physical connection, an instance of SnowflakeConnectionV1 class */ private Connection physicalConnection; /** list of event listener registered to listen for connection event */ private final Set eventListeners; - SnowflakePooledConnection(Connection physicalConnection) { + SnowflakePooledConnection(Connection physicalConnection) throws SQLException { this.physicalConnection = physicalConnection; + + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug("Creating new pooled connection with session id: {}", sfConnection.getSessionID()); + this.eventListeners = new HashSet<>(); } @Override public Connection getConnection() throws SQLException { + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug( + "Creating new Logical Connection based on pooled connection with session id: {}", + sfConnection.getSessionID()); return new LogicalConnection(this); } @@ -55,6 +68,8 @@ public void addConnectionEventListener(ConnectionEventListener eventListener) { @Override public void close() throws SQLException { if (this.physicalConnection != null) { + SnowflakeConnectionV1 sfConnection = physicalConnection.unwrap(SnowflakeConnectionV1.class); + logger.debug("Closing pooled connection with session id: {}", sfConnection.getSessionID()); this.physicalConnection.close(); this.physicalConnection = null; } diff --git a/src/main/java/net/snowflake/client/util/SecretDetector.java b/src/main/java/net/snowflake/client/util/SecretDetector.java index 0d43ec725..454d7b7be 100644 --- a/src/main/java/net/snowflake/client/util/SecretDetector.java +++ b/src/main/java/net/snowflake/client/util/SecretDetector.java @@ -19,8 +19,6 @@ import net.minidev.json.JSONArray; import net.minidev.json.JSONObject; import net.minidev.json.JSONStyle; -import net.snowflake.client.log.SFLogger; -import net.snowflake.client.log.SFLoggerFactory; /** Search for credentials in sql and/or other text */ public class SecretDetector { @@ -72,13 +70,9 @@ public class SecretDetector { "(token|assertion content)" + "(['\"\\s:=]+)" + "([a-z0-9=/_\\-+]{8,})", Pattern.CASE_INSENSITIVE); - private static final int LOOK_AHEAD = 10; - // only attempt to find secrets in its leading 100Kb SNOW-30961 private static final int MAX_LENGTH = 100 * 1000; - private static final SFLogger LOGGER = SFLoggerFactory.getLogger(SecretDetector.class); - private static String[] SENSITIVE_NAMES = { "access_key_id", "accesstoken", diff --git a/src/main/java/net/snowflake/client/util/Stopwatch.java b/src/main/java/net/snowflake/client/util/Stopwatch.java new file mode 100644 index 000000000..d891d51d9 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/Stopwatch.java @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +/** Stopwatch class used to calculate the time between start and stop. */ +@SnowflakeJdbcInternalApi +public class Stopwatch { + private boolean isStarted = false; + private long startTime; + private long elapsedTime; + + /** + * Starts the Stopwatch. + * + * @throws IllegalStateException when Stopwatch is already running. + */ + public void start() { + if (isStarted) { + throw new IllegalStateException("Stopwatch is already running"); + } + + isStarted = true; + startTime = System.nanoTime(); + } + + /** + * Stops the Stopwatch. + * + * @throws IllegalStateException when Stopwatch was not yet started or is already stopped. + */ + public void stop() { + if (!isStarted) { + if (startTime == 0) { + throw new IllegalStateException("Stopwatch has not been started"); + } + throw new IllegalStateException("Stopwatch is already stopped"); + } + + isStarted = false; + elapsedTime = System.nanoTime() - startTime; + } + + /** Resets the instance to it's initial state. */ + public void reset() { + isStarted = false; + startTime = 0; + elapsedTime = 0; + } + + /** Restarts the instance. */ + public void restart() { + isStarted = true; + startTime = System.nanoTime(); + elapsedTime = 0; + } + + /** + * Get the elapsed time (in ms) between the stopTime and startTime. + * + * @return elapsed milliseconds between stopTime and startTime + * @throws IllegalStateException when Stopwatch has not been started yet + */ + public long elapsedMillis() { + return elapsedNanos() / 1_000_000; + } + + /** + * Get the elapsed time (in nanoseconds) between the stopTime and startTime. + * + * @return elapsed nanoseconds between stopTime and startTime + * @throws IllegalStateException when Stopwatch has not been started yet + */ + public long elapsedNanos() { + if (isStarted) { + return (System.nanoTime() - startTime); + } + if (startTime == 0) { + throw new IllegalStateException("Stopwatch has not been started"); + } + return elapsedTime; + } + + /** + * Get the instance status. + * + * @return true if the stopwatch is running, false otherwise + */ + public boolean isStarted() { + return isStarted; + } +} diff --git a/src/main/java/net/snowflake/client/util/TimeMeasurement.java b/src/main/java/net/snowflake/client/util/TimeMeasurement.java new file mode 100644 index 000000000..390294236 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/TimeMeasurement.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeUtil; + +/** Class keeping the start and stop time in epoch microseconds. */ +@SnowflakeJdbcInternalApi +public class TimeMeasurement { + private long start; + private long end; + + /** Get the start time as epoch time in microseconds. */ + public long getStart() { + return start; + } + + /** Set the start time as current epoch time in microseconds. */ + public void setStart() { + this.start = SnowflakeUtil.getEpochTimeInMicroSeconds(); + } + + /** Get the stop time as epoch time in microseconds. */ + public long getEnd() { + return end; + } + + /** Set the stop time as current epoch time in microseconds. */ + public void setEnd() { + this.end = SnowflakeUtil.getEpochTimeInMicroSeconds(); + } + + /** + * Get the microseconds between the stop and start time. + * + * @return difference between stop and start in microseconds. If one of the variables is not + * initialized, it returns -1 + */ + public long getTime() { + if (start == 0 || end == 0) { + return -1; + } + + return end - start; + } +} diff --git a/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties b/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties index 5d9973bb1..3b68fbf69 100644 --- a/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties +++ b/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties @@ -80,5 +80,6 @@ Error message={3}, Extended error info={4} 200058=Value is too large to be stored as integer at batch index {0}. Use executeLargeBatch() instead. 200059=Invalid Connect String: {0}. 200061=GCS operation failed: Operation={0}, Error code={1}, Message={2}, Reason={3} +200062=Authentication timed out. 200063=Invalid data - Cannot be parsed and converted to structured type. diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java index 48a77625c..413b732ff 100644 --- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java @@ -8,6 +8,7 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemSetEnv; import static net.snowflake.client.jdbc.SnowflakeUtil.systemUnsetEnv; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mockStatic; @@ -17,119 +18,117 @@ import java.nio.file.Path; import java.nio.file.Paths; import net.snowflake.client.jdbc.SnowflakeUtil; +import org.junit.After; import org.junit.Test; import org.mockito.MockedStatic; public class SFClientConfigParserTest { private static final String CONFIG_JSON = "{\"common\":{\"log_level\":\"info\",\"log_path\":\"/jdbc.log\"}}"; + private static final String CONFIG_JSON_WITH_UNKNOWN_PROPS = + "{\"common\":{\"log_level\":\"info\",\"log_path\":\"/jdbc.log\",\"unknown_inside\":\"/unknown\"},\"unknown_outside\":\"/unknown\"}"; - @Test - public void testloadSFClientConfigValidPath() { - Path configFilePath = Paths.get("config.json"); - try { - Files.write(configFilePath, CONFIG_JSON.getBytes()); - SFClientConfig actualConfig = - SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); - assertEquals("info", actualConfig.getCommonProps().getLogLevel()); - assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); + private Path configFilePath; - Files.delete(configFilePath); - } catch (IOException e) { - fail("testloadSFClientConfigValidPath failed"); + @After + public void cleanup() throws IOException { + if (configFilePath != null) { + Files.deleteIfExists(configFilePath); } + + systemUnsetEnv(SF_CLIENT_CONFIG_ENV_NAME); + } + + @Test + public void testLoadSFClientConfigValidPath() throws IOException { + configFilePath = Paths.get("config.json"); + Files.write(configFilePath, CONFIG_JSON.getBytes()); + SFClientConfig actualConfig = + SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); } @Test - public void testloadSFClientConfigInValidPath() { + public void testLoadSFClientConfigValidPathWithUnknownProperties() throws IOException { + configFilePath = Paths.get("config.json"); + Files.write(configFilePath, CONFIG_JSON_WITH_UNKNOWN_PROPS.getBytes()); + SFClientConfig actualConfig = + SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); + } + + @Test + public void testLoadSFClientConfigInValidPath() { String configFilePath = "InvalidPath"; SFClientConfig config = null; try { - SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); - fail("testloadSFClientConfigInValidPath"); // this will not be reached! + SFClientConfigParser.loadSFClientConfig(configFilePath); + fail("testLoadSFClientConfigInValidPath"); // this will not be reached! } catch (IOException e) { // do nothing } } @Test - public void testloadSFClientConfigInValidJson() { + public void testLoadSFClientConfigInValidJson() { try { String invalidJson = "invalidJson"; - Path configFilePath = Paths.get("config.json"); + configFilePath = Paths.get("config.json"); Files.write(configFilePath, invalidJson.getBytes()); SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); - fail("testloadSFClientConfigInValidJson"); + fail("testLoadSFClientConfigInValidJson"); } catch (IOException e) { // DO Nothing } } @Test - public void testloadSFClientConfigWithEnvVar() { - Path configFilePath = Paths.get("config.json"); - - try { - Files.write(configFilePath, CONFIG_JSON.getBytes()); - systemSetEnv(SF_CLIENT_CONFIG_ENV_NAME, "config.json"); - SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); - assertEquals("info", actualConfig.getCommonProps().getLogLevel()); - assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); - - Files.delete(configFilePath); - systemUnsetEnv(SF_CLIENT_CONFIG_ENV_NAME); - } catch (IOException e) { - fail("testloadSFClientConfigWithEnvVar failed"); - } + public void testLoadSFClientConfigWithEnvVar() throws IOException { + configFilePath = Paths.get("config.json"); + Files.write(configFilePath, CONFIG_JSON.getBytes()); + systemSetEnv(SF_CLIENT_CONFIG_ENV_NAME, "config.json"); + SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); } @Test - public void testloadSFClientConfigWithDriverLoaction() { + public void testLoadSFClientConfigWithDriverLocation() throws IOException { String configLocation = Paths.get(getConfigFilePathFromJDBCJarLocation(), SF_CLIENT_CONFIG_FILE_NAME).toString(); - Path configFilePath = Paths.get(configLocation); - - try { - Files.write(configFilePath, CONFIG_JSON.getBytes()); - SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); - assertEquals("info", actualConfig.getCommonProps().getLogLevel()); - assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); - - Files.delete(configFilePath); - } catch (IOException e) { - fail("testloadSFClientConfigWithClasspath failed"); - } + configFilePath = Paths.get(configLocation); + Files.write(configFilePath, CONFIG_JSON.getBytes()); + SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); + assertEquals("info", actualConfig.getCommonProps().getLogLevel()); + assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); } @Test - public void testloadSFClientConfigWithUserHome() { + public void testLoadSFClientConfigWithUserHome() throws IOException { String tmpDirectory = systemGetProperty("java.io.tmpdir"); try (MockedStatic mockedSnowflakeUtil = mockStatic(SnowflakeUtil.class)) { // mocking this as Jenkins/GH Action doesn't have write permissions on user.home directory. mockedSnowflakeUtil.when(() -> systemGetProperty("user.home")).thenReturn(tmpDirectory); - Path configFilePath = Paths.get(systemGetProperty("user.home"), SF_CLIENT_CONFIG_FILE_NAME); + configFilePath = Paths.get(systemGetProperty("user.home"), SF_CLIENT_CONFIG_FILE_NAME); Files.write(configFilePath, CONFIG_JSON.getBytes()); SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); assertEquals("info", actualConfig.getCommonProps().getLogLevel()); assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); - - Files.delete(configFilePath); - } catch (IOException e) { - e.printStackTrace(System.err); - fail("testloadSFClientConfigWithUserHome failed: " + e.getMessage()); } } @Test - public void testloadSFClientNoConditionsMatch() throws IOException { + public void testLoadSFClientNoConditionsMatch() throws IOException { SFClientConfig actualConfig = SFClientConfigParser.loadSFClientConfig(null); - assertTrue(actualConfig == null); + assertNull(actualConfig); } @Test - public void testgetConfigFileNameFromJDBCJarLocation() { + public void testGetConfigFileNameFromJDBCJarLocation() { String jdbcDirectoryPath = getConfigFilePathFromJDBCJarLocation(); assertTrue(jdbcDirectoryPath != null && !jdbcDirectoryPath.isEmpty()); } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java index 1af7e1534..b597c4dd0 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java @@ -54,8 +54,8 @@ public void testChunkDownloaderRetry() throws SQLException, InterruptedException } catch (SnowflakeSQLException exception) { // verify that request was retried twice before reaching max retries Mockito.verify(snowflakeChunkDownloaderSpy, Mockito.times(2)).getResultStreamProvider(); - assertTrue(exception.getMessage().contains("Max retry reached for the download of #chunk0")); - assertTrue(exception.getMessage().contains("retry=2")); + assertTrue(exception.getMessage().contains("Max retry reached for the download of chunk#0")); + assertTrue(exception.getMessage().contains("retry: 2")); } } } diff --git a/src/test/java/net/snowflake/client/util/StopwatchTest.java b/src/test/java/net/snowflake/client/util/StopwatchTest.java new file mode 100644 index 000000000..12449fd8f --- /dev/null +++ b/src/test/java/net/snowflake/client/util/StopwatchTest.java @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.util; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.Test; + +public class StopwatchTest { + Stopwatch stopwatch = new Stopwatch(); + + @Before + public void before() { + stopwatch = new Stopwatch(); + } + + @Test + public void testGetMillisWhenStopped() throws InterruptedException { + stopwatch.start(); + TimeUnit.MILLISECONDS.sleep(20); + stopwatch.stop(); + + assertThat(stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(50L))); + } + + @Test + public void testGetMillisWithoutStopping() throws InterruptedException { + stopwatch.start(); + TimeUnit.MILLISECONDS.sleep(20); + assertThat(stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(50L))); + } + + @Test + public void testShouldBeStarted() { + stopwatch.start(); + assertTrue(stopwatch.isStarted()); + } + + @Test + public void testShouldBeStopped() { + assertFalse(stopwatch.isStarted()); + } + + @Test + public void testThrowsExceptionWhenStartedTwice() { + stopwatch.start(); + + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.start()); + + assertTrue(e.getMessage().contains("Stopwatch is already running")); + } + + @Test + public void testThrowsExceptionWhenStoppedTwice() { + stopwatch.start(); + stopwatch.stop(); + + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.stop()); + + assertTrue(e.getMessage().contains("Stopwatch is already stopped")); + } + + @Test + public void testThrowsExceptionWhenStoppedWithoutStarting() { + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.stop()); + + assertTrue(e.getMessage().contains("Stopwatch has not been started")); + } + + @Test + public void testThrowsExceptionWhenElapsedMillisWithoutStarting() { + Exception e = assertThrows(IllegalStateException.class, () -> stopwatch.elapsedMillis()); + + assertTrue(e.getMessage().contains("Stopwatch has not been started")); + } + + @Test + public void testShouldReset() { + stopwatch.start(); + assertTrue(stopwatch.isStarted()); + stopwatch.reset(); + assertFalse(stopwatch.isStarted()); + } + + @Test + public void testShouldRestart() { + stopwatch.start(); + assertTrue(stopwatch.isStarted()); + stopwatch.stop(); + assertFalse(stopwatch.isStarted()); + stopwatch.restart(); + assertTrue(stopwatch.isStarted()); + } +} From e95bba2816769fc59bfb3f2e3424b449c4b9a323 Mon Sep 17 00:00:00 2001 From: Juan Martinez Ramirez <126511805+sfc-gh-jmartinez@users.noreply.github.com> Date: Wed, 12 Jun 2024 06:46:49 -0600 Subject: [PATCH 27/54] SNOW-1016467: Fixed test to support execution on Windows (#1776) * Using file separator according to current system * Fix get absolute path to support for Windows * Ignore testLoginTimeout for Windows, timeout is not thrown as expected on Windows * SNOW-1016467 Use canonical absolute path to avoid issues with absolute paths in Windows using ~ short path. * Deprecated methods from TestUtil.systemGetEnv required only for compatibility with old driver. * Fixed method systemSetEnv is used to set environment variables at runtime. The method System.get() and System.get("ENV_VAR_NAME") use two different maps to store their values, in Windows fails when we use System.get("ENV_VAR_NAME"), so it's required to be set in both places. * Increase Stopwatch test upper boundary for MacOS GitHub actions tolerance. --- .../java/net/snowflake/client/TestUtil.java | 3 ++- .../snowflake/client/core/EventHandler.java | 2 +- .../net/snowflake/client/core/EventUtil.java | 3 ++- .../snowflake/client/jdbc/SnowflakeUtil.java | 14 ++++++++++++++ .../snowflake/client/AbstractDriverIT.java | 8 +++++++- .../client/RunningNotOnLinuxMac.java | 13 +++++++++++++ .../java/net/snowflake/client/TestUtil.java | 9 +++++---- .../net/snowflake/client/core/EventTest.java | 2 +- .../jdbc/FileUploaderExpandFileNamesTest.java | 19 ++++++++++--------- .../client/jdbc/FileUploaderLatestIT.java | 10 ++++++++-- .../client/jdbc/RestRequestTest.java | 15 +++++++++++---- .../snowflake/client/jdbc/StatementIT.java | 2 +- .../snowflake/client/util/StopwatchTest.java | 6 ++++-- 13 files changed, 79 insertions(+), 27 deletions(-) create mode 100644 src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java diff --git a/FIPS/src/test/java/net/snowflake/client/TestUtil.java b/FIPS/src/test/java/net/snowflake/client/TestUtil.java index 3c7f04958..703d59953 100644 --- a/FIPS/src/test/java/net/snowflake/client/TestUtil.java +++ b/FIPS/src/test/java/net/snowflake/client/TestUtil.java @@ -37,7 +37,7 @@ public interface TestRunInterface { /** * System.getenv wrapper. If System.getenv raises an SecurityException, it is ignored and returns * null. - * + * @deprecated This method should be replaced by SnowflakeUtil.systemGetEnv. *

This is replicated from SnowflakeUtil.systemGetEnv, because the old driver doesn't have that * function for the tests to use it. Replace this function call with SnowflakeUtil.systemGetEnv * when it is available. @@ -45,6 +45,7 @@ public interface TestRunInterface { * @param env the environment variable name. * @return the environment variable value if set, otherwise null. */ + @Deprecated public static String systemGetEnv(String env) { try { return System.getenv(env); diff --git a/src/main/java/net/snowflake/client/core/EventHandler.java b/src/main/java/net/snowflake/client/core/EventHandler.java index 11acd00fa..e6eccc060 100644 --- a/src/main/java/net/snowflake/client/core/EventHandler.java +++ b/src/main/java/net/snowflake/client/core/EventHandler.java @@ -263,7 +263,7 @@ public void dumpLogBuffer(String identifier) { cleanupSfDumps(true); String logDumpPath = - logDumpPathPrefix + "/" + LOG_DUMP_FILE_NAME + identifier + LOG_DUMP_FILE_EXT; + logDumpPathPrefix + File.separator + LOG_DUMP_FILE_NAME + identifier + LOG_DUMP_FILE_EXT; if (!disableCompression) { logDumpPath += LOG_DUMP_COMP_EXT; diff --git a/src/main/java/net/snowflake/client/core/EventUtil.java b/src/main/java/net/snowflake/client/core/EventUtil.java index e4cde5502..d45cd0676 100644 --- a/src/main/java/net/snowflake/client/core/EventUtil.java +++ b/src/main/java/net/snowflake/client/core/EventUtil.java @@ -6,6 +6,7 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; +import java.io.File; import java.util.concurrent.atomic.AtomicReference; /** @@ -80,7 +81,7 @@ public static void triggerStateTransition(BasicEvent.QueryState newState, String } public static String getDumpPathPrefix() { - return DUMP_PATH_PREFIX + "/" + DUMP_SUBDIR; + return DUMP_PATH_PREFIX + File.separator + DUMP_SUBDIR; } public static String getDumpFileId() { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 884f832ed..bdcf2af61 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -17,6 +17,7 @@ import java.io.PrintWriter; import java.io.StringWriter; import java.lang.reflect.Field; +import java.lang.reflect.Method; import java.sql.SQLException; import java.sql.Time; import java.sql.Types; @@ -35,6 +36,7 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import net.snowflake.client.core.Constants; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.OCSPMode; import net.snowflake.client.core.ObjectMapperFactory; @@ -706,6 +708,18 @@ public static void systemSetEnv(String key, String value) { field.setAccessible(true); Map writableEnv = (Map) field.get(env); writableEnv.put(key, value); + + // To an environment variable is set on Windows, it uses a different map to store the values + // when the system.getenv(VAR_NAME) is used its required to update in this additional place. + if (Constants.getOS() == Constants.OS.WINDOWS) { + Class pe = Class.forName("java.lang.ProcessEnvironment"); + Method getenv = pe.getDeclaredMethod("getenv", String.class); + getenv.setAccessible(true); + Field props = pe.getDeclaredField("theCaseInsensitiveEnvironment"); + props.setAccessible(true); + Map writableEnvForGet = (Map) props.get(null); + writableEnvForGet.put(key, value); + } } catch (Exception e) { System.out.println("Failed to set value"); logger.error( diff --git a/src/test/java/net/snowflake/client/AbstractDriverIT.java b/src/test/java/net/snowflake/client/AbstractDriverIT.java index b44cc31ef..4a3acea23 100644 --- a/src/test/java/net/snowflake/client/AbstractDriverIT.java +++ b/src/test/java/net/snowflake/client/AbstractDriverIT.java @@ -6,7 +6,9 @@ import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.base.Strings; +import java.net.URISyntaxException; import java.net.URL; +import java.nio.file.Paths; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; @@ -385,7 +387,11 @@ public static String getFullPathFileInResource(String fileName) { ClassLoader classLoader = AbstractDriverIT.class.getClassLoader(); URL url = classLoader.getResource(fileName); if (url != null) { - return url.getFile(); + try { + return Paths.get(url.toURI()).toAbsolutePath().toString(); + } catch (URISyntaxException ex) { + throw new RuntimeException("Unable to get absolute path: " + fileName); + } } else { throw new RuntimeException("No file is found: " + fileName); } diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java b/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java new file mode 100644 index 000000000..a99eaa3b7 --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java @@ -0,0 +1,13 @@ +package net.snowflake.client; + +import net.snowflake.client.core.Constants; + +public class RunningNotOnLinuxMac implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; + } + + public static boolean isNotRunningOnLinuxMac() { + return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; + } +} diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index 1f782ec1f..afed53dd7 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -66,13 +66,14 @@ public interface TestRunInterface { * System.getenv wrapper. If System.getenv raises an SecurityException, it is ignored and returns * null. * - *

This is replicated from SnowflakeUtil.systemGetEnv, because the old driver doesn't have that - * function for the tests to use it. Replace this function call with SnowflakeUtil.systemGetEnv - * when it is available. - * + * @deprecated This method should be replaced by SnowflakeUtil.systemGetEnv. + *

This is replicated from SnowflakeUtil.systemGetEnv, because the old driver doesn't have + * that function for the tests to use it. Replace this function call with + * SnowflakeUtil.systemGetEnv when it is available. * @param env the environment variable name. * @return the environment variable value if set, otherwise null. */ + @Deprecated public static String systemGetEnv(String env) { try { return System.getenv(env); diff --git a/src/test/java/net/snowflake/client/core/EventTest.java b/src/test/java/net/snowflake/client/core/EventTest.java index 441eee25a..e9ee978e5 100644 --- a/src/test/java/net/snowflake/client/core/EventTest.java +++ b/src/test/java/net/snowflake/client/core/EventTest.java @@ -62,7 +62,7 @@ public void testWriteEventDumpLine() throws IOException { File dumpFile = new File( EventUtil.getDumpPathPrefix() - + "/" + + File.separator + "sf_event_" + EventUtil.getDumpFileId() + ".dmp.gz"); diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java index 67f5a175b..5d57d31d4 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java @@ -7,6 +7,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Properties; @@ -31,20 +32,20 @@ public void testProcessFileNames() throws Exception { System.setProperty("user.home", folderName); String[] locations = { - folderName + "/Tes*Fil*A", - folderName + "/TestFil?B", - "~/TestFileC", + folderName + File.separator + "Tes*Fil*A", + folderName + File.separator + "TestFil?B", + "~" + File.separator + "TestFileC", "TestFileD", - folderName + "/TestFileE~" + folderName + File.separator + "TestFileE~" }; Set files = SnowflakeFileTransferAgent.expandFileNames(locations, null); - assertTrue(files.contains(folderName + "/TestFileA")); - assertTrue(files.contains(folderName + "/TestFileB")); - assertTrue(files.contains(folderName + "/TestFileC")); - assertTrue(files.contains(folderName + "/TestFileD")); - assertTrue(files.contains(folderName + "/TestFileE~")); + assertTrue(files.contains(folderName + File.separator + "TestFileA")); + assertTrue(files.contains(folderName + File.separator + "TestFileB")); + assertTrue(files.contains(folderName + File.separator + "TestFileC")); + assertTrue(files.contains(folderName + File.separator + "TestFileD")); + assertTrue(files.contains(folderName + File.separator + "TestFileE~")); } @Test diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java index 66b33b9ab..378234715 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java @@ -492,7 +492,10 @@ public void testPopulateStatusRowsWithSortOn() throws Exception { // upload files orders_101.csv and orders_100.csv String command = - "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; + "PUT file://" + + getFullPathFileInResource("") + + File.separator + + "orders_10*.csv @testStage"; SnowflakeFileTransferAgent sfAgent1 = new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); sfAgent1.execute(); // upload files @@ -597,7 +600,10 @@ public void testFileTransferMappingFromSourceFile() throws SQLException { SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); String command = - "PUT file://" + getFullPathFileInResource("") + "/orders_10*.csv @testStage"; + "PUT file://" + + getFullPathFileInResource("") + + File.separator + + "orders_10*.csv @testStage"; SnowflakeFileTransferAgent sfAgent1 = new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); sfAgent1.execute(); diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java index 608f69d02..536acbc03 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java @@ -6,8 +6,10 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.junit.Assume.assumeFalse; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -17,6 +19,7 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import net.snowflake.client.RunningNotOnLinuxMac; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; @@ -521,8 +524,9 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl } } - @Test(expected = SnowflakeSQLException.class) - public void testLoginTimeout() throws IOException, SnowflakeSQLException { + @Test + public void testLoginTimeout() throws IOException { + assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -544,8 +548,11 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl try { TelemetryService.disable(); - execute(client, "/session/v1/login-request", 1, 0, 0, true, false, 10); - fail("testMaxRetries"); + assertThrows( + SnowflakeSQLException.class, + () -> { + execute(client, "/session/v1/login-request", 1, 0, 0, true, false, 10); + }); } finally { if (telemetryEnabled) { TelemetryService.enable(); diff --git a/src/test/java/net/snowflake/client/jdbc/StatementIT.java b/src/test/java/net/snowflake/client/jdbc/StatementIT.java index bf62a4948..be5f65a56 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementIT.java @@ -372,7 +372,7 @@ public void testExecuteBatch() throws Exception { + getFullPathFileInResource(TEST_DATA_FILE) + " @%test_batch auto_compress=false"); File tempFolder = tmpFolder.newFolder("test_downloads_folder"); - statement.addBatch("get @%test_batch file://" + tempFolder); + statement.addBatch("get @%test_batch file://" + tempFolder.getCanonicalPath()); rowCounts = statement.executeBatch(); assertThat(rowCounts.length, is(2)); diff --git a/src/test/java/net/snowflake/client/util/StopwatchTest.java b/src/test/java/net/snowflake/client/util/StopwatchTest.java index 12449fd8f..9e44ce18a 100644 --- a/src/test/java/net/snowflake/client/util/StopwatchTest.java +++ b/src/test/java/net/snowflake/client/util/StopwatchTest.java @@ -29,14 +29,16 @@ public void testGetMillisWhenStopped() throws InterruptedException { TimeUnit.MILLISECONDS.sleep(20); stopwatch.stop(); - assertThat(stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(50L))); + assertThat( + stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(500L))); } @Test public void testGetMillisWithoutStopping() throws InterruptedException { stopwatch.start(); TimeUnit.MILLISECONDS.sleep(20); - assertThat(stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(50L))); + assertThat( + stopwatch.elapsedMillis(), allOf(greaterThanOrEqualTo(10L), lessThanOrEqualTo(500L))); } @Test From 26fb3c6c36b0b54b87cf58a2746609f76dea946a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Thu, 13 Jun 2024 12:26:10 +0200 Subject: [PATCH 28/54] SNOW-1452552: Expose vector dimension in column metadata (#1788) --- .../client/core/SFResultSetMetaData.java | 20 +++++++++++ .../client/jdbc/SnowflakeColumnMetadata.java | 13 +++++-- .../jdbc/SnowflakeDatabaseMetaData.java | 3 ++ .../jdbc/SnowflakeResultSetMetaData.java | 18 ++++++++++ .../jdbc/SnowflakeResultSetMetaDataV1.java | 10 ++++++ .../snowflake/client/jdbc/SnowflakeUtil.java | 16 +++++++-- .../client/jdbc/DatabaseMetaDataLatestIT.java | 36 +++++++++++++++++++ .../client/jdbc/SnowflakeUtilTest.java | 3 +- 8 files changed, 114 insertions(+), 5 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java index 30a680030..c39b4ec86 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java @@ -37,6 +37,8 @@ public class SFResultSetMetaData { private List precisions; + private List dimensions; + private List scales; private List nullables; @@ -143,6 +145,7 @@ public SFResultSetMetaData( this.columnTypeNames = new ArrayList<>(this.columnCount); this.columnTypes = new ArrayList<>(this.columnCount); this.precisions = new ArrayList<>(this.columnCount); + this.dimensions = new ArrayList<>(this.columnCount); this.scales = new ArrayList<>(this.columnCount); this.nullables = new ArrayList<>(this.columnCount); this.columnSrcDatabases = new ArrayList<>(this.columnCount); @@ -156,6 +159,7 @@ public SFResultSetMetaData( columnNames.add(columnMetadata.get(colIdx).getName()); columnTypeNames.add(columnMetadata.get(colIdx).getTypeName()); precisions.add(calculatePrecision(columnMetadata.get(colIdx))); + dimensions.add(calculateDimension(columnMetadata.get(colIdx))); columnTypes.add(columnMetadata.get(colIdx).getType()); scales.add(columnMetadata.get(colIdx).getScale()); nullables.add( @@ -200,6 +204,14 @@ private Integer calculatePrecision(SnowflakeColumnMetadata columnMetadata) { } } + private Integer calculateDimension(SnowflakeColumnMetadata columnMetadata) { + int columnType = columnMetadata.getType(); + if (columnType == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + return columnMetadata.getDimension(); + } + return 0; + } + private Integer calculateDisplaySize(SnowflakeColumnMetadata columnMetadata) { int columnType = columnMetadata.getType(); switch (columnType) { @@ -403,6 +415,14 @@ public int getPrecision(int column) { } } + public int getDimension(int column) { + if (dimensions != null && dimensions.size() >= column && column > 0) { + return dimensions.get(column - 1); + } else { + return 0; + } + } + public boolean isSigned(int column) { return (columnTypes.get(column - 1) == Types.INTEGER || columnTypes.get(column - 1) == Types.DECIMAL diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java index 9f182772e..9f1cd272e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java @@ -28,6 +28,7 @@ public class SnowflakeColumnMetadata implements Serializable { private String columnSrcDatabase; private boolean isAutoIncrement; + private int dimension; // vector type contains dimension @SnowflakeJdbcInternalApi public SnowflakeColumnMetadata( @@ -44,7 +45,8 @@ public SnowflakeColumnMetadata( String columnSrcDatabase, String columnSrcSchema, String columnSrcTable, - boolean isAutoIncrement) { + boolean isAutoIncrement, + int dimension) { this.name = name; this.type = type; this.nullable = nullable; @@ -59,11 +61,12 @@ public SnowflakeColumnMetadata( this.columnSrcSchema = columnSrcSchema; this.columnSrcTable = columnSrcTable; this.isAutoIncrement = isAutoIncrement; + this.dimension = dimension; } /** * @deprecated Use {@link SnowflakeColumnMetadata#SnowflakeColumnMetadata(String, int, boolean, - * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean)} + * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean, int)} * instead */ @Deprecated @@ -194,6 +197,11 @@ public void setAutoIncrement(boolean autoIncrement) { isAutoIncrement = autoIncrement; } + @SnowflakeJdbcInternalApi + public int getDimension() { + return dimension; + } + public String toString() { StringBuilder sBuilder = new StringBuilder(); @@ -209,6 +217,7 @@ public String toString() { sBuilder.append(",schema=").append(columnSrcSchema); sBuilder.append(",table=").append(columnSrcTable); sBuilder.append((",isAutoIncrement=")).append(isAutoIncrement); + sBuilder.append((",dimension=")).append(dimension); return sBuilder.toString(); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java index ff5e0529f..b50646ea7 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java @@ -1865,6 +1865,9 @@ public boolean next() throws SQLException { || columnMetadata.getType() == Types.TIME || columnMetadata.getType() == Types.TIMESTAMP) { columnSize = columnMetadata.getPrecision(); + } else if (columnMetadata.getType() == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + // For VECTOR Snowflake type we consider dimension as the column size + columnSize = columnMetadata.getDimension(); } nextRow[6] = columnSize; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java index dcc5250b5..7de89e6f5 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java @@ -13,4 +13,22 @@ public interface SnowflakeResultSetMetaData { int getInternalColumnType(int column) throws SQLException; List getColumnFields(int column) throws SQLException; + + /** + * Get vector dimension + * + * @param column column index + * @return vector dimension when the column is vector type or 0 when it is not vector type + * @throws SQLException when cannot get column dimension + */ + int getDimension(int column) throws SQLException; + + /** + * Get vector dimension + * + * @param columnName column name + * @return vector dimension when the column is vector type or 0 when it is not vector type + * @throws SQLException when cannot get column dimension + */ + int getDimension(String columnName) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java index 0a88b1ebd..b8cdb236b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java @@ -85,6 +85,16 @@ public List getColumnFields(int column) throws SQLException { () -> resultSetMetaData.getColumnFields(column)); } + @Override + public int getDimension(int column) throws SQLException { + return resultSetMetaData.getDimension(column); + } + + @Override + public int getDimension(String columnName) throws SQLException { + return resultSetMetaData.getDimension(getColumnIndex(columnName) + 1); + } + @Override public T unwrap(Class iface) throws SQLException { logger.trace(" T unwrap(Class iface)", false); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index bdcf2af61..8c848032e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -181,6 +181,15 @@ public static SnowflakeColumnMetadata extractColumnMetadata( int precision = colNode.path("precision").asInt(); int scale = colNode.path("scale").asInt(); int length = colNode.path("length").asInt(); + int dimension = + colNode + .path("dimension") + .asInt(); // vector dimension when checking columns via connection.getMetadata + int vectorDimension = + colNode + .path("vectorDimension") + .asInt(); // dimension when checking columns via resultSet.getMetadata + int finalVectorDimension = dimension > 0 ? dimension : vectorDimension; boolean fixed = colNode.path("fixed").asBoolean(); JsonNode udtOutputType = colNode.path("outputType"); JsonNode extColTypeNameNode = colNode.path("extTypeName"); @@ -223,7 +232,8 @@ public static SnowflakeColumnMetadata extractColumnMetadata( colSrcDatabase, colSrcSchema, colSrcTable, - isAutoIncrement); + isAutoIncrement, + finalVectorDimension); } static ColumnTypeInfo getSnowflakeType( @@ -560,7 +570,9 @@ static List describeFixedViewColumns( "", // database "", // schema "", - false)); // isAutoincrement + false, // isAutoincrement + 0 // dimension + )); } return rowType; diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index d3176f8b2..24d3940d7 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -2342,4 +2342,40 @@ public void testKeywordsCount() throws SQLException { assertEquals(43, metaData.getSQLKeywords().split(",").length); } } + /** Added in > 3.16.1 */ + @Test + public void testVectorDimension() throws SQLException { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table JDBC_VECTOR(text_col varchar(32), float_vec VECTOR(FLOAT, 256), int_vec VECTOR(INT, 16))"); + DatabaseMetaData metaData = connection.getMetaData(); + try (ResultSet resultSet = + metaData.getColumns( + connection.getCatalog(), + connection.getSchema().replaceAll("_", "\\\\_"), + "JDBC\\_VECTOR", + null)) { + assertTrue(resultSet.next()); + assertEquals(32, resultSet.getObject("COLUMN_SIZE")); + assertTrue(resultSet.next()); + assertEquals(256, resultSet.getObject("COLUMN_SIZE")); + assertTrue(resultSet.next()); + assertEquals(16, resultSet.getObject("COLUMN_SIZE")); + assertFalse(resultSet.next()); + } + + try (ResultSet resultSet = + statement.executeQuery("Select text_col, float_vec, int_vec from JDBC_VECTOR")) { + SnowflakeResultSetMetaData unwrapResultSetMetadata = + resultSet.getMetaData().unwrap(SnowflakeResultSetMetaData.class); + assertEquals(0, unwrapResultSetMetadata.getDimension("TEXT_COL")); + assertEquals(0, unwrapResultSetMetadata.getDimension(1)); + assertEquals(256, unwrapResultSetMetadata.getDimension("FLOAT_VEC")); + assertEquals(256, unwrapResultSetMetadata.getDimension(2)); + assertEquals(16, unwrapResultSetMetadata.getDimension("INT_VEC")); + assertEquals(16, unwrapResultSetMetadata.getDimension(3)); + } + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java index 23b96dc6c..1110ce4df 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -93,7 +93,8 @@ private static SnowflakeColumnMetadata createExpectedMetadata( rootNode.path("database").asText(), rootNode.path("schema").asText(), rootNode.path("table").asText(), - false); + false, + rootNode.path("dimension").asInt()); return expectedColumnMetadata; } From 6a95dccda713969cc8681f2c3bfa5791b4d8b97e Mon Sep 17 00:00:00 2001 From: Juan Martinez Ramirez <126511805+sfc-gh-jmartinez@users.noreply.github.com> Date: Mon, 17 Jun 2024 23:18:15 -0600 Subject: [PATCH 29/54] SNOW-1016467: Run tests on Windows in Github actions. (#1766) --- .github/workflows/build-test.yml | 35 +++- FIPS/pom.xml | 46 +++-- .../net/snowflake/client/RunningOnWinMac.java | 0 ci/test_windows.bat | 160 ++++++++++++++++++ pom.xml | 104 ++++++++---- 5 files changed, 291 insertions(+), 54 deletions(-) create mode 100644 FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java create mode 100644 ci/test_windows.bat diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 6c3022b75..b3c7c8bc2 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -36,9 +36,40 @@ jobs: WHITESOURCE_API_KEY: ${{ secrets.WHITESOURCE_API_KEY }} run: ./ci/build.sh + test-windows: + needs: build + name: ${{ matrix.cloud }} Windows java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + runs-on: windows-latest + strategy: + fail-fast: false + matrix: + cloud: [ 'AWS' ] + javaVersion: [ '8', '11', '17'] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + additionalMavenProfile: ['', '-Dthin-jar'] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.javaVersion }} + distribution: 'temurin' + cache: maven + - uses: actions/setup-python@v4 + with: + python-version: '3.7' + architecture: 'x64' + - name: Tests + shell: cmd + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + CLOUD_PROVIDER: ${{ matrix.cloud }} + JDBC_TEST_CATEGORY: ${{ matrix.category }} + ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} + run: ci\\test_windows.bat + test-mac: needs: build - name: ${{ matrix.cloud }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} / Test on Mac(java ${{ matrix.javaVersion }}, ${{ matrix.cloud }} ) + name: ${{ matrix.cloud }} Mac java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: macos-13 strategy: fail-fast: false @@ -71,7 +102,7 @@ jobs: test-linux: needs: build - name: ${{ matrix.cloud }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} on ${{ matrix.image }} + name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: ubuntu-latest strategy: fail-fast: false diff --git a/FIPS/pom.xml b/FIPS/pom.xml index e9934e0b5..b7ae7edcc 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -323,23 +323,6 @@ - - org.codehaus.mojo - exec-maven-plugin - ${version.plugin.exec} - - - check-shaded-content - verify - - exec - - - ${basedir}/scripts/check_content.sh - - - - @@ -662,6 +645,35 @@ + + check-content + + + !windows + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + verify + + exec + + + ${basedir}/scripts/check_content.sh + + + + + + + java-9 diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java b/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java new file mode 100644 index 000000000..e69de29bb diff --git a/ci/test_windows.bat b/ci/test_windows.bat new file mode 100644 index 000000000..4f32f7564 --- /dev/null +++ b/ci/test_windows.bat @@ -0,0 +1,160 @@ +REM +REM Tests JDBC Driver on Windows +REM +setlocal +setlocal EnableDelayedExpansion +python -m venv venv +call venv\scripts\activate +pip install -U snowflake-connector-python + +cd %GITHUB_WORKSPACE% + +if "%CLOUD_PROVIDER%"=="AZURE" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_azure.json.gpg +) else if "%CLOUD_PROVIDER%"=="GCP" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_gcp.json.gpg +) else if "%CLOUD_PROVIDER%"=="AWS" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_aws.json.gpg +) else ( + echo === unknown cloud provider + exit /b 1 +) + +gpg --quiet --batch --yes --decrypt --passphrase=%PARAMETERS_SECRET% --output parameters.json %ENCODED_PARAMETERS_FILE% + +REM DON'T FORGET TO include @echo off here or the password may be leaked! +echo @echo off>parameters.bat +jq -r ".testconnection | to_entries | map(\"set \(.key)=\(.value)\") | .[]" parameters.json >> parameters.bat +call parameters.bat +if %ERRORLEVEL% NEQ 0 ( + echo === failed to set the test parameters + exit /b 1 +) +echo @echo off>parametersorg.bat +jq -r ".orgconnection | to_entries | map(\"set \(.key)=\(.value)\") | .[]" parameters.json >> parametersorg.bat +call parametersorg.bat +if %ERRORLEVEL% NEQ 0 ( + echo === failed to set the org parameters + exit /b 1 +) +set SNOWFLAKE_TEST_SCHEMA=%RUNNER_TRACKING_ID:-=_%_%GITHUB_SHA% +set TARGET_SCHEMA_NAME=%SNOWFLAKE_TEST_SCHEMA% + +echo [INFO] Account: %SNOWFLAKE_TEST_ACCOUNT% +echo [INFO] User : %SNOWFLAKE_TEST_USER% +echo [INFO] Database: %SNOWFLAKE_TEST_DATABASE% +echo [INFO] Schema: %SNOWFLAKE_TEST_SCHEMA% +echo [INFO] Warehouse: %SNOWFLAKE_TEST_WAREHOUSE% +echo [INFO] Role: %SNOWFLAKE_TEST_ROLE% + +echo [INFO] Creating schema %SNOWFLAKE_TEST_SCHEMA% +pushd %GITHUB_WORKSPACE%\ci\container +python create_schema.py +popd + +REM setup log + +set CLIENT_LOG_DIR_PATH=%GITHUB_WORKSPACE%\jenkins_rt_logs +echo "[INFO] CLIENT_LOG_DIR_PATH=%CLIENT_LOG_DIR_PATH%" + +set CLIENT_LOG_FILE_PATH=%CLIENT_LOG_DIR_PATH%\ssnowflake_ssm_rt.log +echo "[INFO] CLIENT_LOG_FILE_PATH=%CLIENT_LOG_FILE_PATH%" + +set CLIENT_KNOWN_SSM_FILE_PATH=%CLIENT_LOG_DIR_PATH%\rt_jenkins_log_known_ssm.txt +echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH=%CLIENT_KNOWN_SSM_FILE_PATH%" + +REM To close log analyze, just set ENABLE_CLIENT_LOG_ANALYZE to not "true", e.g. "false". +set ENABLE_CLIENT_LOG_ANALYZE=true + +REM The new complex password we use for jenkins test +set SNOWFLAKE_TEST_PASSWORD_NEW="ThisIsRandomPassword123!" + +set LOG_PROPERTY_FILE=%GITHUB_WORKSPACE%\src\test\resources\logging.properties + +echo "[INFO] LOG_PROPERTY_FILE=%LOG_PROPERTY_FILE%" + +set CLIENT_DRIVER_NAME=JDBC + +powershell -Command "(Get-Content %LOG_PROPERTY_FILE%) | Foreach-Object { $_ -replace '^java.util.logging.FileHandler.pattern.*', 'java.util.logging.FileHandler.pattern = %CLIENT_LOG_FILE_PATH%' } | Set-Content %LOG_PROPERTY_FILE%" + +echo "[INFO] Create log directory" + +IF NOT EXIST %CLIENT_LOG_DIR_PATH% MD %CLIENT_LOG_DIR_PATH% 2>nul + +echo "[INFO] Delete ssm file" +IF EXIST "%CLIENT_KNOWN_SSM_FILE_PATH%" DEL /F /Q "%CLIENT_KNOWN_SSM_FILE_PATH%" + +echo "[INFO] Create ssm file" +echo.>"%CLIENT_KNOWN_SSM_FILE_PATH%" + +echo "[INFO] Finish log setup" +REM end setup log + +for /F "tokens=1,* delims==" %%i in ('set ^| findstr /I /R "^SNOWFLAKE_[^=]*$" ^| findstr /I /V /R "^SNOWFLAKE_PASS_[^=]*$" ^| sort') do ( + echo %%i=%%j +) + +echo [INFO] Starting hang_webserver.py 12345 +pushd %GITHUB_WORKSPACE%\ci\container +start /b python hang_webserver.py 12345 > hang_webserver.out 2>&1 +popd + +echo [INFO] Testing + +set MVNW_EXE=%GITHUB_WORKSPACE%\mvnw.cmd + +REM Avoid connection timeouts +set MAVEN_OPTS="-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120" +echo "MAVEN OPTIONS %MAVEN_OPTS%" + +REM Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched +cmd /c %MVNW_EXE% --batch-mode --show-version dependency:go-offline + +echo list = "%JDBC_TEST_CATEGORY%" +for %%a in ("%JDBC_TEST_CATEGORY:,=" "%") do ( + echo "Current category to execute" %%a + if /i %%a=="TestCategoryFips" ( + pushd FIPS + echo "[INFO] Run Fips tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + popd ) else ( + echo "[INFO] Run %%a tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -DtestCategory=net.snowflake.client.category.%%a ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + ) +) + +echo [INFO] Dropping schema %SNOWFLAKE_TEST_SCHEMA% +pushd %GITHUB_WORKSPACE%\ci\container +python drop_schema.py +popd diff --git a/pom.xml b/pom.xml index 9e5f5c744..19b5ad10e 100644 --- a/pom.xml +++ b/pom.xml @@ -746,25 +746,6 @@ - - org.codehaus.mojo - exec-maven-plugin - - - check-shaded-content - - exec - - verify - - ${basedir}/ci/scripts/check_content.sh - - -thin - - - - - @@ -1087,22 +1068,6 @@ - - org.codehaus.mojo - exec-maven-plugin - - - check-shaded-content - - exec - - verify - - ${basedir}/ci/scripts/check_content.sh - - - - @@ -1231,6 +1196,75 @@ + + + check-content + + + !windows + + + !thin-jar + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + + exec + + verify + + ${basedir}/ci/scripts/check_content.sh + + + + + + + + + + check-content-thin + + + !windows + + + thin-jar + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + + exec + + verify + + ${basedir}/ci/scripts/check_content.sh + + -thin + + + + + + + + qa1IT From 9f438a956d58bc94b03140d3c57f8058d5862b56 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Thu, 20 Jun 2024 10:46:33 +0200 Subject: [PATCH 30/54] SNOW-1488827: Fix SFTrustManagerIT tests (#1793) --- .../client/core/SFTrustManagerIT.java | 137 +++++++++++------- .../client/jdbc/ConnectionLatestIT.java | 4 +- 2 files changed, 85 insertions(+), 56 deletions(-) diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java index 0a9d96dd2..f30cd88e1 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java @@ -20,9 +20,12 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLHandshakeException; import net.snowflake.client.category.TestCategoryCore; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; @@ -32,22 +35,37 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +@RunWith(Parameterized.class) @Category(TestCategoryCore.class) public class SFTrustManagerIT extends BaseJDBCTest { - private static final String[] TARGET_HOSTS = { - "storage.googleapis.com", - "ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch", - "sfcsupport.snowflakecomputing.com", - "sfcsupport.us-east-1.snowflakecomputing.com", - "sfcsupport.eu-central-1.snowflakecomputing.com", - "sfc-dev1-regression.s3.amazonaws.com", - "sfc-ds2-customer-stage.s3.amazonaws.com", - "snowflake.okta.com", - "sfcdev2.blob.core.windows.net" - }; + private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManagerIT.class); + + public SFTrustManagerIT(String host) { + this.host = host; + } + + @Parameterized.Parameters(name = "host={0}") + public static Object[][] data() { + return new Object[][] { + // this host generates many "SSLHandshake Certificate Revocation + // check failed. Could not retrieve OCSP Response." when running in parallel CI builds + // {"storage.googleapis.com"}, + {"ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch"}, + {"sfcsupport.snowflakecomputing.com"}, + {"sfcsupport.us-east-1.snowflakecomputing.com"}, + {"sfcsupport.eu-central-1.snowflakecomputing.com"}, + {"sfc-dev1-regression.s3.amazonaws.com"}, + {"sfc-ds2-customer-stage.s3.amazonaws.com"}, + {"snowflake.okta.com"}, + {"sfcdev2.blob.core.windows.net"} + }; + } private boolean defaultState; + private final String host; @Before public void setUp() { @@ -83,15 +101,13 @@ public void tearDown() throws InterruptedException { public void testOcsp() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - null, // default OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + null, // default OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** @@ -104,15 +120,13 @@ public void testOcspWithFileCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ @@ -121,15 +135,13 @@ public void testOcspWithServerCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** @@ -141,15 +153,13 @@ public void testOcspWithoutServerCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ @@ -159,7 +169,6 @@ public void testInvalidCacheFile() throws Throwable { SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); // a file under never exists. File ocspCacheFile = new File("NEVER_EXISTS", "NEVER_EXISTS"); - String host = TARGET_HOSTS[0]; HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), @@ -169,22 +178,40 @@ public void testInvalidCacheFile() throws Throwable { accessHost(host, client); } - private static void accessHost(String host, HttpClient client) throws IOException { - int statusCode = -1; - - HttpGet httpRequest = new HttpGet(String.format("https://%s:443/", host)); - HttpResponse response = client.execute(httpRequest); - statusCode = response.getStatusLine().getStatusCode(); + private static void accessHost(String host, HttpClient client) + throws IOException, InterruptedException { + HttpResponse response = executeWithRetries(host, client); await() .atMost(Duration.ofSeconds(10)) .until(() -> response.getStatusLine().getStatusCode(), not(equalTo(-1))); + assertThat( String.format("response code for %s", host), - statusCode, + response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(400), equalTo(403), equalTo(404), equalTo(513))); } + private static HttpResponse executeWithRetries(String host, HttpClient client) + throws IOException, InterruptedException { + // There is one host that causes SSLHandshakeException very often - let's retry + int maxRetries = host.equals("storage.googleapis.com") ? 5 : 0; + int retries = 0; + HttpGet httpRequest = new HttpGet(String.format("https://%s:443/", host)); + while (true) { + try { + return client.execute(httpRequest); + } catch (SSLHandshakeException e) { + logger.warn("SSL handshake failed (host = {}, retries={}}", host, retries, e); + ++retries; + if (retries >= maxRetries) { + throw e; + } + Thread.sleep(retries * 1000); + } + } + } + /** * TODO: we should re-enable this https://snowflakecomputing.atlassian.net/browse/SNOW-146911 * Revoked certificate test. @Test public void testRevokedCertificate() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index 02ba5a983..0e7ab4648 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -427,8 +427,10 @@ public void testQueryStatusErrorMessageAndErrorCodeChangeOnAsyncQuery() throws S await() .atMost(Duration.ofSeconds(10)) .until(() -> sfResultSet.getStatusV2().getStatus(), equalTo(QueryStatus.RUNNING)); + + // it may take more time to finish the test when running in parallel in CI builds await() - .atMost(Duration.ofSeconds(50)) + .atMost(Duration.ofSeconds(360)) .until(() -> sfResultSet.getStatusV2().getStatus(), equalTo(QueryStatus.SUCCESS)); } } From 184d6ffbc16b9a922537f9a5529ec2f7199e110d Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Fri, 21 Jun 2024 02:35:01 +0900 Subject: [PATCH 31/54] SNOW-1016470: Increase code coverage in JDBC part1 (#1759) --- .../client/core/ExecTimeTelemetryData.java | 2 +- .../java/net/snowflake/client/TestUtil.java | 16 +++ .../config/SFClientConfigParserTest.java | 1 + .../core/ExecTimeTelemetryDataTest.java | 84 ++++++++++++++ .../client/core/QueryContextCacheTest.java | 8 ++ .../client/core/SQLInputOutputTest.java | 42 +++++++ .../client/core/bind/BindExceptionTest.java | 23 ++++ .../snowflake/client/jdbc/BaseJDBCTest.java | 5 + .../client/jdbc/ResultSetAlreadyClosedIT.java | 37 +++++- .../client/jdbc/ResultSetLatestIT.java | 62 +++++++++- .../client/jdbc/SnowflakeTypeTest.java | 108 ++++++++++++++++++ .../storage/SnowflakeAzureClientLatestIT.java | 14 ++- ...ogicalConnectionAlreadyClosedLatestIT.java | 1 + .../pooling/LogicalConnectionLatestIT.java | 77 +++++++++++++ 14 files changed, 466 insertions(+), 14 deletions(-) create mode 100644 src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java create mode 100644 src/test/java/net/snowflake/client/core/SQLInputOutputTest.java create mode 100644 src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java create mode 100644 src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java diff --git a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java index a9d40a054..91d45f29f 100644 --- a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java +++ b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java @@ -154,7 +154,7 @@ public String generateTelemetry() { value.put("ProcessResultChunkStart", this.processResultChunk.getStart()); value.put("ProcessResultChunkEnd", this.processResultChunk.getEnd()); value.put("CreateResultSetStart", this.createResultSet.getStart()); - value.put("CreatResultSetEnd", this.createResultSet.getEnd()); + value.put("CreateResultSetEnd", this.createResultSet.getEnd()); value.put("QueryEnd", this.query.getEnd()); value.put("BatchID", this.batchId); value.put("QueryID", this.queryId); diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index afed53dd7..76487bcb4 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -5,9 +5,12 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.List; @@ -128,4 +131,17 @@ public static void withRandomSchema( statement.execute("DROP SCHEMA " + customSchema); } } + + public interface MethodRaisesSQLException { + void run() throws SQLException; + } + + public static void expectSnowflakeLoggedFeatureNotSupportedException(MethodRaisesSQLException f) { + try { + f.run(); + fail("must raise exception"); + } catch (SQLException ex) { + assertEquals(ex.getClass().getSimpleName(), "SnowflakeLoggedFeatureNotSupportedException"); + } + } } diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java index 413b732ff..a00784f68 100644 --- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java @@ -47,6 +47,7 @@ public void testLoadSFClientConfigValidPath() throws IOException { SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); assertEquals("info", actualConfig.getCommonProps().getLogLevel()); assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); + assertEquals("config.json", actualConfig.getConfigFilePath()); } @Test diff --git a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java new file mode 100644 index 000000000..f7ad06b46 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java @@ -0,0 +1,84 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import net.minidev.json.JSONObject; +import net.minidev.json.parser.JSONParser; +import net.minidev.json.parser.ParseException; +import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import org.junit.Test; + +public class ExecTimeTelemetryDataTest { + + @Test + public void testExecTimeTelemetryData() throws ParseException { + ExecTimeTelemetryData execTimeTelemetryData = new ExecTimeTelemetryData(); + execTimeTelemetryData.sendData = true; + execTimeTelemetryData.setBindStart(); + execTimeTelemetryData.setOCSPStatus(true); + execTimeTelemetryData.setBindEnd(); + execTimeTelemetryData.setHttpClientStart(); + execTimeTelemetryData.setHttpClientEnd(); + execTimeTelemetryData.setGzipStart(); + execTimeTelemetryData.setGzipEnd(); + execTimeTelemetryData.setQueryEnd(); + execTimeTelemetryData.setQueryId("queryid"); + execTimeTelemetryData.setProcessResultChunkStart(); + execTimeTelemetryData.setProcessResultChunkEnd(); + execTimeTelemetryData.setResponseIOStreamStart(); + execTimeTelemetryData.setResponseIOStreamEnd(); + execTimeTelemetryData.setCreateResultSetStart(); + execTimeTelemetryData.setCreateResultSetEnd(); + execTimeTelemetryData.incrementRetryCount(); + execTimeTelemetryData.setRequestId("mockId"); + execTimeTelemetryData.addRetryLocation("retry"); + + String telemetry = execTimeTelemetryData.generateTelemetry(); + JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); + JSONObject json = (JSONObject) parser.parse(telemetry); + assertNotNull(json.get("BindStart")); + assertNotNull(json.get("BindEnd")); + assertEquals(json.get("ocspEnabled"), true); + assertNotNull(json.get("HttpClientStart")); + assertNotNull(json.get("HttpClientEnd")); + assertNotNull(json.get("GzipStart")); + assertNotNull(json.get("GzipEnd")); + assertNotNull(json.get("QueryEnd")); + assertEquals(json.get("QueryID"), "queryid"); + assertNotNull(json.get("ProcessResultChunkStart")); + assertNotNull(json.get("ProcessResultChunkEnd")); + assertNotNull(json.get("ResponseIOStreamStart")); + assertNotNull(json.get("CreateResultSetStart")); + assertNotNull(json.get("CreateResultSetEnd")); + assertNotNull(json.get("ElapsedQueryTime")); + assertNotNull(json.get("ElapsedResultProcessTime")); + assertNull(json.get("QueryFunction")); + assertNull(json.get("BatchID")); + assertEquals(((Long) json.get("RetryCount")).intValue(), 1); + assertEquals(json.get("RequestID"), "mockId"); + assertEquals(json.get("RetryLocations"), "retry"); + assertEquals(json.get("Urgent"), true); + assertEquals(json.get("eventType"), "ExecutionTimeRecord"); + } + + @Test + public void testRetryLocation() throws ParseException { + TelemetryService.enableHTAP(); + ExecTimeTelemetryData execTimeTelemetryData = + new ExecTimeTelemetryData("queryFunction", "batchId"); + execTimeTelemetryData.addRetryLocation("hello"); + execTimeTelemetryData.addRetryLocation("world"); + execTimeTelemetryData.sendData = true; + String telemetry = execTimeTelemetryData.generateTelemetry(); + + JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); + JSONObject json = (JSONObject) parser.parse(telemetry); + assertEquals(json.get("QueryFunction"), "queryFunction"); + assertEquals(json.get("BatchID"), "batchId"); + assertNotNull(json.get("QueryStart")); + assertEquals(json.get("RetryLocations"), "hello, world"); + TelemetryService.disableHTAP(); + } +} diff --git a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java index cd841b474..862dd1c40 100644 --- a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java +++ b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java @@ -6,6 +6,9 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import org.junit.Test; @@ -217,6 +220,11 @@ public void testSerializeRequestAndDeserializeResponseDataWithNullContext() thro qcc.deserializeQueryContextDTO(requestData); assertCacheDataWithContext(null); + + QueryContextCache mockQcc = spy(qcc); + mockQcc.deserializeQueryContextDTO(null); + verify(mockQcc).clearCache(); + verify(mockQcc, times(2)).logCacheEntries(); } private void assertCacheData() { diff --git a/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java new file mode 100644 index 000000000..346d43c34 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java @@ -0,0 +1,42 @@ +package net.snowflake.client.core; + +import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; +import static org.mockito.Mockito.mock; + +import java.sql.SQLData; +import org.junit.Test; + +public class SQLInputOutputTest { + + @Test + public void testBaseSQLUnSupportedException() { + BaseSqlInput sqlInput = new ArrowSqlInput(null, null, null, null); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readCharacterStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readAsciiStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readBinaryStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readRef); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readBlob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readClob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readArray); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readURL); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readNClob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readNString); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readSQLXML); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readRowId); + } + + @Test + public void testJsonSqlOutPutUnSupportedTest() { + JsonSqlOutput sqloutput = new JsonSqlOutput(mock(SQLData.class), mock(SFBaseSession.class)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeRef(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeBlob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeClob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeStruct(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeArray(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeURL(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeNString(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeNClob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeRowId(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeSQLXML(null)); + } +} diff --git a/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java new file mode 100644 index 000000000..f3ae88eee --- /dev/null +++ b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java @@ -0,0 +1,23 @@ +package net.snowflake.client.core.bind; + +import static org.junit.Assert.assertEquals; + +import net.snowflake.client.jdbc.telemetry.TelemetryField; +import org.junit.Test; + +public class BindExceptionTest { + + @Test + public void testBindExceptionType() { + assertEquals(BindException.Type.SERIALIZATION.field, TelemetryField.FAILED_BIND_SERIALIZATION); + assertEquals(BindException.Type.UPLOAD.field, TelemetryField.FAILED_BIND_UPLOAD); + assertEquals(BindException.Type.OTHER.field, TelemetryField.FAILED_BIND_OTHER); + } + + @Test + public void testBindExceptionConstructor() { + BindException exception = new BindException("testException", BindException.Type.SERIALIZATION); + assertEquals(exception.getMessage(), "testException"); + assertEquals(exception.type.field, TelemetryField.FAILED_BIND_SERIALIZATION); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java index b8bacc82b..a326dea12 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java @@ -35,6 +35,7 @@ import javax.xml.transform.Result; import javax.xml.transform.Source; import net.snowflake.client.AbstractDriverIT; +import net.snowflake.client.core.SFException; public class BaseJDBCTest extends AbstractDriverIT { // Test UUID unique per session @@ -44,6 +45,10 @@ protected interface MethodRaisesSQLException { void run() throws SQLException; } + protected interface MethodRaisesSFException { + void run() throws SFException; + } + protected interface MethodRaisesSQLClientInfoException { void run() throws SQLClientInfoException; } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java index 292d71949..d2939cc8a 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java @@ -22,8 +22,9 @@ public class ResultSetAlreadyClosedIT extends BaseJDBCTest { @Test public void testQueryResultSetAlreadyClosed() throws Throwable { try (Connection connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery("select 1")) { + Statement statement = connection.createStatement()) { + ResultSet resultSet = statement.executeQuery("select 1"); + resultSet.close(); checkAlreadyClosed(resultSet); } } @@ -44,9 +45,18 @@ public void testMetadataResultSetAlreadyClosed() throws Throwable { } @Test - public void testEmptyResultSetAlreadyClosed() throws Throwable { - try (ResultSet resultSet = new SnowflakeResultSetV1.EmptyResultSet()) { + public void testResultSetAlreadyClosed() throws Throwable { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT 1")) { checkAlreadyClosed(resultSet); + } + } + + @Test + public void testEmptyResultSetAlreadyClosed() throws Throwable { + try (SnowflakeResultSetV1.EmptyResultSet resultSet = + new SnowflakeResultSetV1.EmptyResultSet()) { checkAlreadyClosedEmpty(resultSet); } } @@ -68,7 +78,6 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException(() -> resultSet.getDouble(1)); expectResultSetAlreadyClosedException(() -> resultSet.getBigDecimal(1)); expectResultSetAlreadyClosedException(() -> resultSet.getBytes(1)); - expectResultSetAlreadyClosedException(() -> resultSet.getString(1)); expectResultSetAlreadyClosedException(() -> resultSet.getDate(1)); expectResultSetAlreadyClosedException(() -> resultSet.getTime(1)); expectResultSetAlreadyClosedException(() -> resultSet.getTimestamp(1)); @@ -105,7 +114,13 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException(() -> resultSet.getBigDecimal("col1", 38)); expectResultSetAlreadyClosedException(resultSet::getWarnings); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).getWarnings()); + expectResultSetAlreadyClosedException(resultSet::clearWarnings); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).clearWarnings()); + expectResultSetAlreadyClosedException(resultSet::getMetaData); expectResultSetAlreadyClosedException(() -> resultSet.findColumn("col1")); @@ -119,11 +134,20 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException( () -> resultSet.setFetchDirection(ResultSet.FETCH_FORWARD)); expectResultSetAlreadyClosedException(() -> resultSet.setFetchSize(10)); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).setFetchSize(10)); + expectResultSetAlreadyClosedException(resultSet::getFetchDirection); expectResultSetAlreadyClosedException(resultSet::getFetchSize); expectResultSetAlreadyClosedException(resultSet::getType); expectResultSetAlreadyClosedException(resultSet::getConcurrency); + expectResultSetAlreadyClosedException( + resultSet.unwrap(SnowflakeBaseResultSet.class)::getConcurrency); + expectResultSetAlreadyClosedException(resultSet::getHoldability); + expectResultSetAlreadyClosedException( + resultSet.unwrap(SnowflakeBaseResultSet.class)::getHoldability); + expectResultSetAlreadyClosedException(resultSet::getStatement); } @@ -133,7 +157,8 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { * @param resultSet * @throws SQLException */ - private void checkAlreadyClosedEmpty(ResultSet resultSet) throws SQLException { + private void checkAlreadyClosedEmpty(SnowflakeResultSetV1.EmptyResultSet resultSet) + throws SQLException { resultSet.close(); resultSet.close(); // second close won't raise exception assertTrue(resultSet.isClosed()); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index add205145..fb55a9780 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -3,6 +3,7 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertArrayEquals; @@ -27,7 +28,6 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; @@ -808,14 +808,64 @@ public void testCallStatementType() throws SQLException { * implemented for synchronous queries * */ @Test - public void testNewFeaturesNotSupported() throws SQLException { + public void testNewFeaturesNotSupportedExeceptions() throws SQLException { + try (Connection con = init(); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select 1")) { + expectSnowflakeLoggedFeatureNotSupportedException( + rs.unwrap(SnowflakeResultSet.class)::getQueryErrorMessage); + expectSnowflakeLoggedFeatureNotSupportedException( + rs.unwrap(SnowflakeResultSet.class)::getStatus); + expectSnowflakeLoggedFeatureNotSupportedException(() -> rs.getArray(1)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getList(1, String.class)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getArray(1, String.class)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getMap(1, String.class)); + + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getUnicodeStream(1)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getUnicodeStream("column1")); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateAsciiStream("column1", new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateBinaryStream("column1", new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateCharacterStream("column1", new FakeReader(), 5L)); + + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateAsciiStream(1, new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateBinaryStream(1, new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateCharacterStream(1, new FakeReader(), 5L)); + } + } + + @Test + public void testInvalidUnWrap() throws SQLException { try (Connection con = init(); ResultSet rs = con.createStatement().executeQuery("select 1")) { try { - rs.unwrap(SnowflakeResultSet.class).getQueryErrorMessage(); - } catch (SQLFeatureNotSupportedException ex) { - // catch SQLFeatureNotSupportedException - assertEquals("This function is only supported for asynchronous queries.", ex.getMessage()); + rs.unwrap(SnowflakeUtil.class); + } catch (SQLException ex) { + assertEquals( + ex.getMessage(), + "net.snowflake.client.jdbc.SnowflakeResultSetV1 not unwrappable from net.snowflake.client.jdbc.SnowflakeUtil"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java new file mode 100644 index 000000000..29c58b787 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java @@ -0,0 +1,108 @@ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.jdbc.SnowflakeType.convertStringToType; +import static net.snowflake.client.jdbc.SnowflakeType.getJavaType; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import java.math.BigDecimal; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Types; +import org.junit.Test; + +public class SnowflakeTypeTest { + + @Test + public void testSnowflakeType() { + assertEquals(getJavaType(SnowflakeType.CHAR, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals(getJavaType(SnowflakeType.INTEGER, false), SnowflakeType.JavaDataType.JAVA_LONG); + assertEquals( + getJavaType(SnowflakeType.FIXED, false), SnowflakeType.JavaDataType.JAVA_BIGDECIMAL); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals(getJavaType(SnowflakeType.TIME, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_LTZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_NTZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_TZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals(getJavaType(SnowflakeType.DATE, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.BOOLEAN, false), SnowflakeType.JavaDataType.JAVA_BOOLEAN); + assertEquals(getJavaType(SnowflakeType.VECTOR, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals(getJavaType(SnowflakeType.BINARY, false), SnowflakeType.JavaDataType.JAVA_BYTES); + assertEquals(getJavaType(SnowflakeType.ANY, false), SnowflakeType.JavaDataType.JAVA_OBJECT); + assertEquals(getJavaType(SnowflakeType.OBJECT, true), SnowflakeType.JavaDataType.JAVA_OBJECT); + assertEquals(getJavaType(SnowflakeType.OBJECT, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals( + getJavaType(SnowflakeType.GEOMETRY, false), SnowflakeType.JavaDataType.JAVA_STRING); + } + + @Test + public void testConvertStringToType() { + assertEquals(convertStringToType(null), Types.NULL); + assertEquals(convertStringToType("decimal"), Types.DECIMAL); + assertEquals(convertStringToType("int"), Types.INTEGER); + assertEquals(convertStringToType("integer"), Types.INTEGER); + assertEquals(convertStringToType("byteint"), Types.INTEGER); + assertEquals(convertStringToType("smallint"), Types.SMALLINT); + assertEquals(convertStringToType("bigint"), Types.BIGINT); + assertEquals(convertStringToType("double"), Types.DOUBLE); + assertEquals(convertStringToType("double precision"), Types.DOUBLE); + assertEquals(convertStringToType("real"), Types.REAL); + assertEquals(convertStringToType("char"), Types.CHAR); + assertEquals(convertStringToType("character"), Types.CHAR); + assertEquals(convertStringToType("varbinary"), Types.VARBINARY); + assertEquals(convertStringToType("boolean"), Types.BOOLEAN); + assertEquals(convertStringToType("date"), Types.DATE); + assertEquals(convertStringToType("time"), Types.TIME); + assertEquals(convertStringToType("timestamp"), Types.TIMESTAMP); + assertEquals(convertStringToType("datetime"), Types.TIMESTAMP); + assertEquals(convertStringToType("timestamp_ntz"), Types.TIMESTAMP); + assertEquals(convertStringToType("timestamp_ltz"), Types.TIMESTAMP_WITH_TIMEZONE); + assertEquals(convertStringToType("timestamp_tz"), Types.TIMESTAMP_WITH_TIMEZONE); + assertEquals(convertStringToType("variant"), Types.OTHER); + assertEquals(convertStringToType("object"), Types.JAVA_OBJECT); + assertEquals(convertStringToType("vector"), SnowflakeUtil.EXTRA_TYPES_VECTOR); + assertEquals(convertStringToType("array"), Types.ARRAY); + assertEquals(convertStringToType("default"), Types.OTHER); + } + + @Test + public void testJavaSQLTypeFind() { + assertNull(SnowflakeType.JavaSQLType.find(200000)); + } + + @Test + public void testJavaSQLTypeLexicalValue() { + assertEquals(SnowflakeType.lexicalValue(1.0f, null, null, null, null), "0x1.0p0"); + assertEquals(SnowflakeType.lexicalValue(new BigDecimal(100.0), null, null, null, null), "100"); + assertEquals( + SnowflakeType.lexicalValue("random".getBytes(), null, null, null, null), "72616E646F6D"); + } + + @Test + public void testJavaTypeToSFType() throws SnowflakeSQLException { + assertEquals(SnowflakeType.javaTypeToSFType(0, null), SnowflakeType.ANY); + assertThrows( + SnowflakeSQLLoggedException.class, + () -> { + SnowflakeType.javaTypeToSFType(2000000, null); + }); + } + + @Test + public void testJavaTypeToClassName() throws SQLException { + assertEquals(SnowflakeType.javaTypeToClassName(Types.DECIMAL), BigDecimal.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.TIME), java.sql.Time.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.BOOLEAN), Boolean.class.getName()); + assertThrows( + SQLFeatureNotSupportedException.class, + () -> { + SnowflakeType.javaTypeToClassName(-2000000); + }); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java index c667b7a3f..93539005a 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java @@ -1,8 +1,11 @@ package net.snowflake.client.jdbc.cloud.storage; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; +import static org.mockito.Mockito.spy; +import com.microsoft.azure.storage.blob.ListBlobItem; import java.sql.Connection; import java.sql.SQLException; import net.snowflake.client.ConditionalIgnoreRule; @@ -17,7 +20,6 @@ import org.junit.Test; public class SnowflakeAzureClientLatestIT extends BaseJDBCTest { - @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { @@ -37,4 +39,14 @@ public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { } } } + + @Test + public void testCloudExceptionTest() { + Iterable mockList = null; + AzureObjectSummariesIterator iterator = new AzureObjectSummariesIterator(mockList); + AzureObjectSummariesIterator spyIterator = spy(iterator); + UnsupportedOperationException ex = + assertThrows(UnsupportedOperationException.class, () -> spyIterator.remove()); + assertEquals(ex.getMessage(), "remove() method not supported"); + } } diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java index ac50f7608..ce93928ac 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java @@ -49,5 +49,6 @@ public void testLogicalConnectionAlreadyClosed() throws SQLException { expectConnectionAlreadyClosedException(() -> logicalConnection.setSchema("fakedb")); expectConnectionAlreadyClosedException( () -> logicalConnection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED)); + expectConnectionAlreadyClosedException(() -> logicalConnection.createArrayOf("faketype", null)); } } diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java index bf05325e0..d25cdb485 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java @@ -6,8 +6,14 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.sql.CallableStatement; import java.sql.Clob; @@ -370,6 +376,77 @@ public void testDatabaseMetaData() throws SQLException { } } + @Test + public void testLogicalConnectionWhenPhysicalConnectionThrowsErrors() throws SQLException { + Connection connection = mock(Connection.class); + SnowflakePooledConnection snowflakePooledConnection = mock(SnowflakePooledConnection.class); + when(snowflakePooledConnection.getPhysicalConnection()).thenReturn(connection); + SQLException sqlException = new SQLException("mocking error"); + when(connection.createStatement()).thenThrow(sqlException); + when(connection.createStatement(1, 2, 3)).thenThrow(sqlException); + + when(connection.prepareStatement("mocksql")).thenThrow(sqlException); + when(connection.prepareCall("mocksql")).thenThrow(sqlException); + when(connection.prepareCall("mocksql", 1, 2, 3)).thenThrow(sqlException); + when(connection.nativeSQL("mocksql")).thenThrow(sqlException); + when(connection.getAutoCommit()).thenThrow(sqlException); + when(connection.getMetaData()).thenThrow(sqlException); + when(connection.isReadOnly()).thenThrow(sqlException); + when(connection.getCatalog()).thenThrow(sqlException); + when(connection.getTransactionIsolation()).thenThrow(sqlException); + when(connection.getWarnings()).thenThrow(sqlException); + when(connection.prepareCall("mocksql", 1, 2)).thenThrow(sqlException); + when(connection.getTypeMap()).thenThrow(sqlException); + when(connection.getHoldability()).thenThrow(sqlException); + when(connection.createClob()).thenThrow(sqlException); + when(connection.getClientInfo("mocksql")).thenThrow(sqlException); + when(connection.getClientInfo()).thenThrow(sqlException); + when(connection.createArrayOf("mock", null)).thenThrow(sqlException); + when(connection.getSchema()).thenThrow(sqlException); + when(connection.getNetworkTimeout()).thenThrow(sqlException); + when(connection.isWrapperFor(Connection.class)).thenThrow(sqlException); + + doThrow(sqlException).when(connection).setAutoCommit(false); + doThrow(sqlException).when(connection).commit(); + doThrow(sqlException).when(connection).rollback(); + doThrow(sqlException).when(connection).setReadOnly(false); + doThrow(sqlException).when(connection).clearWarnings(); + doThrow(sqlException).when(connection).setSchema(null); + doThrow(sqlException).when(connection).abort(null); + doThrow(sqlException).when(connection).setNetworkTimeout(null, 1); + + LogicalConnection logicalConnection = new LogicalConnection(snowflakePooledConnection); + + assertThrows(SQLException.class, logicalConnection::createStatement); + assertThrows(SQLException.class, () -> logicalConnection.createStatement(1, 2, 3)); + assertThrows(SQLException.class, () -> logicalConnection.nativeSQL("mocksql")); + assertThrows(SQLException.class, logicalConnection::getAutoCommit); + assertThrows(SQLException.class, logicalConnection::getMetaData); + assertThrows(SQLException.class, logicalConnection::isReadOnly); + assertThrows(SQLException.class, logicalConnection::getCatalog); + assertThrows(SQLException.class, logicalConnection::getTransactionIsolation); + assertThrows(SQLException.class, logicalConnection::getWarnings); + assertThrows(SQLException.class, () -> logicalConnection.prepareCall("mocksql")); + assertThrows(SQLException.class, logicalConnection::getTypeMap); + assertThrows(SQLException.class, logicalConnection::getHoldability); + assertThrows(SQLException.class, logicalConnection::createClob); + assertThrows(SQLException.class, () -> logicalConnection.getClientInfo("mocksql")); + assertThrows(SQLException.class, logicalConnection::getClientInfo); + assertThrows(SQLException.class, () -> logicalConnection.createArrayOf("mock", null)); + assertThrows(SQLException.class, logicalConnection::getSchema); + assertThrows(SQLException.class, logicalConnection::getNetworkTimeout); + assertThrows(SQLException.class, () -> logicalConnection.isWrapperFor(Connection.class)); + assertThrows(SQLException.class, () -> logicalConnection.setAutoCommit(false)); + assertThrows(SQLException.class, logicalConnection::rollback); + assertThrows(SQLException.class, () -> logicalConnection.setReadOnly(false)); + assertThrows(SQLException.class, logicalConnection::clearWarnings); + assertThrows(SQLException.class, () -> logicalConnection.setSchema(null)); + assertThrows(SQLException.class, () -> logicalConnection.abort(null)); + assertThrows(SQLException.class, () -> logicalConnection.setNetworkTimeout(null, 1)); + + verify(snowflakePooledConnection, times(26)).fireConnectionErrorEvent(sqlException); + } + private SnowflakeConnectionPoolDataSource setProperties( SnowflakeConnectionPoolDataSource poolDataSource) { poolDataSource.setUrl(properties.get("uri")); From ab4880c4f0e95d4437602a2b98020b03e989d763 Mon Sep 17 00:00:00 2001 From: Juan Martinez Ramirez <126511805+sfc-gh-jmartinez@users.noreply.github.com> Date: Mon, 24 Jun 2024 00:26:48 -0600 Subject: [PATCH 32/54] SNOW-1016467: Enable matrix for Azure and GCP cloud providers. (#1777) --- .github/workflows/build-test.yml | 24 ++++++++---------- .github/workflows/parameters_azure.json.gpg | Bin 0 -> 408 bytes .../net/snowflake/client/RunningOnGCP.java | 12 +++++++++ .../client/jdbc/ConnectionFipsIT.java | 6 +++++ ci/test.sh | 1 + ci/test_windows.bat | 1 + .../net/snowflake/client/RunningNotOnAWS.java | 12 +++++++++ .../snowflake/client/RunningNotOnAzure.java | 12 +++++++++ .../net/snowflake/client/RunningNotOnGCP.java | 12 +++++++++ .../client/jdbc/ConnectionLatestIT.java | 7 +++++ 10 files changed, 74 insertions(+), 13 deletions(-) create mode 100644 .github/workflows/parameters_azure.json.gpg create mode 100644 FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnAWS.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnAzure.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnGCP.java diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index b3c7c8bc2..ad3d196af 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -38,20 +38,19 @@ jobs: test-windows: needs: build - name: ${{ matrix.cloud }} Windows java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: windows-latest strategy: fail-fast: false matrix: - cloud: [ 'AWS' ] - javaVersion: [ '8', '11', '17'] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] - additionalMavenProfile: ['', '-Dthin-jar'] + additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 - uses: actions/setup-java@v4 with: - java-version: ${{ matrix.javaVersion }} + java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - uses: actions/setup-python@v4 @@ -62,27 +61,26 @@ jobs: shell: cmd env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} - CLOUD_PROVIDER: ${{ matrix.cloud }} + CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} JDBC_TEST_CATEGORY: ${{ matrix.category }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: ci\\test_windows.bat test-mac: needs: build - name: ${{ matrix.cloud }} Mac java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: macos-13 strategy: fail-fast: false matrix: - cloud: [ 'AWS' ] - javaVersion: [ '8', '11', '17'] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] - additionalMavenProfile: ['', '-Dthin-jar'] + additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 - uses: actions/setup-java@v4 with: - java-version: ${{ matrix.javaVersion }} + java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - uses: actions/setup-python@v4 @@ -95,7 +93,7 @@ jobs: shell: bash env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} - CLOUD_PROVIDER: ${{ matrix.cloud }} + CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} JDBC_TEST_CATEGORY: ${{ matrix.category }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: /usr/local/bin/bash ./ci/test_mac.sh @@ -108,7 +106,7 @@ jobs: fail-fast: false matrix: image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17' ] - cloud: [ 'AWS' ] + cloud: [ 'AWS', 'AZURE', 'GCP' ] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: ['', '-Dthin-jar'] steps: diff --git a/.github/workflows/parameters_azure.json.gpg b/.github/workflows/parameters_azure.json.gpg new file mode 100644 index 0000000000000000000000000000000000000000..ea6fbdb51a554341c8fd56c7194cf189c785425a GIT binary patch literal 408 zcmV;J0cZY<4Fm}T2zK4?Ssz{?nE%qi#sRsA{{$a3C#~;|2eijdZwWFgnch=ra(Pf5 z@Y?x+8lNiVyXXT;lL)@O0@+kRk$|)A{C|>p;BlXx!E^(?hK1r~Clxm64G%2Q;Jr>j zZVVzaK#q}nXf5W!;vKIcywMkq|Cs_`h%vwQ(&ESG8?0iLtiR_f6;LBc=joyokd2~I zrM}s&j0``@^*wj`(IkZcEZdg^6Rc8^8%wZfyoDTGJR>Q1&laRSM!vZTP)Zyn0;#esG@hq52T~bW)*|80Je?f?be-ucovXwqV;1_)R*o*A7Az$e~T8# zdiT3Eer!fllrK%f!6+-zQIG{VD)Z{e=*KxDe6y~Ms@w7aV-vbMf(G5H75QuD^Rjol ClFH!# literal 0 HcmV?d00001 diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java new file mode 100644 index 000000000..c902dc5f9 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningOnGCP implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && cloudProvider.equalsIgnoreCase("GCP"); + } +} diff --git a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java index a10924432..c1509a6a8 100644 --- a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java +++ b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java @@ -21,6 +21,7 @@ import javax.net.ssl.HttpsURLConnection; import net.snowflake.client.AbstractDriverIT; import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGCP; import net.snowflake.client.RunningOnGithubActions; import net.snowflake.client.category.TestCategoryFips; import net.snowflake.client.core.SecurityUtil; @@ -289,7 +290,12 @@ public void testConnectUsingKeyPair() throws Exception { DriverManager.getConnection(uri, properties).close(); } + /** + * Test case for connecting with FIPS and executing a query. + * Currently ignored execution on GCP due to exception thrown "SSlException Could not generate XDH keypair" + */ @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGCP.class) public void connectWithFipsAndQuery() throws SQLException { try (Connection con = getConnection()) { Statement statement = con.createStatement(); diff --git a/ci/test.sh b/ci/test.sh index 49a999d41..03c66c502 100755 --- a/ci/test.sh +++ b/ci/test.sh @@ -58,6 +58,7 @@ for name in "${!TARGET_TEST_IMAGES[@]}"; do -e BUILD_NUMBER \ -e JDBC_TEST_CATEGORY \ -e ADDITIONAL_MAVEN_PROFILE \ + -e CLOUD_PROVIDER \ -e is_old_driver \ --add-host=snowflake.reg.local:${IP_ADDR} \ --add-host=s3testaccount.reg.local:${IP_ADDR} \ diff --git a/ci/test_windows.bat b/ci/test_windows.bat index 4f32f7564..4a5a8ebe3 100644 --- a/ci/test_windows.bat +++ b/ci/test_windows.bat @@ -46,6 +46,7 @@ echo [INFO] Database: %SNOWFLAKE_TEST_DATABASE% echo [INFO] Schema: %SNOWFLAKE_TEST_SCHEMA% echo [INFO] Warehouse: %SNOWFLAKE_TEST_WAREHOUSE% echo [INFO] Role: %SNOWFLAKE_TEST_ROLE% +echo [INFO] PROVIDER: %CLOUD_PROVIDER% echo [INFO] Creating schema %SNOWFLAKE_TEST_SCHEMA% pushd %GITHUB_WORKSPACE%\ci\container diff --git a/src/test/java/net/snowflake/client/RunningNotOnAWS.java b/src/test/java/net/snowflake/client/RunningNotOnAWS.java new file mode 100644 index 000000000..70f54ab8f --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnAWS.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnAWS implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("AWS"); + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnAzure.java b/src/test/java/net/snowflake/client/RunningNotOnAzure.java new file mode 100644 index 000000000..e2a00966c --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnAzure.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnAzure implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("Azure"); + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnGCP.java b/src/test/java/net/snowflake/client/RunningNotOnGCP.java new file mode 100644 index 000000000..7a5c7aafb --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnGCP.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnGCP implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("GCP"); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index 0e7ab4648..a76f7fdf2 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -47,6 +47,7 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningNotOnAWS; import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; import net.snowflake.client.category.TestCategoryConnection; @@ -1169,7 +1170,13 @@ public void testReadOnly() throws Throwable { } } + /** + * Test case for the method testDownloadStreamWithFileNotFoundException. This test verifies that a + * SQLException is thrown when attempting to download a file that does not exist. It verifies that + * the error code is ErrorCode.S3_OPERATION_ERROR so only runs on AWS. + */ @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnAWS.class) public void testDownloadStreamWithFileNotFoundException() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { From 66279cad1dbf232d51640c581540f4df22f527ea Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 24 Jun 2024 11:02:25 +0200 Subject: [PATCH 33/54] SNOW-1446174: Accept 513 next to 403 for OCSP tests (#1801) --- .../client/jdbc/ConnectionWithOCSPModeIT.java | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java index 00978b0d5..04c9c9311 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java @@ -24,6 +24,7 @@ import net.snowflake.client.category.TestCategoryConnection; import net.snowflake.client.core.SFOCSPException; import net.snowflake.client.core.SFTrustManager; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; import org.junit.Ignore; @@ -108,7 +109,7 @@ public void testValidityExpiredOCSPResponseFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -146,7 +147,7 @@ public void testNoOCSPResponderURLFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -183,7 +184,7 @@ public void testValidityExpiredOCSPResponseInsecure() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -198,7 +199,7 @@ public void testCertAttachedInvalidFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -234,7 +235,7 @@ public void testUnknownOCSPCertFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -293,7 +294,7 @@ public void testOCSPCacheServerTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -332,7 +333,7 @@ public void testOCSPResponderTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -368,7 +369,7 @@ public void testOCSPResponder403FailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -429,4 +430,8 @@ public void testWrongHost() { instanceOf(SSLHandshakeException.class))); } } + + private static Matcher httpStatus403Or513() { + return anyOf(containsString("HTTP status=403"), containsString("HTTP status=513")); + } } From 6d11e4ffb9536b1e4eee3d50d1904c3d0b642bf4 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 24 Jun 2024 15:16:46 +0200 Subject: [PATCH 34/54] SNOW-1495591: Support getObject on vector column (#1799) --- .../client/core/SFArrowResultSet.java | 6 ++- .../client/core/SFJsonResultSet.java | 2 + .../core/arrow/VectorTypeConverter.java | 7 ++- .../client/jdbc/ResultSetVectorLatestIT.java | 49 +++++++++++++++++++ 4 files changed, 62 insertions(+), 2 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index 02f16fff1..74e4c41db 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -37,6 +37,7 @@ import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; +import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryData; import net.snowflake.client.jdbc.telemetry.TelemetryField; @@ -559,6 +560,10 @@ public Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SFException { @Override public Object getObject(int columnIndex) throws SFException { + int type = resultSetMetaData.getColumnType(columnIndex); + if (type == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + return getString(columnIndex); + } ArrowVectorConverter converter = currentChunkIterator.getCurrentConverter(columnIndex - 1); int index = currentChunkIterator.getCurrentRowInRecordBatch(); wasNull = converter.isNull(index); @@ -566,7 +571,6 @@ public Object getObject(int columnIndex) throws SFException { converter.setUseSessionTimezone(useSessionTimezone); converter.setSessionTimeZone(sessionTimeZone); Object obj = converter.toObject(index); - int type = resultSetMetaData.getColumnType(columnIndex); boolean isStructuredType = resultSetMetaData.isStructuredTypeColumn(columnIndex); if (type == Types.STRUCT && isStructuredType) { if (converter instanceof VarCharConverter) { diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 2232eea00..1011870df 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -18,6 +18,7 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -53,6 +54,7 @@ public Object getObject(int columnIndex) throws SFException { switch (type) { case Types.VARCHAR: case Types.CHAR: + case SnowflakeUtil.EXTRA_TYPES_VECTOR: return getString(columnIndex); case Types.BINARY: diff --git a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java index 2e9dbd82d..ae7a492a0 100644 --- a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java @@ -1,5 +1,6 @@ package net.snowflake.client.core.arrow; +import java.util.List; import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.SFException; import net.snowflake.client.jdbc.SnowflakeType; @@ -22,6 +23,10 @@ public Object toObject(int index) throws SFException { @Override public String toString(int index) throws SFException { - return vector.getObject(index).toString(); + List object = vector.getObject(index); + if (object == null) { + return null; + } + return object.toString(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java index 5af26db35..bbc145516 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java @@ -151,10 +151,59 @@ public void testGetFloatVectorFromTable() throws SQLException { } } + /** Added in > 3.16.1 */ + @Test + public void testGetVectorViaGetStringIsEqualToTheGetObject() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + enforceQueryResultFormat(stmt); + Integer[] intVector = {-1, 5}; + Float[] floatVector = {-1.2f, 5.1f, 15.87f}; + try (ResultSet resultSet = + stmt.executeQuery( + "select " + + vectorToString(intVector, "int") + + ", " + + vectorToString(floatVector, "float") + + ", " + + nullVectorToString("int") + + ", " + + nullVectorToString("float"))) { + + assertTrue(resultSet.next()); + assertGetObjectAndGetStringBeTheSame(resultSet, "[-1,5]", 1); + String floatArrayRepresentation = + "json".equals(queryResultFormat) + // in json we have slightly different format that we accept in the result + ? "[-1.200000,5.100000,15.870000]" + : "[-1.2,5.1,15.87]"; + assertGetObjectAndGetStringBeTheSame(resultSet, floatArrayRepresentation, 2); + assertGetObjectAndGetStringAreNull(resultSet, 3); + assertGetObjectAndGetStringAreNull(resultSet, 4); + } + } + } + + private static void assertGetObjectAndGetStringBeTheSame( + ResultSet resultSet, String intArrayRepresentation, int columnIndex) throws SQLException { + assertEquals(intArrayRepresentation, resultSet.getString(columnIndex)); + assertEquals(intArrayRepresentation, resultSet.getObject(columnIndex)); + } + + private static void assertGetObjectAndGetStringAreNull(ResultSet resultSet, int columnIndex) + throws SQLException { + assertNull(resultSet.getString(columnIndex)); + assertNull(resultSet.getObject(columnIndex)); + } + private String vectorToString(T[] vector, String vectorType) { return Arrays.toString(vector) + "::vector(" + vectorType + ", " + vector.length + ")"; } + private String nullVectorToString(String vectorType) { + return "null::vector(" + vectorType + ", 2)"; + } + private void enforceQueryResultFormat(Statement stmt) throws SQLException { String sql = String.format( From ccee1b1dead6d2cb39a270b1cd5bfea874c2b8cc Mon Sep 17 00:00:00 2001 From: Przemyslaw Motacki Date: Mon, 24 Jun 2024 16:33:46 +0200 Subject: [PATCH 35/54] SNOW-1454054 - Read connection configuration from file. (#1780) * SNOW-1454054 - Read connection configuration from file. --- parent-pom.xml | 4 + .../client/config/ConnectionParameters.java | 26 +++ .../config/SFConnectionConfigParser.java | 149 ++++++++++++++++++ .../client/jdbc/SnowflakeDriver.java | 49 +++++- .../client/jdbc/SnowflakeSQLException.java | 4 + .../net/snowflake/client/RunningNotOnWin.java | 9 ++ .../config/SFConnectionConfigParserTest.java | 133 ++++++++++++++++ .../FileConnectionConfigurationLatestIT.java | 52 ++++++ thin_public_pom.xml | 4 + 9 files changed, 426 insertions(+), 4 deletions(-) create mode 100644 src/main/java/net/snowflake/client/config/ConnectionParameters.java create mode 100644 src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnWin.java create mode 100644 src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java create mode 100644 src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java diff --git a/parent-pom.xml b/parent-pom.xml index 1c5ab3c2f..8642fe429 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -528,6 +528,10 @@ com.fasterxml.jackson.core jackson-databind + + com.fasterxml.jackson.dataformat + jackson-dataformat-toml + com.google.api gax diff --git a/src/main/java/net/snowflake/client/config/ConnectionParameters.java b/src/main/java/net/snowflake/client/config/ConnectionParameters.java new file mode 100644 index 000000000..5fa97ac91 --- /dev/null +++ b/src/main/java/net/snowflake/client/config/ConnectionParameters.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.config; + +import java.util.Properties; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +public class ConnectionParameters { + private final String url; + private final Properties params; + + public ConnectionParameters(String uri, Properties params) { + this.url = uri; + this.params = params; + } + + public String getUrl() { + return url; + } + + public Properties getParams() { + return params; + } +} diff --git a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java new file mode 100644 index 000000000..9040fa392 --- /dev/null +++ b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java @@ -0,0 +1,149 @@ +package net.snowflake.client.config; + +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetEnv; + +import com.fasterxml.jackson.dataformat.toml.TomlMapper; +import com.google.common.base.Strings; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import net.snowflake.client.core.Constants; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class SFConnectionConfigParser { + + private static final SFLogger logger = SFLoggerFactory.getLogger(SFConnectionConfigParser.class); + private static final TomlMapper mapper = new TomlMapper(); + public static final String SNOWFLAKE_HOME_KEY = "SNOWFLAKE_HOME"; + public static final String SNOWFLAKE_DIR = ".snowflake"; + public static final String SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY = + "SNOWFLAKE_DEFAULT_CONNECTION_NAME"; + public static final String DEFAULT = "default"; + public static final String SNOWFLAKE_TOKEN_FILE_PATH = "/snowflake/session/token"; + + private static Map loadDefaultConnectionConfiguration( + String defaultConnectionName) throws SnowflakeSQLException { + String configDirectory = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_HOME_KEY)) + .orElse(Paths.get(System.getProperty("user.home"), SNOWFLAKE_DIR).toString()); + Path configFilePath = Paths.get(configDirectory, "connections.toml"); + + if (Files.exists(configFilePath)) { + logger.debug( + "Reading connection parameters from file using key: {} []", + configFilePath, + defaultConnectionName); + Map parametersMap = readParametersMap(configFilePath); + Map defaultConnectionParametersMap = parametersMap.get(defaultConnectionName); + return defaultConnectionParametersMap; + } else { + logger.debug("Connection configuration file does not exist"); + return new HashMap<>(); + } + } + + private static Map readParametersMap(Path configFilePath) + throws SnowflakeSQLException { + try { + File file = new File(configFilePath.toUri()); + varifyFilePermissionSecure(configFilePath); + return mapper.readValue(file, Map.class); + } catch (IOException ex) { + throw new SnowflakeSQLException(ex, "Problem during reading a configuration file."); + } + } + + private static void varifyFilePermissionSecure(Path configFilePath) + throws IOException, SnowflakeSQLException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + PosixFileAttributeView posixFileAttributeView = + Files.getFileAttributeView(configFilePath, PosixFileAttributeView.class); + if (!posixFileAttributeView.readAttributes().permissions().stream() + .allMatch( + o -> + Arrays.asList(PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_READ) + .contains(o))) { + logger.error( + "Reading from file {} is not safe because of insufficient permissions", configFilePath); + throw new SnowflakeSQLException( + String.format( + "Reading from file %s is not safe because of insufficient permissions", + configFilePath)); + } + } + } + + public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException { + String defaultConnectionName = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT); + Map fileConnectionConfiguration = + loadDefaultConnectionConfiguration(defaultConnectionName); + + if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) { + Properties conectionProperties = new Properties(); + conectionProperties.putAll(fileConnectionConfiguration); + + String url = + Optional.ofNullable(fileConnectionConfiguration.get("account")) + .map(ac -> createUrl(ac, fileConnectionConfiguration)) + .orElse(null); + logger.debug("Url created using parameters from connection configuration file: {}", url); + + if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) + && fileConnectionConfiguration.get("token") == null) { + Path path = + Paths.get( + Optional.ofNullable(fileConnectionConfiguration.get("token_file_path")) + .orElse(SNOWFLAKE_TOKEN_FILE_PATH)); + logger.debug("Token used in connect is read from file: {}", path); + try { + String token = new String(Files.readAllBytes(path), Charset.defaultCharset()); + if (!token.isEmpty()) { + putPropertyIfNotNull(conectionProperties, "token", token.trim()); + } else { + logger.warn("The token has empty value"); + } + } catch (IOException ex) { + throw new SnowflakeSQLException(ex, "There is a problem during reading token from file"); + } + } + return new ConnectionParameters(url, conectionProperties); + } else { + return null; + } + } + + private static String createUrl(String account, Map fileConnectionConfiguration) { + String host = String.format("%s.snowflakecomputing.com", account); + String port = fileConnectionConfiguration.get("port"); + String protocol = fileConnectionConfiguration.get("protocol"); + if (Strings.isNullOrEmpty(port)) { + if ("https".equals(protocol)) { + port = "443"; + } else { + port = "80"; + } + } + return String.format("jdbc:snowflake://%s:%s", host, port); + } + + private static void putPropertyIfNotNull(Properties props, Object key, Object value) { + if (key != null && value != null) { + props.put(key, value); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 6baba4a57..73f201ac2 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -14,7 +14,12 @@ import java.sql.SQLFeatureNotSupportedException; import java.util.List; import java.util.Properties; +import net.snowflake.client.config.ConnectionParameters; +import net.snowflake.client.config.SFConnectionConfigParser; import net.snowflake.client.core.SecurityUtil; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.ResourceBundleManager; import net.snowflake.common.core.SqlState; @@ -26,6 +31,8 @@ * loading */ public class SnowflakeDriver implements Driver { + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDriver.class); + public static final String AUTO_CONNECTION_STRING_PREFIX = "jdbc:snowflake:auto"; static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); @@ -200,18 +207,52 @@ public boolean acceptsURL(String url) { */ @Override public Connection connect(String url, Properties info) throws SQLException { - if (url == null) { + ConnectionParameters connectionParameters = + overrideByFileConnectionParametersIfAutoConfiguration(url, info); + + if (connectionParameters.getUrl() == null) { // expected return format per the JDBC spec for java.sql.Driver#connect() throw new SnowflakeSQLException("Unable to connect to url of 'null'."); } - if (!SnowflakeConnectString.hasSupportedPrefix(url)) { + if (!SnowflakeConnectString.hasSupportedPrefix(connectionParameters.getUrl())) { return null; // expected return format per the JDBC spec for java.sql.Driver#connect() } - SnowflakeConnectString conStr = SnowflakeConnectString.parse(url, info); + SnowflakeConnectString conStr = + SnowflakeConnectString.parse( + connectionParameters.getUrl(), connectionParameters.getParams()); if (!conStr.isValid()) { throw new SnowflakeSQLException("Connection string is invalid. Unable to parse."); } - return new SnowflakeConnectionV1(url, info); + return new SnowflakeConnectionV1( + connectionParameters.getUrl(), connectionParameters.getParams()); + } + + private static ConnectionParameters overrideByFileConnectionParametersIfAutoConfiguration( + String url, Properties info) throws SnowflakeSQLException { + if (url != null && url.contains(AUTO_CONNECTION_STRING_PREFIX)) { + // Connect using connection configuration file + ConnectionParameters connectionParameters = + SFConnectionConfigParser.buildConnectionParameters(); + if (connectionParameters == null) { + throw new SnowflakeSQLException( + "Unavailable connection configuration parameters expected for auto configuration using file"); + } + return connectionParameters; + } else { + return new ConnectionParameters(url, info); + } + } + + /** + * Connect method using connection configuration file + * + * @return connection + * @throws SQLException if failed to create a snowflake connection + */ + @SnowflakeJdbcInternalApi + public Connection connect() throws SQLException { + logger.debug("Execute internal method connect() without parameters"); + return connect(AUTO_CONNECTION_STRING_PREFIX, null); } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java index 660e83134..a88829ec6 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java @@ -172,6 +172,10 @@ public SnowflakeSQLException(String reason) { super(reason); } + public SnowflakeSQLException(Throwable ex, String message) { + super(message, ex); + } + public String getQueryId() { return queryId; } diff --git a/src/test/java/net/snowflake/client/RunningNotOnWin.java b/src/test/java/net/snowflake/client/RunningNotOnWin.java new file mode 100644 index 000000000..ce5cdf7d1 --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnWin.java @@ -0,0 +1,9 @@ +package net.snowflake.client; + +import net.snowflake.client.core.Constants; + +public class RunningNotOnWin implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + return Constants.getOS() != Constants.OS.WINDOWS; + } +} diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java new file mode 100644 index 000000000..e68e68fa0 --- /dev/null +++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java @@ -0,0 +1,133 @@ +package net.snowflake.client.config; + +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_HOME_KEY; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assume.assumeFalse; + +import com.fasterxml.jackson.dataformat.toml.TomlMapper; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import net.snowflake.client.RunningNotOnLinuxMac; +import net.snowflake.client.core.Constants; +import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.jdbc.SnowflakeUtil; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class SFConnectionConfigParserTest { + + private Path tempPath = null; + private TomlMapper tomlMapper = new TomlMapper(); + + @Before + public void setUp() throws IOException { + tempPath = Files.createTempDirectory(".snowflake"); + } + + @After + public void close() throws IOException { + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_HOME_KEY); + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + Files.walk(tempPath).map(Path::toFile).forEach(File::delete); + Files.delete(tempPath); + } + + @Test + public void testLoadSFConnectionConfigWrongConfigurationName() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "unknown"); + prepareConnectionConfigurationTomlFile(null, true); + ConnectionParameters connectionParameters = + SFConnectionConfigParser.buildConnectionParameters(); + assertNull(connectionParameters); + } + + @Test + public void testLoadSFConnectionConfigInValidPath() throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, Paths.get("unknownPath").toString()); + prepareConnectionConfigurationTomlFile(null, true); + assertNull(SFConnectionConfigParser.buildConnectionParameters()); + } + + @Test + public void testLoadSFConnectionConfigWithTokenFromFile() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), true); + + ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters(); + assertNotNull(data); + assertEquals(tokenFile.toString(), data.getParams().get("token_file_path")); + assertEquals("testToken", data.getParams().get("token")); + } + + @Test + public void testThrowErrorWhenWrongPermissionsForTokenFile() throws IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), false); + assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assertThrows( + SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); + } + + private void prepareConnectionConfigurationTomlFile( + Map moreParameters, boolean onlyUserPermission) throws IOException { + Path path = Paths.get(tempPath.toString(), "connections.toml"); + Path filePath = createFilePathWithPermission(path, onlyUserPermission); + File file = filePath.toFile(); + + Map configuration = new HashMap(); + Map configurationParams = new HashMap(); + configurationParams.put("account", "snowaccount.us-west-2.aws"); + configurationParams.put("user", "user1"); + configurationParams.put("token", "testToken"); + configurationParams.put("port", "443"); + + if (moreParameters != null) { + moreParameters.forEach((k, v) -> configurationParams.put(k, v)); + } + configuration.put("default", configurationParams); + tomlMapper.writeValue(file, configuration); + + if (configurationParams.containsKey("token_file_path")) { + Path tokenFilePath = + createFilePathWithPermission( + Paths.get(configurationParams.get("token_file_path").toString()), onlyUserPermission); + Files.write(tokenFilePath, "token_from_file".getBytes()); + } + } + + private Path createFilePathWithPermission(Path path, boolean onlyUserPermission) + throws IOException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + FileAttribute> fileAttribute = + onlyUserPermission + ? PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-------")) + : PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrw----")); + return Files.createFile(path, fileAttribute); + } else { + return Files.createFile(path); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java new file mode 100644 index 000000000..734446c92 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.After; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +/** This test could be run only on environment where file connection.toml is configured */ +@Ignore +public class FileConnectionConfigurationLatestIT { + + @After + public void cleanUp() { + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + } + + @Test + public void testThrowExceptionIfConfigurationDoesNotExist() { + SnowflakeUtil.systemSetEnv("SNOWFLAKE_DEFAULT_CONNECTION_NAME", "non-existent"); + Assert.assertThrows(SnowflakeSQLException.class, () -> SnowflakeDriver.INSTANCE.connect()); + } + + @Test + public void testSimpleConnectionUsingFileConfigurationToken() throws SQLException { + verifyConnetionToSnowflake("aws-oauth"); + } + + @Test + public void testSimpleConnectionUsingFileConfigurationTokenFromFile() throws SQLException { + verifyConnetionToSnowflake("aws-oauth-file"); + } + + private static void verifyConnetionToSnowflake(String connectionName) throws SQLException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, connectionName); + try (Connection con = + DriverManager.getConnection(SnowflakeDriver.AUTO_CONNECTION_STRING_PREFIX, null); + Statement statement = con.createStatement(); + ResultSet resultSet = statement.executeQuery("show parameters")) { + Assert.assertTrue(resultSet.next()); + } + } +} diff --git a/thin_public_pom.xml b/thin_public_pom.xml index 239e31e34..e15a4e3c4 100644 --- a/thin_public_pom.xml +++ b/thin_public_pom.xml @@ -140,6 +140,10 @@ com.fasterxml.jackson.core jackson-databind + + com.fasterxml.jackson.dataformat + jackson-dataformat-toml + com.google.api gax From a4db3096c3282eb0c8aa7b86229d063a4f9ae694 Mon Sep 17 00:00:00 2001 From: Piotr Bulawa Date: Tue, 25 Jun 2024 09:57:34 +0200 Subject: [PATCH 36/54] SNOW-1490931: Preparation for Java 21 (#1796) --- .github/workflows/build-test.yml | 6 +++--- Jenkinsfile | 2 +- ci/_init.sh | 5 ++++- ci/image/Dockerfile.jdbc-centos7-openjdk-test | 20 ++++++++++++++++++- parent-pom.xml | 7 +++++++ 5 files changed, 34 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index ad3d196af..90b03180f 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -43,7 +43,7 @@ jobs: strategy: fail-fast: false matrix: - runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: [''] steps: @@ -73,7 +73,7 @@ jobs: strategy: fail-fast: false matrix: - runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: [''] steps: @@ -105,7 +105,7 @@ jobs: strategy: fail-fast: false matrix: - image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17' ] + image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17', 'jdbc-centos7-openjdk21' ] cloud: [ 'AWS', 'AZURE', 'GCP' ] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: ['', '-Dthin-jar'] diff --git a/Jenkinsfile b/Jenkinsfile index 5e62aab1b..8e5925b8c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -56,7 +56,7 @@ timestamps { e.printStackTrace() } - jdkToParams = ['openjdk8': 'jdbc-centos7-openjdk8', 'openjdk11': 'jdbc-centos7-openjdk11', 'openjdk17': 'jdbc-centos7-openjdk17'].collectEntries { jdk, image -> + jdkToParams = ['openjdk8': 'jdbc-centos7-openjdk8', 'openjdk11': 'jdbc-centos7-openjdk11', 'openjdk17': 'jdbc-centos7-openjdk17', 'openjdk21': 'jdbc-centos7-openjdk21'].collectEntries { jdk, image -> return [(jdk): [ string(name: 'client_git_branch', value: scmInfo.GIT_BRANCH), string(name: 'client_git_commit', value: scmInfo.GIT_COMMIT), diff --git a/ci/_init.sh b/ci/_init.sh index c91f03c31..5df299949 100755 --- a/ci/_init.sh +++ b/ci/_init.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/usr/local/bin/env bash set -e export PLATFORM=$(echo $(uname) | tr '[:upper:]' '[:lower:]') @@ -23,6 +23,7 @@ declare -A TEST_IMAGE_NAMES=( [$DRIVER_NAME-centos7-openjdk8]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk8-test:$TEST_IMAGE_VERSION [$DRIVER_NAME-centos7-openjdk11]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk11-test:$TEST_IMAGE_VERSION [$DRIVER_NAME-centos7-openjdk17]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk17-test:$TEST_IMAGE_VERSION + [$DRIVER_NAME-centos7-openjdk21]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk21-test:$TEST_IMAGE_VERSION ) export TEST_IMAGE_NAMES @@ -30,11 +31,13 @@ declare -A TEST_IMAGE_DOCKERFILES=( [$DRIVER_NAME-centos7-openjdk8]=jdbc-centos7-openjdk-test [$DRIVER_NAME-centos7-openjdk11]=jdbc-centos7-openjdk-test [$DRIVER_NAME-centos7-openjdk17]=jdbc-centos7-openjdk-test + [$DRIVER_NAME-centos7-openjdk21]=jdbc-centos7-openjdk-test ) declare -A TEST_IMAGE_BUILD_ARGS=( [$DRIVER_NAME-centos7-openjdk8]="--target jdbc-centos7-openjdk-yum --build-arg=JDK_PACKAGE=java-1.8.0-openjdk-devel" [$DRIVER_NAME-centos7-openjdk11]="--target jdbc-centos7-openjdk-yum --build-arg=JDK_PACKAGE=java-11-openjdk-devel" # pragma: allowlist secret [$DRIVER_NAME-centos7-openjdk17]="--target jdbc-centos7-openjdk17" + [$DRIVER_NAME-centos7-openjdk21]="--target jdbc-centos7-openjdk21" ) diff --git a/ci/image/Dockerfile.jdbc-centos7-openjdk-test b/ci/image/Dockerfile.jdbc-centos7-openjdk-test index 15e351530..e6adfb975 100644 --- a/ci/image/Dockerfile.jdbc-centos7-openjdk-test +++ b/ci/image/Dockerfile.jdbc-centos7-openjdk-test @@ -88,4 +88,22 @@ RUN export JAVA_HOME=/opt/jdk-17 && \ -Dnot-self-contained-jar \ --batch-mode --fail-never compile && \ mv $HOME/.m2 /home/user && \ - chmod -R 777 /home/user/.m2 \ No newline at end of file + chmod -R 777 /home/user/.m2 + +###### OpenJDK 21 from archive (not available in yum) +FROM jdbc-centos7-openjdk-base AS jdbc-centos7-openjdk21 + +# Java +RUN curl -o - https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz | tar xfz - -C /opt && \ + ln -s /opt/jdk-21.0.2 /opt/jdk-21 + +RUN sed -i /usr/local/bin/entrypoint.sh -e '/^exec/i export JAVA_HOME=/opt/jdk-21' +RUN sed -i /usr/local/bin/entrypoint.sh -e '/^exec/i export PATH=$JAVA_HOME/bin:$PATH' + +RUN export JAVA_HOME=/opt/jdk-21 && \ + cd /root && \ + mvn -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -Dnot-self-contained-jar \ + --batch-mode --fail-never compile && \ + mv $HOME/.m2 /home/user && \ + chmod -R 777 /home/user/.m2 diff --git a/parent-pom.xml b/parent-pom.xml index 8642fe429..db166fa9d 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -28,6 +28,7 @@ 1.74 1.0.2.4 1.0.5 + 1.14.17 1.1 3.33.0 1.2 @@ -482,6 +483,12 @@ ${mockito.version} test + + net.bytebuddy + byte-buddy + ${bytebuddy.version} + test + org.awaitility awaitility From d4504f8b7780a2504712b21569bb29ccaf755073 Mon Sep 17 00:00:00 2001 From: Dawid Heyman Date: Fri, 28 Jun 2024 13:30:37 +0200 Subject: [PATCH 37/54] SNOW-1163203: Increased Max LOB size in metadata (#1806) --- .../client/core/ObjectMapperFactory.java | 5 +- .../jdbc/SnowflakeDatabaseMetaData.java | 22 +- .../client/jdbc/SnowflakeStatementV1.java | 5 +- .../client/jdbc/DatabaseMetaDataIT.java | 8 +- .../jdbc/DatabaseMetaDataInternalIT.java | 9 +- .../DatabaseMetaDataInternalLatestIT.java | 8 +- .../client/jdbc/DatabaseMetaDataLatestIT.java | 20 +- .../client/jdbc/LobSizeLatestIT.java | 253 +++++++++++++++++ .../client/jdbc/MaxLobSizeLatestIT.java | 262 ++---------------- .../client/jdbc/ResultSetLatestIT.java | 9 +- 10 files changed, 336 insertions(+), 265 deletions(-) create mode 100644 src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java diff --git a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java index e7b1056ba..0f9a7b01f 100644 --- a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java +++ b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java @@ -11,8 +11,9 @@ */ public class ObjectMapperFactory { @SnowflakeJdbcInternalApi - // Snowflake allows up to 16M string size and returns base64 encoded value that makes it up to 23M - public static final int DEFAULT_MAX_JSON_STRING_LEN = 23_000_000; + // Snowflake allows up to 128M (after updating Max LOB size) string size and returns base64 + // encoded value that makes it up to 180M + public static final int DEFAULT_MAX_JSON_STRING_LEN = 180_000_000; @SnowflakeJdbcInternalApi public static final String MAX_JSON_STRING_LENGTH_JVM = diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java index b50646ea7..acfb3e4f7 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java @@ -35,6 +35,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; import net.snowflake.client.core.ObjectMapperFactory; @@ -140,6 +141,12 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { "VECTOR", "VIEW"); + private static final String MAX_VARCHAR_BINARY_SIZE_PARAM_NAME = + "VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"; + + // Defaults to 16MB + private static final int DEFAULT_MAX_LOB_SIZE = 16777216; + private final Connection connection; private final SFBaseSession session; @@ -911,14 +918,17 @@ public boolean supportsOpenStatementsAcrossRollback() throws SQLException { public int getMaxBinaryLiteralLength() throws SQLException { logger.trace("int getMaxBinaryLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); - return 8388608; + return getMaxCharLiteralLength() / 2; // hex instead of octal, thus divided by 2 } @Override public int getMaxCharLiteralLength() throws SQLException { logger.trace("int getMaxCharLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); - return 16777216; + Optional maxLiteralLengthFromSession = + Optional.ofNullable( + (Integer) session.getOtherParameter(MAX_VARCHAR_BINARY_SIZE_PARAM_NAME)); + return maxLiteralLengthFromSession.orElse(DEFAULT_MAX_LOB_SIZE); } @Override @@ -1348,9 +1358,9 @@ else if (i == 0) { typeName.substring(typeName.indexOf('(') + 1, typeName.indexOf(')'))); nextRow[16] = char_octet_len; } else if (type == Types.CHAR || type == Types.VARCHAR) { - nextRow[16] = 16777216; + nextRow[16] = getMaxCharLiteralLength(); } else if (type == Types.BINARY || type == Types.VARBINARY) { - nextRow[16] = 8388608; + nextRow[16] = getMaxBinaryLiteralLength(); } } else { nextRow[16] = null; @@ -3570,9 +3580,9 @@ public ResultSet getFunctionColumns( typeName.substring(typeName.indexOf('(') + 1, typeName.indexOf(')'))); nextRow[13] = char_octet_len; } else if (type == Types.CHAR || type == Types.VARCHAR) { - nextRow[13] = 16777216; + nextRow[13] = getMaxCharLiteralLength(); } else if (type == Types.BINARY || type == Types.VARBINARY) { - nextRow[13] = 8388608; + nextRow[13] = getMaxBinaryLiteralLength(); } } else { nextRow[13] = null; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java index 3d8b8c464..5016c175b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java @@ -68,9 +68,6 @@ class SnowflakeStatementV1 implements Statement, SnowflakeStatement { // timeout in seconds private int queryTimeout = 0; - // max field size limited to 16MB - private final int maxFieldSize = 16777216; - SFBaseStatement sfBaseStatement; private boolean poolable; @@ -640,7 +637,7 @@ public ResultSet getGeneratedKeys() throws SQLException { public int getMaxFieldSize() throws SQLException { logger.trace("getMaxFieldSize()", false); raiseSQLExceptionIfStatementIsClosed(); - return maxFieldSize; + return connection.getMetaData().getMaxCharLiteralLength(); } @Override diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java index 0a52b3df1..2ea144f3c 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java @@ -59,6 +59,10 @@ public class DatabaseMetaDataIT extends BaseJDBCTest { + " $$\n" + " ;"; + public static final int EXPECTED_MAX_CHAR_LENGTH = 16777216; + + public static final int EXPECTED_MAX_BINARY_LENGTH = 8388608; + @Test public void testGetConnection() throws SQLException { try (Connection connection = getConnection()) { @@ -698,9 +702,9 @@ public void testDatabaseMetadata() throws SQLException { assertEquals("$", metaData.getExtraNameCharacters()); assertEquals("\"", metaData.getIdentifierQuoteString()); assertEquals(0, getSizeOfResultSet(metaData.getIndexInfo(null, null, null, true, true))); - assertEquals(8388608, metaData.getMaxBinaryLiteralLength()); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, metaData.getMaxBinaryLiteralLength()); assertEquals(255, metaData.getMaxCatalogNameLength()); - assertEquals(16777216, metaData.getMaxCharLiteralLength()); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, metaData.getMaxCharLiteralLength()); assertEquals(255, metaData.getMaxColumnNameLength()); assertEquals(0, metaData.getMaxColumnsInGroupBy()); assertEquals(0, metaData.getMaxColumnsInIndex()); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java index d0eed3c5f..ec590b066 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java @@ -3,6 +3,8 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -59,7 +61,7 @@ static void initMetaData(Connection con) throws SQLException { st.execute("create or replace database JDBC_DB2"); st.execute("create or replace schema JDBC_SCHEMA21"); st.execute("create or replace table JDBC_TBL211(colA string)"); - st.execute("create or replace table JDBC_BIN(bin1 binary, bin2 binary(100))"); + st.execute("create or replace table JDBC_BIN(bin1 binary(8388608), bin2 binary(100))"); // st.execute("create or replace table JDBC_TBL211(colA string(25) NOT NULL DEFAULT // 'defstring')"); @@ -111,7 +113,7 @@ public void testGetColumn() throws SQLException { resultSet = databaseMetaData.getColumns(null, "JDBC_SCHEMA21", "JDBC_BIN", "BIN1"); resultSet.next(); - assertEquals(8388608, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(1, getSizeOfResultSet(resultSet) + 1); resultSet = databaseMetaData.getColumns(null, "JDBC_SCHEMA21", "JDBC_BIN", "BIN2"); @@ -187,8 +189,7 @@ public void testGetFunctions() throws SQLException { // test each column return the right value resultSet = databaseMetaData.getFunctions("JDBC_DB1", "JDBC_SCHEMA11", "JDBCFUNCTEST111"); - DatabaseMetaDataIT.verifyResultSetMetaDataColumns( - resultSet, DBMetadataResultSetMetadata.GET_FUNCTIONS); + verifyResultSetMetaDataColumns(resultSet, DBMetadataResultSetMetadata.GET_FUNCTIONS); resultSet.next(); assertEquals("JDBC_DB1", resultSet.getString("FUNCTION_CAT")); assertEquals("JDBC_SCHEMA11", resultSet.getString("FUNCTION_SCHEM")); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java index 97e67683a..15701ca17 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java @@ -91,7 +91,7 @@ public void testGetFunctionColumns() throws SQLException { + "sharedCol decimal)"); statement.execute( "create or replace function JDBC_DB1.JDBC_SCHEMA11.FUNC112 " - + "() RETURNS TABLE(colA string, colB decimal, bin2 binary, sharedCol decimal) COMMENT= 'returns " + + "() RETURNS TABLE(colA string(16777216), colB decimal, bin2 binary(8388608), sharedCol decimal) COMMENT= 'returns " + "table of 4 columns'" + " as 'select JDBC_DB1.JDBC_SCHEMA11.JDBC_TBL111.colA, JDBC_DB1.JDBC_SCHEMA11.JDBC_TBL111.colB, " + "JDBC_DB1.JDBC_SCHEMA11.BIN_TABLE.bin2, JDBC_DB1.JDBC_SCHEMA11.BIN_TABLE.sharedCol from JDBC_DB1" @@ -173,7 +173,8 @@ public void testGetFunctionColumns() throws SQLException { assertEquals(10, resultSet.getInt("RADIX")); assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals( + databaseMetaData.getMaxCharLiteralLength(), resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); assertEquals("", resultSet.getString("IS_NULLABLE")); assertEquals( @@ -213,7 +214,8 @@ public void testGetFunctionColumns() throws SQLException { assertEquals(10, resultSet.getInt("RADIX")); assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); - assertEquals(8388608, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals( + databaseMetaData.getMaxBinaryLiteralLength(), resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); assertEquals("", resultSet.getString("IS_NULLABLE")); assertEquals( diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index 24d3940d7..bebe3d8f4 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -3,6 +3,8 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_CHAR_LENGTH; import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.NumericFunctionsSupported; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.StringFunctionsSupported; @@ -772,8 +774,8 @@ public void testGetFunctionColumns() throws Exception { "create or replace table JDBC_TBL111(colA string, colB decimal, colC " + "timestamp)"); /* Create a UDF that returns a table made up of 4 columns from 2 different tables, joined together */ statement.execute( - "create or replace function FUNC112 () RETURNS TABLE(colA string, colB decimal, bin2" - + " binary, sharedCol decimal) COMMENT= 'returns table of 4 columns' as 'select" + "create or replace function FUNC112 () RETURNS TABLE(colA string(16777216), colB decimal, bin2 " + + "binary(8388608) , sharedCol decimal) COMMENT= 'returns table of 4 columns' as 'select" + " JDBC_TBL111.colA, JDBC_TBL111.colB, BIN_TABLE.bin2, BIN_TABLE.sharedCol from" + " JDBC_TBL111 inner join BIN_TABLE on JDBC_TBL111.colB =BIN_TABLE.sharedCol'"); DatabaseMetaData metaData = connection.getMetaData(); @@ -877,7 +879,7 @@ public void testGetFunctionColumns() throws Exception { assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); // char octet length column is not supported and always returns 0 - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); // is_nullable column is not supported and always returns empty string assertEquals("", resultSet.getString("IS_NULLABLE")); @@ -927,7 +929,7 @@ public void testGetFunctionColumns() throws Exception { assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); // char octet length column is not supported and always returns 0 - assertEquals(8388608, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); // is_nullable column is not supported and always returns empty string assertEquals("", resultSet.getString("IS_NULLABLE")); @@ -1222,8 +1224,8 @@ public void testGetColumns() throws Throwable { statement.execute( "create or replace table " + targetTable - + "(C1 int, C2 varchar(100), C3 string default '', C4 number(18,4), C5 double," - + " C6 boolean, C7 date not null, C8 time, C9 timestamp_ntz(7), C10 binary,C11" + + "(C1 int, C2 varchar(100), C3 string(16777216) default '', C4 number(18,4), C5 double," + + " C6 boolean, C7 date not null, C8 time, C9 timestamp_ntz(7), C10 binary(8388608),C11" + " variant, C12 timestamp_ltz(8), C13 timestamp_tz(3))"); DatabaseMetaData metaData = connection.getMetaData(); @@ -1290,14 +1292,14 @@ public void testGetColumns() throws Throwable { assertEquals("C3", resultSet.getString("COLUMN_NAME")); assertEquals(Types.VARCHAR, resultSet.getInt("DATA_TYPE")); assertEquals("VARCHAR", resultSet.getString("TYPE_NAME")); - assertEquals(16777216, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); assertEquals(0, resultSet.getInt("NUM_PREC_RADIX")); assertEquals(ResultSetMetaData.columnNullable, resultSet.getInt("NULLABLE")); assertEquals("", resultSet.getString("REMARKS")); assertEquals("", resultSet.getString("COLUMN_DEF")); - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); assertEquals("YES", resultSet.getString("IS_NULLABLE")); assertNull(resultSet.getString("SCOPE_CATALOG")); @@ -1465,7 +1467,7 @@ public void testGetColumns() throws Throwable { assertEquals("C10", resultSet.getString("COLUMN_NAME")); assertEquals(Types.BINARY, resultSet.getInt("DATA_TYPE")); assertEquals("BINARY", resultSet.getString("TYPE_NAME")); - assertEquals(8388608, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); assertEquals(0, resultSet.getInt("NUM_PREC_RADIX")); assertEquals(ResultSetMetaData.columnNullable, resultSet.getInt("NULLABLE")); diff --git a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java new file mode 100644 index 000000000..33ab5e772 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java @@ -0,0 +1,253 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.core.ObjectMapperFactory; +import net.snowflake.client.core.UUIDUtils; +import org.apache.commons.text.RandomStringGenerator; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +@Category(TestCategoryStatement.class) +public class LobSizeLatestIT extends BaseJDBCTest { + + // Max LOB size is testable from version 3.15.0 and above. + private static int maxLobSize = 16 * 1024 * 1024; + private static int largeLobSize = maxLobSize / 2; + private static int mediumLobSize = largeLobSize / 2; + private static int originLobSize = mediumLobSize / 2; + private static int smallLobSize = 16; + + private static Map LobSizeStringValues = + new HashMap() { + { + put(smallLobSize, generateRandomString(smallLobSize)); + put(originLobSize, generateRandomString(originLobSize)); + put(mediumLobSize, generateRandomString(mediumLobSize)); + put(largeLobSize, generateRandomString(largeLobSize)); + put(maxLobSize, generateRandomString(maxLobSize)); + } + }; + + @BeforeClass + public static void setUp() { + System.setProperty( + // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure + ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); + } + + @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") + public static Collection data() { + int[] lobSizes = + new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; + String[] resultFormats = new String[] {"Arrow", "JSON"}; + List ret = new ArrayList<>(); + for (int i = 0; i < lobSizes.length; i++) { + for (int j = 0; j < resultFormats.length; j++) { + ret.add(new Object[] {lobSizes[i], resultFormats[j]}); + } + } + return ret; + } + + private final int lobSize; + + private final String resultFormat; + + public LobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { + this.lobSize = lobSize; + this.resultFormat = resultFormat; + + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); + } + } + + private static String tableName = "my_lob_test"; + private static String executeInsert = "insert into " + tableName + " (c1, c2, c3) values ("; + private static String executePreparedStatementInsert = executeInsert + "?, ?, ?)"; + private static String selectQuery = "select * from " + tableName + " where c3="; + + private static String generateRandomString(int stringSize) { + RandomStringGenerator randomStringGenerator = + new RandomStringGenerator.Builder().withinRange('a', 'z').build(); + return randomStringGenerator.generate(stringSize); + } + + private static void setResultFormat(Statement stmt, String format) throws SQLException { + stmt.execute("alter session set jdbc_query_result_format = '" + format + "'"); + } + + private void createTable(int lobSize, Statement stmt) throws SQLException { + String createTableQuery = + "create or replace table " + + tableName + + " (c1 varchar, c2 varchar(" + + lobSize + + "), c3 varchar)"; + stmt.execute(createTableQuery); + } + + private void insertQuery(String varCharValue, String uuidValue, Statement stmt) + throws SQLException { + stmt.executeUpdate(executeInsert + "'abc', '" + varCharValue + "', '" + uuidValue + "')"); + } + + private void preparedInsertQuery(String varCharValue, String uuidValue, Connection con) + throws SQLException { + try (PreparedStatement pstmt = con.prepareStatement(executePreparedStatementInsert)) { + pstmt.setString(1, "abc"); + pstmt.setString(2, varCharValue); + pstmt.setString(3, uuidValue); + + pstmt.execute(); + } + } + + @AfterClass + public static void tearDown() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + stmt.execute("Drop table if exists " + tableName); + } + } + + @Test + public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + String varCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + insertQuery(varCharValue, uuidValue, stmt); + + try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rs.next()); + assertEquals("abc", rs.getString(1)); + assertEquals(varCharValue, rs.getString(2)); + assertEquals(uuidValue, rs.getString(3)); + } + } + } + + @Test + public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + String maxVarCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + preparedInsertQuery(maxVarCharValue, uuidValue, con); + + try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rs.next()); + assertEquals("abc", rs.getString(1)); + assertEquals(maxVarCharValue, rs.getString(2)); + assertEquals(uuidValue, rs.getString(3)); + } + } + } + + @Test + public void testPutAndGet() throws IOException, SQLException { + File tempFile = File.createTempFile("LobSizeTest", ".csv"); + // Delete file when JVM shuts down + tempFile.deleteOnExit(); + + String filePath = tempFile.getPath(); + String filePathEscaped = filePath.replace("\\", "\\\\"); + String fileName = tempFile.getName(); + + String varCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + String fileInput = "abc," + varCharValue + "," + uuidValue; + + // Print data to new temporary file + try (PrintWriter out = new PrintWriter(filePath)) { + out.println(fileInput); + } + + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + // Test PUT + String sqlPut = "PUT 'file://" + filePathEscaped + "' @%" + tableName; + + stmt.execute(sqlPut); + + try (ResultSet rsPut = stmt.getResultSet()) { + assertTrue(rsPut.next()); + assertEquals(fileName, rsPut.getString(1)); + assertEquals(fileName + ".gz", rsPut.getString(2)); + assertEquals("GZIP", rsPut.getString(6)); + assertEquals("UPLOADED", rsPut.getString(7)); + } + + try (ResultSet rsFiles = stmt.executeQuery("ls @%" + tableName)) { + // ResultSet should return a row with the zipped file name + assertTrue(rsFiles.next()); + assertEquals(fileName + ".gz", rsFiles.getString(1)); + } + + String copyInto = + "copy into " + + tableName + + " from @%" + + tableName + + " file_format=(type=csv compression='gzip')"; + stmt.execute(copyInto); + + // Check that results are copied into table correctly + try (ResultSet rsCopy = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rsCopy.next()); + assertEquals("abc", rsCopy.getString(1)); + assertEquals(varCharValue, rsCopy.getString(2)); + assertEquals(uuidValue, rsCopy.getString(3)); + } + + // Test Get + Path tempDir = Files.createTempDirectory("MaxLobTest"); + // Delete tempDir when JVM shuts down + tempDir.toFile().deleteOnExit(); + String pathToTempDir = tempDir.toString().replace("\\", "\\\\"); + + String getSql = "get @%" + tableName + " 'file://" + pathToTempDir + "'"; + stmt.execute(getSql); + + try (ResultSet rsGet = stmt.getResultSet()) { + assertTrue(rsGet.next()); + assertEquals(fileName + ".gz", rsGet.getString(1)); + assertEquals("DOWNLOADED", rsGet.getString(3)); + } + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java index 8fd874a9c..8962b8141 100644 --- a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java @@ -1,252 +1,48 @@ -/* - * Copyright (c) 2024 Snowflake Computing Inc. All right reserved. - */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.text.IsEmptyString.emptyOrNullString; -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.file.Files; -import java.nio.file.Path; import java.sql.Connection; -import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import net.snowflake.client.category.TestCategoryStatement; -import net.snowflake.client.core.ObjectMapperFactory; -import net.snowflake.client.core.UUIDUtils; -import org.apache.commons.text.RandomStringGenerator; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGithubAction; +import org.hamcrest.CoreMatchers; +import org.junit.Assert; import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) public class MaxLobSizeLatestIT extends BaseJDBCTest { - // Max LOB size is testable from version 3.15.0 and above. - private static int maxLobSize = 16 * 1024 * 1024; - private static int largeLobSize = maxLobSize / 2; - private static int mediumLobSize = largeLobSize / 2; - private static int originLobSize = mediumLobSize / 2; - private static int smallLobSize = 16; - - private static Map LobSizeStringValues = - new HashMap() { - { - put(smallLobSize, generateRandomString(smallLobSize)); - put(originLobSize, generateRandomString(originLobSize)); - put(mediumLobSize, generateRandomString(mediumLobSize)); - put(largeLobSize, generateRandomString(largeLobSize)); - put(maxLobSize, generateRandomString(maxLobSize)); - } - }; - - @BeforeClass - public static void setUp() { - System.setProperty( - // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure - ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); - } - - @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") - public static Collection data() { - int[] lobSizes = - new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; - String[] resultFormats = new String[] {"Arrow", "JSON"}; - List ret = new ArrayList<>(); - for (int i = 0; i < lobSizes.length; i++) { - for (int j = 0; j < resultFormats.length; j++) { - ret.add(new Object[] {lobSizes[i], resultFormats[j]}); - } - } - return ret; - } - - private final int lobSize; - - private final String resultFormat; - - public MaxLobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { - this.lobSize = lobSize; - this.resultFormat = resultFormat; - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - createTable(lobSize, stmt); - } - } - - private static String tableName = "my_lob_test"; - private static String executeInsert = "insert into " + tableName + " (c1, c2, c3) values ("; - private static String executePreparedStatementInsert = executeInsert + "?, ?, ?)"; - private static String selectQuery = "select * from " + tableName + " where c3="; - - private static String generateRandomString(int stringSize) { - RandomStringGenerator randomStringGenerator = - new RandomStringGenerator.Builder().withinRange('a', 'z').build(); - return randomStringGenerator.generate(stringSize); - } - - private static void setResultFormat(Statement stmt, String format) throws SQLException { - stmt.execute("alter session set jdbc_query_result_format = '" + format + "'"); - } - - private void createTable(int lobSize, Statement stmt) throws SQLException { - String createTableQuery = - "create or replace table " - + tableName - + " (c1 varchar, c2 varchar(" - + lobSize - + "), c3 varchar)"; - stmt.execute(createTableQuery); - } - - private void insertQuery(String varCharValue, String uuidValue, Statement stmt) - throws SQLException { - stmt.executeUpdate(executeInsert + "'abc', '" + varCharValue + "', '" + uuidValue + "')"); - } - - private void preparedInsertQuery(String varCharValue, String uuidValue, Connection con) - throws SQLException { - try (PreparedStatement pstmt = con.prepareStatement(executePreparedStatementInsert)) { - pstmt.setString(1, "abc"); - pstmt.setString(2, varCharValue); - pstmt.setString(3, uuidValue); - - pstmt.execute(); - } - } - - @AfterClass - public static void tearDown() throws SQLException { - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - stmt.execute("Drop table if exists " + tableName); - } - } - + /** + * Available since 3.17.0 + * + * @throws SQLException + */ @Test - public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testIncreasedMaxLobSize() throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - String varCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - insertQuery(varCharValue, uuidValue, stmt); - - try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rs.next()); - assertEquals("abc", rs.getString(1)); - assertEquals(varCharValue, rs.getString(2)); - assertEquals(uuidValue, rs.getString(3)); + stmt.execute("alter session set FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY='ENABLED'"); + stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=false"); + try { + stmt.execute("select randstr(20000000, random()) as large_str"); + } catch (SnowflakeSQLException e) { + assertThat(e.getMessage(), CoreMatchers.containsString("exceeds supported length")); } - } - } - - @Test - public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - String maxVarCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - preparedInsertQuery(maxVarCharValue, uuidValue, con); - - try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rs.next()); - assertEquals("abc", rs.getString(1)); - assertEquals(maxVarCharValue, rs.getString(2)); - assertEquals(uuidValue, rs.getString(3)); - } - } - } - - @Test - public void testPutAndGet() throws IOException, SQLException { - File tempFile = File.createTempFile("LobSizeTest", ".csv"); - // Delete file when JVM shuts down - tempFile.deleteOnExit(); - - String filePath = tempFile.getPath(); - String filePathEscaped = filePath.replace("\\", "\\\\"); - String fileName = tempFile.getName(); - - String varCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - String fileInput = "abc," + varCharValue + "," + uuidValue; - - // Print data to new temporary file - try (PrintWriter out = new PrintWriter(filePath)) { - out.println(fileInput); - } - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - // Test PUT - String sqlPut = "PUT 'file://" + filePathEscaped + "' @%" + tableName; - - stmt.execute(sqlPut); - - try (ResultSet rsPut = stmt.getResultSet()) { - assertTrue(rsPut.next()); - assertEquals(fileName, rsPut.getString(1)); - assertEquals(fileName + ".gz", rsPut.getString(2)); - assertEquals("GZIP", rsPut.getString(6)); - assertEquals("UPLOADED", rsPut.getString(7)); - } - - try (ResultSet rsFiles = stmt.executeQuery("ls @%" + tableName)) { - // ResultSet should return a row with the zipped file name - assertTrue(rsFiles.next()); - assertEquals(fileName + ".gz", rsFiles.getString(1)); - } - - String copyInto = - "copy into " - + tableName - + " from @%" - + tableName - + " file_format=(type=csv compression='gzip')"; - stmt.execute(copyInto); - - // Check that results are copied into table correctly - try (ResultSet rsCopy = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rsCopy.next()); - assertEquals("abc", rsCopy.getString(1)); - assertEquals(varCharValue, rsCopy.getString(2)); - assertEquals(uuidValue, rsCopy.getString(3)); - } - - // Test Get - Path tempDir = Files.createTempDirectory("MaxLobTest"); - // Delete tempDir when JVM shuts down - tempDir.toFile().deleteOnExit(); - String pathToTempDir = tempDir.toString().replace("\\", "\\\\"); - - String getSql = "get @%" + tableName + " 'file://" + pathToTempDir + "'"; - stmt.execute(getSql); - try (ResultSet rsGet = stmt.getResultSet()) { - assertTrue(rsGet.next()); - assertEquals(fileName + ".gz", rsGet.getString(1)); - assertEquals("DOWNLOADED", rsGet.getString(3)); + stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=true"); + try (ResultSet resultSet = + stmt.executeQuery("select randstr(20000000, random()) as large_str")) { + Assert.assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is(not(emptyOrNullString()))); + } finally { + stmt.execute("alter session unset ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT"); + stmt.execute("alter session unset FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index fb55a9780..efd185926 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -744,7 +744,7 @@ public void testCallStatementType() throws SQLException { try { String sp = "CREATE OR REPLACE PROCEDURE \"SP_ZSDLEADTIME_ARCHIVE_DAILY\"()\n" - + "RETURNS VARCHAR(16777216)\n" + + "RETURNS VARCHAR\n" + "LANGUAGE SQL\n" + "EXECUTE AS CALLER\n" + "AS \n" @@ -793,7 +793,6 @@ public void testCallStatementType() throws SQLException { assertEquals("SP_ZSDLEADTIME_ARCHIVE_DAILY", resultSetMetaData.getColumnName(1)); assertEquals("VARCHAR", resultSetMetaData.getColumnTypeName(1)); assertEquals(0, resultSetMetaData.getScale(1)); - assertEquals(16777216, resultSetMetaData.getPrecision(1)); } } finally { statement.execute("drop procedure if exists SP_ZSDLEADTIME_ARCHIVE_DAILY()"); @@ -1048,6 +1047,12 @@ public void testLargeStringRetrieval() throws SQLException { int colLength = 16777216; try (Connection con = getConnection(); Statement statement = con.createStatement()) { + SFBaseSession session = con.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); + Integer maxVarcharSize = + (Integer) session.getOtherParameter("VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"); + if (maxVarcharSize != null) { + colLength = maxVarcharSize; + } statement.execute("create or replace table " + tableName + " (c1 string(" + colLength + "))"); statement.execute( "insert into " + tableName + " select randstr(" + colLength + ", random())"); From f39eff42290bfda27edb46d2cef0181f12ee52c3 Mon Sep 17 00:00:00 2001 From: sfc-gh-ext-simba-nl <143542970+sfc-gh-ext-simba-nl@users.noreply.github.com> Date: Tue, 2 Jul 2024 00:00:12 -0700 Subject: [PATCH 38/54] SNOW-957747: Easy logging improvements (#1730) --- .../client/config/SFClientConfigParser.java | 40 ++++++- .../jdbc/DefaultSFConnectionHandler.java | 110 +++++++++++++++--- .../net/snowflake/client/RunningOnWin.java | 9 ++ .../client/config/SFPermissionsTest.java | 86 ++++++++++++++ .../log/JDK14LoggerWithClientLatestIT.java | 49 ++++++++ 5 files changed, 276 insertions(+), 18 deletions(-) create mode 100644 src/test/java/net/snowflake/client/RunningOnWin.java create mode 100644 src/test/java/net/snowflake/client/config/SFPermissionsTest.java diff --git a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java index 3c960bc98..a0ca0fa11 100644 --- a/src/main/java/net/snowflake/client/config/SFClientConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFClientConfigParser.java @@ -8,9 +8,11 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; +import net.snowflake.client.core.Constants; import net.snowflake.client.jdbc.SnowflakeDriver; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -26,14 +28,16 @@ public class SFClientConfigParser { * connection property. 2. Environment variable: SF_CLIENT_CONFIG_FILE containing full path to * sf_client_config file. 3. Searches for default config file name(sf_client_config.json under the * driver directory from where the driver gets loaded. 4. Searches for default config file - * name(sf_client_config.json) under user home directory 5. Searches for default config file - * name(sf_client_config.json) under tmp directory. + * name(sf_client_config.json) under user home directory. * * @param configFilePath SF_CLIENT_CONFIG_FILE parameter read from connection URL or connection * properties * @return SFClientConfig */ public static SFClientConfig loadSFClientConfig(String configFilePath) throws IOException { + if (configFilePath != null) { + logger.info("Attempting to enable easy logging with file path {}", configFilePath); + } String derivedConfigFilePath = null; if (configFilePath != null && !configFilePath.isEmpty()) { // 1. Try to read the file at configFilePath. @@ -63,9 +67,16 @@ public static SFClientConfig loadSFClientConfig(String configFilePath) throws IO } if (derivedConfigFilePath != null) { try { + checkConfigFilePermissions(derivedConfigFilePath); + File configFile = new File(derivedConfigFilePath); ObjectMapper objectMapper = new ObjectMapper(); SFClientConfig clientConfig = objectMapper.readValue(configFile, SFClientConfig.class); + logger.info( + "Reading values logLevel {} and logPath {} from client configuration", + clientConfig.getCommonProps().getLogLevel(), + clientConfig.getCommonProps().getLogPath()); + Set unknownParams = clientConfig.getUnknownParamKeys(); if (!unknownParams.isEmpty()) { for (String unknownParam : unknownParams) { @@ -111,6 +122,31 @@ && systemGetProperty("os.name").toLowerCase().startsWith("windows")) { } } + private static void checkConfigFilePermissions(String derivedConfigFilePath) throws IOException { + try { + if (Constants.getOS() != Constants.OS.WINDOWS) { + // Check permissions of config file + if (checkGroupOthersWritePermissions(derivedConfigFilePath)) { + String error = + String.format( + "Error due to other users having permission to modify the config file: %s", + derivedConfigFilePath); + // TODO: SNOW-1503722 to change warning log to throw an error instead + logger.warn(error); + } + } + } catch (IOException e) { + throw e; + } + } + + static Boolean checkGroupOthersWritePermissions(String configFilePath) throws IOException { + Set folderPermissions = + Files.getPosixFilePermissions(Paths.get(configFilePath)); + return folderPermissions.contains(PosixFilePermission.GROUP_WRITE) + || folderPermissions.contains(PosixFilePermission.OTHERS_WRITE); + } + static String convertToWindowsPath(String filePath) { // Find the Windows file path pattern: ex) C:\ or D:\ Pattern windowsFilePattern = Pattern.compile("[C-Z]:[\\\\/]"); diff --git a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java index 7ada3a803..6bb62c82f 100644 --- a/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java +++ b/src/main/java/net/snowflake/client/jdbc/DefaultSFConnectionHandler.java @@ -8,15 +8,19 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLNonTransientConnectionException; import java.sql.Statement; import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.logging.Level; import net.snowflake.client.config.SFClientConfig; import net.snowflake.client.config.SFClientConfigParser; +import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFBaseStatement; @@ -136,13 +140,16 @@ private void setClientConfig() throws SnowflakeSQLLoggedException { String clientConfigFilePath = (String) connectionPropertiesMap.getOrDefault(SFSessionProperty.CLIENT_CONFIG_FILE, null); - SFClientConfig sfClientConfig; - try { - sfClientConfig = SFClientConfigParser.loadSFClientConfig(clientConfigFilePath); - } catch (IOException e) { - throw new SnowflakeSQLLoggedException(sfSession, ErrorCode.INTERNAL_ERROR, e.getMessage()); + SFClientConfig sfClientConfig = sfSession.getSfClientConfig(); + if (sfClientConfig == null) { + try { + sfClientConfig = SFClientConfigParser.loadSFClientConfig(clientConfigFilePath); + } catch (IOException e) { + throw new SnowflakeSQLLoggedException( + sfSession, ErrorCode.INTERNAL_ERROR, e.getMessage(), e.getCause()); + } + sfSession.setSfClientConfig(sfClientConfig); } - sfSession.setSfClientConfig(sfClientConfig); } /** @@ -181,6 +188,7 @@ && systemGetProperty("java.util.logging.config.file") == null) { if (logLevel != null && logPattern != null) { try { + logger.info("Setting logger with log level {} and log pattern {}", logLevel, logPattern); JDK14Logger.instantiateLogger(logLevel, logPattern); } catch (IOException ex) { throw new SnowflakeSQLLoggedException( @@ -188,13 +196,10 @@ && systemGetProperty("java.util.logging.config.file") == null) { } if (sfClientConfig != null) { logger.debug( - String.format( - "SF Client config found at location: %s.", sfClientConfig.getConfigFilePath())); + "SF Client config found at location: {}.", sfClientConfig.getConfigFilePath()); } logger.debug( - String.format( - "Instantiating JDK14Logger with level: %s , output path: %s", - logLevel, logPattern)); + "Instantiating JDK14Logger with level: {}, output path: {}", logLevel, logPattern); } } } @@ -206,25 +211,98 @@ private String constructLogPattern(String logPathFromConfig) throws SnowflakeSQL String logPattern = "%t/snowflake_jdbc%u.log"; // java.tmpdir + Path logPath; if (logPathFromConfig != null && !logPathFromConfig.isEmpty()) { - Path path = Paths.get(logPathFromConfig, "jdbc"); - if (!Files.exists(path)) { + // Get log path from configuration + logPath = Paths.get(logPathFromConfig); + if (!Files.exists(logPath)) { try { - Files.createDirectories(path); + Files.createDirectories(logPath); } catch (IOException ex) { throw new SnowflakeSQLLoggedException( sfSession, ErrorCode.INTERNAL_ERROR, String.format( - "Un-able to create log path mentioned in configfile %s ,%s", + "Unable to create log path mentioned in configfile %s ,%s", logPathFromConfig, ex.getMessage())); } } - logPattern = Paths.get(path.toString(), "snowflake_jdbc%u.log").toString(); + } else { + // Get log path from home directory + String homePath = systemGetProperty("user.home"); + if (homePath == null || homePath.isEmpty()) { + throw new SnowflakeSQLLoggedException( + sfSession, + ErrorCode.INTERNAL_ERROR, + String.format( + "Log path not set in configfile %s and home directory not set.", + logPathFromConfig)); + } + logPath = Paths.get(homePath); } + + Path path = createLogPathSubDirectory(logPath); + + logPattern = Paths.get(path.toString(), "snowflake_jdbc%u.log").toString(); return logPattern; } + private Path createLogPathSubDirectory(Path logPath) throws SnowflakeSQLLoggedException { + Path path = Paths.get(logPath.toString(), "jdbc"); + if (!Files.exists(path)) { + createLogFolder(path); + } else { + checkLogFolderPermissions(path); + } + return path; + } + + private void createLogFolder(Path path) throws SnowflakeSQLLoggedException { + try { + if (Constants.getOS() == Constants.OS.WINDOWS) { + Files.createDirectories(path); + } else { + Files.createDirectories( + path, + PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwx------"))); + } + } catch (IOException ex) { + throw new SnowflakeSQLLoggedException( + sfSession, + ErrorCode.INTERNAL_ERROR, + String.format( + "Unable to create jdbc subfolder in configfile %s ,%s", + path.toString(), ex.getMessage(), ex.getCause())); + } + } + + private void checkLogFolderPermissions(Path path) throws SnowflakeSQLLoggedException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + try { + Set folderPermissions = Files.getPosixFilePermissions(path); + if (folderPermissions.contains(PosixFilePermission.GROUP_WRITE) + || folderPermissions.contains(PosixFilePermission.GROUP_READ) + || folderPermissions.contains(PosixFilePermission.GROUP_EXECUTE) + || folderPermissions.contains(PosixFilePermission.OTHERS_WRITE) + || folderPermissions.contains(PosixFilePermission.OTHERS_READ) + || folderPermissions.contains(PosixFilePermission.OTHERS_EXECUTE)) { + logger.warn( + "Access permission for the logs directory '{}' is currently {} and is potentially " + + "accessible to users other than the owner of the logs directory.", + path.toString(), + folderPermissions.toString()); + } + } catch (IOException ex) { + throw new SnowflakeSQLLoggedException( + sfSession, + ErrorCode.INTERNAL_ERROR, + String.format( + "Unable to get permissions of log directory %s ,%s", + path.toString(), ex.getMessage(), ex.getCause())); + } + } + } + private void initSessionProperties(SnowflakeConnectString conStr, String appID, String appVersion) throws SFException { Map properties = mergeProperties(conStr); diff --git a/src/test/java/net/snowflake/client/RunningOnWin.java b/src/test/java/net/snowflake/client/RunningOnWin.java new file mode 100644 index 000000000..025ab1e04 --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningOnWin.java @@ -0,0 +1,9 @@ +package net.snowflake.client; + +import net.snowflake.client.core.Constants; + +public class RunningOnWin implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + return Constants.getOS() == Constants.OS.WINDOWS; + } +} diff --git a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java new file mode 100644 index 000000000..92ec8a624 --- /dev/null +++ b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java @@ -0,0 +1,86 @@ +package net.snowflake.client.config; + +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnWin; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class SFPermissionsTest { + @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); + + @Parameterized.Parameters(name = "permission={0}") + public static Set> data() { + Map testConfigFilePermissions = + new HashMap() { + { + put("rwx------", false); + put("rw-------", false); + put("r-x------", false); + put("r--------", false); + put("rwxrwx---", true); + put("rwxrw----", true); + put("rwxr-x---", false); + put("rwxr-----", false); + put("rwx-wx---", true); + put("rwx-w----", true); + put("rwx--x---", false); + put("rwx---rwx", true); + put("rwx---rw-", true); + put("rwx---r-x", false); + put("rwx---r--", false); + put("rwx----wx", true); + put("rwx----w-", true); + put("rwx-----x", false); + } + }; + return testConfigFilePermissions.entrySet(); + } + + Path configFilePath = Paths.get("config.json"); + String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"logs\"}}"; + String permission; + Boolean isSucceed; + + public SFPermissionsTest(Map.Entry permission) { + this.permission = permission.getKey(); + this.isSucceed = permission.getValue(); + } + + @Before + public void createConfigFile() throws IOException { + Files.write(configFilePath, configJson.getBytes()); + } + + @After + public void cleanupConfigFile() throws IOException { + Files.deleteIfExists(configFilePath); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnWin.class) + public void testLogDirectoryPermissions() throws IOException { + // TODO: SNOW-1503722 Change to check for thrown exceptions + // Don't run on Windows + Files.setPosixFilePermissions(configFilePath, PosixFilePermissions.fromString(permission)); + Boolean result = + SFClientConfigParser.checkGroupOthersWritePermissions(configFilePath.toString()); + if (isSucceed != result) { + fail("testLogDirectoryPermissions failed. Expected " + isSucceed); + } + } +} diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java index 84b876147..232da8451 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java @@ -1,5 +1,6 @@ package net.snowflake.client.log; +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -17,11 +18,14 @@ import java.util.Properties; import java.util.logging.Level; import net.snowflake.client.AbstractDriverIT; +import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import org.apache.commons.io.FileUtils; import org.junit.Test; public class JDK14LoggerWithClientLatestIT extends AbstractDriverIT { + String homePath = systemGetProperty("user.home"); + @Test public void testJDK14LoggingWithClientConfig() { Path configFilePath = Paths.get("config.json"); @@ -93,4 +97,49 @@ public void testJDK14LoggerWithQuotesInMessage() { logger.debug("Returning column: 12: a: Group b) Hi {Hello 'World' War} cant wait"); JDK14Logger.setLevel(Level.OFF); } + + @Test + public void testJDK14LoggingWithMissingLogPathClientConfig() throws Exception { + Path configFilePath = Paths.get("config.json"); + String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; + + Path homeLogPath = Paths.get(homePath, "jdbc"); + Files.write(configFilePath, configJson.getBytes()); + Properties properties = new Properties(); + properties.put("client_config_file", configFilePath.toString()); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { + try { + statement.executeQuery("select 1"); + + File file = new File(homeLogPath.toString()); + assertTrue(file.exists()); + + } finally { + Files.deleteIfExists(configFilePath); + FileUtils.deleteDirectory(new File(homeLogPath.toString())); + } + } + } + + @Test + public void testJDK14LoggingWithMissingLogPathNoHomeDirClientConfig() throws Exception { + System.clearProperty("user.home"); + + Path configFilePath = Paths.get("config.json"); + String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; + Files.write(configFilePath, configJson.getBytes()); + Properties properties = new Properties(); + properties.put("client_config_file", configFilePath.toString()); + try (Connection connection = getConnection(properties); + Statement statement = connection.createStatement()) { + + fail("testJDK14LoggingWithMissingLogPathNoHomeDirClientConfig failed"); + } catch (SnowflakeSQLLoggedException e) { + // Succeed + } finally { + System.setProperty("user.home", homePath); + Files.deleteIfExists(configFilePath); + } + } } From 3fee6f9240599049c0d179606236666b79f3bd10 Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Tue, 2 Jul 2024 16:02:32 +0900 Subject: [PATCH 39/54] SNOW-1196082: FIx inserting and reading timestamps not symetric if too much columns inserted with batch (#1794) --- .../client/core/bind/BindUploader.java | 6 +- .../client/jdbc/BindingDataLatestIT.java | 361 ++++++++++++------ 2 files changed, 249 insertions(+), 118 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/bind/BindUploader.java b/src/main/java/net/snowflake/client/core/bind/BindUploader.java index 2332f0150..6b901da44 100644 --- a/src/main/java/net/snowflake/client/core/bind/BindUploader.java +++ b/src/main/java/net/snowflake/client/core/bind/BindUploader.java @@ -159,6 +159,7 @@ private synchronized String synchronizedTimestampFormat(String o, String type) { int nano = times.right; Timestamp v1 = new Timestamp(sec * 1000); + ZoneOffset offsetId; // For timestamp_ntz, use UTC timezone. For timestamp_ltz, use the local timezone to minimise // the gap. if ("TIMESTAMP_LTZ".equals(type)) { @@ -166,10 +167,11 @@ private synchronized String synchronizedTimestampFormat(String o, String type) { cal.setTimeZone(tz); cal.clear(); timestampFormat.setCalendar(cal); + offsetId = ZoneId.systemDefault().getRules().getOffset(Instant.ofEpochMilli(v1.getTime())); + } else { + offsetId = ZoneOffset.UTC; } - ZoneOffset offsetId = ZoneId.systemDefault().getRules().getOffset(Instant.now()); - return timestampFormat.format(v1) + String.format("%09d", nano) + " " + offsetId; } diff --git a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java index 257759120..71c556686 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java @@ -6,6 +6,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.PreparedStatement; @@ -30,26 +31,34 @@ */ @Category(TestCategoryOthers.class) public class BindingDataLatestIT extends AbstractDriverIT { + TimeZone origTz = TimeZone.getDefault(); + TimeZone tokyoTz = TimeZone.getTimeZone("Asia/Tokyo"); + TimeZone australiaTz = TimeZone.getTimeZone("Australia/Sydney"); + Calendar tokyo = Calendar.getInstance(tokyoTz); + @Test public void testBindTimestampTZ() throws SQLException { - Connection connection = getConnection(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table testBindTimestampTZ(" + "cola int, colb timestamp_tz)"); - statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_TZ"); - - long millSeconds = System.currentTimeMillis(); - Timestamp ts = new Timestamp(millSeconds); - PreparedStatement prepStatement = - connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)"); - prepStatement.setInt(1, 123); - prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("EST"))); - prepStatement.execute(); - - ResultSet resultSet = statement.executeQuery("select cola, colb from testBindTimestampTz"); - resultSet.next(); - assertThat("integer", resultSet.getInt(1), equalTo(123)); - assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute("create or replace table testBindTimestampTZ(cola int, colb timestamp_tz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_TZ"); + + long milliSeconds = System.currentTimeMillis(); + Timestamp ts = new Timestamp(milliSeconds); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into testBindTimestampTZ values (?, ?)")) { + prepStatement.setInt(1, 123); + prepStatement.setTimestamp(2, ts, Calendar.getInstance(TimeZone.getTimeZone("EST"))); + prepStatement.execute(); + } + + try (ResultSet resultSet = + statement.executeQuery("select cola, colb from testBindTimestampTz")) { + assertTrue(resultSet.next()); + assertThat("integer", resultSet.getInt(1), equalTo(123)); + assertThat("timestamp_tz", resultSet.getTimestamp(2), equalTo(ts)); + } + } } /** @@ -60,57 +69,52 @@ public void testBindTimestampTZ() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testTimestampBindingWithNTZType() throws SQLException { - try (Connection connection = getConnection()) { - TimeZone origTz = TimeZone.getDefault(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute( - "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); - statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); - TimeZone.setDefault(TimeZone.getTimeZone("Asia/Tokyo")); - Timestamp currT = new Timestamp(System.currentTimeMillis()); - - // insert using stage binding - PreparedStatement prepStatement = - connection.prepareStatement("insert into stageinsert values (?,?,?,?)"); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); - prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); - prepStatement.addBatch(); - prepStatement.executeBatch(); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); - - // insert using regular binging - prepStatement = connection.prepareStatement("insert into regularinsert values (?,?,?,?)"); - for (int i = 1; i <= 6; i++) { - prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); - prepStatement.addBatch(); - } - prepStatement.executeBatch(); + TimeZone.setDefault(tokyoTz); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute( + "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); + statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); + Timestamp currT = new Timestamp(System.currentTimeMillis()); + + // insert using regular binging + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into regularinsert values (?,?,?,?)")) { + prepStatement.setInt(1, 1); + prepStatement.setTimestamp(2, currT, tokyo); + prepStatement.setTimestamp(3, currT, tokyo); + prepStatement.setTimestamp(4, currT); + prepStatement.addBatch(); + prepStatement.executeBatch(); + } + // insert using stage binding + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + executePsStatementForTimestampTest(connection, "stageinsert", currT); + + // Compare the results + try (ResultSet rs1 = statement.executeQuery("select * from stageinsert"); + ResultSet rs2 = statement.executeQuery("select * from regularinsert")) { + assertTrue(rs1.next()); + assertTrue(rs2.next()); + + assertEquals(rs1.getInt(1), rs2.getInt(1)); - // Compare the results - ResultSet rs1 = statement.executeQuery("select * from stageinsert"); - ResultSet rs2 = statement.executeQuery("select * from regularinsert"); - rs1.next(); - rs2.next(); - - assertEquals(rs1.getInt(1), rs2.getInt(1)); - assertEquals(rs1.getString(2), rs2.getString(2)); - assertEquals(rs1.getString(3), rs2.getString(3)); - assertEquals(rs1.getString(4), rs2.getString(4)); - - statement.execute("drop table if exists stageinsert"); - statement.execute("drop table if exists regularinsert"); - TimeZone.setDefault(origTz); - statement.close(); - prepStatement.close(); + // Check tz type and ltz type columns have the same value. + assertEquals(rs1.getTimestamp(2), rs1.getTimestamp(3)); + + assertEquals(rs1.getTimestamp(2), rs2.getTimestamp(2)); + assertEquals(rs1.getTimestamp(3), rs2.getTimestamp(3)); + assertEquals(rs1.getTimestamp(4), rs2.getTimestamp(4)); + } + } finally { + statement.execute("drop table if exists stageinsert"); + statement.execute("drop table if exists regularinsert"); + TimeZone.setDefault(origTz); + } } } @@ -122,57 +126,182 @@ public void testTimestampBindingWithNTZType() throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testTimestampBindingWithLTZType() throws SQLException { - try (Connection connection = getConnection()) { - TimeZone origTz = TimeZone.getDefault(); - Statement statement = connection.createStatement(); - statement.execute( - "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute( - "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); - statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_LTZ"); - statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); - TimeZone.setDefault(TimeZone.getTimeZone("Asia/Tokyo")); - Timestamp currT = new Timestamp(System.currentTimeMillis()); - - // insert using stage binding - PreparedStatement prepStatement = - connection.prepareStatement("insert into stageinsert values (?,?,?,?)"); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + TimeZone.setDefault(tokyoTz); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute( + "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, tz0 timestamp_tz, ntz0 timestamp_ntz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_LTZ"); + statement.execute("alter session set TIMEZONE='Asia/Tokyo'"); + Timestamp currT = new Timestamp(System.currentTimeMillis()); + + // insert using regular binging + executePsStatementForTimestampTest(connection, "regularinsert", currT); + + // insert using stage binding + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + executePsStatementForTimestampTest(connection, "stageinsert", currT); + + // Compare the results + try (ResultSet rs1 = statement.executeQuery("select * from stageinsert"); + ResultSet rs2 = statement.executeQuery("select * from regularinsert")) { + assertTrue(rs1.next()); + assertTrue(rs2.next()); + + assertEquals(rs1.getInt(1), rs2.getInt(1)); + + // Check that all the values are the same. + assertEquals(rs1.getTimestamp(2), rs1.getTimestamp(3)); + assertEquals(rs1.getTimestamp(3), rs1.getTimestamp(4)); + + assertEquals(rs1.getTimestamp(2), rs2.getTimestamp(2)); + assertEquals(rs1.getTimestamp(3), rs2.getTimestamp(3)); + assertEquals(rs1.getTimestamp(4), rs2.getTimestamp(4)); + } + } finally { + statement.execute("drop table if exists stageinsert"); + statement.execute("drop table if exists regularinsert"); + TimeZone.setDefault(origTz); + } + } + } + + /** + * Test that stage binding and regular binding insert and return the same value for timestamp_ltz + * when the local timezone has the daylight saving. This test is added in version > 3.16.1 + * + *

When CLIENT_TIMESTAMP_TYPE_MAPPING setting is mismatched with target data type (e.g + * MAPPING=LTZ and insert to NTZ or MAPPING=NTZ and insert to TZ/LTZ there could be different + * result as the timezone offset is applied on client side and removed on server side. This only + * occurs around the boundary of daylight-savings and the difference from the source data would be + * one hour. Both regular binding and stage binding have such issue but they also behave + * diffently, for some data only regular binding gets the extra hour while sometime only stage + * binding does. The workaround is to use CLIENT_TIMESTAMP_TYPE_MAPPING=LTZ to insert LTZ/TZ data + * and use CLIENT_TIMESTAMP_TYPE_MAPPING=NTZ to insert NTZ data. + * + *

This test cannot run on the GitHub testing because of the "ALTER SESSION SET + * CLIENT_STAGE_ARRAY_BINDING_THRESHOLD" This command should be executed with the system admin. + * + * @throws SQLException + */ + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testTimestampBindingWithLTZTypeForDayLightSavingTimeZone() throws SQLException { + Calendar australia = Calendar.getInstance(australiaTz); + TimeZone.setDefault(australiaTz); + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + try { + statement.execute( + "create or replace table stageinsert(ind int, ltz0 timestamp_ltz, ltz1 timestamp_ltz, ltz2 timestamp_ltz, tz0 timestamp_tz, tz1 timestamp_tz, tz2 timestamp_tz, ntz0 timestamp_ntz, ntz1 timestamp_ntz, ntz2 timestamp_ntz)"); + statement.execute( + "create or replace table regularinsert(ind int, ltz0 timestamp_ltz, ltz1 timestamp_ltz, ltz2 timestamp_ltz, tz0 timestamp_tz, tz1 timestamp_tz, tz2 timestamp_tz, ntz0 timestamp_ntz, ntz1 timestamp_ntz, ntz2 timestamp_ntz)"); + statement.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_LTZ"); + statement.execute("alter session set TIMEZONE='UTC'"); + + Timestamp ts1 = new Timestamp(1403049600000L); + Timestamp ts2 = new Timestamp(1388016000000L); + Timestamp ts3 = new Timestamp(System.currentTimeMillis()); + + // insert using regular binging + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into regularinsert values (?,?,?,?,?,?,?,?,?,?)")) { + prepStatement.setInt(1, 1); + prepStatement.setTimestamp(2, ts1); + prepStatement.setTimestamp(3, ts2); + prepStatement.setTimestamp(4, ts3); + + prepStatement.setTimestamp(5, ts1); + prepStatement.setTimestamp(6, ts2); + prepStatement.setTimestamp(7, ts3); + + prepStatement.setTimestamp(8, ts1, australia); + prepStatement.setTimestamp(9, ts2, australia); + prepStatement.setTimestamp(10, ts3, australia); + + prepStatement.addBatch(); + prepStatement.executeBatch(); + } + + // insert using stage binding + statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into stageinsert values (?,?,?,?,?,?,?,?,?,?)")) { + prepStatement.setInt(1, 1); + prepStatement.setTimestamp(2, ts1); + prepStatement.setTimestamp(3, ts2); + prepStatement.setTimestamp(4, ts3); + + prepStatement.setTimestamp(5, ts1); + prepStatement.setTimestamp(6, ts2); + prepStatement.setTimestamp(7, ts3); + + prepStatement.setTimestamp(8, ts1); + prepStatement.setTimestamp(9, ts2); + prepStatement.setTimestamp(10, ts3); + + prepStatement.addBatch(); + prepStatement.executeBatch(); + } + + // Compare the results + try (ResultSet rs1 = statement.executeQuery("select * from stageinsert"); + ResultSet rs2 = statement.executeQuery("select * from regularinsert")) { + assertTrue(rs1.next()); + assertTrue(rs2.next()); + + assertEquals(rs1.getInt(1), rs2.getInt(1)); + assertEquals(rs1.getTimestamp(2), rs2.getTimestamp(2)); + assertEquals(rs1.getTimestamp(3), rs2.getTimestamp(3)); + assertEquals(rs1.getTimestamp(4), rs2.getTimestamp(4)); + assertEquals(rs1.getTimestamp(5), rs2.getTimestamp(5)); + assertEquals(rs1.getTimestamp(6), rs2.getTimestamp(6)); + assertEquals(rs1.getTimestamp(7), rs2.getTimestamp(7)); + assertEquals(rs1.getTimestamp(8), rs2.getTimestamp(8)); + assertEquals(rs1.getTimestamp(9), rs2.getTimestamp(9)); + assertEquals(rs1.getTimestamp(10), rs2.getTimestamp(10)); + + assertEquals(ts1.getTime(), rs1.getTimestamp(2).getTime()); + assertEquals(ts2.getTime(), rs1.getTimestamp(3).getTime()); + assertEquals(ts3.getTime(), rs1.getTimestamp(4).getTime()); + assertEquals(ts1.getTime(), rs1.getTimestamp(5).getTime()); + assertEquals(ts2.getTime(), rs1.getTimestamp(6).getTime()); + assertEquals(ts3.getTime(), rs1.getTimestamp(7).getTime()); + assertEquals(ts1.getTime(), rs1.getTimestamp(8).getTime()); + assertEquals(ts2.getTime(), rs1.getTimestamp(9).getTime()); + assertEquals(ts3.getTime(), rs1.getTimestamp(10).getTime()); + + assertEquals(ts1.getTime(), rs2.getTimestamp(2).getTime()); + assertEquals(ts2.getTime(), rs2.getTimestamp(3).getTime()); + assertEquals(ts3.getTime(), rs2.getTimestamp(4).getTime()); + assertEquals(ts1.getTime(), rs2.getTimestamp(5).getTime()); + assertEquals(ts2.getTime(), rs2.getTimestamp(6).getTime()); + assertEquals(ts3.getTime(), rs2.getTimestamp(7).getTime()); + assertEquals(ts1.getTime(), rs2.getTimestamp(8).getTime()); + assertEquals(ts2.getTime(), rs2.getTimestamp(9).getTime()); + assertEquals(ts3.getTime(), rs2.getTimestamp(10).getTime()); + } + } finally { + statement.execute("drop table if exists stageinsert"); + statement.execute("drop table if exists regularinsert"); + TimeZone.setDefault(origTz); + } + } + } + + public void executePsStatementForTimestampTest( + Connection connection, String tableName, Timestamp timestamp) throws SQLException { + try (PreparedStatement prepStatement = + connection.prepareStatement("insert into " + tableName + " values (?,?,?,?)")) { prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); + prepStatement.setTimestamp(2, timestamp); + prepStatement.setTimestamp(3, timestamp); + prepStatement.setTimestamp(4, timestamp); prepStatement.addBatch(); prepStatement.executeBatch(); - statement.execute("ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 0"); - - // insert using regular binging - prepStatement = connection.prepareStatement("insert into regularinsert values (?,?,?,?)"); - for (int i = 1; i <= 6; i++) { - prepStatement.setInt(1, 1); - prepStatement.setTimestamp(2, currT); - prepStatement.setTimestamp(3, currT); - prepStatement.setTimestamp(4, currT); - prepStatement.addBatch(); - } - prepStatement.executeBatch(); - - // Compare the results - ResultSet rs1 = statement.executeQuery("select * from stageinsert"); - ResultSet rs2 = statement.executeQuery("select * from regularinsert"); - rs1.next(); - rs2.next(); - - assertEquals(rs1.getInt(1), rs2.getInt(1)); - assertEquals(rs1.getString(2), rs2.getString(2)); - assertEquals(rs1.getString(3), rs2.getString(3)); - assertEquals(rs1.getString(4), rs2.getString(4)); - - statement.execute("drop table if exists stageinsert"); - statement.execute("drop table if exists regularinsert"); - TimeZone.setDefault(origTz); - statement.close(); - prepStatement.close(); } } } From 8e916ae62af6fae3c6bd7fabdca609129fbbe18a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 2 Jul 2024 14:11:00 +0200 Subject: [PATCH 40/54] SNOW-1465374: Return consistent timestamps_ltz between JSON and ARROW result sets (#1792) --- .../client/core/arrow/ArrowResultUtil.java | 16 ++- .../arrow/BigIntToTimestampLTZConverter.java | 20 +++- .../StructuredTypeDateTimeConverter.java | 3 +- .../client/jdbc/ConnectionLatestIT.java | 98 +++++++++++++++++++ 4 files changed, 134 insertions(+), 3 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java index 8eaaadc94..2ad5c3ef2 100644 --- a/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java +++ b/src/main/java/net/snowflake/client/core/arrow/ArrowResultUtil.java @@ -11,6 +11,7 @@ import java.util.TimeZone; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeTimestampWithTimezone; import net.snowflake.client.log.ArgSupplier; @@ -151,6 +152,19 @@ public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone ne * @return */ public static Timestamp toJavaTimestamp(long epoch, int scale) { + return toJavaTimestamp(epoch, scale, TimeZone.getDefault(), false); + } + + /** + * generate Java Timestamp object + * + * @param epoch the value since epoch time + * @param scale the scale of the value + * @return + */ + @SnowflakeJdbcInternalApi + public static Timestamp toJavaTimestamp( + long epoch, int scale, TimeZone sessionTimezone, boolean useSessionTimezone) { long seconds = epoch / powerOfTen(scale); int fraction = (int) ((epoch % powerOfTen(scale)) * powerOfTen(9 - scale)); if (fraction < 0) { @@ -158,7 +172,7 @@ public static Timestamp toJavaTimestamp(long epoch, int scale) { seconds--; fraction += 1000000000; } - return createTimestamp(seconds, fraction, TimeZone.getDefault(), false); + return createTimestamp(seconds, fraction, sessionTimezone, useSessionTimezone); } /** diff --git a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java index 236abe553..e2bba45ab 100644 --- a/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverter.java @@ -11,6 +11,7 @@ import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeType; import net.snowflake.client.jdbc.SnowflakeUtil; @@ -65,7 +66,7 @@ public Timestamp toTimestamp(int index, TimeZone tz) throws SFException { private Timestamp getTimestamp(int index, TimeZone tz) throws SFException { long val = bigIntVector.getDataBuffer().getLong(index * BigIntVector.TYPE_WIDTH); int scale = context.getScale(columnIndex); - return getTimestamp(val, scale); + return getTimestamp(val, scale, sessionTimeZone, useSessionTimezone); } @Override @@ -90,8 +91,25 @@ public boolean toBoolean(int index) throws SFException { SnowflakeUtil.BOOLEAN_STR, val); } + /** + * Use {@link #getTimestamp(long, int, TimeZone, boolean)} + * + * @param val epoch + * @param scale scale + * @return Timestamp value without timezone take into account + * @throws SFException + */ + @Deprecated public static Timestamp getTimestamp(long val, int scale) throws SFException { Timestamp ts = ArrowResultUtil.toJavaTimestamp(val, scale); return ResultUtil.adjustTimestamp(ts); } + + @SnowflakeJdbcInternalApi + public static Timestamp getTimestamp( + long epoch, int scale, TimeZone sessionTimeZone, boolean useSessionTimezone) + throws SFException { + return ResultUtil.adjustTimestamp( + ArrowResultUtil.toJavaTimestamp(epoch, scale, sessionTimeZone, useSessionTimezone)); + } } diff --git a/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java b/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java index a07e583ea..14bea858a 100644 --- a/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/StructuredTypeDateTimeConverter.java @@ -92,7 +92,8 @@ private Timestamp convertTimestampLtz(Object obj, int scale) throws SFException false); } } else if (obj instanceof Long) { - return BigIntToTimestampLTZConverter.getTimestamp((long) obj, scale); + return BigIntToTimestampLTZConverter.getTimestamp( + (long) obj, scale, sessionTimeZone, useSessionTimezone); } throw new SFException( ErrorCode.INVALID_VALUE_CONVERT, diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index a76f7fdf2..f4a19bd43 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -40,6 +40,7 @@ import java.sql.Statement; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.List; @@ -59,6 +60,8 @@ import net.snowflake.client.core.SecurityUtil; import net.snowflake.client.core.SessionUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.ClientAuthnParameter; import net.snowflake.common.core.SqlState; @@ -82,6 +85,7 @@ @Category(TestCategoryConnection.class) public class ConnectionLatestIT extends BaseJDBCTest { @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + private static final SFLogger logger = SFLoggerFactory.getLogger(ConnectionLatestIT.class); private boolean defaultState; @@ -1374,4 +1378,98 @@ public void testDataSourceOktaGenerates429StatusCode() throws Exception { thread.join(); } } + + /** + * SNOW-1465374: For TIMESTAMP_LTZ we were returning timestamps without timezone when scale was + * set e.g. to 6 in Arrow format The problem wasn't visible when calling getString, but was + * visible when we called toString on passed getTimestamp since we returned {@link + * java.sql.Timestamp}, not {@link SnowflakeTimestampWithTimezone} + * + *

Timestamps before 1582-10-05 are always returned as {@link java.sql.Timestamp}, not {@link + * SnowflakeTimestampWithTimezone} {SnowflakeTimestampWithTimezone} + * + *

Added in > 3.16.1 + */ + @Test + public void shouldGetDifferentTimestampLtzConsistentBetweenFormats() throws Exception { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.executeUpdate( + "create or replace table DATETIMETZ_TYPE(timestamp_tzcol timestamp_ltz, timestamp_tzpcol timestamp_ltz(6), timestamptzcol timestampltz, timestampwtzcol timestamp with local time zone);"); + Arrays.asList( + "insert into DATETIMETZ_TYPE values('9999-12-31 23:59:59.999999999','9999-12-31 23:59:59.999999','9999-12-31 23:59:59.999999999','9999-12-31 23:59:59.999999999');", + "insert into DATETIMETZ_TYPE values('1582-01-01 00:00:00.000000001','1582-01-01 00:00:00.000001','1582-01-01 00:00:00.000000001','1582-01-01 00:00:00.000000001');", + "insert into DATETIMETZ_TYPE values('2000-06-18 18:29:30.123456789 +0100','2000-06-18 18:29:30.123456 +0100','2000-06-18 18:29:30.123456789 +0100','2000-06-18 18:29:30.123456789 +0100');", + "insert into DATETIMETZ_TYPE values(current_timestamp(),current_timestamp(),current_timestamp(),current_timestamp());", + "insert into DATETIMETZ_TYPE values('2000-06-18 18:29:30.12345 -0530','2000-06-18 18:29:30.123 -0530','2000-06-18 18:29:30.123456 -0530','2000-06-18 18:29:30.123 -0530');", + "insert into DATETIMETZ_TYPE values('2000-06-18 18:29:30','2000-06-18 18:29:30','2000-06-18 18:29:30','2000-06-18 18:29:30');", + "insert into DATETIMETZ_TYPE values('1582-10-04 00:00:00.000000001','1582-10-04 00:00:00.000001','1582-10-04 00:00:00.000000001','1582-10-04 00:00:00.000000001');", + "insert into DATETIMETZ_TYPE values('1582-10-05 00:00:00.000000001','1582-10-05 00:00:00.000001','1582-10-05 00:00:00.000000001','1582-10-05 00:00:00.000000001');", + "insert into DATETIMETZ_TYPE values('1583-10-05 00:00:00.000000001','1583-10-05 00:00:00.000001','1583-10-05 00:00:00.000000001','1583-10-05 00:00:00.000000001');") + .forEach( + insert -> { + try { + statement.executeUpdate(insert); + } catch (SQLException e) { + throw new RuntimeException(e); + } + }); + try (ResultSet arrowResultSet = statement.executeQuery("select * from DATETIMETZ_TYPE")) { + try (Connection jsonConnection = getConnection(); + Statement jsonStatement = jsonConnection.createStatement()) { + jsonStatement.execute("alter session set JDBC_QUERY_RESULT_FORMAT=JSON"); + try (ResultSet jsonResultSet = + jsonStatement.executeQuery("select * from DATETIMETZ_TYPE")) { + int rowIdx = 0; + while (arrowResultSet.next()) { + logger.debug("Checking row " + rowIdx); + assertTrue(jsonResultSet.next()); + for (int column = 1; column <= 4; ++column) { + logger.trace( + "JSON row[{}],column[{}] as string '{}', timestamp string '{}', as timestamp numeric '{}', tz offset={}, timestamp class {}", + rowIdx, + column, + jsonResultSet.getString(column), + jsonResultSet.getTimestamp(column), + jsonResultSet.getTimestamp(column).getTime(), + jsonResultSet.getTimestamp(column).getTimezoneOffset(), + jsonResultSet.getTimestamp(column).getClass()); + logger.trace( + "ARROW row[{}],column[{}] as string '{}', timestamp string '{}', as timestamp numeric '{}', tz offset={}, timestamp class {}", + rowIdx, + column, + arrowResultSet.getString(column), + arrowResultSet.getTimestamp(column), + arrowResultSet.getTimestamp(column).getTime(), + arrowResultSet.getTimestamp(column).getTimezoneOffset(), + arrowResultSet.getTimestamp(column).getClass()); + assertEquals( + "Expecting that string representation are the same for row " + + rowIdx + + " and column " + + column, + jsonResultSet.getString(column), + arrowResultSet.getString(column)); + assertEquals( + "Expecting that string representation (via toString) are the same for row " + + rowIdx + + " and column " + + column, + jsonResultSet.getTimestamp(column).toString(), + arrowResultSet.getTimestamp(column).toString()); + assertEquals( + "Expecting that timestamps are the same for row " + + rowIdx + + " and column " + + column, + jsonResultSet.getTimestamp(column), + arrowResultSet.getTimestamp(column)); + } + rowIdx++; + } + } + } + } + } + } } From 997002b86897944fbcf529c6b6a4a0a71eaeb078 Mon Sep 17 00:00:00 2001 From: Waleed Fateem <72769898+sfc-gh-wfateem@users.noreply.github.com> Date: Wed, 3 Jul 2024 00:13:41 -0500 Subject: [PATCH 41/54] SNOW-731500: Add JDBC Connectivity Diagnostics mode (#1789) --- .github/workflows/build-test.yml | 6 +- .../net/snowflake/client/core/SFSession.java | 49 +++ .../client/core/SFSessionProperty.java | 3 +- .../client/jdbc/SnowflakeConnectString.java | 1 - .../CertificateDiagnosticCheck.java | 61 +++ .../jdbc/diagnostic/DiagnosticCheck.java | 22 ++ .../jdbc/diagnostic/DiagnosticContext.java | 190 ++++++++++ .../diagnostic/DiagnosticTrustManager.java | 78 ++++ .../jdbc/diagnostic/DnsDiagnosticCheck.java | 83 ++++ .../HttpAndHttpsDiagnosticCheck.java | 57 +++ .../client/jdbc/diagnostic/ProxyConfig.java | 214 +++++++++++ .../jdbc/diagnostic/SnowflakeEndpoint.java | 74 ++++ .../jdbc/diagnostic/TcpDiagnosticCheck.java | 45 +++ .../category/TestCategoryDiagnostic.java | 3 + .../diagnostic/DiagnosticContextLatestIT.java | 356 ++++++++++++++++++ src/test/resources/allowlist.json | 18 + 16 files changed, 1255 insertions(+), 5 deletions(-) create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/CertificateDiagnosticCheck.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticCheck.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContext.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticTrustManager.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/DnsDiagnosticCheck.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/HttpAndHttpsDiagnosticCheck.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/ProxyConfig.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java create mode 100644 src/main/java/net/snowflake/client/jdbc/diagnostic/TcpDiagnosticCheck.java create mode 100644 src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java create mode 100644 src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java create mode 100644 src/test/resources/allowlist.json diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 90b03180f..190425de4 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -44,7 +44,7 @@ jobs: fail-fast: false matrix: runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] - category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 @@ -74,7 +74,7 @@ jobs: fail-fast: false matrix: runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] - category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 @@ -107,7 +107,7 @@ jobs: matrix: image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17', 'jdbc-centos7-openjdk21' ] cloud: [ 'AWS', 'AZURE', 'GCP' ] - category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader,TestCategoryDiagnostic', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: ['', '-Dthin-jar'] steps: - uses: actions/checkout@v1 diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index eeb1bd479..e79d23b28 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; @@ -40,6 +41,7 @@ import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.jdbc.diagnostic.DiagnosticContext; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryClient; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; @@ -524,6 +526,7 @@ public synchronized void open() throws SFException, SnowflakeSQLException { + " passcode in password: {}, passcode is {}, private key is {}, disable socks proxy: {}," + " application: {}, app id: {}, app version: {}, login timeout: {}, retry timeout: {}, network timeout: {}," + " query timeout: {}, tracing: {}, private key file: {}, private key file pwd is {}," + + " enable_diagnostics: {}, diagnostics_allowlist_path: {}," + " session parameters: client store temporary credential: {}, gzip disabled: {}", connectionPropertiesMap.get(SFSessionProperty.SERVER_URL), connectionPropertiesMap.get(SFSessionProperty.ACCOUNT), @@ -553,6 +556,8 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE), SFLoggerUtil.isVariableProvided( (String) connectionPropertiesMap.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD)), + connectionPropertiesMap.get(SFSessionProperty.ENABLE_DIAGNOSTICS), + connectionPropertiesMap.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE), sessionParametersMap.get(CLIENT_STORE_TEMPORARY_CREDENTIAL), connectionPropertiesMap.get(SFSessionProperty.GZIP_DISABLED)); @@ -625,6 +630,9 @@ public synchronized void open() throws SFException, SnowflakeSQLException { // propagate OCSP mode to SFTrustManager. Note OCSP setting is global on JVM. HttpUtil.initHttpClient(httpClientSettingsKey, null); + + runDiagnosticsIfEnabled(); + SFLoginOutput loginOutput = SessionUtil.openSession(loginInput, connectionPropertiesMap, tracingLevel.toString()); isClosed = false; @@ -1278,4 +1286,45 @@ public SFClientConfig getSfClientConfig() { public void setSfClientConfig(SFClientConfig sfClientConfig) { this.sfClientConfig = sfClientConfig; } + + /** + * If the JDBC driver starts in diagnostics mode then the method prints results of the + * connectivity tests it performs in the logs. A SQLException is thrown with a message indicating + * that the driver is in diagnostics mode, and that a connection was not created. + */ + private void runDiagnosticsIfEnabled() throws SnowflakeSQLException { + Map connectionPropertiesMap = getConnectionPropertiesMap(); + boolean isDiagnosticsEnabled = + Optional.ofNullable(connectionPropertiesMap.get(SFSessionProperty.ENABLE_DIAGNOSTICS)) + .map(b -> (Boolean) b) + .orElse(false); + + if (!isDiagnosticsEnabled) { + return; + } + logger.info("Running diagnostics tests"); + String allowListFile = + (String) connectionPropertiesMap.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE); + + if (allowListFile == null || allowListFile.isEmpty()) { + logger.error( + "Diagnostics was enabled but an allowlist file was not provided." + + " Please provide an allowlist JSON file using the connection parameter {}", + SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE); + throw new SnowflakeSQLException( + "Diagnostics was enabled but an allowlist file was not provided. " + + "Please provide an allowlist JSON file using the connection parameter " + + SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE); + } else { + DiagnosticContext diagnosticContext = + new DiagnosticContext(allowListFile, connectionPropertiesMap); + diagnosticContext.runDiagnostics(); + } + + throw new SnowflakeSQLException( + "A connection was not created because the driver is running in diagnostics mode." + + " If this is unintended then disable diagnostics check by removing the " + + SFSessionProperty.ENABLE_DIAGNOSTICS + + " connection parameter"); + } } diff --git a/src/main/java/net/snowflake/client/core/SFSessionProperty.java b/src/main/java/net/snowflake/client/core/SFSessionProperty.java index 359448d24..3dcb09602 100644 --- a/src/main/java/net/snowflake/client/core/SFSessionProperty.java +++ b/src/main/java/net/snowflake/client/core/SFSessionProperty.java @@ -77,13 +77,14 @@ public enum SFSessionProperty { PUT_GET_MAX_RETRIES("putGetMaxRetries", false, Integer.class), RETRY_TIMEOUT("retryTimeout", false, Integer.class), + ENABLE_DIAGNOSTICS("ENABLE_DIAGNOSTICS", false, Boolean.class), + DIAGNOSTICS_ALLOWLIST_FILE("DIAGNOSTICS_ALLOWLIST_FILE", false, String.class), ENABLE_PATTERN_SEARCH("enablePatternSearch", false, Boolean.class), DISABLE_GCS_DEFAULT_CREDENTIALS("disableGcsDefaultCredentials", false, Boolean.class), JDBC_ARROW_TREAT_DECIMAL_AS_INT("JDBC_ARROW_TREAT_DECIMAL_AS_INT", false, Boolean.class), - DISABLE_SAML_URL_CHECK("disableSamlURLCheck", false, Boolean.class); // property key in string diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java index bc4ecadc4..ea456be6c 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeConnectString.java @@ -28,7 +28,6 @@ public class SnowflakeConnectString implements Serializable { private final int port; private final Map parameters; private final String account; - private static SnowflakeConnectString INVALID_CONNECT_STRING = new SnowflakeConnectString("", "", -1, Collections.emptyMap(), ""); diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/CertificateDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/CertificateDiagnosticCheck.java new file mode 100644 index 000000000..1fcbb1fb2 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/CertificateDiagnosticCheck.java @@ -0,0 +1,61 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.Proxy; +import java.net.URL; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.TrustManager; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class CertificateDiagnosticCheck extends DiagnosticCheck { + + private static final String SECURE_SOCKET_PROTOCOL = "TLS"; + + private static final SFLogger logger = + SFLoggerFactory.getLogger(CertificateDiagnosticCheck.class); + + public CertificateDiagnosticCheck(ProxyConfig proxyConfig) { + super("SSL/TLS Certificate Test", proxyConfig); + } + + @Override + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + String port = Integer.toString(snowflakeEndpoint.getPort()); + if (snowflakeEndpoint.isSslEnabled()) { + String urlString = "https://" + hostname + ":" + port; + try { + SSLContext sslContext = SSLContext.getInstance(SECURE_SOCKET_PROTOCOL); + sslContext.init(null, new TrustManager[] {new DiagnosticTrustManager()}, null); + HttpsURLConnection.setDefaultSSLSocketFactory(sslContext.getSocketFactory()); + Proxy proxy = this.proxyConf.getProxy(snowflakeEndpoint); + new URL(urlString).openConnection(proxy).connect(); + } catch (NoSuchAlgorithmException e) { + logger.error( + "None of the security provider's implementation of SSLContextSpi supports " + + SECURE_SOCKET_PROTOCOL, + e); + } catch (KeyManagementException e) { + logger.error("Failed to initialize SSLContext", e); + } catch (MalformedURLException e) { + logger.error("Failed to create new URL object: " + urlString, e); + } catch (IOException e) { + logger.error("Failed to open a connection to: " + urlString, e); + } catch (Exception e) { + logger.error( + "Unexpected error occurred when trying to retrieve certificate from: " + hostname, e); + } finally { + HttpsURLConnection.setDefaultSSLSocketFactory( + (SSLSocketFactory) SSLSocketFactory.getDefault()); + } + } else { + logger.info("Host " + hostname + ":" + port + " is not secure. Skipping certificate check."); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticCheck.java new file mode 100644 index 000000000..14aa2ff0a --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticCheck.java @@ -0,0 +1,22 @@ +package net.snowflake.client.jdbc.diagnostic; + +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +abstract class DiagnosticCheck { + protected final String name; + protected final ProxyConfig proxyConf; + private static final SFLogger logger = SFLoggerFactory.getLogger(DiagnosticCheck.class); + + abstract void doCheck(SnowflakeEndpoint snowflakeEndpoint); + + final void run(SnowflakeEndpoint snowflakeEndpoint) { + logger.info("JDBC Diagnostics - {}: hostname: {}", this.name, snowflakeEndpoint.getHost()); + doCheck(snowflakeEndpoint); + } + + protected DiagnosticCheck(String name, ProxyConfig proxyConf) { + this.name = name; + this.proxyConf = proxyConf; + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContext.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContext.java new file mode 100644 index 000000000..7f146ac1b --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContext.java @@ -0,0 +1,190 @@ +package net.snowflake.client.jdbc.diagnostic; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.File; +import java.io.IOException; +import java.net.Proxy; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import net.snowflake.client.core.SFSessionProperty; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class DiagnosticContext { + + private static final SFLogger logger = SFLoggerFactory.getLogger(DiagnosticContext.class); + private static final String JAVAX_NET_DEBUG = "javax.net.debug"; + + private static final String JAVAX_TRUSTSTORE = "javax.net.ssl.trustStore"; + private ProxyConfig proxyConf; + + private List endpoints = new ArrayList<>(); + + private final DiagnosticCheck[] tests; + + public DiagnosticContext( + String allowListFile, Map connectionPropertiesMap) { + + createProxyConfiguration(connectionPropertiesMap); + + try { + JsonNode jsonNode = readAllowListJsonFile(allowListFile); + for (JsonNode objectNode : jsonNode) { + String type = objectNode.get("type").asText(); + String host = objectNode.get("host").asText(); + int port = objectNode.get("port").asInt(); + SnowflakeEndpoint e = new SnowflakeEndpoint(type, host, port); + endpoints.add(e); + } + + } catch (IOException e) { + logger.error("Failed to read allowlist file: ", e); + } catch (Exception e) { + logger.error("Failed to parse data in allowlist file: " + allowListFile, e); + } + + tests = + new DiagnosticCheck[] { + new DnsDiagnosticCheck(proxyConf), + new TcpDiagnosticCheck(proxyConf), + new CertificateDiagnosticCheck(proxyConf), + new HttpAndHttpsDiagnosticCheck(proxyConf) + }; + } + + /** This constructor is only used for testing */ + DiagnosticContext(Map connectionPropertiesMap) { + createProxyConfiguration(connectionPropertiesMap); + tests = null; + } + + private void createProxyConfiguration(Map connectionPropertiesMap) { + String proxyHost = (String) connectionPropertiesMap.get(SFSessionProperty.PROXY_HOST); + int proxyPort = + (connectionPropertiesMap.get(SFSessionProperty.PROXY_PORT) == null) + ? -1 + : Integer.parseInt((String) connectionPropertiesMap.get(SFSessionProperty.PROXY_PORT)); + String nonProxyHosts = (String) connectionPropertiesMap.get(SFSessionProperty.NON_PROXY_HOSTS); + proxyConf = new ProxyConfig(proxyHost, proxyPort, nonProxyHosts); + } + + public void runDiagnostics() { + + logEnvironmentInfo(); + + // Loop through endpoints and run diagnostic test on each one of them + for (DiagnosticCheck test : tests) { + for (SnowflakeEndpoint endpoint : endpoints) { + test.run(endpoint); + } + } + } + + private JsonNode readAllowListJsonFile(String jsonFilePath) throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + File allowListFile = new File(jsonFilePath); + + return objectMapper.readTree(allowListFile); + } + + public void logEnvironmentInfo() { + logger.info("Getting environment information"); + logger.info("Current truststore used: " + getTrustStoreLocation()); + logger.info("-Dnetworkaddress.cache.ttl: " + System.getProperty("networkaddress.cache.ttl")); + logger.info( + "-Dnetworkaddress.cache.negative.ttl: " + + System.getProperty("networkaddress.cache.negative.ttl")); + logger.info("-Djavax.net.debug: " + System.getProperty(JAVAX_NET_DEBUG)); + } + + private boolean isNullOrEmpty(String a) { + return a == null || a.isEmpty(); + } + + /** + * We determine the truststore in use based on the JSSE documentation: + * + *

1.) If the javax.net.ssl.trustStore property is defined, then the TrustManagerFactory + * attempts to find a file using the file name specified by that system property, and uses that + * file for the KeyStore parameter. If the javax.net.ssl.trustStorePassword system property is + * also defined, then its value is used to check the integrity of the data in the truststore + * before opening it. + * + *

If the javax.net.ssl.trustStore property is defined but the specified file does not exist, + * then a default TrustManager using an empty keystore is created. + * + *

2.) If the javax.net.ssl.trustStore system property was not specified, then: - if the file + * java-home/lib/security/jssecacerts exists, that file is used; - if the file + * java-home/lib/security/cacerts exists, that file is used; - if neither of these files exists, + * then the SSL cipher suite is anonymous, does not perform any authentication, and thus does not + * need a truststore. + */ + private String getTrustStoreLocation() { + String trustStore = System.getProperty(JAVAX_TRUSTSTORE); + String javaHome = System.getProperty("java.home"); + Path javaSecurityPath = FileSystems.getDefault().getPath(javaHome, "/lib/security"); + logger.info("JAVA_HOME: " + javaHome); + + if (isNullOrEmpty(trustStore)) { + logger.info("-D{} is null", JAVAX_TRUSTSTORE); + Path jssecacertsPath = + FileSystems.getDefault().getPath(javaSecurityPath.toString(), "jssecacerts"); + Path cacertsPath = FileSystems.getDefault().getPath(javaSecurityPath.toString(), "cacerts"); + + logger.info("Checking if jssecacerts or cacerts exist"); + if (Files.exists(jssecacertsPath)) { + logger.info(jssecacertsPath.toString() + " exists"); + trustStore = jssecacertsPath.toString(); + } else if (Files.exists(cacertsPath)) { + logger.info(cacertsPath.toString() + " exists"); + trustStore = cacertsPath.toString(); + } + } else { + logger.info("-D{} is set by user: {}", JAVAX_TRUSTSTORE, trustStore); + } + return trustStore; + } + + String getHttpProxyHost() { + return proxyConf.getHttpProxyHost(); + } + + int getHttpProxyPort() { + return proxyConf.getHttpProxyPort(); + } + + String getHttpsProxyHost() { + return proxyConf.getHttpsProxyHost(); + } + + int getHttpsProxyPort() { + return proxyConf.getHttpsProxyPort(); + } + + String getHttpNonProxyHosts() { + return proxyConf.getNonProxyHosts(); + } + + List getEndpoints() { + return endpoints; + } + + Proxy getProxy(SnowflakeEndpoint snowflakeEndpoint) { + return this.proxyConf.getProxy(snowflakeEndpoint); + } + + boolean isProxyEnabled() { + return proxyConf.isProxyEnabled(); + } + + boolean isProxyEnabledOnJvm() { + return proxyConf.isProxyEnabledOnJvm(); + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticTrustManager.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticTrustManager.java new file mode 100644 index 000000000..cfd316f40 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DiagnosticTrustManager.java @@ -0,0 +1,78 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.net.Socket; +import java.security.cert.CertificateParsingException; +import java.security.cert.X509Certificate; +import javax.net.ssl.SSLEngine; +import javax.net.ssl.X509ExtendedTrustManager; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class DiagnosticTrustManager extends X509ExtendedTrustManager { + + private static final SFLogger logger = SFLoggerFactory.getLogger(DiagnosticTrustManager.class); + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType) { + printCertificates(certs); + } + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType, SSLEngine engine) { + printCertificates(certs); + } + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType, Socket sc) { + printCertificates(certs); + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) { + // do nothing + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType, Socket sc) { + // do nothing + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType, SSLEngine engine) { + // do nothing + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + // This implementation is not needed, so we're returning an empty array + return new X509Certificate[0]; + } + + private void printCertificates(X509Certificate[] chainCerts) { + logger.info("Printing certificate chain"); + StringBuilder sb = new StringBuilder(); + int i = 0; + for (X509Certificate x509Cert : chainCerts) { + try { + sb.append("\nCertificate[").append(i).append("]:").append("\n"); + sb.append("Subject: ").append(x509Cert.getSubjectDN()).append("\n"); + sb.append("Issuer: ").append(x509Cert.getIssuerDN()).append("\n"); + sb.append("Valid from: ").append(x509Cert.getNotBefore()).append("\n"); + sb.append("Not Valid After: ").append(x509Cert.getNotAfter()).append("\n"); + sb.append("Subject Alternative Names: ") + .append(x509Cert.getSubjectAlternativeNames()) + .append("\n"); + sb.append("Issuer Alternative Names: ") + .append(x509Cert.getIssuerAlternativeNames()) + .append("\n"); + sb.append("Serial: ").append(x509Cert.getSerialNumber().toString(16)).append("\n"); + logger.info(sb.toString()); + i++; + } catch (CertificateParsingException e) { + logger.error("Error parsing certificate", e); + } catch (Exception e) { + logger.error("Unexpected error occurred when parsing certificate", e); + } + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/DnsDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/DnsDiagnosticCheck.java new file mode 100644 index 000000000..259015283 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/DnsDiagnosticCheck.java @@ -0,0 +1,83 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.net.Inet4Address; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Hashtable; +import javax.naming.Context; +import javax.naming.NamingEnumeration; +import javax.naming.NamingException; +import javax.naming.directory.Attribute; +import javax.naming.directory.Attributes; +import javax.naming.directory.DirContext; +import javax.naming.spi.NamingManager; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class DnsDiagnosticCheck extends DiagnosticCheck { + + private static final SFLogger logger = SFLoggerFactory.getLogger(DnsDiagnosticCheck.class); + + private final String INITIAL_DNS_CONTEXT = "com.sun.jndi.dns.DnsContextFactory"; + + DnsDiagnosticCheck(ProxyConfig proxyConfig) { + super("DNS Lookup Test", proxyConfig); + } + + @Override + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + getCnameRecords(snowflakeEndpoint); + getArecords(snowflakeEndpoint); + } + + private void getCnameRecords(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + try { + Hashtable env = new Hashtable<>(); + env.put(Context.INITIAL_CONTEXT_FACTORY, INITIAL_DNS_CONTEXT); + DirContext dirCtx = (DirContext) NamingManager.getInitialContext(env); + Attributes attrs1 = dirCtx.getAttributes(snowflakeEndpoint.getHost(), new String[] {"CNAME"}); + NamingEnumeration attrs = attrs1.getAll(); + StringBuilder sb = new StringBuilder(); + sb.append("\nCNAME:\n"); + while (attrs.hasMore()) { + Attribute a = attrs.next(); + NamingEnumeration values = a.getAll(); + while (values.hasMore()) { + sb.append(values.next()); + sb.append("\n"); + } + } + logger.info(sb.toString()); + } catch (NamingException e) { + logger.error("Error occurred when getting CNAME record for host " + hostname, e); + } catch (Exception e) { + logger.error("Unexpected error occurred when getting CNAME record for host " + hostname, e); + } + } + + private void getArecords(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + try { + InetAddress[] addresses = InetAddress.getAllByName(hostname); + StringBuilder sb = new StringBuilder(); + sb.append("\nA Records:\n"); + for (InetAddress ip : addresses) { + if (ip instanceof Inet4Address) { + sb.append(ip.getHostAddress()); + sb.append("\n"); + } + // Check if this is a private link endpoint and if the ip address + // returned by the DNS query is a private IP address as expected. + if (snowflakeEndpoint.isPrivateLink() && !ip.isSiteLocalAddress()) { + logger.error( + "Public IP address was returned for {}. Please review your DNS configurations.", + hostname); + } + } + logger.info(sb.toString()); + } catch (UnknownHostException e) { + logger.error("DNS query failed for host: " + snowflakeEndpoint.getHost(), e); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/HttpAndHttpsDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/HttpAndHttpsDiagnosticCheck.java new file mode 100644 index 000000000..90b499435 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/HttpAndHttpsDiagnosticCheck.java @@ -0,0 +1,57 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.Proxy; +import java.net.URL; +import java.util.List; +import java.util.Map; +import javax.net.ssl.HttpsURLConnection; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class HttpAndHttpsDiagnosticCheck extends DiagnosticCheck { + + private static final SFLogger logger = + SFLoggerFactory.getLogger(HttpAndHttpsDiagnosticCheck.class); + private final String HTTP_SCHEMA = "http://"; + private final String HTTPS_SCHEMA = "https://"; + + HttpAndHttpsDiagnosticCheck(ProxyConfig proxyConfig) { + super("HTTP/HTTPS Connection Test", proxyConfig); + } + + @Override + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + // We have to replace underscores with hyphens because the JDK doesn't allow underscores in the + // hostname + String hostname = snowflakeEndpoint.getHost().replace('_', '-'); + try { + Proxy proxy = this.proxyConf.getProxy(snowflakeEndpoint); + StringBuilder sb = new StringBuilder(); + String urlString = + (snowflakeEndpoint.isSslEnabled()) ? HTTPS_SCHEMA + hostname : HTTP_SCHEMA + hostname; + URL url = new URL(urlString); + HttpURLConnection con = + (snowflakeEndpoint.isSslEnabled()) + ? (HttpsURLConnection) url.openConnection(proxy) + : (HttpURLConnection) url.openConnection(proxy); + logger.info("Response from server: {} {}", con.getResponseCode(), con.getResponseMessage()); + sb.append("Headers:\n"); + + Map> headerFields = con.getHeaderFields(); + for (Map.Entry> header : headerFields.entrySet()) { + sb.append(header.getKey()).append(": ").append(header.getValue()).append("\n"); + } + + logger.info(sb.toString()); + + } catch (MalformedURLException e) { + logger.error( + "The URL format is incorrect, please check your allowlist JSON file for errors.", e); + } catch (IOException e) { + logger.error("Could not send an HTTP/HTTPS request to host " + hostname, e); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/ProxyConfig.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/ProxyConfig.java new file mode 100644 index 000000000..7003bae46 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/ProxyConfig.java @@ -0,0 +1,214 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.util.regex.Pattern; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +/** + * This class is used to represent the proxy configurations passed to the JDBC driver either as JVM + * arguments or connection parameters. The class determines which proxy settings take precedence and + * should be used by the diagnostic tests. We normalize configurations where empty strings for + * hostnames and -1 for ports represent the absence of a configuration. + * + *

The order of precedence is: + * + *

1.) Connection parameters (proxy configurations passed to the constructor) 2.) JVM arguments + * + *

The useProxy parameter is ignored. If the proxy is configured using the JVM and someone wants + * to bypass that at the connection-level then they would need to set the following connection + * parameters: proxyHost=127.0.0.1 proxyPort=8080 nonProxyHosts=* + * + *

i.e. bypass the proxy host when connecting to any host. + */ +class ProxyConfig { + private String proxyHost; + private int proxyPort; + private String nonProxyHosts; + private String jvmHttpProxyHost; + private String jvmHttpsProxyHost; + private int jvmHttpProxyPort; + private int jvmHttpsProxyPort; + private String jvmNonProxyHosts; + private String finalHttpProxyHost = ""; + private String finalHttpsProxyHost = ""; + private int finalHttpProxyPort = -1; + private int finalHttpsProxyPort = -1; + private String finalNonProxyHosts = ""; + private boolean isProxyEnabled = false; + + private boolean isProxyEnabledOnJvm = false; + + private final String JVM_HTTP_PROXY_HOST = "http.proxyHost"; + private final String JVM_HTTPS_PROXY_HOST = "https.proxyHost"; + private final String JVM_HTTP_PROXY_PORT = "http.proxyPort"; + private final String JVM_HTTPS_PROXY_PORT = "https.proxyPort"; + private final String JVM_HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts"; + + private static final SFLogger logger = SFLoggerFactory.getLogger(ProxyConfig.class); + + public String getHttpProxyHost() { + return finalHttpProxyHost; + } + + public String getHttpsProxyHost() { + return finalHttpsProxyHost; + } + + public int getHttpProxyPort() { + return finalHttpProxyPort; + } + + public int getHttpsProxyPort() { + return finalHttpsProxyPort; + } + + public String getNonProxyHosts() { + return finalNonProxyHosts; + } + + public void setProxyHost(String proxyHost) { + this.proxyHost = proxyHost; + } + + public void setProxyPort(int proxyPort) { + this.proxyPort = proxyPort; + } + + public void setNonProxyHosts(String nonProxyHosts) { + this.nonProxyHosts = nonProxyHosts; + } + + public ProxyConfig(String proxyHost, int proxyPort, String nonProxyHosts) { + jvmHttpProxyHost = + (System.getProperty(JVM_HTTP_PROXY_HOST) == null) + ? "" + : System.getProperty(JVM_HTTP_PROXY_HOST); + + jvmHttpsProxyHost = + (System.getProperty(JVM_HTTPS_PROXY_HOST) == null) + ? "" + : System.getProperty(JVM_HTTPS_PROXY_HOST); + + jvmHttpProxyPort = + (System.getProperty(JVM_HTTP_PROXY_PORT) == null) + ? -1 + : Integer.parseInt(System.getProperty(JVM_HTTP_PROXY_PORT)); + + jvmHttpsProxyPort = + (System.getProperty(JVM_HTTPS_PROXY_PORT) == null) + ? -1 + : Integer.parseInt(System.getProperty(JVM_HTTPS_PROXY_PORT)); + + jvmNonProxyHosts = + (System.getProperty(JVM_HTTP_NON_PROXY_HOSTS) == null) + ? "" + : System.getProperty(JVM_HTTP_NON_PROXY_HOSTS); + this.proxyHost = (proxyHost == null) ? "" : proxyHost; + this.proxyPort = proxyPort; + this.nonProxyHosts = (nonProxyHosts == null) ? "" : nonProxyHosts; + resolveProxyConfigurations(); + } + + public ProxyConfig() { + this(null, -1, null); + } + + public boolean isProxyEnabled() { + return isProxyEnabled; + } + + public boolean isProxyEnabledOnJvm() { + return isProxyEnabledOnJvm; + } + + /** + * This method reviews both the JVM and connection parameter configurations then concludes which + * settings to use 1.) Check if proxy settings were passed in the connection parameters, if so, + * then we use that right away. 2.) If connection parameters were not passed, then review JVM + * arguments and use those. 3.) If neither were set, then don't use any proxy settings (default). + */ + private void resolveProxyConfigurations() { + // Both proxyHost and proxyPort connection parameters must be present. + StringBuilder sb = new StringBuilder(); + logger.info("Resolving proxy configurations"); + sb.append("Proxy Configurations picked up from "); + if (!proxyHost.isEmpty() && proxyPort != -1) { + finalHttpProxyHost = proxyHost; + finalHttpsProxyHost = proxyHost; + finalHttpProxyPort = proxyPort; + finalHttpsProxyPort = proxyPort; + finalNonProxyHosts = nonProxyHosts; + isProxyEnabled = true; + sb.append("connection parameters:\n"); + sb.append("proxyHost: ").append(proxyHost).append("\n"); + sb.append("proxyPort: ").append(proxyPort).append("\n"); + sb.append("nonProxyHosts: ").append(nonProxyHosts); + } else if ((!jvmHttpProxyHost.isEmpty() && jvmHttpProxyPort != -1) + || (!jvmHttpsProxyHost.isEmpty() && jvmHttpsProxyPort != -1)) { + finalHttpProxyHost = jvmHttpProxyHost; + finalHttpProxyPort = jvmHttpProxyPort; + finalHttpsProxyHost = jvmHttpsProxyHost; + finalHttpsProxyPort = jvmHttpsProxyPort; + finalNonProxyHosts = jvmNonProxyHosts; + isProxyEnabled = true; + isProxyEnabledOnJvm = true; + sb.append("JVM arguments:\n"); + sb.append("-D").append(JVM_HTTP_PROXY_HOST).append("=").append(jvmHttpProxyHost).append("\n"); + sb.append("-D").append(JVM_HTTP_PROXY_PORT).append("=").append(jvmHttpProxyPort).append("\n"); + sb.append("-D") + .append(JVM_HTTPS_PROXY_HOST) + .append("=") + .append(jvmHttpsProxyHost) + .append("\n"); + sb.append("-D") + .append(JVM_HTTPS_PROXY_PORT) + .append("=") + .append(jvmHttpsProxyPort) + .append("\n"); + } + logger.info(sb.toString()); + } + + protected boolean isBypassProxy(String hostname) { + String nonProxyHosts = getNonProxyHosts().replace(".", "\\.").replace("*", ".*"); + String[] nonProxyHostsArray = nonProxyHosts.split("\\|"); + for (String i : nonProxyHostsArray) { + if (Pattern.compile(i).matcher(hostname).matches()) { + return true; + } + } + return false; + } + + public Proxy getProxy(SnowflakeEndpoint endpoint) { + if (!isProxyEnabled || isBypassProxy(endpoint.getHost())) { + return Proxy.NO_PROXY; + } else if (endpoint.isSslEnabled()) { + return (isHttpsProxyEnabled()) + ? new Proxy( + Proxy.Type.HTTP, new InetSocketAddress(finalHttpsProxyHost, finalHttpsProxyPort)) + : Proxy.NO_PROXY; + } + return (isHttpProxyEnabled()) + ? new Proxy(Proxy.Type.HTTP, new InetSocketAddress(finalHttpProxyHost, finalHttpProxyPort)) + : Proxy.NO_PROXY; + } + + /* + Check that both http proxy host and http proxy port are set, + only then do we consider that http proxy is enabled. + */ + private boolean isHttpProxyEnabled() { + return (!finalHttpProxyHost.isEmpty() || finalHttpProxyPort != -1); + } + + /* + Check that both https proxy host and http proxy port are set, + only then do we consider that http proxy is enabled. + */ + private boolean isHttpsProxyEnabled() { + return (!finalHttpsProxyHost.isEmpty() || finalHttpsProxyPort != -1); + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java new file mode 100644 index 000000000..6cecb71d9 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java @@ -0,0 +1,74 @@ +package net.snowflake.client.jdbc.diagnostic; + +/* +The SnowflakeEndpoint class represents an endpoint as returned by the System$allowlist() SQL +function. Example: + +[{"type":"SNOWFLAKE_DEPLOYMENT","host":"snowhouse.snowflakecomputing.com","port":443},{"type":"SNOWFLAKE_DEPLOYMENT_REGIONLESS","host":"sfcogsops-snowhouse_aws_us_west_2.snowflakecomputing.com","port":443},{"type":"STAGE","host":"sfc-ds2-customer-stage.s3.amazonaws.com","port":443},{"type":"STAGE","host":"sfc-ds2-customer-stage.s3.us-west-2.amazonaws.com","port":443},{"type":"STAGE","host":"sfc-ds2-customer-stage.s3-us-west-2.amazonaws.com","port":443},{"type":"SNOWSQL_REPO","host":"sfc-repo.snowflakecomputing.com","port":443},{"type":"OUT_OF_BAND_TELEMETRY","host":"client-telemetry.snowflakecomputing.com","port":443},{"type":"OCSP_CACHE","host":"ocsp.snowflakecomputing.com","port":80},{"type":"DUO_SECURITY","host":"api-35a58de5.duosecurity.com","port":443},{"type":"CLIENT_FAILOVER","host":"sfcogsops-snowhouseprimary.snowflakecomputing.com","port":443},{"type":"OCSP_RESPONDER","host":"o.ss2.us","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.r2m02.amazontrust.com","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.sca1b.amazontrust.com","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.rootg2.amazontrust.com","port":80},{"type":"OCSP_RESPONDER","host":"ocsp.rootca1.amazontrust.com","port":80},{"type":"SNOWSIGHT_DEPLOYMENT","host":"app.snowflake.com","port":443},{"type":"SNOWSIGHT_DEPLOYMENT","host":"apps-api.c1.us-west-2.aws.app.snowflake.com","port":443}] + + */ +class SnowflakeEndpoint { + private final String type; + private final String host; + private final int port; + private final boolean isSecure; + + public SnowflakeEndpoint(String type, String host, int port) { + this.type = type; + this.host = host; + this.port = port; + this.isSecure = (this.port == 443); + } + + public SnowflakeEndpoint() { + this(null, null, -1); + } + + public String getType() { + return this.type; + } + + public String getHost() { + return this.host; + } + + public boolean isSslEnabled() { + return this.isSecure; + } + + public int getPort() { + return this.port; + } + + // We can only tell if private link is enabled for certain hosts when the hostname contains + // the word 'privatelink' but we don't have a good way of telling if a private link connection + // is expected for internal stages for example. + public boolean isPrivateLink() { + return (host.contains("privatelink.snowflakecomputing.com")); + } + + @Override + public String toString() { + return this.host + ":" + this.port; + } + + @Override + public boolean equals(Object o) { + boolean isSnowflakeEndpoint = o instanceof SnowflakeEndpoint; + if (!isSnowflakeEndpoint) { + return false; + } + if (!((SnowflakeEndpoint) o).getHost().equals(this.host)) { + return false; + } + if (((SnowflakeEndpoint) o).getPort() != this.port) { + return false; + } + + if (!((SnowflakeEndpoint) o).getType().equals(this.type)) { + return false; + } + + return true; + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/TcpDiagnosticCheck.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/TcpDiagnosticCheck.java new file mode 100644 index 000000000..c1538de7d --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/TcpDiagnosticCheck.java @@ -0,0 +1,45 @@ +package net.snowflake.client.jdbc.diagnostic; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.net.Socket; +import java.net.SocketTimeoutException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +class TcpDiagnosticCheck extends DiagnosticCheck { + + private static final SFLogger logger = SFLoggerFactory.getLogger(TcpDiagnosticCheck.class); + + TcpDiagnosticCheck(ProxyConfig proxyConfig) { + super("TCP Connection Test", proxyConfig); + } + + protected void doCheck(SnowflakeEndpoint snowflakeEndpoint) { + String hostname = snowflakeEndpoint.getHost(); + int connectTimeoutMillis = 60000; + int port = snowflakeEndpoint.getPort(); + Proxy proxy = proxyConf.getProxy(snowflakeEndpoint); + try (Socket socket = new Socket(proxy)) { + socket.bind(null); + logger.info( + "Establishing TCP connection: {} -> {}:{}", + socket.getLocalSocketAddress(), + snowflakeEndpoint.getHost(), + snowflakeEndpoint.getPort()); + socket.connect(new InetSocketAddress(hostname, port), connectTimeoutMillis); + logger.info( + "Established a TCP connection successfully: {} -> {}", + socket.getLocalSocketAddress(), + socket.getRemoteSocketAddress()); + } catch (SocketTimeoutException e) { + logger.error( + "Could not establish TCP connection within timeout of " + connectTimeoutMillis + "ms", e); + } catch (IOException e) { + logger.error("Error connecting to host " + hostname + ":" + port, e); + } catch (Exception e) { + logger.error("Unexpected error occurred when connecting to host " + hostname + ":" + port, e); + } + } +} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java b/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java new file mode 100644 index 000000000..ecb5c0509 --- /dev/null +++ b/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java @@ -0,0 +1,3 @@ +package net.snowflake.client.category; + +public interface TestCategoryDiagnostic {} diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java new file mode 100644 index 000000000..042c6b0f4 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java @@ -0,0 +1,356 @@ +package net.snowflake.client.jdbc.diagnostic; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import net.snowflake.client.category.TestCategoryDiagnostic; +import net.snowflake.client.core.SFSessionProperty; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(TestCategoryDiagnostic.class) +public class DiagnosticContextLatestIT { + + private static final String HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts"; + private static final String HTTP_PROXY_HOST = "http.proxyHost"; + private static final String HTTP_PROXY_PORT = "http.proxyPort"; + private static final String HTTPS_PROXY_HOST = "https.proxyHost"; + private static final String HTTPS_PROXY_PORT = "https.proxyPort"; + + private static String oldJvmNonProxyHosts; + private static String oldJvmHttpProxyHost; + private static String oldJvmHttpProxyPort; + private static String oldJvmHttpsProxyHost; + private static String oldJvmHttpsProxyPort; + + @BeforeClass + public static void init() { + oldJvmNonProxyHosts = System.getProperty(HTTP_NON_PROXY_HOSTS); + oldJvmHttpProxyHost = System.getProperty(HTTP_PROXY_HOST); + oldJvmHttpProxyPort = System.getProperty(HTTP_PROXY_PORT); + oldJvmHttpsProxyHost = System.getProperty(HTTPS_PROXY_HOST); + oldJvmHttpsProxyPort = System.getProperty(HTTPS_PROXY_PORT); + } + + @Before + public void clearJvmProperties() { + System.clearProperty(HTTP_NON_PROXY_HOSTS); + System.clearProperty(HTTP_PROXY_HOST); + System.clearProperty(HTTP_PROXY_PORT); + System.clearProperty(HTTPS_PROXY_HOST); + System.clearProperty(HTTPS_PROXY_PORT); + } + /** + * Check that all the mock Snowflake Endpoints we manually created exist in the array returned to + * us by the DiagnosticContext class which it generated after it parsed the allowlist.json file + * during initialization. + * + *

Test added in version > 3.16.1 + */ + @Test + public void parseAllowListFileTest() { + Map connectionPropertiesMap = new HashMap<>(); + File allowlistFile = new File("src/test/resources/allowlist.json"); + + DiagnosticContext diagnosticContext = + new DiagnosticContext(allowlistFile.getAbsolutePath(), connectionPropertiesMap); + List endpointsFromTestFile = diagnosticContext.getEndpoints(); + List mockEndpoints = new ArrayList<>(); + + mockEndpoints.add( + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint( + "SNOWFLAKE_DEPLOYMENT_REGIONLESS", "org-account_name.snowflakecomputing.com", 443)); + mockEndpoints.add(new SnowflakeEndpoint("STAGE", "stage-bucket.s3.amazonaws.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("STAGE", "stage-bucket.s3.us-west-2.amazonaws.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("STAGE", "stage-bucket.s3-us-west-2.amazonaws.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("SNOWSQL_REPO", "snowsql_repo.snowflakecomputing.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint( + "OUT_OF_BAND_TELEMETRY", "out_of_band_telemetry.snowflakecomputing.com", 443)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("DUO_SECURITY", "duo_security.duosecurity.com", 443)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.rootg2.amazontrust.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "o.ss2.us", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.sca1b.amazontrust.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.r2m01.amazontrust.com", 80)); + mockEndpoints.add(new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.rootca1.amazontrust.com", 80)); + mockEndpoints.add( + new SnowflakeEndpoint("SNOWSIGHT_DEPLOYMENT", "snowsight_deployment.snowflake.com", 443)); + mockEndpoints.add( + new SnowflakeEndpoint("SNOWSIGHT_DEPLOYMENT", "snowsight_deployment_2.snowflake.com", 443)); + + String testFailedMessage = + "The lists of SnowflakeEndpoints in mockEndpoints and endpointsFromTestFile should be identical"; + assertTrue(testFailedMessage, endpointsFromTestFile.containsAll(mockEndpoints)); + } + + /** + * Test that we correctly determine that proxy settings are absent from both the JVM and the + * connections parameters (i.e. empty strings for hostnames, or -1 for ports). + * + *

Test added in version > 3.16.1 + */ + @Test + public void testEmptyProxyConfig() { + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + assertFalse("Proxy configurations should be empty", diagnosticContext.isProxyEnabled()); + assertTrue( + "getHttpProxyHost() must return an empty string in the absence of proxy configuration", + diagnosticContext.getHttpProxyHost().isEmpty()); + assertEquals( + "getHttpProxyPort() must return -1 in the absence of proxy configuration", + -1, + diagnosticContext.getHttpProxyPort()); + assertTrue( + "getHttpsProxyHost() must return an empty string in the absence of proxy configuration", + diagnosticContext.getHttpsProxyHost().isEmpty()); + assertEquals( + "getHttpsProxyPort() must return -1 in the absence of proxy configuration", + -1, + diagnosticContext.getHttpsProxyPort()); + assertTrue( + "getHttpNonProxyHosts() must return an empty string in the absence of proxy configuration", + diagnosticContext.getHttpNonProxyHosts().isEmpty()); + } + + /** Test added in version > 3.16.1 */ + @Test + public void testProxyConfigSetOnJvm() { + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_NON_PROXY_HOSTS, "*.domain.com|localhost"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + assertTrue(diagnosticContext.isProxyEnabled()); + assertTrue(diagnosticContext.isProxyEnabledOnJvm()); + assertEquals(diagnosticContext.getHttpProxyHost(), "http.proxyHost.com"); + assertEquals(diagnosticContext.getHttpProxyPort(), 8080); + assertEquals(diagnosticContext.getHttpsProxyHost(), "https.proxyHost.com"); + assertEquals(diagnosticContext.getHttpsProxyPort(), 8083); + assertEquals(diagnosticContext.getHttpNonProxyHosts(), "*.domain.com|localhost"); + } + + /** + * If Proxy settings are passed using JVM arguments and connection parameters then the connection + * parameters take precedence. + * + *

Test added in version > 3.16.1 + */ + @Test + public void testProxyOverrideWithConnectionParameter() { + + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_NON_PROXY_HOSTS, "*.domain.com|localhost"); + + Map connectionPropertiesMap = new HashMap<>(); + + connectionPropertiesMap.put(SFSessionProperty.PROXY_HOST, "override.proxyHost.com"); + connectionPropertiesMap.put(SFSessionProperty.PROXY_PORT, "80"); + connectionPropertiesMap.put(SFSessionProperty.NON_PROXY_HOSTS, "*.new_domain.com|localhost"); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + assertTrue(diagnosticContext.isProxyEnabled()); + assertFalse(diagnosticContext.isProxyEnabledOnJvm()); + assertEquals(diagnosticContext.getHttpProxyHost(), "override.proxyHost.com"); + assertEquals(diagnosticContext.getHttpProxyPort(), 80); + assertEquals(diagnosticContext.getHttpsProxyHost(), "override.proxyHost.com"); + assertEquals(diagnosticContext.getHttpsProxyPort(), 80); + assertEquals(diagnosticContext.getHttpNonProxyHosts(), "*.new_domain.com|localhost"); + } + + /** Test added in version > 3.16.1 */ + @Test + public void testGetProxy() { + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_NON_PROXY_HOSTS, "*.domain.com|localhost|*.snowflakecomputing.com"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + String httpProxyHost = diagnosticContext.getHttpProxyHost(); + int httpProxyPort = diagnosticContext.getHttpProxyPort(); + String httpsProxyHost = diagnosticContext.getHttpsProxyHost(); + int httpsProxyPort = diagnosticContext.getHttpsProxyPort(); + + SnowflakeEndpoint httpsHostBypassingProxy = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint httpHostBypassingProxy = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + SnowflakeEndpoint hostWithHttpProxy = + new SnowflakeEndpoint("OCSP_RESPONDER", "ocsp.rootg2.amazontrust.com", 80); + SnowflakeEndpoint hostWithHttpsProxy = + new SnowflakeEndpoint("STAGE", "stage-bucket.s3-us-west-2.amazonaws.com", 443); + + Proxy byPassProxy = Proxy.NO_PROXY; + Proxy httpProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpProxyHost, httpProxyPort)); + Proxy httpsProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpsProxyHost, httpsProxyPort)); + + assertEquals(byPassProxy, diagnosticContext.getProxy(httpsHostBypassingProxy)); + assertEquals(byPassProxy, diagnosticContext.getProxy(httpHostBypassingProxy)); + assertEquals(httpProxy, diagnosticContext.getProxy(hostWithHttpProxy)); + assertEquals(httpsProxy, diagnosticContext.getProxy(hostWithHttpsProxy)); + } + + /** + * Test that we correctly create direct HTTPS connections and only route HTTP requests through a + * proxy server when we set only the -Dhttp.proxyHost and -Dhttp.proxyPort arguments + * + *

Test added in version > 3.16.1 + */ + @Test + public void testGetHttpProxyOnly() { + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + System.clearProperty(HTTP_PROXY_HOST); + System.clearProperty(HTTP_PROXY_PORT); + + String httpProxyHost = diagnosticContext.getHttpProxyHost(); + int httpProxyPort = diagnosticContext.getHttpProxyPort(); + + Proxy noProxy = Proxy.NO_PROXY; + Proxy httpProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpProxyHost, httpProxyPort)); + + SnowflakeEndpoint httpsHostDirectConnection = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint httpHostProxy = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + + assertEquals(noProxy, diagnosticContext.getProxy(httpsHostDirectConnection)); + assertEquals(httpProxy, diagnosticContext.getProxy(httpHostProxy)); + } + + /** + * Test that we correctly create direct HTTP connections and only route HTTPS through a proxy + * server when we set only the -Dhttps.proxyHost and -Dhttps.proxyPort parameters + * + *

Test added in version > 3.16.1 + */ + @Test + public void testGetHttpsProxyOnly() { + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + + Map connectionPropertiesMap = new HashMap<>(); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + String httpsProxyHost = diagnosticContext.getHttpsProxyHost(); + int httpsProxyPort = diagnosticContext.getHttpsProxyPort(); + + Proxy noProxy = Proxy.NO_PROXY; + Proxy httpsProxy = + new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpsProxyHost, httpsProxyPort)); + + SnowflakeEndpoint httpsHostProxy = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint httpHostDirectConnection = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + + assertEquals(noProxy, diagnosticContext.getProxy(httpHostDirectConnection)); + assertEquals(httpsProxy, diagnosticContext.getProxy(httpsHostProxy)); + } + + /** + * Test that we create a direct connection to every host even though the JVM arguments are set. We + * override the JVM arguments with the nonProxyHosts connection parameter. + * + *

Test added in version > 3.16.1 + */ + @Test + public void testgetNoProxyAfterOverridingJvm() { + System.setProperty(HTTPS_PROXY_HOST, "https.proxyHost.com"); + System.setProperty(HTTPS_PROXY_PORT, "8083"); + System.setProperty(HTTP_PROXY_HOST, "http.proxyHost.com"); + System.setProperty(HTTP_PROXY_PORT, "8080"); + + Map connectionPropertiesMap = new HashMap<>(); + + connectionPropertiesMap.put(SFSessionProperty.PROXY_HOST, "override.proxyHost.com"); + connectionPropertiesMap.put(SFSessionProperty.PROXY_PORT, "80"); + connectionPropertiesMap.put(SFSessionProperty.NON_PROXY_HOSTS, "*"); + + DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); + + Proxy noProxy = Proxy.NO_PROXY; + + SnowflakeEndpoint host1 = + new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", "account_name.snowflakecomputing.com", 443); + SnowflakeEndpoint host2 = + new SnowflakeEndpoint("OCSP_CACHE", "ocsp_cache.snowflakecomputing.com", 80); + SnowflakeEndpoint host3 = + new SnowflakeEndpoint( + "SNOWFLAKE_DEPLOYMENT", "account_name.privatelink.snowflakecomputing.com", 443); + SnowflakeEndpoint host4 = + new SnowflakeEndpoint("STAGE", "stage-bucket.s3-us-west-2.amazonaws.com", 443); + + assertEquals(noProxy, diagnosticContext.getProxy(host1)); + assertEquals(noProxy, diagnosticContext.getProxy(host2)); + assertEquals(noProxy, diagnosticContext.getProxy(host3)); + assertEquals(noProxy, diagnosticContext.getProxy(host4)); + } + + @After + public void restoreJvmArguments() { + System.clearProperty(HTTP_NON_PROXY_HOSTS); + System.clearProperty(HTTP_PROXY_HOST); + System.clearProperty(HTTP_PROXY_PORT); + System.clearProperty(HTTPS_PROXY_HOST); + System.clearProperty(HTTPS_PROXY_PORT); + + if (oldJvmNonProxyHosts != null) { + System.setProperty(HTTP_NON_PROXY_HOSTS, oldJvmNonProxyHosts); + } + if (oldJvmHttpProxyHost != null) { + System.setProperty(HTTP_PROXY_HOST, oldJvmHttpProxyHost); + } + if (oldJvmHttpProxyPort != null) { + System.setProperty(HTTP_PROXY_PORT, oldJvmHttpProxyPort); + } + if (oldJvmHttpsProxyHost != null) { + System.setProperty(HTTPS_PROXY_HOST, oldJvmHttpsProxyHost); + } + if (oldJvmHttpsProxyPort != null) { + System.getProperty(HTTPS_PROXY_PORT, oldJvmHttpsProxyPort); + } + } +} diff --git a/src/test/resources/allowlist.json b/src/test/resources/allowlist.json new file mode 100644 index 000000000..12ae61965 --- /dev/null +++ b/src/test/resources/allowlist.json @@ -0,0 +1,18 @@ +[ + {"host":"account_name.snowflakecomputing.com","port":443,"type":"SNOWFLAKE_DEPLOYMENT"}, + {"host":"org-account_name.snowflakecomputing.com","port":443,"type":"SNOWFLAKE_DEPLOYMENT_REGIONLESS"}, + {"host":"stage-bucket.s3.amazonaws.com","port":443,"type":"STAGE"}, + {"host":"stage-bucket.s3.us-west-2.amazonaws.com","port":443,"type":"STAGE"}, + {"host":"stage-bucket.s3-us-west-2.amazonaws.com","port":443,"type":"STAGE"}, + {"host":"snowsql_repo.snowflakecomputing.com","port":443,"type":"SNOWSQL_REPO"}, + {"host":"out_of_band_telemetry.snowflakecomputing.com","port":443,"type":"OUT_OF_BAND_TELEMETRY"}, + {"host":"ocsp_cache.snowflakecomputing.com","port":80,"type":"OCSP_CACHE"}, + {"host":"duo_security.duosecurity.com","port":443,"type":"DUO_SECURITY"}, + {"host":"ocsp.rootg2.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"o.ss2.us","port":80,"type":"OCSP_RESPONDER"}, + {"host":"ocsp.sca1b.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"ocsp.r2m01.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"ocsp.rootca1.amazontrust.com","port":80,"type":"OCSP_RESPONDER"}, + {"host":"snowsight_deployment.snowflake.com","port":443,"type":"SNOWSIGHT_DEPLOYMENT"}, + {"host":"snowsight_deployment_2.snowflake.com","port":443,"type":"SNOWSIGHT_DEPLOYMENT"} +] From 5f840231e128945105d8e1c02df3b7e20947080c Mon Sep 17 00:00:00 2001 From: Przemyslaw Motacki Date: Wed, 3 Jul 2024 10:50:51 +0200 Subject: [PATCH 42/54] SNOW-1514498 - support for host when use file configuration (#1809) --- .../config/SFConnectionConfigParser.java | 31 +++++++++++++++---- .../config/SFConnectionConfigParserTest.java | 28 +++++++++++++++++ 2 files changed, 53 insertions(+), 6 deletions(-) diff --git a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java index 9040fa392..405dd09db 100644 --- a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java @@ -97,10 +97,7 @@ public static ConnectionParameters buildConnectionParameters() throws SnowflakeS Properties conectionProperties = new Properties(); conectionProperties.putAll(fileConnectionConfiguration); - String url = - Optional.ofNullable(fileConnectionConfiguration.get("account")) - .map(ac -> createUrl(ac, fileConnectionConfiguration)) - .orElse(null); + String url = createUrl(fileConnectionConfiguration); logger.debug("Url created using parameters from connection configuration file: {}", url); if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) @@ -127,8 +124,30 @@ public static ConnectionParameters buildConnectionParameters() throws SnowflakeS } } - private static String createUrl(String account, Map fileConnectionConfiguration) { - String host = String.format("%s.snowflakecomputing.com", account); + private static String createUrl(Map fileConnectionConfiguration) + throws SnowflakeSQLException { + Optional maybeAccount = Optional.ofNullable(fileConnectionConfiguration.get("account")); + Optional maybeHost = Optional.ofNullable(fileConnectionConfiguration.get("host")); + if (maybeAccount.isPresent() + && maybeHost.isPresent() + && !maybeHost.get().contains(maybeAccount.get())) { + logger.warn( + String.format( + "Inconsistent host and account values in file configuration. ACCOUNT: {} , HOST: {}. The host value will be used.", + maybeAccount.get(), + maybeHost.get())); + } + String host = + maybeHost.orElse( + maybeAccount + .map(acnt -> String.format("%s.snowflakecomputing.com", acnt)) + .orElse(null)); + if (host == null || host.isEmpty()) { + logger.warn("Neither host nor account is specified in connection parameters"); + throw new SnowflakeSQLException( + "Unable to connect because neither host nor account is specified in connection parameters"); + } + logger.debug("Host created using parameters from connection configuration file: {}", host); String port = fileConnectionConfiguration.get("port"); String protocol = fileConnectionConfiguration.get("protocol"); if (Strings.isNullOrEmpty(port)) { diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java index e68e68fa0..07882fcb7 100644 --- a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java @@ -26,6 +26,7 @@ import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeUtil; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -130,4 +131,31 @@ private Path createFilePathWithPermission(Path path, boolean onlyUserPermission) return Files.createFile(path); } } + + @Test + public void testLoadSFConnectionConfigWithHostConfigured() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + Map extraparams = new HashMap(); + extraparams.put("host", "snowflake.reg.local"); + extraparams.put("account", null); + extraparams.put("port", "8082"); + prepareConnectionConfigurationTomlFile(extraparams, true); + ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters(); + assertNotNull(data); + assertEquals("jdbc:snowflake://snowflake.reg.local:8082", data.getUrl()); + } + + @Test + public void shouldThrowExceptionIfNoneOfHostAndAccountIsSet() throws IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + Map extraparams = new HashMap(); + extraparams.put("host", null); + extraparams.put("account", null); + prepareConnectionConfigurationTomlFile(extraparams, true); + Assert.assertThrows( + SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); + } } From f6548d570dfe5b9736c309c319ccb054a92e05c4 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Wed, 3 Jul 2024 15:04:14 +0200 Subject: [PATCH 43/54] SNOW-1369651: Do not fail file pattern expanding on file not found in different pattern (#1811) --- .../jdbc/SnowflakeFileTransferAgent.java | 18 +++- .../jdbc/FileUploaderExpandFileNamesTest.java | 96 +++++++++++++++++++ 2 files changed, 109 insertions(+), 5 deletions(-) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java index 0afe353f0..751b47d19 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java @@ -15,6 +15,7 @@ import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; import java.io.File; +import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -74,6 +75,8 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.filefilter.WildcardFileFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for uploading/downloading files @@ -97,6 +100,7 @@ public class SnowflakeFileTransferAgent extends SFBaseFileTransferAgent { private static final String localFSFileSep = systemGetProperty("file.separator"); private static final int DEFAULT_PARALLEL = 10; + private static final Logger log = LoggerFactory.getLogger(SnowflakeFileTransferAgent.class); private final String command; @@ -1938,7 +1942,7 @@ static Set expandFileNames(String[] filePathList, String queryId) // For each location, list files and match against the patterns for (Map.Entry> entry : locationToFilePatterns.entrySet()) { try { - java.io.File dir = new java.io.File(entry.getKey()); + File dir = new File(entry.getKey()); logger.debug( "Listing files under: {} with patterns: {}", @@ -1950,11 +1954,15 @@ static Set expandFileNames(String[] filePathList, String queryId) && injectedFileTransferException instanceof Exception) { throw (Exception) SnowflakeFileTransferAgent.injectedFileTransferException; } - // The following currently ignore sub directories - for (Object file : - FileUtils.listFiles(dir, new WildcardFileFilter(entry.getValue()), null)) { - result.add(((java.io.File) file).getCanonicalPath()); + File[] filesMatchingPattern = + dir.listFiles((FileFilter) new WildcardFileFilter(entry.getValue())); + if (filesMatchingPattern != null) { + for (File file : filesMatchingPattern) { + result.add(file.getCanonicalPath()); + } + } else { + logger.debug("No files under {} matching pattern {}", entry.getKey(), entry.getValue()); } } catch (Exception ex) { throw new SnowflakeSQLException( diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java index 5d57d31d4..a4426d449 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java @@ -3,6 +3,7 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -10,8 +11,17 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; import java.util.Properties; import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.stream.IntStream; import net.snowflake.client.core.OCSPMode; import org.junit.Assert; import org.junit.Rule; @@ -21,6 +31,8 @@ /** Tests for SnowflakeFileTransferAgent.expandFileNames */ public class FileUploaderExpandFileNamesTest { @Rule public TemporaryFolder folder = new TemporaryFolder(); + @Rule public TemporaryFolder secondFolder = new TemporaryFolder(); + private String localFSFileSep = systemGetProperty("file.separator"); @Test public void testProcessFileNames() throws Exception { @@ -126,4 +138,88 @@ public int read() throws IOException { assertEquals("dummy_dest_file_name", config.getDestFileName()); assertEquals(expectedThrowCount, throwCount); } + + /** + * We have N jobs expanding files with exclusive pattern, processing them and deleting. Expanding + * the list should not cause the error when file of another pattern is deleted which may happen + * when FileUtils.listFiles is used. + * + *

Fix available after version 3.16.1. + * + * @throws Exception + */ + @Test + public void testFileListingDoesNotFailOnMissingFilesOfAnotherPattern() throws Exception { + folder.newFolder("TestFiles"); + String folderName = folder.getRoot().getCanonicalPath(); + + int filePatterns = 10; + int filesPerPattern = 100; + IntStream.range(0, filesPerPattern * filePatterns) + .forEach( + id -> { + try { + File file = + new File( + folderName + + localFSFileSep + + "foo" + + id % filePatterns + + "-" + + UUID.randomUUID()); + assertTrue(file.createNewFile()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + + ExecutorService executorService = Executors.newFixedThreadPool(filePatterns / 3); + List>> futures = new ArrayList<>(); + for (int i = 0; i < filePatterns; ++i) { + String[] locations = { + folderName + localFSFileSep + "foo" + i + "*", + }; + Future> future = + executorService.submit( + () -> { + try { + Set strings = SnowflakeFileTransferAgent.expandFileNames(locations, null); + strings.forEach( + fileName -> { + try { + File file = new File(fileName); + Files.delete(file.toPath()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + return strings; + } catch (SnowflakeSQLException e) { + throw new RuntimeException(e); + } + }); + futures.add(future); + } + executorService.shutdown(); + assertTrue(executorService.awaitTermination(60, TimeUnit.SECONDS)); + assertEquals(filePatterns, futures.size()); + for (Future> future : futures) { + assertTrue(future.isDone()); + assertEquals(filesPerPattern, future.get().size()); + } + } + + @Test + public void testFileListingDoesNotFailOnNotExistingDirectory() throws Exception { + folder.newFolder("TestFiles"); + String folderName = folder.getRoot().getCanonicalPath(); + String[] locations = { + folderName + localFSFileSep + "foo*", + }; + folder.delete(); + + Set files = SnowflakeFileTransferAgent.expandFileNames(locations, null); + + assertTrue(files.isEmpty()); + } } From 356852c65c31c92f31a875b2f185095564a227b9 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 8 Jul 2024 09:26:23 +0200 Subject: [PATCH 44/54] Bump version to 3.17.0 for release (#1812) --- CHANGELOG.rst | 4 ++++ FIPS/pom.xml | 4 ++-- parent-pom.xml | 2 +- pom.xml | 4 ++-- src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java | 2 +- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5cec7897f..d8ea55f6d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,7 @@ +**JDBC Driver 3.17.0** + +- \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc + **JDBC Driver 3.16.1** - \||Please Refer to Release Notes at https://docs.snowflake.com/en/release-notes/clients-drivers/jdbc diff --git a/FIPS/pom.xml b/FIPS/pom.xml index b7ae7edcc..a51d0cd33 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,12 +5,12 @@ net.snowflake snowflake-jdbc-parent - 3.16.2-SNAPSHOT + 3.17.0 ../parent-pom.xml snowflake-jdbc-fips - 3.16.2-SNAPSHOT + 3.17.0 jar snowflake-jdbc-fips diff --git a/parent-pom.xml b/parent-pom.xml index db166fa9d..8f7db4b38 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -5,7 +5,7 @@ net.snowflake snowflake-jdbc-parent - 3.16.2-SNAPSHOT + 3.17.0 pom diff --git a/pom.xml b/pom.xml index 19b5ad10e..994e3537d 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.16.2-SNAPSHOT + 3.17.0 ./parent-pom.xml ${artifactId} - 3.16.2-SNAPSHOT + 3.17.0 jar ${artifactId} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 73f201ac2..d03689c09 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -36,7 +36,7 @@ public class SnowflakeDriver implements Driver { static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); - public static String implementVersion = "3.16.2"; + public static String implementVersion = "3.17.0"; static int majorVersion = 0; static int minorVersion = 0; From f19866e1edc27098678c9d979bc03691578e544d Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 8 Jul 2024 13:57:25 +0200 Subject: [PATCH 45/54] Prepare next development version (#1814) --- FIPS/pom.xml | 4 ++-- parent-pom.xml | 2 +- pom.xml | 4 ++-- src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index a51d0cd33..0ba788c0a 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -5,12 +5,12 @@ net.snowflake snowflake-jdbc-parent - 3.17.0 + 3.17.1-SNAPSHOT ../parent-pom.xml snowflake-jdbc-fips - 3.17.0 + 3.17.1-SNAPSHOT jar snowflake-jdbc-fips diff --git a/parent-pom.xml b/parent-pom.xml index 8f7db4b38..ea1bb19f0 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -5,7 +5,7 @@ net.snowflake snowflake-jdbc-parent - 3.17.0 + 3.17.1-SNAPSHOT pom diff --git a/pom.xml b/pom.xml index 994e3537d..d71b46639 100644 --- a/pom.xml +++ b/pom.xml @@ -6,13 +6,13 @@ net.snowflake snowflake-jdbc-parent - 3.17.0 + 3.17.1-SNAPSHOT ./parent-pom.xml ${artifactId} - 3.17.0 + 3.17.1-SNAPSHOT jar ${artifactId} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index d03689c09..56da5b258 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -36,7 +36,7 @@ public class SnowflakeDriver implements Driver { static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); - public static String implementVersion = "3.17.0"; + public static String implementVersion = "3.17.1"; static int majorVersion = 0; static int minorVersion = 0; From 823cecc40d7578b2aa14bb3418cf55c1339e491b Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 9 Jul 2024 15:01:57 +0200 Subject: [PATCH 46/54] SNOW-1523813: Bump netty to the newest version (#1813) --- dependencies/arrow-format-10.0.1.jar | Bin 109998 -> 109982 bytes dependencies/arrow-memory-core-10.0.1.jar | Bin 118420 -> 118404 bytes dependencies/arrow-memory-netty-10.0.1.jar | Bin 39257 -> 39278 bytes dependencies/arrow-memory-unsafe-10.0.1.jar | Bin 10798 -> 10787 bytes dependencies/arrow-vector-10.0.1.jar | Bin 1875561 -> 1875125 bytes parent-pom.xml | 2 +- thin_public_pom.xml | 2 +- 7 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dependencies/arrow-format-10.0.1.jar b/dependencies/arrow-format-10.0.1.jar index 5822d5a9fb6629a00f55093fbf034e5e0a575927..26b30d66bf0aff800cb8fe576144672697065d05 100644 GIT binary patch delta 6738 zcmZu$2|QI>7eD*DmusGBAY`hDMl!2N#)PO3MR}T@xkqV0gUr*xMy7;JA@yEISi&`A0oz70)w%hrX5%x08E%`_8q^=g*fn0&w8>vnoKIi+Cc#Yfj~ z-#F6d_jmGd1Dbm}Z>gt^WRovAaj#hbN7bla@e*hX;NAfdc-f05$xr2b%c%4_L%Q@hLIp;IAFS0Dt2c zD=B)dt9m~JFI00{d8q|wcqOQ0h@jP4)6zoMzmOarS+bg~kgf*!Y8R&turb&68K9Gi z5AxbSrWZ+-4|y=n&l{vN_^BHPC1d79zl-Y|gap;3$)aKys(483SPoz{I!y zp?i^OPszI{=7sfIY0kN>eb2U4`?{@W`{G5n7T#Le(Q2jH`|c0nfQ}EtK?fEN2T8U% zyuWqk%V+cc&}dflwW+0{9whtPE9LGMRw~PLZQwAsxKV>&Jv2_N$*i%n3VXeM;Bux&R-GB7%v#PuMB#)%pJ9bnqlFOc5x z*_%o)^f-?&iL)sX^kFD!%PdHm7GO)C=csrhc<}|!(RVO#oPl*fh~7FpY{r9ubBuNB zHzKPoRN#jcAWrUX=da!UKRcSK-^m(l$2lzlifEi&9X?gZkW2ejYKNJXv}BQkX|xyB zdo6aAun59NIBf_hz&2~ZU#nJlm4)4K#;ZgWE0f)qP2b)gnu!-2VH5C`UMV>MEgBEg zcG?`y&HdmVOK>a^D9jA{yLi}?&m9k_SCOXusvJK%P(|yqROK!Wj7>0j!h_webs+ou z4ut$Z^6{PJVR07(_S=gQZ@x+k=DIo$8kJTg|C4iXSx=?_0z}c2r87 zzVt%0t@f05^9pC-3`a{nm3UUeT*<%)x7;)Lo4y2wd0godcmB|}<)~F=hQpMH>7O60 z>EpR(tQ^46j%qmxb$oqf5C8pbuJ|aGR%1%Y?)xbQa0skr?z)+12j_SKom#&)Av81^ zJCD%|`Hda!TZa}H$$qNTh<_u_LtbUiXaox z?~4lM4qliS4}?kUgZ#CFO7{3P$6XqD@`j;AS(|tG+nhQ;5DX?kNFg;n0|Wg+=5F4) zL(4zV&;Pf;9b113%5ikv-aSuh-sZJTGOO)3|^hIS?Z7z(7 z&5vy!*p}G6+kc9F@gnIu#}z$Kc8@H1d%@&p<4IA6T7#z310stfo7EIn@Sok4{yJIb zaYIdHg2f|nx^BjvrijI^ExxWbjr_yHXBUPklq)BGIC^^UzE?om#%byrqc$IV8+#bp zvmE!QuXDeoQm#T89uZ4YE%jn8TX3m+oyVG8{GfqtPsxFGaz+1tSLG% zC~{MwNW*4fkFM>uepS#UWk<^MHCa`gUv^KC@=`nqSg``xy^CyzEPl%{?psjz+3yJH zI~;Z@ZXkAa#)6}6Q%bxZTpD!k&gy>=a$udH&8=6@tEytPTRVH_Ml3JCp>%2KW*(1M zPJ1HHgk)X$$FQ>QMYrRL&Dw{hzZnJHG`@VkS&l7dX%L&?Zqlc7U#z>)YFAd8W8g<7 z6@))Kc&rZ5F~|{G2AFg_@NNZkZK<`GGt7&tGZ}SbCWD6{_Oqb99Z=xJiBhE>Wgw?b zZ>(532j8ZPRS|n5ox#PCC+PT05Ti?5?dVf(#8xaZWJnC^>8|Z~Emz}@ZkFLAh(!cU zjsY`aUN#U9+v)Rr{2JS!ZXL14_b$KxVdZV(V7EJPinmkCwyq@(SDdn*v6HO@boi@@ zEml8_1;bPO3kNRCO2&TQ`DQ>XwZW)P*s$QXTa0dO>y|%v9^t*F?aAgem>xaJRK6f5 z<}9!Aa)Xw}!g+_P(;H`H9$m8Yn5eX%x0?g9=l3H$F1(>%CkFNx)P0M zYrY#z7cvItBpmjP%HWx5xu!+3{8RdlhtG76vc6a@Dp}0`epQ=vdg}Ht#$EB4{R8~2 zmw3B1?up&oTxK}S!KFhs|M-fwuK_Yqfwmb@{SObBZDh!l%hl`w=fkSrX`T;n>l$nu zI4-|ueNWE`o%+{mhX>@{{B+;cxYzYOOZBaf?CjHMXw!M4MOZ4%IoM*ij+dF@I#j)9 zT4rfO4*%}E@-hv*hjJFF{gd0tK3&)@6Re`|H1}TJp{&Pgi&%4YeSL>3UcCrDsN}Qs z_%kCJ-Yw<__tj20G zt}=l~H&_Y+!gQYze z3ky9iraqC@uyHQ7nXPwl^4Q+cvAqX{kgXpZ;p`x%vHJMa?@h zFW=Ey*<1Si+f8fE_q16{+~|$y?C2cMoju)H{(xDv%$+phqLFKd^&OK^O9Yz@%6-4^ z-7ar>T|H`E@i6yx-JGtoN>`27UpnjQ{m4c7FY{<=*}>-a)e>!YnCULj^0&W5?LSs$ z_M({7y|Ji3SFx<9VtQOz5dU|Z*)L3|H{hOLmTY36Zk5?Z|DO!K~cN zS=C=01qQFlN|fwwKA@u>D!(Q9gxrU975lF=`&F_oVh)%Tj(^HS$8`XHl$;h&5P& z^J5e6q3eO)h81*$kR6yx?@a%b?rS)ARRLWqPWOTL^h2Jt04(6cFW(Kfe7et=z0y%F z9#xHZd{m!NJNJ7X-O9W>TS2UV^%6jZ3=bCIWEi4H5H|{8Rl$J}xB*qTp%931_9_Es zt|~PxOss6E)mO{E`3Yj1JSi%xQ2Vf+iCeyxtc~Asg=S#N9;KgqoqP9a93;)a-?Xl?hm0>ZqBFhp8R09hX^wM#pa7f2`Q{3e zxKK&4;+2sTb}>nDdxhE=*4$VBAp;y^Q+*j}YIPEq@_<=DjTZT5M<{*Gh3Vipjgx@L zE`|hPNbyKb8b{#mD4>Zem< z2sFw768NGZgIRYDpw4J2nuydVUF1B4;)Vi+U>fGE-Sns}8gXtwoSKt3wd4_}9JEOR zGqA9VOe9QvCS{l$Szjp#-(~_KOnFd-C~sBq#hKnf0#?QWA$&U?bpBJ1!?U2L3s8og zHnzbWRX~Zy;AG%X~yw zw3w12N8V!dl0Qa-v10hx`BFq})5R+PxgLm5P}q)xf2gEgq;#p-9r;;;{BRDS~$Y6^bQ;^^->#$vDwv)%3KGf!SSG-x20;KU~ zbphJ6_N320mH^Wye77MMk=}2jNg1)+1M%r-_vU6gaPW%afC5ZLN^J0_uz5EHn*4%07TuZwZ+BTNU&Fvh`?X4$@ z5LGTr{I2ROgZX(tnjspFZCalTW@4MBQ$bg`pV}n9hDuMkPRhNsvIHUMgbTf)_*J|0mn=U}t8 ztB7j;iipPh1F{MDZT!``oIHFT0*D`m1tSapsX#uX%EH)iO3kJQlY9_yY`iB|$Ki*E zRRrF@%jx<;9QUN}8#+y}6V9e0ZKaNn$G#x^kqV~bn|A41ng^ncU;M8sdA4+%pN`pG zRDwoUj)xD6Y%=17jY(WcJQ5K({|vScWwsz`q9cq0604Avhp5ZyY4^y!lYoAO+)#Zt zEnFlUNtaakHI*JWQW_|dCSAPnYz|jU<#BY;&8y>D&>@9ocv4RBZ5=1Es5zT9Q~L4R zRyKO7j))M1)+B8CZsNKEA15G(tT_f`75{4pA3IVQvAqN8$8v)^rx*#`bQhc4Sqx@i zL`%!hz6?i2#2@)*(bzfkMMv-d6ul-}fzZFgCiL5oVg^!E0_EWRNoQ>zjH{`Pjom;M z3$rJbv8tlKgddD6LPYqC7WYLD{1gits94Z^F@76A851l3%Tu}VR#Sxr^wYKW7F!5T z8S_)xCso1<*`$FIcF82&ya(yq6d{;HJ0cEUA~S?>$&3R^I7@|JmMX%$Jd|c9-%}KF z?stADkjs7W&r{K}>LW$K5B_K(s#OMTOfH3`F<4PG;b3BOfi~&RT+Boz)j=}5@r%IPmlvD5vYJ{1DqQ)#JzMJTSR zS{J)v<%k(N_Z%5DIlzu6(yX>5{R?I|O0^lr%6J#~N)&EEDTpt@4L-U=7)8S3NmeSU zLr@HsV=k)OH$+@%NYv~}T-RsQqROzo7h3GHpO2&T* zBh-SyL_1=)$_X_jzZSnPDsvtLkKZc+Ps~ESjz$qz?zL=6BlN!%RK9EH0~YzRgvs0 zlTEiWHXL_s+3T}1u#b4XbkqD;zk-c#Z`uU}jl3KFGD38PIAkxV5B9t@h$b24Gkn9m8aygy6`?@ZCn6w(K;HhzyU|5V>>9hNvuG?`?gi()-k&+bd3N|D%53 zZwZFqA|JoMR&FlYdn0*$VOUuGk7d4`=RKZ@_^q&d2eq5SpRSCJ7rq=H6&1HiQ=fgt ztycC#(F@bxBaH6as9JBZ+xJvfhCO^?OU$n6cYUGOr<0Rz-EDm~Hd-FFtI)Vc+3E!I z-8N52cjg`cBhN>etWSIVm%lwJ6cp`X=KjXe1W!J6YgKZ6(z=ZP0sG|_n+|H3MwH+4 z?tQg-Q(mmllPrsrjoMl}o#uos*`s~K=FW5LWDUjj&9BM^kG0Szx>Mt~*jfA{-^R|a za?Y5t`0Nv_EIp|2oB5M@tAI&<=J~ARqumydEKc9f*9@O;)mmI$Uj6KIYFdPbimlqC zaBq<-EcdIxvpn*`q=Lb_Gr^ywc4K4F!etVE9IICXmYx&Zq18Jx-_+*kR$rBB587kf zC#c1)8gW!Q{oCBjVXQXQ?K;i918c)~vG1&pPER`}nyFS?YWot+NY0VOz z+Be-EV*8@&%AHGj%^lN#%$o0G%b&IWJ|F6mI6~|1(8*-rNREaGk_WD3Jcy>v;>+)$ zEynhE(*bRwxERRc)t^N;+g^LoL?HHR+xHXWNw`|JHZfKKM7avCpbYWj2Kj&zQN(#~ zYc>rXItsf19VQ%StC1Z*kRIA7b>-JPE`%V>l`R73P;;S(x>`G7iw@iy9^g1lk`%fE zTeH4d1j7oI3Ht>=jc6|bLY(WhPec%9mZG&=@48zqdvqK5VJO zF>>EbvA0njq6CVrWeOr3-{c=h5l+z>3Ws}A9TZbKjEI6C1d$D_h?7!?d6*Y?MOm}> z$sOvTQBg`UZ#a-k9mMR9QU{jx6vE+NVFF%Iz+v!p$=}IcPX8=m~;uwBK2Nc zS?1pc%bl)(VY+ySpbBAB2820LQZE?D=Ies3l>ef(eiD>%+F`kRxThp68WyyZFov09 zL|hWk#2ZUz;R7GU*n9t+4zBVw3(bt|#am5Ftyf;_&M-TXY3`D+)~C*Gzu;2ekM9F8 z?KZ0|^L}&1CLUhqN_l-A`MdK~f_s#Hpo#JI#}7Or_sLJo2t>3-$<7fomAMImyXp(B zn61e>rhn+%bEhV8oomTIo!=*N)=AJu-BmEPEPbtL>4z%|eF}>JyYq@>ox8>&y%b4} zWjnhp`vPKO={l!piPpx>u^79!*bD1vXTHO9vNk$5h54`SJHEC(@F7dDI%occlP9D0 z`dqSBOuwuiBRm*i$1$vbm$2;9Z?)wa=et+klQ7(PNLbZ)h(m}39%k+drpsUNc<^{E z;&^9#;Zj$6;Sc^VDy`?Tqqa*p=&bedkJ61)$@P$M>>2D%-+Nszd$GJabKQ1PfsI2! z%lFDT$KL<-W|MgNspA=|cQXBU)aPGyoYLOfnRj-w;-RRR|8_R5TPCgGg-AeKjLiZ| zIm@tr1`IM+|EN&h%)0%n*_QRmsV5Vj+r}RXp~+W?-`Nh>)%E5L`%OmKFQ+E%N?xgc z`Eq4`Kn&hAAz91K=Z+c=>@f2WZ;0<2G5DOdV=xfQbnO&9Qd6(Z(B;xXUOy^0(|9gE z;F)|>;G6!%X^E#oq>&YGYl7Z$TzSmSMjlev$V`zq+3G5d45 zQeb_5ne(-ynXwVK=dLL(GiRCGM4idX3}A{mnjT;+W|<$mP&eG+W^7d7Y8AsSh-vQV z%#*)=ZlY&u%{s??`?@5dg8qdY3{sfZv#XodZ@@=8-n8sUP803C_@L$|>*!y{eN3MX z*e4b?T-?dd+Fqb_=SWXoZSK9@8{BmD>mFTOvdlWAG+HS+?}Oamn)loushkV7y>BC7 zv+&~64GaVQsyWvqx0W`_wdvg4bDK86cDkZ0blBx~-mOKpX1yuzRZl0+iuT`K)-TX& zGCP7T>PV9?XSa_ynFtTC3_L7E2t4_7R`j1cHVTE`Eoqs4`*tLDtKx@hwe~XGj~xBz zF8xKzLA+;W7@6yHqB;{NrjnO7P79L#m@G@RnY!d} z$QRZbN*{#d_Z5E5*p&VF!DDyB`G1-8c(j`Qx}vBF6VJVOix00Tw`ncy^mh>4(IW^kuuCo z_q##2JZ~Fz80h_F;%YR%Wx0`1v>4hizKVw`crnGk8vvZSn*CH$-)ab&1yDn3EqR!b z40nb|xQ*zO4T*Ep;T+Z3m@+JS1*@u@gXhU{(ahaVQj zK%ocVKbxN`CQ@Z;l)reqF6`q3+GKk4ic#2TPWP zdz?1&>^U#fI28;-=myoA`O%%(6%xTb?|K9VH>Y>qQ;k$}jH8r*nJ}85HCNlc$O%de9R&h}Wf73z+<&x+!Z)4=PEZ!EI{b+`M05*M zcY}dokQ#g$;1gTE~tJVK&^F%R){P6mny$UFvt*7Fg$-7Qf(tyhxBKL%Nl z5#65$FBdF86jC)ngb2z50{E9oA)+;phhmfuQKl?N^VISF3$uyAaEy$1=zjofD; zO%MxC=>6M?kbE6{$?~(9^h7K~Q+ZKY$o}P*FA#H~7=~%`3G4q6VHzFf1-%f5!sh#- zRKSbZNE3~xfH;ztyw3H#6IA#y9zz6Y^Xfh(9nuHIBIXwM-(|_gBkH9cDv-BjGgA$}|eulIiRX5-ek|Vg8 zl7kp!ebB+&Ark{Z471{6lpBw*spMzir(b@TkNKHUJGVq637RcMJjetx$hl|8>QYN1 zE!2`T4&5Fc74`3yqMt&VhC^aKVn+(kkwIh;Di$J+Gxkan)yY5xsX6a~OY}*o;&O?9 zm-t92qCr)Wkp<+`|NkBJ&8)JxN0nb+RYYqEkV4$Q-=b!J!-hSxv4%)Z0~*LQ!&Io^ z!CF!r?y4zHM5OZ4#!-ACj4Y{9_`2n9xgqsPxuO$59tq-G!1x3ia*5aJz!C{6D20L~ zFCx;PWq{1L>Wgvz6MUI3+0qlE6MyM{!+)rss3dzoaoQy{iC#xLZz_TO$D*6Heq4~3jUrTxEB@lh< z$O>2m_Ate)znKOTKzD2g-3Te4LaNyRngl}k2;i{|RCv%OlWb%$y%?X0Y}n0>VHF5rOGI3GvW(_0>&<6Z8!n>iiRK;xlQyHnC3& z;vXyLInt{Pw*GquZP@cNNlPGvWP>! zpPb!8xUfHw#xQL@4yk4UA1+iQ9+m-_uPZh&lmXN6+G2|N%&x730#s8DC9Jaj<_T(G z6=y>*W>4CyLiA<>3OT(HLcchCXKH&PFVUM|h0%qasH@3J-k5EIEtdYom4w=STPM~A0!gST#gtm z$Jr_qrZ}&q-GVKR zKITtZRk0aRdjfJ?Fr1e31A)|4?h&Ahc%#?De^#|ZFp8oJDz2g!RXuW%MXx2k0Nw~C z^IwyQWqiMkvEdw`his%`dnuI{j!`lklbdH8AVruS1BytEm$Q@R$3cH7!Ig=BR`+e4 z@rnd7H;w17Fw)F3;gs=WusIV_8=-h|CqlLwz}1YtGLo`&<{3jtLNA#AY!mjozT#88 xK&%3C(Td?gdCwYZgr6edRnq~o(v@+SbV+$HrXb933=4yQdtr)SNq_;v{s)-WZ{q*} diff --git a/dependencies/arrow-memory-core-10.0.1.jar b/dependencies/arrow-memory-core-10.0.1.jar index 176e8e711332a248958e3b16d825ad5482c35be5..264c9e373a65ef38ce96446d07a7231f092fc081 100644 GIT binary patch delta 6755 zcmZu#2Rzl?|G&r09vQdnnG!NfB&3}YX(CF=c0*P}U80^A;l{BtTBvY!t%qb}WUD+% zM(e5c^aw?RPUrJJpM5@`bG{cY@C$yCGQ-)y(aCM}rMH&7w5YjezQOxqa0lbbn?2e5&!3dG3$Y&NGJ1x% z>zWsijH!%gDhIDIhbT`Q~F!&ysT6Kavg}4J(^+E6!57uKcHtV!Qc1=a~Kv zX_a7;6)zw1!CVs_K%tm$Q7EK)D0~#cJs&^7 z?BO!S<8Ce!JW5mb@c0SnQVD(=sTz1ZOkIx0GU`Tx&fb@Db(RR4*xPw1J+(k;7|w#v zqk#f^_9x(G`zHi*5wOyA$-tOPjKJ}|YArl5i$d9g6kWo+UoVT1rJ1gvWEO@FGk^~X z{pY*Wjugal8G>v5fX-o8A1T62*_YqO3t%PAo|59l6S0L+*lc#AfFvMRm=%ZXFxQOp zK#JUTxBdtT7QQo^8`C0FLAjXVkK{tT*j4PuBxWjUALdi~T@lpdrP4FLph*x*Ga5U= zg>5$o%+AfBQYiMk|FL~jXBQR2{=M1BgC~wYy#V9gdqS^Sqp;oMf_n}E1$MvJVJwBc zvTcC$m*XK2&x5fU_NS+I@=_>aVobjjh0-v4kj#q->4o2T!1lL%JlFEf*LZeV9dk!4%3)S-Q3exsJ;OFmlAjr!t@Zy%8KE1lM zW5Wfa|1L`jJKGF*vPlQDB1Ux*igdU zvSPE&8!yzproYa}Z#GrU5m}h4s~%waK(0+}+3t=F#p(08%kBH}Kb2MT{*mQsxn<9C zw=VfRq35ZJ-(L2~PAVBxP@Q_;Z*sNw{jldkW~xV%f)}-2(`V3VLL{en{25i`W{+`5 zZ~V~4OutUX#jp1Ahi=Z!3vOI`cIT^yWzOF{*pkCTV$aqjix*kHlQ~lqZ~dW2Ny%z+ zlR(tIsBDL(EOGtR^p;2Ztwy1&uX$z^?k`W&MW(~m%-&1K#2JsI8-%+R7*tm7`kth^ zHf5VdNqUx_>!#QUQ6Ac&=78f&)f&ZwJrhYSk6%~OZ6+9(vfX@lkA05ie^qNf|Di(2 z@RM%q6;EYb9bJy^(qSBR4s*z#>&GwW=q4g9Vk}G9nf=0`uJYlM?QcfCj@-HEZF)p{ z`^e|kWs{=Qa|4z9RfiVs4Vg;nby@BES>u(3qfJwf1W{D}8)1%;D`0QXRn&bL1w9hV zD*_9KsX(N;sQL3_wt6l@y5DIU_OmSF!nYlWXfoDY)fd;~J@SdaWH=HAEs%2yZxqQs zF=dtcW+NBP-t~F>E~?Wb!%f^<4YwMs z3iVi5#c0{sej4>#zriE}hg4A_bwT=YsA*5XJs<|rYACaLLY84`B~WWF z)Zd|gyIOUq`=a0pfpU%at7V_Wg>2T(%jmr!FB*`e&80Iec}quL6~QvbLAr0=cKVi= zOGUq@?=EapE2~-B=e%6r`v5c2HAP)jUc77|XkumIqnPSMCyR!^pN7t_s#E44Dw^77 z&#*JS)@ZCy{REnc*?RY;w2{PO6x2`SIh0$0pq1rAuK#L?D`x_MoZq3kPO<&wLq z)lO>YuH1d~jP|+A7M<7;A?4TQch;6@?%%FjZLoE->B?0p4swA$32s-If=`-#XaZZV z-BjCGYOk4|FLci`maVz#3yWpeTCh{9eEYjaG8WhGG~EiwdP84b%Wu+^R&#_gO^dl% z%~~d-Fg?qsJcVt*=at#Dp?PWf==|DR(M_uoB&9u6YP<7ns)ldwoO8fSs!9J1^gI~>flVOE@SnEHkBaPtFJ z3Js|jyxMxxSU%1!brK3a)~+gA4rgL5n}hdkxn!OYNMD`9zsuSoHt^+#b)PQ{oVd&Q zm?I!CUr|rr++f~VJxkflqhsMj*R*kku-Qa*Nn+aHU(de2SR}kBE`Oz zwoYWPl<$x*&mW_Q#KsfovD+P4a_e0+c}s0XOv-}d-`Aa~%b8a%*z*3Kf*pPSfPe!i zcE4`6Qr&e@e179*z53kskN>_tZpOM<$7{)(TcpTyG&s_c5piL?$|WD+dHvnx1A}oE zm5gfJtJaIhawjik_>1YZY^_oWPCd0g&n)d&mD-vbA1lA)l7KFU52K?SciPmta!;>l z9W~L39y0Qq{Oqg#efg2dJmK{v{f>zyKCjN+w9YLzmKZO0esBA>{9H=v@`a@V-G5!R zI>BGOt>8`(^Fo_xN(8^HRlW5Vp??02CvTp!`^+%(>TMt3_I91TPV0GMbKy#BrehW> zP<*SplU3mAsB2kjr-gLuT8uBbU-`^}^4% zQ(h)RU4{z%?y3!0&pHY`XDhzUzCY}-BJGOAxr(l_ZO@$Utr{pofMQiwBz&zNvms%M#sC& zMyG;ra|d`o-QoCp!|HH_4&C^xvw;(U;iQ&L`t5GkcPI7g^>zLkG#ayZs;lWR>+gP9yKt4Qk-9 zia6k+QE42{HfWm%jEPqSJeLQS5G5Mk%>z1kGMF(RKEN*#$jS$@@KipS1E1vs z2}RsCP|Nu7PyWY*B1#!rsR;KJkS`n7_sjYxH0^c9j5Pego;h zlmki^#{%TTfiyI30l53a+&__vdTEHF^P5!HH38a-QDA6u(3_BhRi`9&gV0^D5N8Fw z0&#IufqRm`JeXGj)bOfJ5n2sb$I!e+APnP+fHN8o$mK5%OiT{z;5P`YcEKXmISY_eXmcK+h^5j=6u^?zBe&87he4w}1n=_#hOk zoH>;34ai3+hj3OyRV8G)Mfr>&LRWOkLIf$497zggAqRuK5!ev^W*~<}S%;NZXdp2+ zkxz9wklN7*smOF6lbAU&Ka&>*)F8hHso>gl zC}N?6!p3<-#nZ_&Twep!u&HxKV@C9mg6YWr1{@MUW@5%DAK<-aAcG~E-9r*jWfKI5 zUiGT*N!?6tX0#y6#sWh$&P8A5IM^Ha{m3FsoFeB4NAthIi$u~fk15W9O;yM@L}`&Md%sT* z9j43Z$7$7u!5>?AVI~iHF#b$qnKqP?rEnC*6~2rE68KDES)+t3+mRm|30b`G*?GVN zSNu43-tMmM#Yka3B#^Uxb$^D1w}Clk^tKK$diMo0QmO|^Sl0&)^;;jIttZh|O%9vi zP7_FGY^DTsz5|4m3Cnt`#s()4DgdE5%Y-xl%s9q8=86n4*xEP)|MfhkP!@4858?vM z4)%N~T?S}aISX{?00m~%kv9cyc&r5|V9L=dbS^`M(1*Lpxy3V=B(C{5G}1vu*#pkdWT*4DNUaZ@OVP=GnxCPf)Qmmw|_q2 zkux}p-Zmo`+Xhx+RbvO9$V(#;`%o3t=5U+EGSalC_kbvTkNP&o&pFA;|Az3-P&?zS z^>>#4#N$uB(Dx3~O=<;zHPv7tvm=WKmZ8{UwdrMuM7Jcur~!-1fHYneMRy2l@kYEs z9qy2nH)We@}C;K z?WgZ}V0G<`mA!H1Ihx4%6eN~2nM^#$WXdU?NhUp%>C_reB4yNY8^-CPOBfg%2%t?F zkW>Eciw1|IErUQJ8?3`Y(=M@GYJ%*HK`oq9^UH^bs4Fr5I!`0T5IJavauBS5044O# z@63EKrEunwb)V28GvZf@G9?enY5nhoG|E7A|8=C%Ao_6{dp7E)b9kX|^GrNW3k$X1 zM(HwubmXje=TFSwla9~EMFq!C&&5enPX!%$tN-mX*Uk{}lYt{@KzQyZD{fRkDU{vl MZ?!Ul=Me$;KeNq_KL7v# delta 6641 zcmZWt30zFy7k_uA&9v_eT5PGbi1xG*N;^p?5+$vO7S;UHA|UY+|^?b)=@n#p2}Pi={c&qrok$I_5ed% zK9j5;&NiccxhH^{TS?KLQZZR(+ z6z=cxS3}V+@7Ad;_384YmT*fN_V&8pdu_0I-?ML@pH89oqfZt@zo-wqh&~BH2zn4M1fz#$50;Th@EZbT@NX7afqz@UD*P)&RwI+4d4eU@ZJ{2NTx!@t#JOQGo~tsOoS#%IbQf-*9Kh#+Wb zJrJWNZ?*LtQWFT1d1ojw<<(q~ImpemQ`2r|$M%CcC)(onXS#YspOE^J_E08$ ziLYXcdmpvm?!mEH^G($Pd|3%!9u}~TYA+v)$+y#aG832lYfx?W)KJ9t5OOZ>PP+Cd zwgBs?Rjx(;7rx}7sYML|eR~6M4zVw#UL7})EY(~bJoM~>-ryFi>$i9Na(Gnens0jC zC$nCBufFGyU+CbmCcUww1MUJKQ(g63#K;nXiEY1?m)UA}>)w&S<=o>Y_~GPWZEBD! zqj=oVC{|J?EdJxA17=Iyi-ZIEWTyC)TZU#IJES?pY5y9{CnA86frEQz?mcn zM?0z~zq!mofAg~;hyg8!cOk7lUmE??47eDdrGX>~3mUqJl{ml#jNS6!5D)JLAFUm0 z)m=<~!##PZi$5@-j`|L13WV{Wn|nH*Um25|`#w5n>+WjH2+1w)UaY=nY@5m(a%SY$D*miGJ6jQI02;J!% zWSB4#pODZbqt9Prdgsd1)jQaace~h?aT#(a55{`%ITW5>O)pqLwOSE(P5hrNCB|la z=Icvrmf7_mDpxG-Z{37A77Oku)ak9MY&pgL%4*uHJ|pZ0*XcIxJ52)*`vRB0lVXny zH-A%lF8h#J>&dQ)Gv_I%PXxuUpFHnzghN?;p>qFqc6+n00m>Aa3on|V6*jvZs_V(M z&z*=--F zRlRZlon5`U3A4Gmczb`=;B8;6ox1UkQpSfqcThS}YaNURVO z)g3|`jaDzX+)m1i6TEqGRbJfIANo$Sx^tD0y|kgBb}yZo>VJMJhotnXUwn8e|y|sr>m#OKU-bnJl32MWFocYe8aAdH#K}Fcbf7I7^-pfn(3!i zk9^I@NSL^r(j+97V(T$%Sdvn;#3eDa#WnAG;mr)!pFU050evxfJCoT}Y(qV%?M@3K zrg874**sf>AI!f8w&%RulXfSZ?+)AbH%7!Xqi)*5zCM5GyrCiLTD`i4eG#9wUgOCD;Brr!aOMG?Zird%4hB7NhMj4K@cV@4L#|uRq z1Fp7phrNSqD#{2D;YX812_E(DL;=9!$Bt?`=!f<*VgtVL#Pn zqmBwqetbmcj+i>gX{&}P@I*T`BzZ+Y~QSSDqJ^1ue*GE!JFwDZqW&5Z1+CSaz@%U zSA$;Xt00=R?a_C1@!Q|_O%$U9%avHk`YR(eGqrWUcA1=&dKi6N* zfgq7Qv}{;R#^_)f{nnCB!k)4qkjjpqRK|6ggs(s3=IVAGD&qn)7j9`9&yP2_+$eBA zVG0-96*e6A3IKKxQ&V%w1i0mfz>XVV-bU-gPoN+08|1|GzKVT&S^{=bi`Y;#Lm-ZR zItbVpg*?EVFz~D+KMsUZuK{w1DFM3^giB;^>$L5Jr1D00*Pa9-J1$*A9y(QSgCYd=zo$ zrokDq5Y_@$59Ot2Lmp1UdMu2fdu{cY+xlpM{N4v0 z9$U6o8O%Drrm!&CWahqo!=X`Kr+TYC5m zjH-Y8WfGN>c5Z{aeyM+^chs(=9^1g#X+Esc_&0FYMx*B-=pxLuf9*xM z?#GX54%|KYsq00(A+^6bF}E{DY(WkYl;M(+1l7b*^!k7o8QAj^Oox4Ye z!i(|sHLCGUDCFxD&AQ!;YD|ex6EM0PDjOi-=e>E~%Hpw0{`XCCY*f|aw@x&jvAEnk zwr;bH02x0$VbISESNk2bqjRj{tqLNN&wcLE_+>yBP!u2z&a_`Gv97z5_0}-*1K+g|q z#-oZsj|IU?xaVIzSZB5A36q|$bWPlNdUAdfvA9zYI1p1b-WPcM(+lcUh=Ff-J0A3m zlt=(@S}Ssri0BU%@+2-RF9+}^rgoP464Wa*;t7rHoh`)7H){zZa+7sjTMglzT`7_b zAkll8fN;fIM-E=>vLkMv0urHBB{XR(8CoA{0s5d7h@b`9;2Ivu+qrd#NX~Y?At(e$ zB4iPkXB}!-40N$q1{zWf)?!;wdNEMN2QQ0(CO(iZ0r-mry;}+vqJbqq6u+{diIg#|vn>N#a5FEK0Vi3Q zasTt0`g1sv8$M0O-d0O+gj*#>X=IqE#$i9L_ID9n^?@CI+6d&(5GoKvgDRLX)0-h6 zYc8~08W^EJnwhg+m2mb31tSKvsR4pCD=Qwfp^_=}Xo1oJZo=SLc_4|_w?KkeT6tKr zbXblzmOww(18t!|Z!f|PgkP~*VqFjQNpKC6y`_5fUq1Er^aEW5m+X@27ml4L^K{n(6Uaj2#3hm4k4cK$2~`@cch@( z+kqs0W{u6Z98Uc{h;98V$4;i_?-|oD~OaAynczGeqB2K*6yS z1OY4QfQXCe@Cl_VsBr@RR_yOv-z+R)BaeU%OsgvG#v4N%n^%rH`^GS}3HhM(m| z5OtRQsR@KNBSA2o{kgv$PVNYHErw8>7oHzdsOoK|`rq4ky9mP& z;D-FNGVm$o4+ALdYq-&52Gh{FGN{ZUZJq)_)T*2rl%H=y$y^q3`&91$S)79Ymga2_ zA@pc?4(sjPnN6^D|2EUrya{Vsf58iuDvKb>EY`;6U~DO%KiilnYOCQ{qeVCev;rYs zl&yv-DXfB$3OWWK%cDhxYR1qj(#-7m&@mX3I1-!Eytx3#yDUu2`Ygt-?M6S>0CU`R z&X}E|eS|}EtAPl3ApAC<$+HRyzU&ri=o6-Cn-OuEFYKkVm*A#@r3G+Fk$a-f3U zK#FGgijx-gS{x1SVA^ef#bZAwE=7;s2D11Ot1z4U_=s898A|=#(2Z5HOd&$uITAa% zpcO2|Df!zi{+IA15keEzKccY>Ku8X=_TL?w5er%l8G!nh!NSYe1;l7)+9|94UH`Ds z!Tp?XjXzAV4v;3+OZiy_1)Z?($6gz>?jS*&9o#4dm~con@&uHZ?A;a&0m;L z49w9)n5AH~<3$Y{nIX`L)fis;dC4MFWk}RCgvSE z;27l0fXiRQ63q%eDS5OO{S5u^iqkS|qn9dai+qyG#3`vk!dR_H9NG1(p+zrxUcsQ~^;D VcKC=ukUjA4dN~BqD~5+4{{yL|N$das diff --git a/dependencies/arrow-memory-netty-10.0.1.jar b/dependencies/arrow-memory-netty-10.0.1.jar index 721e029c10cd8553c662bbf034dbb89f1db27911..fa3ff089dd6b6ba62cf0352b425d00fea637cdf5 100644 GIT binary patch delta 7255 zcmZu$1z42Lw_g_NlvKKw66usqSz<*=Dd}E11zlRCmnD{#5Rh(3X`}=t1f)S4N$Kz| zc>mw0|9j`z_c`;-iTBKzGiP>w?_?!#yAnvEp^APB7l4h84PXGT>FH9XWy48$VC-1C z_Z1n);kaMg$)E0p>alo!7lj@ivxF9W-X&)1Z>$ois!A4YX#HBn=3Y{(B-3uLqT``y z8r7{TO*W}>j*F2yI4NnNXE2T)B=mf4sQh6z@}oo+i-*rUueIp(b6g@LJk6ju%lvyo ziOm2NVS<&0&!?SM@gBfhkleQ(KbW_0?XK@Sv7>{vmVY|ra%iWZRY%3&gU1l_HuuNJ zDi3sV*qfd7Z!2aAqhUJyL?54XEF+@(gv2oDr)nYn;L0vp7~6}b$?9ce(#CVTD9P95 zIJW6sjz=e*$!2}9C1qr`a=5p!oirvFFvi%xL$BhW%-3V;t1=7FP{qR5rJ_vB#zF%C zq=5j`z;%28*nsQ!Aau}JKvbwW0sd&WP<&0zP$*E~hQ2_f zyP+9qEH|_Z?H>BIRAT>M;USWk(ffa>A zFL%V_;)*-L8YUBnPv>Vd$F@hNx(U1fDsCLydDaW-WE#2rOWAeE*|M^r%sl!P#H5!j zKtw_@iw~W-&EYnMkm@=cyxulht}ZdE2gxYE>j^h(fuTRo267PR2CBp3Ls1Cn_v{dZTvX5C^=G+@qeaQ=li zu2V^CHOyRn?h@bkNy!9Vu;6DK7IUnrI@Jo{@9aL>V<2m$E~(K~Qo` z#X5oPp(Td0hUqx<+VMt@)BLybBuhWWGE|P8E5P8V61|zWWxG>8GL~#uH*ojBI&Nju6V|+Ir0)0)T-Boixi}jd)BQTtLT;#ddVHS<7jHbrD zQ!H2>rj1qb2c+lgvI+EXmksGM#~k_&O>tGozF+<+1GIniYX19dW=4i$%iJtWu3m*d z9@Ie~HgRS3+ifR{4Zkx?UT?5jzQ7~=G(+W`^|UztqIbyliI$%xeYy79+(VqCYw%1jZD{zM$mm> zMI>F~-J){)zN}V`yNY)Y>pg@JsP`QFv6BDphSuJiDZ--ig?-fyQK9(=Ql{ z60VL72l4f!bbfSQ^xoXG>C3{%-hClt+HJxEi(BTRMU6;(VITcNWX=?pCor zA~DBap1u*Ph0`k#$0KP^-sf!{x0hUobMl|=a0E?oQIW^gy)vbuW7)i#QWwpyqAJXuIrlZw1kz>E>Jdvz_t*APR2Vu?~+H z=(GWSBlLl1u8s1L(>ADCf5C@7Y#ad*WU1STt{wWml)Ugrj5*#CqH)*0QJF1y)NBL9 z5WM!`+UGwV`}fSduCRDaW|%jHRb2-gjertW-Tmhq*H%#V-v;5}LI-P${>Dq(yn1g% z2~L$-Ud*}NovoN1_(SK-tU5oM<{}tst$B9n;V=OTaU_}&#_DPEF>g8EawgnM%?xwX zv(1XE-p675h^rmViLmjDff~sNjt&U_DA>|J7z0 zQNElWIJ~B*g+Z9gUL8ijRnGFl{|?>fZ}Mb{nrv(#85$ZzW#Lp~lg*R?OOUsHI~!52 zy_Dwg9T#T4e&bwjaz6RCFrZN)z%|XnACh>?m=gyL6ZW;ZyW9gA736|%2Sc1D{0U|- z0dC1z{K$5@2;NeKm8(Rp2_+i|p}7vq(Qey7M(Wn8&^%k@E-7pPI&>W1aN;Li57V3d z7(KA%H3joLI~|F$oHDi!aI&EJdTf<+u)f9|NpD^LrfJsvmCaF?&A8*JlR2*C2F(01 zc!$%Jeqy)Q-k#4OJ-pP_)r&QFT>F^{H9ejVw21O>9GLQ?WZ&7))Wt1Xb@!;B78%=} zg!4ss$XbFpDFZgcu{c1mPuvFInOeu-VBvSRj&&y$d+xpLP=-7?Sg74QNhOF`=oeR2 zs1ZfD544Y*7#Wcjl*L}CHXT8&wMh1Z`R`IO zg$6Ujn4R#^>DF~8GI+N-lVkVs$u7lJjZtd}@B@z-rXwlik|$hYnsFX0tv9`Et-}Zg z^bFr!a{ORT>E!9w{u1pj(NP%kRdd=A-7Nm@iZRhPY5Akpm$xHaT7%Age#y^D%(n$K z{ep)nPHeWx17C72no2M&KECJ-%A{E0yNJ}hm=1d8eEG5$t7U(FoK9j;I&*hk#IOee z%rHZe4SS4XcQ3sTGh@Za>`l?TtNVllCe$?#)$t`!azdS4>skiyvp64TzqSjd;tL8b zyr-}v&Rh%vHrS;!2MoIFCfVJBfLRv5x-!eV6j6-WHfHhDh!Cz4&0h8VED9MA8`nsq1s?toJoeS7NZdcE$xa#WL zy9S~BMl(%)`xxid?f8dR8a9Hiu+78x;kVy%O+N`P*&*o0$V&AX;m%b`jxK^s;ZLGV z@;Y@n!JzEqW_*xjUt$(qJX@im_vc+7Xcf_O5F4X0NUXG9L#%WOLi-Z4xwp+t&-(d+>M?vYVroRbiRW?r)8ktqREjZq??Q?tC{xIbewyQ4U zv&?02?qv`TRF7xDvWaOyyNQ)CAlH8!bV2B@@7&WmiypVDu zenlvsYQhr7FfqEO_vI0zI9f^{e7`;Xwi{E#R@TIuJQ=f)0Y$KQi);Nfts=;w-726S zq|EflVew|Qo^{SP2vcL z5U2Q*W+kne;MYxUf~;V>SDGm!nTjctR@n^G8@wQ z@!hb0^iQW&IMIvUpBgk&EtSnqgu=9j+zWF~E2(w^v7{$v)FH zELrHdf)s{Ey07J zDXQ<-)WnoZp@o?54U^nTF@J5@2pK(**Du+Wh^WPJsO)TV2ycy!*Aq)sNg==k!wMT@ zO-5Ke$%^LtXg~4bZo36}_q?tKgK+AD=$M(=V`vuL6XLtr2i4ot02ajGA+9xAlFtqv zKe5m1c(P~?XH>y8aA3S+r2 zFRWLI>cp8U(0<1|a1issweTZ{Lta#cf+AQ&C`bos^L5NnxqVHkXW5DVjQ47gOkSN% z(bWn&vC}H!Ly1|5sG?%F`RLEbl*XI$*(r7dh-%kF*emWKjEnu^Y&=oZPT;g{Ekfpg+Hhc1HX3(N0HEoplzOf)DpPQs* zqBIOH(X`Ts&`0C+n0JLVvIHRV_45;P;ZzI)fwDd9L~dVCvQ3{aMf$tBo# z6_AO5N$Rr@S2sAf7rv5!J2ZopHOBa@%?x^>@iu}54<9W?ukV+Y7>nobxEMa;bYc$e z(_CjD$6;Wavk}L%Z`dFJrVctE+JvVyR=8w$t0Nt=jjLN-X<{@n;F-mJFqV49xUyi{ z?#zdq1;lF=oHPc<3|)zBJlTh=Wo2v_1_r4xh`o5#J5U|dbagoAx2Ovb{?J$nj6loI zdGP%eHX%{Hb}VtiS7X0U+4~Nb4Kj)!HqEUu7O@}-r2@z#_a)~KUUeH}q-jcP)LXy3 z7cdL5^`M=h)qLgT$xiiZW+rGSfywxQoeaeG{mX9T-oeWw4*}+g%#a!-AVFaQ4R9&| z7Mz%Ow*fHR-dB~1MMF9m=S!Myg& z56u@C81V^v56Pk;URyB_zBwWle4s3j_io(I&MW6lcYS$Ch4||FXA|bf|lg#*oM~(4CxU&*e!3+Jo1WBZyv=uxsRu)ybI>+<^ z?7u{A_JnQTZX;OhRzD&85&t@d65+xsr5-v2`hfdjJtC5+f&Gy4vTxym!WQnWU>Jsb z01i7x@(vS!VKkI$;s<-S^!#mpAN~F&BhkV(?S<#q$$hv<(B)6qKNJ+-meD^Hsp~Bd zlq@J_DEC`EHa}ZG>g2l=J^G2(LyM05@_1p;Bu`-Ha}~bTp?2Mh3%54GHtz8yvN@`? z*93A78JG#JL_4uvLK_%-(=arvEdYC>^5YFz#OGu;8?c-a{p$NUSN!Nb%u4#Y5Z+nH z$(GDr#~>mvVg|28@1M78y%3sPOy9RzxA?=%iPW|fdA7Ui>xd{909i0;$1e1VD6Drc z{J*`re=#n@Ak~lkqbdxRHa&2~#6Wv-IzIV5c;GC2L>H|@I*e}3-4itNk^U_#cU`QS z`*wn)`R7PC*PR?q85RFix;$KE-#Ny&cqfR#y+y^&PytFT-m(sZOxZU>;Xudd@S1(( z+d>OXF*`ovP;nZP&VY{rWvHZU!BZRb4>xw`v#v(Owz{}{`TC9BCn$F*Gdn!6 z((&?%w2gzFqU$LT*4Hryfo7M^E2=B$*}SROa_FP(a!oy3Z@eoY20u36J}+;|DR(7; zIQ(!v^?r{yKEx|pBPvO`Sp($Tkcy*6K|Im7o(J_{te_4a2`NPXRH{fYU~ z<%-J6O9{%Umup@6JDm2a!osO%6)AVp;{^ z=Vc9pr0lSf=Y&8l(9>^cr+qpTyxSs7T88eQn|qU?+1c-7F7q20Ec>hQEWjf`UV)vJ(*dHx&Z=F z@EREa@e57?OV>28pW-EZxl=~GC6?j`-om^Uz`d!Xr}@>vocK`q_zckSQXYoQWS5xr5{c9w&7xbAs z_iaAo1LxW5uufVZ`*djdX{v<*wU93RF!6lbV(!HecR!Ia zt1JHE`JH69BgU)yD>C!SR9t4Bm&hp-d52@pWw8E0&m`&f#qQ7b{CB0V?{(EatmJtA zfxxc^?s4?v-M|6HBKg0OS3O;;Tj;2)sCt4?Ky<^xmOQ4o>BO2mK)+@ooZ0JO*8sYH z445neg?-?nl7|#U)fKg5?}H&qJYa}|q8_&zSW_z`<`Zr^FTVed5AL%!V6Vd!S<}Fb zPTw&H9UINcv#R%A`s6NHi~&42r*esqP4D^C$x2V-MfWFJ7|w;RLIFWGPE4!QseRd9 zxQ`&FLURLZ&BlKIfsM|(7aZVf#iZ%0o|B)KsUs(x-gg6M4gEb=^Gribf5EkAp6My% zV3Hqf)_=8D<9sdTyA9aBj?JGgYp9|D$R?T=!A2S)&|dK07oxG}@ERm>K4$$3;{q6q*1fq$$50J#BA(61dv zY{#)(rx1cVff|UwMh%G~oD=YGbjwH13caxa0OLCV0NY=>v$zOYoDjp`=sy7P;4h&f z62xZ%39kQM%Rdn?*}oLrC{YSxi1;Lmo7cDd*5^}T0sxle0084(g8Fw5R7}hWJp}%Z z>oAn-Ep&)X6HY{T0x82^&>0FrL8<@Kas&G45iTy2h)Cpb-Bcts@PHAe%R&AdHu=-t z>$qGZsDU)>H=z=Y=eu$2&w4HKkLl}}25_P*i6YPuBsX6Cx!uYB5_~Rg?v0?lTV#RBpiw`&2^w>vz>T}r)r+A}BGh_w0Z@7OZ_5n`Hb=(grE81fF5~W>?dQjH>X}JMbQGZk- v$RjPVyyBnX{{ITsf3$N07^DAC{5veRlI|1%y`%`D@GW@X zd-Z+4=ePSi^UR)_IcLty{P(ZdpoP~U0(B)MgbaVi51i#*P*Gfq;!2_5!V|O!A zh_*DVG^VaqKcz+TeDYSq`!zUOv1FfAF8krw*yxze)F+VPc>YA=A_Wj55u!h%68?zd z!B2H7!!K+^NrJZDlFXfraj|AU1&Z~7qb>7h008D59X&E9T{?k9ak|ctt9&gRI6f(sU%2LSU=rC?I8(`dXM+s83 z3mDSk<)fI}h0W-11&Lvp7^?L+$M0%0>h;6gXnkC4R_9epF4dAO1|m(j+U)dGhR6@z zQlDM@l!`rW(!EkV?H5kC#=T9a-(&mlYP*ekL&Ii-bA5(oG45@W)F0d%M!Rd6B8j5h zHEhW&P>~RvaL@RbyN)`Q5zM;=gvA1M*9g)t|HUip`HkeRrx0EK*H5Ehbl_bNi>VDV zzO)O!9;WPenJqE^@CXwC5QHa?3Z@XEz?58T$ zQu)Fxsd2Lg7Rcq)4b;WwANp+r0&BMp(HNoVxWe}-HHe{R`BK<4Gz+fY;_Dg4*Y4GU zh7+;&aK2uZ__R`w-Qn@fDm)S7HA{AYA|aqBq?;!Y@I2(T7}YLeMj<1B6DuK zy9}|;*iCr`m#PiDySpI%z$X=T`uIZ^Dr~Z9I~OnGN%*L5e*3V%z5uM+tj&tq_Va}k zmAC7mD8&U$hG2hjUUf8@U_$;xLSp+eCJR#d*cVG`HjPcI;b(3KD0IR44Ey>fEHXJ{ z`tHT|^D&B91WtuPp@;m+x*QoA?~KTcAWb0#9826Ygq{QFXZ=q4ovwWY>XSn{~; zrkdRcFw0CK>e}_fxhrg5^ZJPaZp{$5MR!Ayy2&1f{hSm7>YZNncv|_UL~F6W40;oy z#TmywvUvQmp|&juPbRQ}=&2}L*mJS$BHrv;nY9OBt|p01cbI6>(vfQT8XE0BDDH6a zzsL8od!r&1MU8jwk~UY&yMYeHyM74oW%1Z$H%n)&4Dk8K!Ldr%;+Qe4VD;??|26^7 z8*IuOpXvkpaN;K_u$Syp%JBOwappQon-13`!9+B56AfM*c}-Rv#OC-UrJ;&Ki@+3A z$GJCEmng_z|Fv2Q))5~$B2lp^C2xC5h)P`CRG`3~|s z)qZHY!dYLu%0EbeF+8rQ!!3t-mY0jX7R?4*On=YzAV4r%-+%w1LDhO#zor2P~Fz|=`4F0@?R%f$106Uktd z!O6Pxw=~LnHx}+U>+uRVH>kG-@%JjEu7vrY^T%xotOUYg&~hG zv~)ud2iAd(xkloSVLHF0GnOFht(MBO!XGDxcn^=K)fb?$$`yN+*@3@O; znIh{<4U-}$yz4xM!yF@7b%iuZIZaB1$_hR_&r{lbg(np~Q_bYK%Y8985)qZkKI8Qs zW-u_ZPRXARLke{a$7kx?F;~$w)Dp?jsW)DrDyMeM?B1j#(%N#gg?m5Lwyq}9J=5#f zZrj|B>}FMo_tYWl(f*jn@0wVFp~T}Q=;_`_MgkLG!Vk9`a!d9!NDbqN4_@Qa{pMc@7!uoxZFW96$!JxYWUw4@rii~^L z-1K_aj&U&rv4A(JMmlb%Y@HPgnLpGxmRz%F*XKhYF2i~&D%v`kiR zpd$%_3E-7?06Ru%Y(0p-}5axRfCO+ux-O=(zPD|>A0_7NE+y~YvU0=|KJQfS9%`>l z^Ub>ND~w8`eTtW~n%)_K`{&5yhVgvwJP&>nh=!~2Lgi7X-;OG@nbj@zioFSAa&vHqV*fa zo|od&Bx$54!g&&xTKN5#IkK{JU*^&|`Te_KtY__6k{k;|wm9|A zdbsm8U2~(bKCJF~l)^m3_N!$%!8=ci4n~`!*I0H!XPa{5Uc;0dp)ZpK7Wf=|S+G0< z7-}hQ$gtl}Ya~>VE4+kGs1m;KOo@#qUqQY+t5>CwTwAcHV55~^#mVZQ9~fy`DaAJO zKE2SqYx6TCjYC-V$Er3EVdS%&dOcPJ8dejf9nP5vQOp-g{p7eP!f!8WG zGscRqTOkd~BhO_jx!Kmbcml%>7mKJhL!M=_3>AO^txsRBe|HmFs23m9C1b1g&K&=; z;6sG%IV%NGibbQ304rn>4A@YP3 z_kM3)CM@H?INlSKO=v)+)_RuR0|-9w2>le)l)7D__$ z8c7{vvGzP~a^=O?cgWn2u{kT|+g+k1$N^ozJtQX&Z@H+6>@F%EkNl{)stgff=uuT&R-5cvf8t2ZO z)ZNMMdok+dq5B56$OlcIIh0V)$8r`&`^=J$4lM1A+S$pT4im^|1#(A5rT?!9=+~8xxT|9SZhM#^9N#!!>l{Z`w53 zFFouF7l@)32+MI7^VtF#tf*B+I&IZPh|C`o6ar#DG!%%IpCBBKUJI9wASAt- zgiY+_!6hsUsBetE5?pQL8)`!^ccS|JmOiUaF7^2CJc4p+M$))NijKY)9vhl>k{2bm+{dF}l=`HowHvDZE&zh_K zd{>oUa^;lwkNC_f^}X!1?V8;;uiLkTNtgV$-8y{3<5Nc7r`huo7U}r64et1;HTtr> z<%ND=O-0kv#rq(J1&>?E7Z}nqbz54`fMBw9Bb+9iX^T>`m zi36sDJCB;oBtCDqb4F)4NHH4xN~DQ?=*D^rc~tm57K+gM9F|&9?1`rQ&`I^ty5~iE zD7Y}I;JA0lg%W8Y{=vK?=|=3iAK1VyU&6*v$@;OfkjZz4GJlF{*P`_xSPq(c_^#OP zZR7Xz`M)Rk+dGC+$F3v}%FnyrG zJqVU(3q+37tz5Wl`h{d@1ZXUxY@WRtQwLg!D`{J^T~39>Z&v&`7hU#U7v{wQdpNe# zMvisZm6EYm!SgvXN}D3rIJLPw#)DthS<(i2DZ``tQE~JE~hyDSa;) z?ISW`NH*{d8Zb;HaghtJV^?=Hp8gSET~*U?yiBY#%2Ks>M~;=Ga`gSiJe9rLTFX98 zNQ1~31KLo#zh&3A&u%!!G-CL>b+eM>_B%Y15yUUCbXX?=J7$>ng;NcE)2+{Cj1|Ml zx3d-J-uko?jaL+U0QRcYPjU`iampq26k;0NyJ-2c?Nngc?5`LDgUP^?i&8`~JooR1 zk~UEOd~DY>kx;|8-Cu?JK*6<+7n-?wsb9FBGulW>YZ>`RGEBB{_1Hr1`E% zdP(GUD!0{r2ZD5YAIbatPTjIDNWtOC8sy4m#(o%M2s?mUTabGd;T+N)D<_7(!v#_5$U_T0v>-)a?+2J8&SLC}7 zROzWRPe0?m7o|B|jQ{SeT%`QNm7R6Z7<+-5a>d7)WA{L7Qb+Z#v9P1oeI@%X%bu6T zPjp}$h2%4=n1jS5{26bMUB#9|Aoc-yc+RH*14!F(owHpUwQ*MKGcw70@3xm&9% znb-?$_?~M~^EV8MyU)tC(_P{VFjNDtG~K#y3f9jrac*S6TvA`t zGo#)ifiMAPVgewD2#M$q$R7?&6uiTuMH6{>Z!fio)$q@QfTaI^@Bm*V+zK zxM2UrgN^{^T|I0cn7f%<+FAdPt@T7o&_fXXXW}m~o(5ss3 z0Y6N~x-*=g|HJ4zB7Q0eFGc=f{LBkiOH;bd9{6=SB}juG;V>6mDTCmyi!zfEbR`T| z@??S=XZ})!#Lzpii%FeDGPio1B>>5(Ck0t8w}xzq>tgY@H=1~?u2kM)a*{)C58pQh;Z}| zj0qtt@rQi3B|;u?>im0he&0fb|6r`P`dce5Tq=dY{Quik{=;7y672R{y#C!|evjEd z1spMdx62rj2LC*zAv*5xSE&ql@d^XR@{J)Fm~a39j(=EgGb+dv;nIV@GyR_wgexFo eSMCo_5a1$FS3*Ig*lmK*0UQxacs}9w?f(Gy(>a*{ diff --git a/dependencies/arrow-memory-unsafe-10.0.1.jar b/dependencies/arrow-memory-unsafe-10.0.1.jar index 5960de229902270694925f4c4c7d370798168966..45868d0e7907c43bb0910f77931577cbb24bfb64 100644 GIT binary patch delta 2670 zcmZuzc_5T)7k_8Q7Q>7TBa^imvPL6I(?nC*Mo5UX5RI#vFig~7jHO5_UPN6>_{x$c zvTt=sB$_B$ZYoqXWN1apeP_O}Zti#Af6jZ(^Lx%Y&v~Bn{GKwGd>5P(6(J%CNJvNk z^0t%ivpvXSaTowl-~fODbMaVq+Rko=HbZoi=e)%InjVotuJ!uFoi2;Jv8Ai2Kh5u5K49m@mVVX1UFf8x zGT{RR%5Y~B+pUJrnnE9=`?*AAIezWutKCkwi(+~j#7JIj(<5Y+h@_61md58f;b3Hw zMGh~Z(Nz3AO(KTesnjaB-z-(dRpMogHb^oy9Dfr2Q6=+W=o_Zlko@@r24JFpSdN3y zT*HZm?}w_LyqKMDyH9Jb^x9`rc_1gn)QKuC@oRbhTR1=h5EKsF4Z|bkAQ=f$70O(g zrcidl^n~&=jEE564B$FK=?NzZWhUHFFkYpgco2?Q3H{s>D?|w`i^>*o;SWnG93dD2 zk0{g&A)_MCU~I_X{-`1ATOG7pa1m&?008i2FakpL{rp1=0vQ2;hZrILhk~1h1*VxiEs>fZtlAVj{2Y#Q<%Pq_nea_FrS|6PG~gPLA3hl75gC6{PkeY z=)P#!UARQMct{b2;oWGUlCzzWzhsqJWhHuFnaE~RmzFw=V7ixA+6P`bIxjnurchVC znOoB3I8WByyf2Mqrh>8b(WY@?moKwaJ4)jl<&~RaT!mr@bR?CUs}% z=TWjuIRAWbz;w)`X_-KJ}> z)3Svf`pSnnabxq%0p$_BW3Kk&$M59y>~E^Ij*>fM$6a4?B*Qn&H^om5UWre;ZfRo` zmVR-yj{3W)`f&ZWv%%t*pS?jiMU`ib@lj@TK@S4YKH_}34%1_mPR)Fnw@z`WY_faz z0U3ddiAKg6=6dbq?E3wx_P0W0|J}VECg177eQYdQ-)AYW^|bw3te8&b#|qYy8rtH* zsMi2o?)}LH9&fChQ5{Bo&eaL-4CD?0gB#_`)!$}ay|MAU45luPq*8EoIb&7s!3ug{&Po2+A7dK9xXOTrVZ{gXAc zE>DcJ0!EQ(dsQzfwdD2fJyY_0(EV^@Bi$0^ll-W|OKb>xe4*tN##Uib_$ zWwd17b{cDVxC3N0%dz%WQmHj?PT-6Va@s; zpHkjpgUGVdV7=pBNBFR_muH!lYL z`6;6P_b!`a9hH(Zvup3VM&kD5pU<-@)y`-ol~v%&W@O!|29h;|fDJr)`QEL2b8PE( zruc?_2EQzR=hsE+t9uFUtI5u=S?bJ-)K8y_-RXb0rE_pvAHu>*83u*&Wv}jc26> znWltonyY(JQfXG3_u6(7yR0%q$@)*-|0lz>$kTb z<^;}0q5Iym3Q1MDE16fqjv)3IFzHn_{f_c+&c}N=*^bGi*NSmI+7kH$Ja){!W|x%?~A`8=^VY-bRkRmF`tC zoX&ia+q^mZ!bp=LiD!}+us%_NNGxjqU}Bsn*Jr-fIjvk>`khV0=aE%?aoC4zk|Ohy zdr*QTCx_MlmkX57@+>EUOTwZcDAemg{zY$E{mj z*gkbKK{TOjRnbW=#ELGS&y94cLW$T9>XOMB=*NoiMf8l*x&puSNqNjmY=J_W?^T81 zAeiuu`58eN8wST9F#rA_fX0O%cnPn)QSe;&VGzg_KNU1Ev&9yor5)Q{w&>eBSgmuS z3YJbwNo$d+0`RZrM{XIo O2!!isiXFxajDG<)>@MyA delta 2607 zcmaJ@c|4Ts7oUw8B70oI*w-3av)sYRzVExFY%#VmQb&fLW}Ln%`)b43q?~Q zjb?HwxfIDZ7?QQ{duMLz=lA>k{&?T>obx>AoadbPob!Fl+-lu~oNVC;ZU_p6g79SM zd7X+1UPE0F+DmkkZ*L~Us?VVh_||r1>76$Zvdr!4d;^Hx!cokM)tSA$&w4{%Ou>Ag zm(u@yEe_?%Q}G_KPuCI0Bp*ByF{dV+ZxFhge+WM;AQow-wxc-3(QaQ}BF=i|gA0>Z zVvr{yb#K=bPK6~0&r)h0OxauK3IjJOcg=Z#%c~F-E=OG2@*aH1tT6lQ&k|>7Ja*ZU zl7{eAg%05F@~HalY%DllE9gXcLzd0L;SnESO(AO^%#G|ex+=r7f{ys?kTB{*aFZ$E ztF)V^wdWjM%3kdfZgH)nbk`<))@-q>98ETO{D?6zy;azcJJT=If|0gKIU&=1I@x9f z;$(|Nof2c^0SbZOU=UEi$`AwvVPy#LfXc(UL7EJeX4C6XMK3k41qun#l=OR+!Gppd~Zx#bWCvE@$ledmt37A zhBWw3OV1jMtwcU14L{;V`RKa%JoN~>nbehiqy&#Axs|&%&c1YP0DAEx)9X8jSCgxl z$EzGY1KVQ}Md$Ew6&jn7pRgGbMI+NR)ulzVntT+o(|)+&Q{5Y>zC?HRDWkU5tOk-igY=(3k*kN|=Qv&a!-9Ak8whRbiRt&C`K4%urfhv6HHAjMBDS zsvd0&BmU)w$)2qnEu3jdS#T)27-`yQw?Nt(-pF6@;&y85Sbw7b_%f_lGb~J{MwM75 z=Z8W*N3Q$2rPh@Q#A^r-3N<`axAg8S)HV#k0azgPDwW}=-2}nQE zEJF5%>jXy}%@+|=lJy`RQRRJhZTw}=p!!39enUO2=-Ep8XCu)TRrA702w8p;c7_hrfFNa`HwEGrjMatvtFzL#z=FvG`+?8p0k zj|}XC#u-=;j8*uJMxv+S?8Sz#F^u;>l%7{d5|S!eU#Io|a93 zU{ZPusOBd#rcn4K4^GxQ6A^VC25ij_S4#K z5%@^w1AE-ZYgG6Q7YNo>MU1xjC#oSFd04*=Oj`7UtWP^pixctAec>~5KfTYQ*d!cT zVjl9TE4LHoYgz3QVDD>tle<{eaL0jwQvHv;To1Vqa!hTS#B-*`#KhFLWxl)5lpF2K zAyUIuX5Dh`#HkP9qj;tTxu5azEivzWT!s~PGZ{UWcb zhnFgge+?oLRn%1Db7jhYOGQ&pJ=-6k@TJ419bcWkMhzdLWvF zZ}Oge*6eM1?=+urC_~RP<%pp5XoHR)qC#cRMBUh-xBPioN9a(H%qnR?NXe}~ll$fi z?d2?`GV?yGp7Z2_Kx+!kYO=Glmq47i7W;rDTWk|E(&0YR!O>~-O^Z_|I;-1r6Ea&^ zp9P=9aVq_(*43nb;$$plS2knq<=IcgQo6DQPYtBF2*K~qFLxyQua)1&@nFyHe;tR) zkP@Zqdy)27+e*6kZdU;s4)f-tq&Srvdh#pu9i~cVsHhxS^IpWX)kokW@#3RCDr&c} zW)Cr9*L{bv>Dl-1icY_n5@f_SU&hkqx8&m!M`N#dYt9ykBz^WIWeW1{b+veyW{I7C4{^f=E#_&VtOk+W{=(y1TtFhxJ4FE7v5N89ezk&p`6-ZP- zv$I&9g6|u{axVjHDM+!=Sz1v7<^ThvRb+txMIN>Y4~lYF zB3+;n9i zCDH#i_rID0Y55s(QP8yk@K9wJJHF@OZMek1_-xtiFlaSOoE0umrz8rfDGRd22}--6 pl3%21=$~dVSX72TIl!Pn2iDR*)fad`=%+(BKxwi?^E8=H-78#|5dWXEjW*jCfnYSh>^+q`M=obNf``D#qbalso12{l_Z=3G8KP~RP*1y{@3OVm5jBBwqafK6L3SGGQ;Ke z2n&`$2x19#Zp_pq_rFh#!XzBKvLj-HlS}vu$?qkKd~OCOf_lXk`{wdYm#ol#UCDz` zWJ1B~9T^vB19m$C>_XzX>Tq&m==$}&gYg%&G0oOX{#v<;*QcOuyubq!CU&{SKJUi_ z&;}P{enzuNGMu6ms`=aGn{m(7T-m<9oO9;0!?h9YomwpyfFU=0*LM5+n_Oi~Oa960q`%ub-6 z-fbT!{ky#cWq7whP-^ghQ!@@l`);40_}(oywDdn`%`34uPVZ4R|MrCiz`sAMcm*o~ ze@BDCI22$=Z#*sZSeKy!P!Ny}Fd(Tb7A3Gd3m*rFpoECj95D=01^UkZdydBl2K$fA zX@f&l4*Uv4<^+!V;KKri+wh@)KY_>#K<*2uGsJ%s7tu1O6%Sl(7}PAhLA-676=8{l|q0CJe7SLuKEtI>w^f(f@9qOMkE6V8qGtz6Jh?v z1=DOFhkX0NSp*qnNP6SUf_*r<2*yEe&ccW7{fO8@3Xu%#Ux_r!n!zr8Ans(u-4C?d zhjH+Mj$VAjo$z&M|W0 z2SP|h^Zr02z&H2riFS6L32MGM(1QUQA%GD9nF0u4fB}veu+3{8Siv6)aMOhNg77}M z8ntLA5bx*}A94Ze-9wWJc=j?h2#5qG@UjGr2uK1;0N=b&h64%n-mJ+TuTK4zyfGNi z2oaJf^)EhZ^A`q0#gE*NP|#98kTf=0(g%W|$AEx+9|UO==?U!}Ew~^AL%yREF`Phx zcN7K=-~EB2=-?hcc;B_L1+m{-^}G;FJ`lVW&+q|e*hlJN0-nUl>fN#R)cBzc-A3;r}fdfa>1wO@~yLM z67Xvs5*;wU1Qiij>V-hkY<`Lf`oU2Q7ilegn~E!Nps+0rTl3T>nAMMz2^2AL(BCUk zj)gDgUl4h~_!$&>V5|!PIxrpq2C~_R4*Ln~Jx=;W8nzs45D@TWVDD#CHeiK20b#S_ z1onSo(kVej{_yo%JNj57v9mGTJCdFz@dO^Vijp4xk?x7$;B; zmjDr%vNX@Dc<1h~z)+E}YF??X-SAvS=&Ph7a31boulhFk~ik$@!z+S8y7yfbbz zP*lO*5ndY(>qjqTYee&pY9j4N{`)Zyt&VKK2=JSo2a(Y~ zDET8eImB=AX21Zab1;a2JxXuQR=yZKABl~AL!L(ZHxjUo9r7HN`t9|kG;66Hd|Ppn zw_XHEQ=k|?n-zR`fSJm;eHJrH*FBA-k4yoBnm`j(#z}c8sx-V+gA@}2GwrV`JDIyv zK5*GcHO8U4v3o9bA>ROK&tka7LA=1PQ>5e?>MwM1=#;xCvF3A)aHj056a$w;58+7B zlFUd@+e%8bg+Ya*I1g0@C?btWPQrw~5K1Hy7ux#8anP2f0IAYpCiyWzOd4~@ndbMwV&Os3ZT7in-h>}>mwsi{#jLxw11%|LKS>0e zF@|1GBxAS(yUIvizXqb|C(Y6fkK8B;`v(s4`^b#41129_gUR{&3%QGkE}d&S7Q*h`I|ReM~Ss2tY3VjZOFhZfQYf@cJ=y< zR`9^|?=JAa7x}+;rGLwhOzU*Q@K(IM6d)i}Z?$RSU}|PzW)J+k28Ic&a(%0WHEms& zWf?U8`|1O+7m~W~zt#p(_(&u4!gQcjK&di$Q=DU_L3H3hORusWF&s@nz*xwBlXg>xlJ)nY z4v~8YR-819`m0&8VP2op(EB41iQ^`4htnK|eKsRDf?DQs^2kL!M(qnO1AAp_~~x_Bz%%|XV9 z^mwXmS(B(K=M;^bYpLWtdo7}UVb=}(5mJU%$5+V$g5}u}P2E$X@0T};fJ)M6@^68< z1&@D{2hrpu+Ebl2>K#WICgBLf2jjj-yVBvFS&X2Eo8NI7q9M;{zck0Q`p=%4Ywxj| z)!7gp?u;Vgu$SXVZ_FH$+J^x=Z{6Vb-g@4p`15uN9Pmj)n^jBy17I452~r zWqAni>?Gl2enVq3A@#O*00_@%!Bqamf!fCMJ&1$eMuL45Onc#N%jvoQyIB2a)6|hA zbXxO)8_yq44yuqemN8%G1@x5wLC7uOlW}R-`XH=VfcaA}V4CqRa|A+fbLi{f)~ zE1lqgum|Sw%WuL_^6U`@>pG7UQs-g9Vi93iOW1W(K+YG|_if$PP^6iZz-$}@r} zH{|nkq>##VTtrl^17(}Ly!X%gNekJ3VqGGr9<)rb15xO^!4o`&5EIW zVaG=bv?ITh2hz?xet(BCRGI~?=C+X3Ml-d-Y*xXmp08Y#fzcR4RCjR81neqx;=Gc- z8Ge!~Egl7&Hpbx~J6ie~Y)$ ze1P*$H0Et7T0rxR?tYb@4{2d7y9zV{Zvam#n#WODVmj%?<-Rjs5#Mmo=~1-H6#_5u zmMc-fEzuRlzZ&V3|I_0_@SHlu*fYJ2Yy&n7GJU3=m|uf<`^$PX8l+=)m>t$5^>jq7 zHyBpnJGdesowB^cZ7e6J(q8f4Y)aF)bHF#o-Nr#lkj!pZ6x`}A!ES_h>ma14yv{5} z#R)+q`zKguZs$xO-dTARrwbOjTq%MVqfPs8bN`YB7BCqzh5UUDDX(3h*Tz~m22)u`k0TSghM^nu<4~BBx#O`-bnWHjPT`BYCz`p3XAJz zxYx?B&);)6P~&-3^7cmLSY%)raoiSRh~kI$WN{9DLvhp|MvpsYowpXQA>{?GEVyzOhvlm^?AoN9F7ZonyJm)t;^QgA}N!+*7wm z9~zvjI`Tm;z2vR5Wfy+IgS=6;&i|YI=h*47el|7rN|E?G$}+cC=nl(hus^SmpArC| z{$SS=ooqzQJO5zb`$K-{eAkPr8kX{v&z&w0i2${k@L+Zj>lVyp@AIf11nl!vbwe^` zn`Aw%!hYp2DJYLGuPD-nJG;tYP(9-3^B=*UMP#}ZEuP<_z@J{?x$K_<>x!eieJeZ& z%-muQ5_~zgk4IYOMqICu3k3cRNQaEL*<*aPeVxSS@|zH#q%j6 zhWG)W_k8@EHjxUVsruKVI^7Nz7F}<#n%r|mMv=|nCu}69@p7+&{e5YFckD6%a(OFw zizEikT=$@-QZr87umoGF0|W=iObD z8$@qg`1aAa$Q8#zwTE2>rW!{O>Eq|NrtK<8XuSj{^e2VheOk zhb082A7CK@^gLE1tIi)WRh!$xWYk8M27ft~bKk{GG@231Dm1OkZi_{IisF#4{wx!_ zatkmp6y#3Z45Owx^=tJHfB4!9f`b6nW0=-EihV-C7iG>r>@^gbj0ryxT44w!l;o<< z4}oh@a>xrCL?(m2he%Z-0M*QIV%4XeQEQgr^n+CvkVF<`zX-`6MIni`YL><1rLrSv zf<6KHbUEPa>f5)cJf@AaTZek{7X=euT#ddUOW@m*Kqh!O3Bson-f->Yo}eOf@@)?h zEXiTQn|AN9f_91CnXq7sM>3tqaP!D4IRy@e)*p1W8 zj(w6z0(bu0xbjcoJJ*&^ms{Q=g2>fXq1%TCrhSZ6zb*$J`Yn0eaQfG1@M8hyuY7ob z12!bIApZxTdME)%I;u=kn6YIer4^cz;)S5Wr8aw!C zFx-Lb-fz)U^eQ;?SVE$It+?gS=c^_?1c>9;Z=5dJQ2ay`M(A`d8kW}n5+&3DIe=t7 z@NHA5hJRX7MU-2u4t7b4H$v>r_7Yf#q&|6XV)q(;UMKJmg18X)(xhmdMaOQYSvXuUo+LegTH)ccr zpg}e*CZzK7*jet-9Qsk?-?4BxZUENoY&&O*q(^lZIYo;vJvCe%JPMl1(AReeW}h^W zydon9D^CmN5y%twnQv6wjX9%r_qkK@Qt!?1C?pnBtV>e%(<>ql4vd4kR?qp>Y?v$; zBQB;ljGG%3!1oW>#@d!Y1HgEb@Rd0mi~D%s>-hW`i8e?Ac{b{Oh=`!QSOD@;pD*2p z0_SVEk-K2{FStBuI?XP&&7D9du)eHbLQvs9`S9>6y0~r!oe+H?zsL^Gyy++Q?-fZ0 zJ%Xy7MW1b+ae1<;(}&$Zbs-_BSzXbk+Mo;b5tuGJa2xtI^{byOgW(qf|DXh3m~BvD zRv{CCq{}7@rrbzM3>;SR1tUOlIy+?tGP)`S5BJBX4b7nD5$Hi)vW9npoUELr#IY4dm{3rJ%9v8_2lGNsR# z60xJ$$3plBgv0x{aRAC0D7#-wU;E)9x9On&Vt7LfNe2tyNSpExiC_bsqj9`Hv(@p4 zA;N%4x5i3SU5#GdR&J<_W9CqS`9?I}(pfF=6}GbIYGE(tQ(dU@9nbZQ&{s z4qQU`sqwsh$QyKKo_B^h&jUHn11vAZ{8r8}Db=^F`abA_g$?G%dL{N%%_nf8%Q(!9{Q68N6)}VJxN5POuYz~6Y=pBu- zF*l}~JaR&Yg+>Dy_}8;hYOhzVyj1@6us<+R*9@``LEe$! zf37!$90U<3s}z6H-92e8#mvE&?ULu{2AIv}GAY^%#tF&^f#obVu)!~&aN;Q{R{iX+ z&m6uvdh+xRf}w_m*%|MZfFwRKE128f1t~6npzBoHO@1!CH032A&b`3}6U$^qi)<`&A(*AqpvheYMd0N|j?QS^{%(Shk;fYgMcv7lN~NV#QL{cYv*i2+%fAKqQMNZUZCmG|PR$A6$5zCQPKB2%j&YFa=80UP`j zWW|yZ8^Fa9=MZXPfjuZTc9s{Ute~yQ^#v0|X9Qmnxmx8%54jRXV(wJ$XhA%F@V;`Q zxv?*(y4Q9AEx_Pm(S)QHF<*$%h=;3S)`bjkz&0oO)~raR1?1F^F!636>25!@Yi|Oe0C3b`9=A zgi89`sZWOiB91k&x2zg_21s!oi;;B%ML}mf%&-6<3)F)%YS>5Y>Ko9SPGnNJO+{Yz zqMx!jCQB(?lyfbpon$~5dGE$6xA6y|_yzGqS0|P$bW+M7AdZI71sVkkYbgrl{Qw{A zh3XKgN_Iq^#E;(oQB@yvEr z)(XJXw>y>}8fc7&Q)WZGNhy3>FtNpV^qz8yA;KKO9FIz9>jms80SDR_Q`8gGG(psb z0zVMGP(|Vr>3?S~FC0d9aA*s_9dHa|{!Hk;NH1K zp#xKb)$8q_kW+-r&&Rm`N0maUPGHb|jza<9V#MT{B8XIC_Qje3WtKJdey6~WSO194 z(S&74AS!1RQHXCvR1#zvW*}2!^e5fZ?;Pw~3 zgj~$nXq69uvmHQdA$d3^+o$x0UE=sFI{NA%b+DUQCBIQP54#B+-^92KNb$7dc)=bZ zkFs7aT{m5VesfuaGKT1$w+nNZVeQMMRbx~Oef(pqJ{X&)zw)tf_upJ0pCmjf1Y4Ici;BkxMZx&{&Y9J5pSFw7uXu@KWAAR z;>2&|Amztx;YK^1ldWBbwF#*n7)UtUz3_>bas@J5f$hrD6nSjy{2nVrKtZ70=!4rc z1NVB)K9KG#Z_=g;a2g0PlL*7~{~cjb2eb~-@&RoK@$_U1D)|N~{lfPdHqj9mVA{(H zxR3p_FN7uLg=#9{9|Xzw`!}>;RH{e^w23@0pZG5dk4=%5(2Gcff5B(dwilW#69S;>}Quvnc2Z;@F#m z#ZNNUyMjj*6*Z!P+J74-35EQG8jiHXeX1Q1r(leBly3)Y{VL18D8)2XM0kkHvzbjn zRDXCgUmsdfuB$aEnPinab$wfI=lxP~yY#!Fuz2cZ(a4xnOM8Z~`68z>01&XLOGmh* zx!NZa^SP>u4TgGK4ZH!f9&UR%o4CsgVn%V&9{G0mcb^d6Ddxu&W z^Dvx&U~|$irrjmOzVHO&0*e07RY`gssgG&6|xe#HMJ zVceL=RD(>x+`%Os!30e>3h)zguOgADZcydt6GzOSIRdaH?0U#+gqYRnnE9fdwbijM z%CgmBng+R2L(1OO6IDNO&;v&29^Guxlvw1EhH8ysCC$TX7L92W6e)ZCW$@Lwa0@5M{H zO^Rdf%ObWb0XzkWS_~ArQQFCCrUbG=%T59|^(&5cy8CTNax~?+7fb^bp|#l8hhjP@ zsd%7T$(|Z>Bz$OQEC7ZX!Sb4*TdvTE%;5tADmO-1U4f!zkL>V53N6n&vm#l@U0*gN zSmjRENr=+R67viV&ZY7`r8EkptT1E8(}-Udj=B|TDGSC}i-C6>$?0$>gBstnJ(vtM z@rDTbl!t7)Etg||V0LIGOlVYq6U6N(v=-~NfDL$F>ta0jr^%03=Q9_HnRD^8|ReE)kv zR0%y^_cTjqXdgX~>@IqePpya(%4uDEA#M~;2HU$3lYoK{4!s6Xm-_${#LSaeNUXAh);rWR>n zD2ayGj{;y(rs@KV1)IqiH!og5F$EK%K!k@HMdzC@R}YpD#SCR)sBM(rCg&m&SK_Js z3shzSpGPUd9D^Z+*F=;4QP2878(Fp0(H4wR*2ov;e2^@_|@*Ezr(ph ztDqIy4{oqH6!`%7`14!HH60)6gzhcOJ*o6vn~I$Swzkz&V4~QqN^*E|M%yJ_edEdZ zee5HzAd_Yt^rq)fnM6k`g+CnM{^Ss}Z;(HZ95o}OM$4lL1@>X=Q73bsI;gj9BOx4Q z$WZ`B#nk)wDX*>}>7_zY+nd48dz4?4T-j_Fg>~LX1(dMag%-SpFij2US6^!`iCB2$>?~1 zg_D>>M68Tj%+#g}ux1E^K&}%9FN4h<8`NDq@dubO5;Lj3J6IGBv^d4D_#q zpi?=PZiq8H%UDV7Q-QYrEG@#BSM+KiUvaQ+E}|Nfoihvy>!siP(+9``7o!0K%6d_X zmY0hUrnXH9W*XE4B3a~I6ezdj-$WXWr`oDo$Z}514V&<@9@p?^c@_St1J#uRpclZL zl=G`hqudU1t@%dP)^aVCkbD2u)WF_+3VofTHRh4j%ViEd!@BL0v_V-D}F`dv;^Yu)?NYRS~rlIDmIbq8(<@)7Z(wt3(2fgS5+0?IxSr<4;u8`GM?Bco5x zgbz76ViIT2t<+xDGc~J8Crg7_i?PG3|H;rW{Qo;^X^yM08SZJGlkA-6iP^s=6sc;sseD z1P?4~hNR5+2D#atq>QZ!6DUp%3tQ*`souV@jjHlp^QK{Kz7=|l%f3qobfc#Xts1gb zG0FH*zddM$tLeO%yz(1c02;9NvyZMTxvQ@7Jg&dHQ6|`RG1>!yYSE~of2h0J88$T+ zv9(X=7^Rre=4F7%P>?lte)2%U1iMqnIrb@BSIMwMUdB>0ZyLw-2NX(rYq{~;S8Nqt)VtrHC0Xs12S6=EHj5jrIkCNhDAfSGnO!NiiuJVdcdOOE>0 zQo`6CPKs2i%wEntZi0!as<{91Cgxi%4IREvK#&%kU`n5+R2$nR>I+ti3XM3ptZMSUUb#t9lN$Udn;K?4plKy0>Dz*myvEkA!| z&p>9dUaA|2{uOpF%eQ5VZ)JH}hKZH=dhUI?KdJ9?EhBa5K z926;Zrp*W|XPCXD0{@rcrCT>|@9)wXi5t|gTv-`+DGhrm=|;pBCee2D4;?2Y4_+Au z=3zkdSTke2-9Di8t|R>}+UriqYT9In{YSU^$T8t{Lc=|IP z>mDZkj0LNJOBa#bb$kXP3=HV*SAE1tov1NOiW3E>NC^x_I!w3*%o7GIk+z2|bnNX@Y6h{ZoaK2Y;ltT_{ZTk-B#-|Sz+a7ITv(1~gHxNPG>2bZllaAg z*JC=ee3#@3UL}HCd3|%ct}6!e>W*>wLVVf%JF1xMNfdH!Qcdk-fhaL9Qlh(qnmg14 zx$(&S{^3Xih5xYPo$fa}tZT%$0jg7V_d<*PNau8q+OFSJM>ZIpbeL6J_v@Ita5AVX zGBhptYW$#Zjze6Boeu%IPo)Oe%9T(2(~RpUs_SV?3Ad(yqavy0zdOVIIqWLIeuez^ z`^JBN#rPkc<^Kp#gRsRuyT6Gl{DA_?h~j`=xIkf)fX!#Yfq?x`Z3d-YG3hWc#apmh z-gFc`)=Kq`LzOgKO!dY7{TX$0T&Q&*X2$sbcw!V0~C{w`U(uf@1SeAjGs zTH^pd!o=DG{q~Wb?SobU=UaNoVE=qk0)=1k;hG)$p-exB33Q0DA1KTUIrjs}F(XEP zASW@b|Huq;?H~%l-lb#nnlehXZw?**sX&QS2cVFf{Tv}QK;LJ0bfk8A3+P0G$qZCf zLWTuGwLp>pnG;ZOn(;4C=ROp9!jc?!)VLrZK54)NK{z@fqY)}pvrrkP7VdizU-Dt1 zKlFl%gSbo|sPzW60Pnr^q89-j{e69a$rK{HE!%O$J^TgTHfNfSI272+j7TD3~9<#|}|6J`h|m-1CPpva=5Q9_>9L zak)^A9iSrw7IX9I0Q7&lbl_wCM?tw)3@-f984nXD=zrY6jC>Tb&pdd*$1^TtOj?}x z1iFtb=Ot1!V{ZUM?{IEeE z2owT8=tJvDDVaZi^|sC%;J^!HSfb_*A`~L9cb(omCgDik_nlHUc2}b z6U`Q#vx-l}c)PX8&KK|368KrXYoL)gQo>v3_(79`(j5;l>3?<`339p=a}(?r$)%yT z2Iq0{|AzM6g>xPz)np#Wq@r_Tj6o}$+0fdf2a^S0?yEA!Ac2Nv>0SpV&HlaqgcB7l zM%JJzdhTLGX*whq9O_ji*n=ZqOOCpeVMaoZB&SCbSt3UvT71x~MV+94u;VhEk`qow z&Q=m^>iK1$AY1Pk)2VAx7P;{2FZAgU56iF)egpuSD(JB5i><{g2v(_p8GL-}SC#8f zD}PkLb)R7=&SJw5L=uYwNrfK?@p)&0uPM$5TNK2VlPW@*h{Vjm`j51&X6lT|ebRc+ zuvVEuID;EFZ9;@Ne^-&D3Egz3%7hAlOn&_4G?>;`kpbJ_d~;ic$9kT+);^@_bSp}$VQoxOG6y&k8#&PuuHe5l;FI2f{=YS{k`qrQOp3pA`n1o-Gk##a49X#l5Qq zxR0ulVwxPe6sg#V9&8~K)kSF8AlwWPng z4ngyFQ<-L^z*b!xlmX`E%i#toU7Q<0rq`g$%a0SqyO5XZC(ZX`5j)w51RVLWs*1Qx zv@Ravoj>!>WX!7dm@LbrKLwaY-rC6C{x!4YeiAKnDcFT8*EHx<3`@h%5wREpRDlt? z%0%F6%q*?bji3hJ{9O+3!?bLXW51DX`Jo+6v!T7U&lr&+x(hXJl16uQPXB2moJT9c zr}E0m-V*-GeXlF>v-B9VH_ootPbYj-XG=9)d`|?ERIA^~y|@Jux?e#H928NKcx%;ZQ4K9dW1!zVyDXVZp z_jk;!gZ#P@ET)RDw3;l#$$c|>45720k?BEC_vm7KDJOhPQC!k!s8N*lqiJYJLFaHv zl4h(xpo^oH@L_~Q*`BiH*1A+N-3_cVnT{D?&H1y*Tezdz#coAmG@@s3qac!alEqMg#U7LDsI+=8#9rY3GoiJ#1R>*tib|*p_>r#bmE2F zeBe=wdhSP1;CG*C7Blt5zO!3Cn>EGC5g-}^COvn)YpDa zG4^rQssj=qjQp?5Qyz@0aEBbKw&l6}U;D9^&kdHZN12A7-&8l6$ABU+8&_SVX8h}Y zsE9e5%1M;TF_GD9rW`lE1>x))JTKE(U#&mQ!7Vf0B3r-hBVFnfpt`E`B6!Wy0YCIf zu;#$4y{=H`&=nSs8QknKM{kMfo^=W52wir;p#>9~V~Xu3iL@VG!K&e{sTOq!IpT~A z9yz|o_`|#>qq`h1|pK$qUa*` z(*X#iReg1-0X2OqS#?``;#K7iJvC%CL;{Dt1l|lS?c3!MuRYk_mfZ++uZW5hbGt!a#4Q4pU_;U z`{cy)6-?dg@4}7cpG>BJeAq%~{=E_8EMsKk@7`_bJ8ByOXK-g|J2CJ!@cpAJ3N_(p z7(qzW`+4wIyonKpS4(}4ei@oXk`GV&2C;4Jx(Rj%0w(|_mGql|3kD`Y-z$eZ$dwC_ z3$*Wk;2zZZO5OpIW3;(^rt^+Z#>RS7qGOO^Ti|@B~)xn9g!y$%FH9@u2!;72I zhSRfjn!94yJ=1A7hd1PWrh2X5>GNaoq02DSn9<@oqy~0?M*)$?H%ku{!9hmUhtjj( z&Ep)deBXFCHI3F7E03IgygEA-8SOY zIiHwJZI&*fEsAi*c>2mF0->K}FEmbMux!6-4ctkVShXRnzcEKWT`|v%I*qD%NaEDl zz#pWIWE-W#pqUz_40M@-Z}Y}FI$uTw$ZF(Vx;6ka87L%stG#hQlX{17$HXT{F@M76 z-=);JfYrGe85Z3zrIBLDs*03rBZ+SH_2CzI{dA=@4fT#{#9CSn)cIUB{sn`yS8gb& z#bCvlziKwpU%#Txxp317Gn~;^j9}(U>LDU;*_E=?J!nGkyRvuh0!d29%moyCx4fs( zv|0neMtLW2IGBsyXX@)^OO^2494r517FC}4xJeH^Tq6D7@k)lj(FOC?n5-6#rLI`> ze2GU_pRCwGT-iDI;@5Sqr?49j=W z<=8%Frp;p&?2m)va8QyS3^0d-bDs?awaAhHG=qkLm%9*E-q_fk)25DC{SROYKMQAj zO__-{zaw`}bG!)PWV?Sg`Jsf|&A|Sqs)mcCRMfM!p`52gQM9y&%Lz}Hp5jPPq2G=# zqgYI|RB`M?39Yl9>Pa?qu0>gx9_GJ|!jP!MDp$uVE2`(+le1}r1M-U$gt_ZEqGKHa zO5l7N1be->X|H`>5flA-uU)(6pM2FYjLOwn810mo1utoMZ$^g+^tNIUhp}Er!oxv{ z1f4Gu!r};?AMC`9qK5L|C#L!@sC4TBl$6)_NRv9`5|*cxIoLU}`_jk%@IbsuATI=A<8UYlLN z)p6G?|4>t%1!?U;D5kw#yci$ecF`i)Zf1?`6stWkM32mHHZ0lP3SMIiM>A&c{kEHddJm)QF zN`z`fuai5{)Pf1pT|Vl?8rFPyvW6UMK_S#QW=F!tkXv>FU+t5nWIa4Wr~FwNdUo_D zrDn^Vl3ix&cD+f;WM;+3<4h2s%){b-CaAu3N)IDQ` z@npyLTm*#f)ze)LnWg@+sjgEyImguFyec8^`ILQqO^EK`bM#yxnw&;rV|BsF)&)s2BVf$|{Aqs2fEi(x z^Rb+Z1fJ?1hna`NjbG5i(Y51j%-GcG@({xSXm0$sa?f04|I#7MfU1O zNUQ!rnq&4RS!1{Yxqyx)*x%3=5%8I_?M({Kk?ZF|oGr*1{z}hjMsj8u)n#TxyqXn~ z#=oLxw?wXk&q*E@vh41Vtr5c%&9YIeN~c!PD?-xj79%^`DPKBAPk!q$0A5Qo?5-8s z?*E#!{YAPoi#BKJUtYHU1O4xG`0p9;@udv<#~+yg9S_oZJ}Cxo$AbuvcmV87-}5y{>IlKD(lBCr zlwW_k1f0kx`7Q_;O+!bFkraY5KOxX2Oe>aK*i#_%wN|M!X3fF11>JN&-$Xz)h)zLa zEt3?Q>hOFjX)J7IB?+WP*aAzb2e1f79&Ac&4`1vcnAeF96(P#}=t{Jo5VmJz_}=gc z#}OoAy-A4m!XZ{%DCw^g|yiRA+AT{QKp^}BIsdJWa#<4-JnM*+IWN0G%C ze1^D8;hRl?P3BRiOGSivv-DE!7;h#c`DOT8s3O8SEZr&3Y@^*FwK^N+6o5)8f^Lo- z4>b`n9*Tfl;7-Ur#LagGsJ;ZU3#h?mjNzZ%JAnnfGL!lch1&bs zw9`6~^SO=e7@LQ9;u4Z(#xljJ#siPnJXft$pEN=PDQ_s}C2ef~u!lo%V(exq@f+X?A{pLXf zvNY-^qK+yAA7OKuSjs)Y#!Q*zv>vy<^`<_#2~)|cvH~nf(R`pK2Jmx;IVpq1eBxK0 z{$yv;Y$mGbDJDu8-fYc5b{uxsngOb3b`9OIp$rQ;E{$=zK>^9rLzGrz_-4d?x|+83zM99d6I(;(mq9j7C?4a- z5r4GZVoH890_yEhR`U_y`rFnoG~32GSEReK(^{RUwipV>J^M|Cq!D-_VaQXkug2vN z@;Kro)>lHo-4ekC*q$Va(Ip<(Zm9Als}5ESJ(X-M=o&86mtuDc_>Pf$DAgybzM-_+ z-niIjT>ryA1QALHk1HFAKXmSN&9x=LR_}69_Nm28`J6t!9e7|ZHoqY!A415>QV~N6 zrWKh;nR&;K*mv6luzs=i9`O@U2u`*N7na z@R^&;ZH_O~XqSfwJVt(t$tXIyRS2Nhok(UL4z;%rm?kx+Rp+G4^xDRwKZiTj1? zk8UMrSdq&7mDyJ#`k8?3(p_fyOIy z5vmvl@AlURSZ|&gXN^JbGu6nK!gNqHOtW{`>~oadJYUWtd5qq`LfDN+s+=Yt54ZJw zI;T}nr^nD@5zFDXm32IoK=In2OKH5?-qFIkurj>}wd_MY|pG9XXu9l|eY;(Dolot*hLEZp?ZA4f0=o_Pidg z(S$|g&I8vQrRn=l9r2NIUhw`k6~>gI{ml5!Yxm7^YPl9O@#fAY+Qm0bB9NN@>6{q@_U;mKSScONk5*BBeB@=*h|NKOX8bNH9K^n)o*8&=e4MUeHYXx zf?8P`bgz9rFh)l?f1`3emL(Ye^s?T+lfhNze4BZ$9pHFp z@G80agW6ELNh~Q>e=os%i~GJz`(OD@dZ6qY(7uGiYjTRjVf=>9Jcc(g*GhCV<`oeu ze`G*SIf4?b5h5`AvDJv1`vp&E5 zNfrjku!_YD*w;MSc|u9cT01n^RB;yGV-5Kh)X&1t64~jbMlhVRgA;oQIIl8AHvn6- ziNltyn7=Qef0o3^=V$+udhD`F=VCkRlXe8#fujG z8a@dL0vtIl1{{Q-?=bmN<^lD%P+}Rc^qzEiH2>lay6H*vK@(Ud4chr%QmDISeCa?I zxz5H{&udhx=%%$Q?Tey9p@2+@gbdLM&$zwD^E#yoUskGB7X(=epJdicXyUN>^lhvi zNsS{vk4wfd@n&P2I-MLGsx^B|Xxx6!k*}?b2q3?o6a`_v=*XFwqqvu}l8r8qfbgf8LJj(3oxQ)aT|!c1Lx zD%coKalFl;FW=Iijx=gPZf+iiQR@Fk)metc6)fu-*WfO}g1b8ecMa~Y!QBUUhcH-h zcXtZ}5AG1$-8IN1nSIaM=U+X`rdM}Qcdve{zG7?3RI94^oyrj#_9L?@9EJ~8rs7hR z$y_Z^sFqn`0dNVUk{-F}4kb7M1D()M9Mb=_JNgxqV9G?7%!f)%TNRH(*JqjDmd>fc zMf~E^1FS-P%d+`}uJT4CeigOD$L^5!rQa$>jk(~y0kLsD*W_)iv+botbUf3h@YUVB z#<8VlRl%MciKJ*#XwNr1}PknBSiegN*gzvU$idxgQ8HRg1boD!xN zl2qA>OL>OT)?b7*89kSpWTSu$phrb-Ir>d~=BIogWR`zr%f!xf+Vn0-j7G!ssS_Fp`H;e-csk^VEgk}XonjvTZ2h+MgWm${4e`2L|6tnw z`@M;#r5=DpzNkUE1bXQXN-DGTOY^56>fgZMKmH22Nfp014<)kau{VqK-S)L+!zn7R zf>pEVn?(H9tY&qO2`C*CNsvXw#Qjk59C+$iO%HC~O>W=sB0aH`!QsJX@}%_;%Wv>w zE!~eq62_CEZ^5_~efLfJT)u;|+4ncjD2k~B?m81dFELUi^*i+Nn53dV6f zHr;aTo=aI7i_OB>AY#C z_XtFzs84bKqom>D3n2i42#US99-!yV&me+>#4&{2Ay{DkJ!Ogx76NoEo(#4HbY1ue zN+`&x(S|(*A%+%j_)nm+>=mQig36+ZfGURk59@?=+#&px4cLMGClS(I^%=eFWydVXL3`AfCzSAt&=C>Eb9uVgu0vu@M z0@2D4cz}jA1kgZ6PC}Sw9~=CB^na~V@xb#G1OFbAkM&o4g0lG&Ec#DS%jIsuV}ags zY#E;lRNI0I*bX3z-T{RYWJR|_k%F*ouGb?eki`Lo4VdGAd-ktf-{UCi#DPlNC=|e| zIRc{QDG7K3$bV=2+t-=J{+{{Noa2Pz2kJ{CLr^H7r|OXs%z{)ZMvHJ~LDi@^i#-JU zuLVAD-~&O&Hslc9KzC>>;mLq1C`}Vn4}{Jd1tBa!#{@;u;XqcFF#3P~G2I1-v>+sg z=qow`_uc~&ApUBJ5dD8iAEajFOk7dWY_o+7o&WGJuPbZG;NCPk|>2^#umj-*{NZI z{8Oki2-2n#lRthM0%SW&$)sRuxiMB6$R+GweUIC}nD<^Gvmd5FNmZgJ=Oy(TACi`V zd?27o&{rNF5>JeXx6Xpw`X0^13niFF9$`qHV45Hmsvan2Ib56u2<=+bCU>F4(_zm2 zrZ|+!V&cc0{y{J+DSR+GSXYbUqHD`5HuNTwu1;o$A)*HpNi?>~O0LB}HUye%DS9M6 z3hzOFA#q|aot*ARLHhNqBpXJwsZWB5@_f}Lf}@`-D~<$>v7{K#2b%9-7w3)Gazr$( ziNMx9b{M|_$3fvaWl4^SN`)y78(M{DjJBesud-52C#9@^PKPa<&znMFpYd2{1sx%&&)$4pHK- z0B$X(!3{8v{q-9%{2NqVe}!yvh7Y?+RsoU)k4Sh2Jy?Y@KzNzTgcY}W_;A>R`7AHt z_{Yx7fwf4LVwVpOo&wCi3G0)HU~H&G(6AwUa5bV6>$AiwA8wW_Jmf$RO1mtD1Q;_< z-LME-qvyNO2(#jaP9gmU8VU(J%jiLLf>xdB7sK=dImVV=Ej2J^F*X(h2is|BfmpkWR)W1JiCMzzzZjx}T31 zzRq0Ay+D53m;NwSnz*1?v|k$8sxCbGc98fa)dZkWO)(}$loV<-y8S&1!5-VZlN-4I zN|Vi#Z+O;>Vs#Y8DlM6&z~&neGJD+Y_ebYLg!rrf2tcK6H3Xr+Y2epjb}(&VzG$1F zuLWr;B)WPXso{lQdzhMW{&CuY7iU}6c0*X{Tsu10&4FO^qhaYSnj^svFnq>9ZbT!B z178REZyMRehl`N`**d>Ed@&*v;ttCEp95Fh_2w!Cf3^rYYQLzgJzKu z{|H?vYA_NH#6xS=EzBl=zgkZ7$6~c{*S6w>Zp!TTYRlc=z{n+|Qqfel1e+P!nv7X( z)($RUof0DVr>6mM%%YE8T&B!gq|u|t9DlE`a{^EViTJ<|4YNxd!+gcYF=40|N^&R9 zt2K6IJ#ea^Df0Y|iz~F3TpKj$fND-b>-4R|TVs6NPkL!r=%-_K6h;kOTMKTsqFoiN zMUvvi+4?*_WHH}u%cNQ(0Kbta=YRm_lzmpwzd33Sc{0u$^~@#f^eE8?dr+!!cTBq_ zha1pzXCcS+0R<{2v~ud$1$DLWI)EdO#Q0v5hfE_=Xr40ccla0%E}G3Pbp{~@?g`%9 zEW|bMDuhnb8xE|#206{ilE}iZ`GM6iw3CR$Fq>_!w2SyjZUqP_YNiFZAGk1 z@Hq;mbHT?pUpR$(@J;h+K1da@(F}!JaOnalCLYw~0ELbNg1rH1-kJ#DO|*thg%x5L ztPO0Vt`4qyUY!I6;wNBT=po6+JQeqmnBL~!gg%*FPk#rO@r@xOS(}Jp4YTZn56d6d zizRlmHK(SD*l;kKv+W%%tX`nW%rjc=msr0wSut=3fRz@PgA6qc*=IAlGbkaPInDsM zzWEBxWQ5C3p4`T`QsHkll!+|*b+OE^PB!^a_Iw!;X)nT8JeSXIJPS73T*VIV$Dy`N zdSw*l{$(z=#d$a{XK@fC9vUyGB8_wO;yu0ob=~&simeB6YnUTfd)XZT>3<&Iki&HbmC={0iwCsq3os29lJgKOPtqNx z;kbzo>9TagTj9%(Z#a3Gdh6D=N$;Yr0cQ}rS<*U{rNE>x^{k0?K_%GM@sD%fwC5F! z*N8B(RJ;%Cj6uE)TbIAI`hQiAkL=-kmIS2N(2*@XD)(I)&LY4dED%gl-n(`HW`#=7 z%6lC~;sTQUGjg64R4kl@XNoaeEA=t@QiQMsj`&0M;fT(-yCHtpHR5MP*`0Ool(K8q z9|+QoFfN@w<_xu);*<^2pR|{s@SALWKd95CdNe|W-Po~=6(>SD{~gh7Sm|?9vSuke zc&v;Wuw#WS?m&KKI6?6{p8snC;91PC>!c^7$>;iTC${O_=22wQ!!(f3Qja;#vE+rj z&9h*4qZwJZ)tX^#E7t06ckAljmntv9JrgLg41hcKR_dTV?4N5*aH*~%Y#A5Lt13&c z+CIWohnO=QPrYa&Itlk3DCe#Xw6nCJOtyMH+<%#}X=Nh92XI69U@Q^>Aj!5mY2+OS z-GEHN5?z^eC{T1UJBemL=bJFT&gqKAB1EfT6o^THmZjW?3V0ANU0i;V zF_q)s$7>1hax%qo6MQ;Z^7$PUv%lpgRZ>^^I@S5eB|$9pYNi^k zczCX$z}rOy>s z4l7KkoNOknt!2}cBB-{9O-axb{N5d0-%HbVivxj7h#f}BiN>rKAuv)iZlAlm6*g}XFg=0obiJO065tA-t zj*OFmbOyAFlDp*LX0RB!h)MOPhMc2>b~bC6{&JRJty-m=?AkDo6fz&-(2}~1i3X1} z$CI36r_$~D`3p_iMnr9(AL<5Mwx3ExbVr1_u(%@})jT|fd;tsR)S*(n<~oVK6Avk> z^AqpRr;VJH?0k%11ZB#La^XM8^hWp5j1lpn$>{)&oF&l_1W46@g^D2_ESj&H>c?OJ zpDPk4Q|cj=ukZ=mMYy>zoLl-2axpZ15!?O7Xv(tOiPq>Ecl}%&bAC>@`JZCE1;aC~ zkh-2KcM$mbwb8Y~N|R&#trc)^KNEZ+@FuBM+OT0cL_H0PV;&=^^i^kp8qI*%;GmV7 zl$Qc9!4HXZ9a4ORYCT2*eDuLhY(M*RzxrC^{w@B8_m$gZ_X(1BkYIF`bn{!pbNxd1 z2`UT`q`*n8k|p@$&VhW&(xiHts&Wtik6K2}kwJdwnV$s(7!s6DgL#i5EBt35MmCInH=-Vf3(Sgh(%8> zp{(k%$$K?7ZJ0C+UD2%&zA22vTxJteJt6QzkNPCKCKyE!i%+znmGTPhZB6W+KdM~- z3Sk@2sL0=4IGjzttHdl5v=2nGCG3uleIHx;#Kn<_La&oQ*C)>{Kls67g42O6Y_{Cu zho#lx%l?LsyM+U|tX|=PQ#Ri~=5CrDbEC=Nut4*$K}WO0)kig#qPVlqE7Znzg(^d8 z^Fxd`7h0$$R_(NLrC*%TCbSSLIht%k00KWXQ7$jHd0Es4<62yA%fEY0Q2X2q4SDfi zNJOzSk%k36!~ka1-oghco4+t9Jt2LC#Z~nkU=1e>8e$8N|6z3ucI5L4Jzp3&ZU8yR zl1Wn-7@BHKrik0{)jA_`0HHHf&_#BRt3niJu|_PGIREG|sB%4i|~LM23<7i3{D$Yo)ij|y;*@shd~6Pe?L}QEjB~5i(OH9 z)(Q}@c2N@#YZK&U^W@PWW>K}ShzeDomjFHPSvsgIV-fE=!k)t$%)$g@+|b!1EQ#-w`U9N9 zIY16dm=H}4qa^+#nIdzVY!=$sFTT=K4l=Pf2g^H@H(%{AtyN>unoi4JIe`*^5aNEh z(x2L_PSRKSxU9Dnh3S?0=-^ctO~?VZ-ZAx1D1lETy!IS7oYRbTxYU1UN&%8at>zh9 zWu|^pu0s}Nf%*u|(`yMgVFmUTs_j(8HV6teeM5E?$41Who?wW-|2+Yqy?5t5RB33-b#pM+R*96&!`a$F*T=Ui^|%-l*0mPBt3<2}MO$9e}knXq|(k zR+MJXC-t@%6ui4ixhdRpr=EzurQm1KVbO%Lf!XSwn!O4W$3i=3@(_^MuM>crG5IlU zij?Z(MDdqU1xwvjS}r{YVXWV@es8`Ox1nY5I#D*s>2r0vsR9S~#i{^-2wP~vD^#7d zA58oHx$mw@unLlsLZamhit1`8Uv-cLq)K7kWuS3^E~8na)2_=>u|}15RxlH%B}1~l zqs}GYt^Ld;KdfrMw6`llKpjempLm_t%XHn{rp4|VDbT4PC+govwES7os;&9!Nju>l? zxzg3*06PUk&BObNYQiCus}*l7D2A+`c$Iqk&-S3JtmaF?mqw{3RW1@AFZ=shbLW-~ zj0uLDzASGY1P1eVx71LbUiHq|!atcypWjZ;-|SNioIQeMH^y55Q`g9D`y=YOzjMJA z+cyf9GMk%Um5W3LO(JsZ$&5;idG;ROR2erJ25zY~BCGY3tvryKl?yp&lGl6>ql{+#J4c;#MtKP%D>#j%V|IJ0Bf!R5L0((UH?Nob)?|PEpkd6bCGz= z-6CuX=(z59U1rbuvYzWs{Q55S3k&zZ)A0M=8_xeoKmXmN*Vv{p*@poG<0Jr*tRPVU z&mst60oF!tMq^sG&NJMouWJUi79-JpSbz2euh8*xKC;3dA|bn^W@N+~)5Ri7UP(Jm z%Td0)xKZ$N$U{Iu2neF;Xbb%5DtFz6mAnAck4l#9V|(|n2^Ot3rFkW{w%M0+=GOT3 zD-}ydWKnjU&%atXD3LHZv~)UC=?g_AcK*ngHeeGiU^o4m1~@MU2EUK&=fHB(%^g%r z&28HIAUfpBcoOV?B8Hnv_kuP6*Iy@x`c>UOzJ>WV&@zkdJAB%wb*Ef$!P=a?#II~< zW@5~u)8st%rYuAYj^YBJOUl&q3mt94F+Ae9f9~ENa0MbnP?op{jhkq09Mb6ct76mk z=>R65d3OuNm6fZkwBRUJ)~0R7AWtuIVU$0O#%J-!3&pIM_Rtd*)IZo8+dfjq?q7TU z%GZ^?VDfc+b{VXphzq~%`^*$9zBo8UZb8--VAL8mTB(mnhb zhHDJw#%uVZ(*lAF4)nY|TZjhyB-9)90{|5`|2f6WPOuQ;pFRzo^H7H+?>|jkxOWri zEgpzWj4(`p`d<6N*2i4%MuuO{n`Tg-I-HaSo{>BJ(loOd>Go-A=a=3HdRjyQ{ z#UsmxJB)e4bjMm!Fexb(RE&noVW5A(6``sa>2asYcZMtbxZLbC(C5pQU**-AnE@t- z2OSrfiZM+4bO;jVA7@A?t36=*<-T&nS@8E)gzJf29H**AczxW*7e(9G5~X-7IrG4aztTHH3XChDsHVihV4 zH1RQ3^E{`YWeX~!))nN3E~k4XJYP{!v9%^9b8l+drJ_#)+Q8++L#kSc zcp!WRycl^63thD2^#dR2H=o5GFJ=B0t6}zxDxTim;CA|-CwU^0lnzo{m`Gt4P5cT9 zvz6QNCRf57y$2y_=y*ZEz z!0R#OQ1+wzlnpUx(CNXSe|@v9fHDgFCbySx&Tb?mj<4Yp;3BowJqupK^(1F_k%gs( z5vkhC!oJZJ0dd|$y?+NS?z)&e_!IgV?E2G*3$KAHnCL6X)~E3aoV6vu2?u95q?frb3?1l*@YE=4t&HIs>=b{1CGN~Mt5`h69a(b!R>TbE5BwLQH_@iS>6g*4zAKFi=I7#gi7}DyU`id&7tFg zd^G`Y0h)HOSF{a)yqTp7`0h1R-xY_pVcI;FLcU)t+gPiIFY{Vf-5BPlUrP-QQ9pAl zur66WwFET^UuxU*(-m1dSolMl`|*LHc@saTZyRA4mbtW0q}#nA)@}x^j*0d*9qbhw zY_g}x;PY1M1cou@j z2K0tw=3{P~#a}QXNNvGZE0r1sovRLIHT06T<;v@Rk0;Mpe=&ckyo;-|Azi1;U8kyi zD_~;STVgvna~Qs?3w@8&=h3fH(`?jJ&A8%pF|oc6?cDch_0!?1$-r9w>DF*A5zp*z zuvxwcO|TdT@JF_%-^SMm`)D`Df8fKIIsb9To4!f5TOeug`nfA)7`ROK;=93M;c25G%Vob^!aClke?F-16C&wL3N8z&Var(%#Tx#1^f@JoMh+>Ne)POm55?Wbb_2}9=74Ev4{s7Xob+bC;+8`1Ya*2#CFeCS9RErE zmDoUX=!W)!8-#9k(G}Mb!77GI3gw%UUTVToBa#18Y_Ln03@q=s6{w$xn&7spEa*wrggiAK*a`e zTH_wgGlRKsj?l7B@JSm>^V-AYnf!=!;>6{W{ofVQ|2E}+H}YQ^DLsNN4gZfO9qrV5 zbG|#38GraEW!`MpgzgH0T-3)fk06i|nS}BP{x9GxEG5za`n_{h00ST(h(Ymrv8?ED z&5ADAOyK{5?OwK7At&Ag=9+1CEfWS;+aFTmGX^Erk#SAS(qIRS86rj#uY>2i_Nl z6=*=iukb`b!%#wuX67#hN1!YL=wE}j+uvV#6B4Mj1oQ7SUi01^?i?tlFS6|5F7chl zHDH78-Y6n~X-*u$?)wjtYe4$X&)f{UBtX7nVMh$mbC@08sb1g&CNn61MArlEA?Cj_ zHuk{yf=C&`Bt8?U1s*;l#)GVLY3w49wT6qH0J6A@Q3XLbGI+6aa`8RL?jjNBn1Di* zH2DJwC=`qUmm*1s4V?5qLu>Z>j~9yP z1NNZ4E8Htoyt875b4aA-_o(0Of1!fKG+$9bU4dKtvikhJgGoUDjU#Xvo9CKf38_q z#cT#C{b%)ef0=#nt))=FA8^QIz&HwYIH3G0K05HT5++JBuM6%esJtvYNME4-{r=cz zt7rcA7|;VrV8S#i8L+62fUvnU0ahKgK;7Be! z5kSv=`F*b0*La9g)3$+7&3K;ABg1<43@uwOmnM`-Urak?H|zG}auZ3!Dw8?ZCL+22 zlj;5aE1dA!X8=ra4qtz8to-wk@{lv52xDOCSMpD$ZnYAviSQ^NaFH~$=sm*8DA<^< zX!%XzbaMzXzu2aQ1A=_(?V-$#X&iR%VE~4(I5NZNtC0AmH-^lqkjRmtaycUWa9WZ2 zOPgS%C}SpQ`q(nzs+gEWn>D%uHp9JWQTH&a>nD>d@V3{5;9j$!eWC{FVN5HTOV1yEfuS}tS0kouW- zE=qI^!;yG4WpDTO)ix$*wFbBWoXv5r)M4763DL8mTM5a^SID>eg2h`t-+@v3a;HhO zQhBn?oEzq~51XH}t6?oZ-z0N{0cPmpu;Q;vb)_8DcC>^GK~ zBExS@89!+|hT71$nwG5@8os)3NGkOMs2=FDq#I!!=T~Z|G)?E*sTMiB(lrJTJBM83 zj*z&_$#s5yAIr%J zfS2v)f`Bav)YtLq)a^VB-O|;pSwBlg@*VlG^muBA7zH(~_e={bxYh0YqNrQl?&h7G zp{Nd0SKu9Yzt|=^PEJx=vgt1Hs)7gmQvc*o9H51%|Kxu@8VSf^?ce-DnLE)CPdcl; zI?B6|NEc5ETWUy;MTMyts{Jm1cBvmv20=hOJ_z8St%aQ2{oUhk=XF|-h#T%7#I&Uf zU#S>EgceT}fm`%NpcgDR^&F9P3<8yO4YybflbCfk$|2k>KMlPv0fq;uHByqy39d*o z%Y})J(J}ICMPBx2Q{z=?4;#oS|8CT<3Li&5(h~e()zca47gX~S0dqFE!cg)(Xv^1~ zODVu7wH=OFh#w+xIQ*lwhWvh-s7nhUcGE7Qc5w-f)brePTjG`1Cs)JSskU@JWipP-P^0IAK^IF_rd%8@G*+ zO_CyWKRJU*3X9l86a+2@wn*b#h+8v4aitxgeT5|ckK6gm*getj=Cd>H@A`l85V_nP z+-7*}@0Yng>gtJP^D^!{d5))a535F zkXl0;mYeDDE<`G(8zU~K6_b#o$Q*!+XgfmVcZP%KME9+lN_po&MuICM7Ts5x$1UR! zU~3<2?y)I6H1J$<^hJFds#T7L$Cc`LpWbGAsHefF5BLr4#nUX6%j^glxJ@_ zFpcqUo8B^0I6NQBTpV&i^&D?wrfvG;aUs}J!ewvrxG4|yj{_}9`0f>sVbR20!Mthw z8_uN;-FrP%G=V%<^EhDFaIcQYnK)V!JsqxHAAl>>_j*Mz=LXVd9zi(o zz!rf;W63p{xJ%lXhU2_D&IYBS$4UDLeUB=YEEtZ)LWXsgbpAjrBcN1<6VJd<0MOw% zq)eb$@-`n~tWrxt4r-t8kVr-KzJ5?skWjuX6|1b5Rs3_A5Wm6t-`>< z7gj7||5T)bGUP;C7F`jQJ)~k11IRlit0i4HR6ErU{&M>P@wtTH9=sx^5LduPC>2Wh z1(gCD>v>4@m6rg?a%!ICYlD0_6dDXs-WvZ;f=3NPtrik}Q0-2p)y+?>**( z-Y`wJ!mnx91mS$EYpeJ5`m42xo*(o@eqParSnMFjX#Tpbj2?5;#dyUidjmjqpEa4{ z>TTlH&aJ7nXVfa1je01da{ztXi5KXXtuNYKzjiVwnq^W$rww4*>gcbI9t*gcn|Diu`nI;=c=(6d`&Ffs(a&6I0ZwxGs3M6KKlodTMB=&~DF~gJ)&h)g;dF?ejoVd^v1fzmKx} z7xMCq!OOJDAkToqJo9ZUJu^qIIsViPdyHpjQ##RUvEB+|33@-cOG1Dx=SF!{bH;69?>5fG&^8Kf)jV}!b(wytsRw*NH+*CazM@ZIpb+th zOE>tkVtjD=ty|$@S_bYiy0Cv!c@vGR^C1m3imo`%B9K5^qy2 z+aLC;(jTYjIwhQKwjb2p5fIcZw@bS)kEJ0ymZ|i=(m_f$vS@r@qJ4w>J7@my%=q8m zf`8{s4a7gU^6xH;%kN(?pZ+<*B_UvA0OakrIZ-?RX#DbmoK;XMJ*}0FcD)Y3U0oL6 zkw?Ys4@`Lv+=-Y?Ovd_j0XY9a(<-w{6uFijBpSJs!DUYrc6kVG0bfWccwC-GN7Zy0$&jFH@5@C#q+OQNaG05#JUd-6|`NBv=DiivYey|W?rznp|W3t^Zh=uqQ zyx-KTT4g3vS}M=L2}jT=p1(+mmHtJ$s*EBKuMe{>d3qmcBStM(Ix%g?> z@~TtB_VC7*uP{xY8^Jf;PhSD>i2MgkEqYkk9uArt(yB83Z8La8axe|l;Y*?XDwBqc zG983j9jwM;kJY=QS>_#FR|I$NZ6r;i6T$FBJiAc>kt3o%ZQ?ePHF5JZJfdjyCb;kS zZGEo1;9BDRaCA0FBVtvplr451A4K^YzE9*e()Dse_?gGh$m~lJQy>78^EgKyKZDyf zXP1^ND>-u;4ivAoL{uTIW+Y?!BTZLguX(2+zU!2}Xr)>(;v~{o!YkD<}dWDD^dSo@O8_alhf`a@k8G8l}{YWVFkki%H2r6YJ#Kz&Fx6r;QE4b^P@ z>-@B4*8^Tx&k&W>F7_bcrqH$=MHri#!rG^%Q1aODJM-ByZc+;oNooBvft2dfI#?)V zPoWV5o@hZ8rEgP(zd)eZ*N!_6Y6q0pEdgw7s+XV?dvz*746jT*rt^(N)E5d%)4i(~ zg6L1JIBEyaebP)URv0#o)_q;2*G7mTPiN0$l(W+9t(~S{BgD-CR-Tw&5U<^{)NtcA zkL?n=U&*4WPumRu1oe`TDcxfXi8V&-o^+XSd*F9rXt;i)o|>9k+E4!#!+jfAY}?0kE)y!!jz*o^DmY=A3I6LN3v>ai9~d4C@A}n> zKWJ;5kvlUf9&pM_^;pJos}a&z>5_xpn9kh6fFMp6^3Lhd-#J}U@0R`^TEH0lJ^+!i z5+2-m{Nu9&I}twsUM0a%D&$he(wE{S)Bi*lSCi%iuL!PxS~cD(B_O_~_9E6&(1aJu zDb(${5xc$>x-n5Vqjg9$5Ko2NvHPmuL)QXp+ow2^HCp1Q85!dS!&dya^}cnuGI>^E zPkfiQ8i`YAogh_ggg%1FXTi-@ae&5DwRjdhCB|BKMN`0|helvYgc)H<&KW&lX`Hhkb|m0&!8qS4lCqn7)WM|WtH5XE zl;i0rzv#y3o=!!48!#PRf=;b_fSRG7=Ct@0hU=*URzE-v)rkw^2sYt$Ndo91iEsb$ zFn2J9d*;>L+_~v-!5TWnz)y^1@L?cm3RT1R0BQY)LC|It;zlYuVWOnzhc= zDrE36mc>T5<`xlR__DUjLkp-F>Rm_;wW|zlEFJmTpkA(YEq2IG_?2=j`M5i6ay6+} zHfuv1K@a2dkAwf&YWEjB-bc%iWfTh~k=?Q>tF62D)}Kv6y~miF+N5ltXS}?=@4pAo z3)0%?FABN>T;{xb3GF}jMTNa*KxOC+iJ$d^`7P>uCkO`+Y|;D13mtl! zEPX98#ye`foZJ~KBt_6Or%b}T&Gs%s>WiOi-Iv4P z&}dPLS+E?(LV{`MeQ3S^t6!^gdPc zw4}F(X3SbTudkvGt|fQ?{=WRId7D+K!TH|IG`|SFVza?jp|T!|%-2uF+So3drg*h6 zOcg)pnTHD_*e z;BCZ?0+fnGD76Ci0uS1J*s)wgRWdxAr+O{&V(j&hStT_#sE?cgk5lU`_0X|d+#<1R zxsF8SnD^1!4aTu5NNB3WRu7?v6Jnx-mzQwk#Jxw4`B7sD)2_NZu{h_c+nh7V2oWaK zNW7IDsfMZ>>Db8Vfx*_b)-YZerZy>tv7RYU&K{QF8(a&<(oMyI=63Lx^Zh7-NUL8a zGkBS&1&d}#UBFZTBq+^%dd)edTISd*)=`~Swo~>cOS+;HEtNQ@=nBp{mRe<@?dX4` z7*$0aQK-;HRH|2_o%2)&92?*ecPQkdcH)=jy+^~j@AX5WW}=n z8c?D4fMG=P)tjG7Nl!f3S;_m3z5)z*&mNmq{YRU@*c65sFc9*YDSOnI$h;UWbAm(x z@yPlp7|}8|HXBvL<5fxJrAVI9#gAF_*)duT4L6hY>)da*Mj&jA`({DAUu0j0Tu;+A zE!Cc@7af^kp#IV;N(FnhQ_>vQ`1GSF2L&h9`7g^W`0V?YHI?UizQG%l+KpPXDE>WZ zFoP~`q36tGKs%ZiB@s=UjJBYibJm4nUe{qcMl!#-r_iYKmV>1V*| zkzL`RcMphoS)%$1LwM!D61-2HnFAM8=opO!i3wfQ^})1^tmrp+eqlXSNn)naaMF7s ztjXw6a?*l~SSN)NWVry9;l)g|GC24M+?lN~jr<==&}kRR3Bfp_@C{6@>A55v+t6AF zk3n)mnDXecWmYmCFa*vf;if9I7+Lbd_OJAhE=A#*=5p9tW*TIvFqd+ zdh;xcZ^%8{8CYZkkq`oNS*R?R!*FUn;T;$DMXG*_r;NbScYTFgH#Ck`N@p$jcyAlRbK< z310}I81sVFs38D}b<#wqCF;kV_mTY|u6zZp^?3F7O=$iPaj`19p{2PCA7*@Aq|I_g zuRi;o&q14y;70zq2S$pmVkkXwv6gjWVe@8*Y~a^W-BFqSKD{Om3>hgdBSWh6slS-z zohbJ^dP=Yj4Fiq82=2=itJEIi&>l5uGfdKv@1$h%PL=|ajqw;YKco?r?Pp?bO1-^> zScq(09qhL_UFrq#zLhY{v8V)qQRZNBE@J>tYxgB(e1d-h44!!V|Ts`Y`;6AF|Q zkV;)Gyet;rCap(LghV1v(eg*w!jfDD9)}#ehsDgnw^X4H;?sOs$Fs;TN;D$mrGwbJ zFiNwZ7^*t2Om-gYLWGl~YfV&+p1A{4l3VdNpov4|Pnr<^s~Z2`U+85KrU^u^rZo?R z7VClPQSrK0&_~a5h~{_a#2bv8c_f46CbDk_25)`mWxi>rABK9U)bD;LFt+@?|)C7&Nd9%aE!}8mbwWG6x-zKtu za)pc+S0aDy;$*ADJY0gS*&?4KrAQV`vcMFtJZGClA32X}N$xj)5mV=Jvq#tD?^>T{ z3IH^?X)Uu}*rdqd(NCG0!4|Be_E0FVP3m}V!_&)a2=D;Lu|(4lF5WVFhHz#0AS+rqLD)jpeW%iFjsU8G^a+r|9l3 z&TZ)HA55WQ3$O6a4-qGFJ`SefA5tA?zc_1f1o{D4le7nl!EYo>MsEj{gVy*ilBV^{ zT8splc=Wq1##GR4xN=mmz>7&L5TubBnCR$J+pt53?mEf7LRgFlBgMBmj44dHv;w-` zI8TlTkuY61e@2pj_t z7*eO3!c+oHpKb6QjE?2-tmGR! zVzwqN8Hr#*jp=ED%X}D8iV7m1==*orqFNa1I~uUE}|V-$SW41 z%~YZ83*tDUw9>x*KB{KDns85s5K?+i~Dcbu)Ou|{qND;@-gDwluSz0bK0 zzU{EP0O*7TNZ;;r#1)>)?5U{i&lUlfK`qPEG@PE#Xupo}bi{4%;y#+T0c{S-mLmjM zQ0%L%Y`)!9QP%SSIK9V_gE^lj6xvR!g?w^0CcNfr_deBlL7r7H{20&yI7nXCBBY=0 zBS8yx?=pMY$jm<;#w_^h{Yk3|cb=Iq@-X+y=f&W$__Bh&f%vDV3+jyCv9CtMg)`CU zh313vi&vM2qQMKK^hOyx zEMgr~JI;^i#jup_Z+D5poKbIwDQ4j@C2GfjzYB-|T^{`R4%HbCm*!()(n-eq?vxZ2 z7#KTHco~Tus1OYg3;4~647fLVSHz+mi_xe{x+C&gIX!taX>-0D^jO{Fz5n@*n!zMO zReI#OQ{nX;m=fR1BdPq5fJ(aT<;-h>d&}Fkjr~*eaB#;q zI7WKNPbuU1=wEF3J=ulPzd~sx#?2T-tPP1D{kr%8@`gj034lPfQYqR1^7vlru8|m3 z?_ly+(%f2Odb3{3IC{MLUrg0@I8+woTkxMq7wm8{J2%adpmdXBRNcuT#@RpcWXWJ` z3`5HmDj@V5mx(YMb91fG5>+E55P-K;dhR9Y%o=&>N9KOi@y<)=IdV_Kp`C4Dh;|V2 zbEYJ-?-0EbKnI-Q$o{E*lWwZvDAXY-ukfM@_yfql$3KrhW&B_COT{qu0K8hT0k8Hz^(OGvC zjp@kV?_FFBy-oDDQ`pEI1|!-|;u!+>2A37Ok^;UR!iOc0M#6q<8;&TvHthHenJ=e; zS66Vcou-%Z6vIP4vM~_y17L(omu3;=0An+XQ3Ckl#CAK1B`NuB6AkHu+~Vyptd}B{ zj11fD*RQcHU>Qqj2=45+=US;CN_v2eqSpuLRbB|xIxP!7_^OJ^aJ!TA$XAe(oT8tk|1#}46g$HQ^Y zxD^2en_Xo-ucr7$w%4~8Rp4E>e@3*pjZBL}>dUiQVePDmr-2+=SAghwsivXvFT+u` zO$}VU7M$Erklo~=kuYV1gZ=qZMy3RA)kiwTl>+tAw_p6h?5EI~d4_-QoK)iauO9wTiHWCF)23`@k_U|_XwFq5Hd|yiS zj$~|IuMm03f@VyNeG(=#vf5zixqx_s> ziTrz8p=R$I5#?@cXSNI{v)c}X^LRyq>+ojvjywiM=Ix2)rlGFUp&}CCeAOyri|-0V zud|ZVP8r7V`E8hk3M>Iww%lyAVWK0-tv~F~Q>}&BI#Y_`B*idR2MZcu%f1ouxtP;A zS2<+`n}vXxRsK>_N-D`Lg%7q}gueqvbCXkDHA)-FG0Y8JqUDsS|Il6$KG;f&fNRN{ zV;CE0eDJMm5v}6FD$&puvXK48fIER1aDiCyO%&VFh>z~{Xk8{6H9f57=zcYuu7)i7 z^OH8T1CQ=Qcrr^+_+r9o6MmVoc~z;_1Z~^0?y29=$oYVp_-=GteG0*X!Vx(6zFhyf zf(=LhsM;^yuwK;J)E0SP(onyvp1|Mu*6M6$c5Q;kfe<#b*FWV5Bli!%5sXVpd(3zxdaQHu$`o~ zsxi_!X4{@G}GfOI#~DGicJ!_rDf z!_uYFjj*)Rf=Ekuw;)Kzvoijk&-?q|dClB0bIpa98vyab_Gz(xDW$13< z^8|uQqa=r-PsL92-pn@)P$68Zq3zH~Gq#uttfQ8N2^V}+M!?^W`k#}igeQrvL<5SuY>Fq8#lPUPts4C3+1l;$8 zQzn-l9lt0Ez^}t&v5BPJC3JE4-h7g#ue`7x(mY`}iA@=Jc{Q#rrvIi>kX*YpcUK|c zjpNVbycBH}&PvR%=h|9&9rr_;tYmGZ2lGGUbcoegMQzXv@8Kw7%C+a*id}0vuLsv46_P_zyNF8C)H?v&!$U&?R?0-z_ z;xsHwq6*Nhp(46&Db*>Av!pTi=NFg1=v&J_Ri)nIeJYyrTW!lvrw9v$1g?Fxl8h&) zi1F#k=xXURRVn!MajfSe3F^DVo0cls+Pbiqgq~D>%u&KiK`qZs?&VO4|N&1>FOoMvUZ`sUlU_Z3$I%8V`(MRSW)7X#y zJfZ1N19zP3X315N$8%&US);(vU9XNK!xiE$+%2j4Eq@T;*nLkHUr^j)`lB2M6_F_| zOR2&A5l!LyqYuxTRH;B53CDpKLIeMS0lF;zvD^;KjVm1W13R4JM^ecjsZo?YVqJdk z4%F?7C$9-U5Kz}zP%vN)Mspx4-O#pZ=}?|e5w2^BRARTdZJY4OocwI2P=C=}Z`SjR zc^4;)=|?js+YO84CqAcx`$@LS&W}9XSVX*8cO=dM$1QcWS(Lc#-0^}h6jaKh>$n?I zRWyV)!Gr>oP4R5TI4q$}X))}zxFH^SsL6FSJ45k&A`X}NGRwIU@a(bQ&>$lDrv1{U z`4rxJr4K1Pq&+?Aw9}ztUt->zhfH#tD_)oo*K1@jinmzy;!{wehPs5Ln}FWG>y5B< zhmW|lKpZ!IE#tH}FGR)5St#C#T8JQf=(L9t3Xb@?_|?J*n#N*yXR`it?VKq48{Pd% z^ZFlH?W!1dTj4o#7yC4=d2kK~`e@2!Jt=;jzalqT53UkzQ8SztoO?4Z*+;989|3io z>O&`}Y~T&bW#0jGMQhoGm^g*42)Y+^>_vBm3ijY@zUHc3N%!&uw8l|vTnzY2(!>^X zDe${%dZcsRoXYohw`*Qy#zzcAORC2XHm!@(tc%}O{0aN0Swjo#Tf@koEA1qlacYnW zC}GrE*RW64Lmx!K1NhV@N^zm&V)Xfu`-1b!C2N<5`P}+VbVW_y6DmDwl&s3JOO?aP zKJE7WP9COawLrPa*wT*Y>Lz`a*nX(q9jx$chqnFL{;8~B0zOLa+Zz=g;pJWSyEJxv zMcs;{nqu;mx1gtAhkY9_9usK)>&5*K82U>LTdE}$1%x`x%Ms3S9tn8wLhx)MlHZX; zA=gb5SkSOtltO^G=5|R=3-HPIEHOBNItbwD@XlBm>#hH99Rz-; zk98;Z+`m=5gK!;^ib0VVFiHWgs*e)t7GTjhBLo8~Je9{duTdWPtIG>}30*J}UmXab z9X{E{A%aQ;W9=h83ir(EfVKhAlTSnx4FFrNrkl9tkw*yqPKL}2TxP>8q4MJ-kONZW zg-3ebbPoy?xEnNn#15btXb1BiAP>^gvS%Vu!@*=n6bO$GK;;D0&t&8iK3+=g`$V>N z*rhN)R-@23AZ~&L*pMT6ViYK56Ef=KzzsDh;=l}vY2-?P<+_bF0C0NI=s)R)!}hde zK>%fRp_c+3f^$KE4aLR7%mo%$&>3wU=oc)DIS*J44JgY1arf>uQmp*ry4F0wZv)mf zO$~$n~EX$D`%!3Be6vRS@e&xq90os;j z5!V9z>pq5Ep=YqzMv?>)%nB>*z`j7}44i3pQzgkpgM%{x&On}kD3KuM$%GhC%nhVA z;P=wviY4+$$%Hh@fkqxLH_HJb1bFz(zBsc$?TPG8BpQI!t(;733Un=cAU6OO;w=gV zHDI}Y!Knfmo=;>DUQj-c7sEmZ6Rv_01z>g8N^l;fb6!u8X3N8X+$O5OPw27e1Z<;@ z5Ulc7k+fr9-#LY{Bv!fc*_$+HoVcPz3_TY@y1BExM1e}9@>Ys53KN;j~cz?#^za&{Yo zy+RL3=%K_1)ysM$;WN}Gt#Ro$k&s1w1v4d`lEv$7AzWrVorINJKAchZ(XHV|5AhV7 zM`-Hce=MR4lcW1(8Cz&Timv^G4wu(uD3&%!BOOyt{CWFs7-4d0s5PEvhML@7QaY{g znmJ)3OrvuuIw(lhV_$yYCB-|g+ei<^@9{?$41=_pjn7_rW3RwB+a18K$v)3LNdnv7 zv#yj%>x@+yl7om*+csvEQTx_E5G5_JMxLqqBMdaDqNvi+pR6tGmY%)e<|M{|ID{?A zs<(AX{=)wtc0BThU!Gt@jq-$}+u$x0BMPs>A13%*rGhrTiYc9(K@;R7BFX-xa?(#(|!$zZIfcG2z@cE@bZ(Ml}@^0MT zU={0!)!)~E1-}+!>|jZBRo`_Ql8DG-{yEktWxyF#eLl8Pp#Cc4V;52v=$RcWPLBc7 zj*TWdkaM?I$i?hZr1tp7(!o|ZnYYNw2FFkIzKBoIpXOy zJi_8nrL8=Zy5aUHtx5khGtbB_?9AWFYDX}^jEE8IPp^q8pni7@<$j93TiCl7`9H-1 zvWi?-lF!)m9jds{x|V8Zv5QFM%*5W!#?mxAL&!-ya2UuTbm^nJ@uL+GQ=mb)julIM@1a8u+q{jD zHH!|PuBfi<& zwC8Hw_|oC<3Z9yKU{g0*w6ChLxJPfwcJM>l@JffKF!7m}5BHiZewAg>%D1{^f~ps& z*n88MaAb`e7U;vO^UlJE;K=rkx?ecUxe2 zBdHUtLjvGN>oNCIu3A?yF_!WPRw5(XCtG=qm+*s~po9*C>HwjKvd{v2$se}U3TWV0 zQNA6D9}~Azti5C2>B}WmGnVpFVz2hmQTzn>M)y!z}uT$tEAhug`8 zVLi%4_to9;8rs?9dX>;(=a0=zA&UzkW%HLE&&v;_-$#wuXPA!*cuQAUu2-y;gMN2# z9blWw&!k&Q2m`8%h5XDk>x%{d&|Zfg;f$aR8-q?!aN5%_38*0K(f|`g$#tjjSyD-%GxtJ7B@}RjRsHu z=Vz}ddQOdd>NCS>tAF8dokh(-Og8yYic@0L6TzG_O+HA6K^1fTK3W?DTsY?Vwn8y| zU-NItP2szZX7l+nNlhK~({*08OcZxwlOx(~IrpD&(LJ+^t($_+J! zt_Ow=6mr&nDj$9i&-^+|xuA^L7 z-#Z;G>k0@$_F6Ix6YtFV%#HXSdUi%@oV@}R?ajn)B%7wd^_&pZ)56l5(2L0!#~~hR z_WSpBH}Vls=ex(e_Wyou|K~TG!kByf9{u~j|E93O{7C{hI0#c3hzP9H>e3ApOTQz! zgO53%tCYd`DD{+R(J;!5@LQ1-zxwZsRDy0ad_`&`i6b|KcjL^1E^e=@z)JEX-TNv| znPt*=5E7N6xQc7_g;df)N($G(KN9xe-gebM&Ss`kT5u+AFLlm#8&uIposo@sjIWN zMhRb9VKc;TPN~ghG`!Du(J=1x1wow0fc81%IL}Msi?KAGoe8v=&1(GgnM&9H`~h=A)W^$Udnl3_uB0aYDCngxd4 zHOKfj{cS`Z>IeaIVP8o(M@NBJk8D94U~4+I(y`B)&l$B3jXNmMKbQm~34 z3PtH7tOQO|;301U*3B)>0bpgVV!$*&VHtH$t}0?o;BHsVKz4wB=wL4?9_rt-#k<7F zz`P$BLBoJ=*j|8e30PAl2$8^9j5@4jU{O>qQH+&f58=OQcu<(z8Ul1Vw!=vQZa7gC zBNwn{d9g|Xt3;l-0I(4}h)(i2bbUZDBH3ex6z_;HnIf#$R5Fksw<3>%2ko6i_W>@!&xvdk2#;J836_Dr zWI-0da!}qhSslV6nT6#4B(Fh@DRCiB5^*u1)FMPTKwn`Zq$prKZ$@(IN8h^`W`OZYtnxLMTD7ntyH=utdaQOf(#zjk^V@ z68d9;=JP`HcYkv@5g~~VILv9Pg!m9#e^`k{c~?>zV8y|@#Q&^~b|g9ASpEsxzZ!U* zeel{qie8Wvu|5zjJ**;)1MSGp(2#&*%9R*ln2#YuM<0?o@I%P|XvX3JdpTaAKISJv zf`usAp^c3AvB1kwphx^K{ND^$QMj#*mJE3AC{@cG9;dEST)-mZ<&O7 zm7g|;soHDpQCy_WYKbu#TiaHLHSlBWiz}32z7C6Fa*K9G`(k%$H?3uJ=`Hbua0@*{b83Td7ibIVqpQZZ(=M}fk}A7im!uGO(B`L4Qs&SHs@B9+}!K0 zI4HJ%EMpbi!p1iy3|;$E)j?C3z}E$Xtwf$DO&T#LUyFq`?Ubr~Iuma)d#;5pfg6uh zdP22N**L316$aKHqe|i#B76@OE{}0=X^Pc)RR&JRQ5j3U0?DGMEB)6N6h0SHbh<)wV3!7brYLOst8)F@A)zm`G~Poqv?c%0VO>X$CgjmncSLjpY0 z+GH;rPlfIa!T^o#i=k-0rotsg?SiyVkd6C~PIL0kJlAmwLFvdff_`~FUM3ZTKRnft z;Zzu+NxzPu^~FfOGu_$$6K2cIig_kcVsatNWLbWNAV7?XjZii&*g1nk+xyP3 ztbltc<8YTn7ytD{^``O_0{O^n*%klD{Z?jC>0E|q!M z6++2d?t3qkMYJ#v>Vk6pH4$CXUx0D*nB9mK zscZ1nkwGVu-CQF4v&V8U-7v#`g7{DB#f5$!U;FY6chwxd)iTGRF1a+Wy^;w}=T`ZX z46CedlEWs&BVX=1!h$*!?$kzWw?Qw;^hF;jhSU)A$SX#{pdaCUhOsqgp2mMtsP?Uv zI9ZGc1p)e!R9aPfrPVLm(P^-T46Q=Ri)-&1ZX1c}=;CCVi6q?x_QG|84>#&Rnc9!1?nFygW+Z|Uh>)o}_`=Ha){k@jPO<^GWxTCR| zEbKlK{X1z#6ss@SLeawQ&F$$fouynDGsoH)S8^g3mS*v7!nAX{wM5`w6Ro(i-~1|7 zo)knypO+>IPKa!L6XQ8x)Pr0^)oSOYvR&!m{IDik;NKssGoz> zNp4b7=cQ3`Ap*R___v)QeYRaT(Wms_2h&=}N5Z`sM4`9pH~3-@BCPowovfCNkvVMA zG#ruN9wr+DT;vVIDmepSWs|Q1)INqh(>jy8(}Nha@L3Zas^8&?R9NQ9Ocv!qJ}ABE z$jPJ_T$K3wWe*Xk#jn)L^C^>sCTp!onw`jPIo!prwsvNAmu65?!#cypDfx0j{dVbD zHW_MBu8Dk|x|GfKzkc)9ksI%mDLWi^=HFWOVtEy7w9;v}&5tqz++~I|)AxoTEr=+p zMMRH{r_>*dTG!AiJYT{TBwM-8Sn%`qNaEX1^=0XM7XgMcp;l(NMzfrI^rGQ4^osSB zdZs`yF1oVHw=f@Qvtx}}j$a>oi(1j!*$H_TU%EP;F)m<=*l^!9uDiRMmu2$Y zQt~Kvtg@DdxigPtwtRqKr3dLOw#Jalu)fGlbdj)I((N}l5d8UD!JqF2lmc!27&uJg zOBO2Z3aVP~W>KCERSIY$9+Otun}RNKmLYU14)trV7ptijYbs|Wv|UR?F0!0%uQA4?VnvOa9wzp0N-m$f zdiP>@xpDW!c=0GyAs*Y?Uc1vht z2=s@0<8ZG%C{T&We0?Gu%%;80s{cF{G8;L5S#eVD*9#6B<{oYxK`nNFv!o>z8{!D^ z?G3JU75Ge*{K6*A&>Q8(*-yo$+r`PrvUPMBky~~sL$^cZGUS7ki@3LZ)7lJmR;t_7 z7fPvuDusn>ljE&l=r%=Z17l6AAKZEKnD>&^u!Xb`iE89?^=>E7rcwvuf=;``+l5%k27@bVauv0LU2-LpXM*G<$s>T zeJ_XjAm*rEK140W!xHCXhkXYD$19%xGJdPg(qqY3m>O8vM=zd0(*;hHwRN`Jq zFsUA9<&I__)##hb$jLg(TX(d|)-W9E{yp-XjCV^5E*7G$cz^`~i-i2c0%rn40j@Y`;aZOl*DAX5l0{ z$Qf5Tik@hsGBaeu9E?65-2SdFHk1n7H+Mg5*ENx6K)r=|*V7MZ6>-cE@I=1*(5n@G z){t^Ze6HZt*|bqKspi)xnt?5yS2~^_x2UC@krJpl{A4o+6a>#1HnTsWb%H&O;S4^& z!#rba0f%I0_kjJl3?t0Lc)S1C!w9+;r_@k@5O~~Ww6#)yp;1$qera!-xO&NcHEn;} zp+L%m;x!|~Fp3yFkf^o=%fO;%jR=zTmn!(^SjGwx%lg9wY^V&U3B^2`&WsSP&60q!#+b?(k1$b8p+Y> zMMm7tin!pa8M)SY_YZ;>saqXW3&OZJXtiL9U}N%(Q<|2dSGpMI;GoTreP(KF*@ySO zDGr2Ur!&(Y`F9kl3{5 zoG!+VJ+86PbSN1_jp`Ka=TqnvMx%YlNhEIJ3GjPSC-NsWR>VF@n05(nxSaagSGl_~bZ;sd&-eRZ#Mo1rr{p2A zi9UPlow(v8C3AqrFzz@bt=y@g=t;kA7tdI#A;2eHem0QyoU!`10~zi!og|Zl;8$oU znd(&qxUb7UqUKapS`NGOmMA^JpJXI)9KSUh@^X%RE%iD?tfB>MF#q&guX?zw-ojn& zf+c~M3niz@!*-ffp>j-2UkBB#E5x+&Q2cbDlq@<~tG}T0VT-)1e1-aZpyAsE`lMdr z0$rQhZbTETXXgoBiJ%m#VwTWFaeSRsV&A?CsA_Zew27417eAjWh%J5hR7j-+ zv%aId{jEAwp^k3_tXh=kx~%rzG2K2ZRehSbEDj%WAd8H{NvpkoK+ZUwq_Id`P)ERM ztu4frMj_dOey2gc{@xZ_Dz!$>MvgwB`Z_EBL7>e6siV(gZXWZB(b*H)F!NqEr=?9X z=w&ls=b9|ijaaHIzm3x#^T9hY`iby*BmL9+wp=|<)=whB;JwT2*9V)CCOd`6c0RSx zDmu~n@EbD)Ic3`s-MK)u-sq6kl&B2y^{*Kp;C`^Nl9k`Ee=;4#xYx<(5^W#f;5YbK z)6>)-@Qbmi*Q>)G(%g76DcuHLvs>6>`1O-Ze4FtTmeL5Su9n%t>zQ!lBOOB)9W0eR zwT2&NQ9I(((coQ+vFum=gB-@Dq65uoDK9JJXP))kl~#Ei+o}f+w1~QA)J1OC{?=O{ zK6g|TAFjd6lz1iyYTl#S`V*w|2|r}gXWrSjtg5fM4l}sn0$z(AvP)lnR%){G-kGVu z^X&DHXWHwP3&%62N$Q}t7;{lvX52pnCiS@a;RtFtI`Y6F@}x~1->K=66ZF4Wbw1f| zkeKUK#Sb7QB`TsY7agP^)Wj)88vF{1INbn_M?*@RYSFDvt%$xd|l|DSB$X8*`~_JN zk^Nf&C+mcoZAgJd)?Zl>G){+FQ%g5|vRO_Pf$cj(mzrI!WW7s__!5!hP-WLMoq> zak2FK25a@otv{{YM;m6-vZ>u%6%_^La6)Rw>uv&Rb@X~1N7SO%;&{(#&rZyIOYgi2~ze8{9{ay5q?LM4aEJ)-b z3yS*MmtDKCaiv~6SdfbN>#8XXu!WTce);%Y!UE?c-!858^?)O+E_n3uoyp+(&cT?r z53V~p5&LP}ZN%4yGKM=aMga}=(;wwBZ&Dmw;lPazPiJ=bKQgNyvi?+@{T_p7G+M1I z*kx-8OZPB$<@!}wsQYZCf zi2g{b)X3@?tz9h1&D-hUjS{a6vaSzmqo3W5T2^UebJh}>m#cr-XEjZyu_(Eg-TP>} z--rPAUmgq+k$>1Ry_Jx*AeOXQBa|EwPW%vjfx<#K7(jJU26wqTiWT=^1!GyBqE9v` z?5rojfa<1COI8>|CT|Q*QuAAsnBxOdTiVIiHx2B!2ph$H2y`O6xEx=f{pM(sgb+N_ z7DRmOqPEAaS{DP7T;Sp5T243chP&Q+e(1mf-o+T-dcMFErmQ#9PprB!B3swX{6(Tu zuzt|KF|A1G2i#X}TC<2_)(g7SG52%(*lh?y#)sBC^Fws|xJbWten~%54sz~SCh9YC zk~T}Kg$Mm=nTlnCUl(1b28X?_C<2>!Zb}~#6)&;-qg_($7@pA@QuNE{b5CB#K>8|3 z!Fs#%yz>H=2Zz!pL~_VmdSL|xvh#bnMo9g3x*3DOrq*6M!-)SQ9JwN9`_XtOY6^0B9i!jNHe=z_bN~ z#}Y|@713}ZYPJ}d&~G%L1^^Nh`;PQabje5%;R5J4ustJ^0PFVy0TRN9O#RpoN)bic z4s=nG!(s)HP6=m1c);?9;QYg^uU-<800b1Zz~tFTNRSdLtW*#KB^uP}0w*6pbd+?j0z}FgjR`XP3JD)##z%>k2BE-*nmZ#s1Nsh$VJ{R8lj zV4`<+SR|!Xk8?i!Li>lg&;Ley3aliKBF4X3VayEp|FB>xZDKm$Z2LC+KQ;}n9fcnf z1|kwrxc2W3!!9{)F#x%$C~|2Dzla<`DR&?_e(g7fi{I9ftSTpim&CX~VSSMxVe02-R1Rs(@E(eHMo~9Y*)MMShe- z{M(KUqH#ol36*(+Q3oK{`>=;m`cya?n6j-mjYN16d|%vP8Zt z{-;D0#>Ytan5VU-Zk#jjW@RbH8SGO>*zw+EZRkHd7!9gsIi?Y`l1vOa6+T5Y4AFx5 zB*vtN!t0o>kZq_zr`XrnRUMZj=-N&wZWquBJ<-D$8IEjt76nS^2>L3vBdWvYNy6EILydQ z4*W!y!p0~F+R)mc=H}N2QQb^Sm7fIX_v+E1V-$s&*3%`MFcxkZeuAONN6&vJZax_q zAV`bfw~93ud1tq}JUO>!O36(e^DenltBTqEm|Ft=1#0&ahMhpJsD1_yO4Btog`AX{ zlZ^XoNqGF%$yjclAVT_RSSsYGRAc7r^pj47biafb*~l23r!8felq7 zW#)QVDrB^`m(;5&+J-X*x8a6iFv*yz)Drgxu8shXet#(Xup9yro|e$Nax@UJ*fmn* zu+RD4VIIlnpif(;0@%1+llf0lEIuZ>$ngl4P4n@tK4=0#mplJYMbf+>@GYwJbM z#e37aPMQ%8^L$tBqcsCD5}$AClqOZE7F4XuJlIRK%Dv1I71eZ$ayY9m$!~0**W5m( z83xxQ#O1XTBljj-Q}R(#GfC5W8d#(7d&RaV@2h-MQ8XC*VK<#fl#lT-=n0-MN89!5 z_jv@zLGCL7MdNTN9f|Fu5n!cHO?A?rBJm`dHQUTc{ela`D7a;qgR%8^LUO2{+GCrt zA^J|jzeg|YP~{-V*gv6id|mN|HI0+6S#aBs)o7GVE-lHU6FrBSj5`t2gz@q@wkKV@%Gv3yrWX+#GAm;<8eAqYO%4> z==KUd@^arf&g`v(6++#DDd)Vx`R5S+yfjdt>J$`L_oqrb<|xm9g3xIn?q0f)nv96I z|GW>67xa0OH4o#wPQpFVfX*w#wn!vCO#4ei>G)|=6>f@bkiFE=*YKnE4}aL+ykNo{ z)=}UDvdA{zePR5Z>+CRFAuv_CMjC-D3z7Ip>KU(} z(abO}zf?DdPt?>&x$}k5PcYtJpK{i|C|IRY6G?D;#@H@o9|2oBCTzc(K_h}<${of@ zVMcNHcmHbU?Z{s3wy74lDOi<{^K(v3@FpvV!+P1DSWb&^J%_Ig9CYHlMU^(^v^>lU| z-a%3+z>EM13kx;w1J@rXhNx5#iYLH)iu}XRr&qYsNcVY z!YRmyl4~<@Bk#g_ub#flaSM%IeUSbxifLofkX?&gJo4>Ea1fkdi2V!>^(Z9!YkyYG z+*O}e^!!bq5t5X@;1#VXO&!86wy-Rr2ifFM&UsmPt4rg)TKEm+!F9tkqcZqVsyKUP zTRQ;;a#Fb7dZC|(DaN;A0$-Cm`wX#ae;dtDYkJjoNNk z4jNU{A^BNFmSPyW?0?8R5<7$W$;n63QgA`B(S81NK^$h*tLo#pjk}D+1iM0qMSZq8 zH?{I3D^fOloKFv|-8dc>S&>fElb}qqiU#fI)v(=#EtuL%@Rp>XAvN@paOcoQOTtk5Wh+ zxAI)LhtRPxB!5i$c~%FLpsvG>6J4K={S!6tXK?5{Lc~9-O}vX04(XqKXdPbneUjZ@ zUxRXLfnB$^=Ov6-{v2%@a?Ucs-$@kb&Hpiv6^uxa)~2LbT(uFhA%Lg9;N07mRh;oK zT4CQZOLO%o%l%&$;s26Yz8@fQqdj^tsY!&wp0JJRJ7UN~BNiZY4E;5RlLKsTmtm1D zkw5xyEE7oVfWOL32I2v%yEB-W>tj@4{P2Sjh#a7`ViYAnKw$C_|1EHuyQL`k9S{(l zh9Hyyz58+rk6<(&SnO087C_Jp+kj`rhFD@DGo*2#;y@7$K??xeT}g<72?z?ND+#Fp z>&GpU59(vzpyKb*{V*r5@jncB2#f)b4BZ*QfYIP#u^Cie1Jw@jpJwQgvjCTC5Zl&j zl?V>*`%^$b8K$(10a?PL#D)s?VBHWs1{}*%{(}mT+6M%BXvbTeU|=w{G4g-815Tbu z@qmv<1oe7B3-bkC5FjT-i2n&6pu_n1%0R%79`@FY71qY%k;a|Y3oA+Mw2$_$#%*FB z%0JEusvPk@U->-*I~eeDGg4`F5@Ei$hz%rh2a6q|j!OxGyoiReK46l?B+zf6WGR4G z9{C0L4zPTDasN^4?R}6h0V(KgCiDX!RJydl3jrMdszwrPz*6-?A%FU~Tv~5XE&(xL z=C|usQ?T%2E(ww-fzA#wiHGr8U?hHYC=Dgz)+5uTzPMBGA6rCjj5Gm~$c`=u*_NTi z`+IG;&`&r7nLx^b{s(TWOxR6)wE3$g%nn&$qr`!jTT`Myz9hit^sv?-sHHk$IdJ_X zhUBC`7-gPHb_mEnD?P!$1;{Fx7x-d;wcms$4oqJl9YCH1!P&tSfF4le^Z~1(&58G?!U*ff=O9`cUFlK z;R-1I+G2*m@z}Tcgd3On@wa5rG2>hZdleyQfEN$OL?D8?{X`iD#v@%MX8)AsIzXEd{e*}fGJVpxcMiBeIb(hg#dqXtIqws_fw`&Ysu$t!_4{rM@yVnlm zZt%{TfmjYNt?HuoH;ZvqKZ0|sl$j*@0r#des5hg`6DbLMk>F2(H|K|r4O{zfy;|Xq zNh0aw9eCi&zjEP*hW4Kklr~p3$&;37GcBMqt}?tsDV9>1%x4H4Phfwa%k*O0k(ZtG zm8~G-u`c}{zonFzZ&49k4&YvQe*-elRP6>&sb z2m0zy_NvY5;i{}|wUczF2AZj|uZRmRa_#n#qEMU!i%XyC8zkJXryayO{G{zk7(UYKu7Gb!t(&LwDH4ETRZ+yJC{vlpXR`kazW{M zJ>zj`N^gL?AHP|1*qPmCR&cm(rX$w`ak?IfPwwHwT#5^4^6zC7&6B2ZTioZQf(0b9 zmXp6Z^jFVrEAz^mb&uan_@`0$J}FcPxzCErYEZq1Q3Fr=O}tS{B&+6jSJS#61Mi7=#?9TxpdZ~(>5=hx8un=eRstFZEzxh~m0RfWxar8bX}UjFyuUaaPPN?>EfQRr@XJodXws ziT8CsRz9sVE+{mChX@}FtjvPXdBrm!K=M@9;$@UbuPi%Fe!TsvKXceoCtj>iEUx=z z?6;fj{V4cxf+`EH4aQisXgNILDnm^1f~Zmq*|qe6cg!z;XK^vmXb8OX4b@ij*`r7g z8Tbe`=_#bF`lIX{L}ZvN&$yobjwu^2?&NptLsm#x)-$t?Jwc@@+AOA4M``XWZwc6!Z4M;IIk)}=g(?D?7MxdaW|=20gX5 zrW$!9zKkW2@7@VVP z_MLBeie@Ec(#&F;80Pcq@cdnh1xvVYyKSAN;75%XI|=z+olSa6+`Aonc;j?;XR&gg zcf5WH-AI=`Su4r|?;TJG3li#+Rt7+1_7}z|)JseZXlJZH?<31Es_(!jYY&ByMNXO` z_}3k?@g$*nRdM2c_tfSZ+`fs7Fr%k>}W`7BQI0 z`*nNOEV+E?^wZ^e&JQkoQ$@!&jBdwMf^=eZjR{*7v5A?4;CclmGE?e_-J7fdm3hrd zIPa>!_Z!hdrld@1nwU9E7uSZ4@x7w;<1gNDtCV2KnSDsJl~LxtXjH776#4wbc0Lx--~X(6eQkK<4L^buAKY7u zR?5R@g{?Pq;dT#~!b9wcNS7B7%*)s8;LHrAP$_UA0gbwKe<*(=rYqk*4#i-Ea|p7g zAUZe-^^RsG2l*e)83}CNg?x;tTy(n9>M8oJj{bHcb`D%!z#e_=9R6m%UTMG5k*}-- z!&K(;{OQph+vkU@8;T{!pSIm?&J@;*$))B-@ZtVTQ13Tt+V2fi=Bnu}iaSG%S;w^b zyFz!aD{~hg-Qp+Swq9v5+3(vG9Q0P^ey>2A9oX+fF@lNrDL60*Qc>?$nJ{y|h$dxs zhLi>9a5jPkbxF3m^e(V>g?Magr|p-yld;?@Jc866h+XgXkM9aw7IHgpjI*l}?rsCG z<-cQwthmY_hpmN+59%=+^IB5OktL%NFtt}o8`8(ol`VLr@$j4d8lHdUo95=Kclnax z$m+-YFZR?E^KmYgq-H^B0uEdQIShMmIr^@9G-5=-N%_@7*7d&!IXImQH$U^cI5XUd zJnw$$GwAN&iyxALtRFa8H1jLQsGSAzd)`2Iru56_7s}!obYRxJ@$-|M|NyiZ-%3`NQInaFgzr)E^V)izi-9hEQm1cYGezH;JrGY?rqJ{6CcEBxA7Gj`cuUNm_@l?N> zF^MEeX`si+qiu{Pm$__@;+1iDQGE47B(3bzSG7Sao6P}h)5sL*CN~|&6CIo)uJKnb z$Br~4I}-68J<=vOn3_Q*Gj{0TUD4y4(mqpru3FAMm7Rd9*7GNGz>*CqK;X>C7$D7F@`3A#=uyuZ?K9 z*X1w&4GhwR_q4Ch8G{t1#Gt=!;+zNCg5f<`;!81^p=pTadjz$G5^iVd|xZ-JxqVq+SEQ5~+ZKZVgk^R;Lb=brtMKeaW zsgj82@x~=tYjQ9XMhXJ|h;foWbMcj0Csn!@w#3iK;R=>Dh?glcu#ws_YShir_297J zO0+pL#@)_*nNla@R2RL5>p2UzOGi0pN^$K);RiNtbu}hkR{`HzzJ40Xy_l7C^r~xiTKCz;_?3Xkx50<@X%rV$NC7M;8me^y9rr z@2BkPy$io23~<+Aoc@8QMOJ4JL-x{_OCDtKO?OV_gsVio{LcsEnNhCyBdbm-IuBvV z3Ar+0e?psAGmUWNr!$CgSjkUDQyL*tk%wyP0ke(ms%lJg+MI>jp=aE-jLYG&-A?EE z&rZigJ%1ihE)gz&N%ZHF_ZGkAbSSf! zB<^gF*mj!`huxHov_l9y9n}I0e!A?%N`ZOI{KEy9RIINJmJVXX2VS%_ymHD(R%G33 zhChmney=UI&|Hjtr1Eq(3`!8?S~$Tu^t8`@?l*x1r?lGgUT3ab(bU;byDkD(-#?an8`LjTeKNs&(`YLe$Kqwd^u^rXuSWxFhI$g%Ql;E%rULQAoBy< z2c|G?-P)?(=7W}Wr7WLVH@fIh2=88VHOxi|PLHuGb-x!}D8k*FmahNvv$p!o7vZF; zR)1kssP)<~!JOe+|GmYiq%x7o{Ycp@pB|Tsyh2IgujG-!TRN@C#nq5Z>ydAJS7){7 z$rppHD(yY|M-XqO{doOdQJFkh4uKwUtD7oS=c3PsFR$J;p+Wns@h>}ls`Vr}lh-O< zgY_>wH;=P~k`<0mx9)Us&$QeGWzv4DMMoN1y<6WQcC{mV``PGRI_Fc;7cmzVq6*dD z^`l(S9|M6-em}=j7L>hI_I=;X=o(n-wLl&eZsE8PK+rN%ZpY&77WIRQcYa7Txv{f(KdiURoYNue8c|5$=%`}{1f z)fg^!mGo%-`+~tqqR&5FjIBupE5LXIV$KCYOde8KjmmG^>KC=_&HGp+cZ66Hq6YBTJ`YUYH8&>ICUv_0X|XUm2%2} z`xgNeztn=6TwHiYiuszkWwu7GhO=UueaXLHiof3jqW||n_}{1E zFVrO#hbjyJ0D;j69RNJ=6uVx89z~(|ngwSrm|KlXUxD3-k-x()jPN2&-4J;}^c!Nw%$S4lh1rXkV7*bJU zK}S<@N`M|u&&Y{@9!}ga;=>~{lOK-oPaPv#9(5mp9pw;lw*bn4^k#*27{u5KBnp)# zME(ezRw^Nk0qS~9EkBsVz!I_SBq5fqupQh{K2!`ya5yCfG0oQKbUy zvkFY>C;(~jvx<)gs?298;R)S={U0)AV+Q&BIi3h;&`-)jYxXjx$>*&lg(r21KMusl z!9O)@EHC&tt*8kI+@F&hDga0Q51uQ{7{L52)G5P#qJ7s`;B%kA*`X~C{O7XR)PuzO zT=Au{@LxVZ%aO;x`<%5|S&XUA^(f9A&CBr7EG@*RYI05um!S3MBAUWyvj@nqlK-PL z+e5DSydkL@a>~C_V+`Ker})fm8W!bWr<&KHNj{PBwOu@d&+N5}Ix2JP52Vcr8hDNT zk^L9n4-emJL5Cmx3F8_<8coT4%pT~6R!#tp4Coh&PuUuV32y$`=@d0c@lVwKg$Jzd zvj@b6i29#UY(iCQuKmZojiLU$W8uu^E$%uk70j7M>YBka|Vew1saZ)~X1iOa@MBZ^x|%fx>F^7pL+-w+Y4 zh=O{pRCYO9x?uho3NPkT0vwOuhlD7y(S_& zDM`~f!CYl(QARy*#eVSx3U!&wlF}~fHc1Pl!c;)9{)7VE5{Wo9GG4`i`hah`Hh*(j zKi%NhQbF6anf7K;CC zkO%>3m~hE}%AJhas=kIolu>YYzXD1npyq&1Hpzzy_j?QCaojJC#N7%75CKvJvHEQz zvhsYgl@I`zYqu8<-saZU2rMcA_QAv0H5X5i0e`R0gBEIx>()Vf72R*-X1 zf}X@GRS&dW+bL>K(nQPz86dWiI=A-xprqz?jfzN2vHCq!WIGF7sFt$*D~@60_(6aN zYfqLQ66*M$@q1vDyXuYK>+g#vA0AIvTBfbYq^LV!(#ifV!Ir}C(<|4_ucSMmC~5v^ z9^td6IvKQ(d1N>35l53mA_6Q8^^-yMK7)P{yAflr0rjei9yY%AA%KR8UYltVPO=#c z>?oW{7HBEItJeHH5GsL8B-AJlTcB=`DWEvf&=vzk+ka;w+MA4e*mb}s>0>bEo@p>n z?L%0D`>O%g2C?Irz27(X8}5u}dN;|qo#EE~`Uj$-|A#vAPJOzn0^g1NyO7N*S^0jg zbW3XQ!XLM%CXM*sF@VjoWM`r?ye#f0@@aAlHr%WrW2m8xey`XQk>`t#etor&lw(iO)K4o=S?^14FH!b9LS#VWa z<3e(!;z=^U1PiTAkutHUoBn($GQ&tZwOh(O*Dc5|eQDh6B@Eyb1y4hh{~j@JtuRSZ zD+Jypt#ZtvdO$o*rF<~QJ)VuvtqBsyltf7r$UyD_a<_eav`m)+96mMHuU&bdtx+vB zdGK@#3L1jDHKolZjCL?nUWy6tNGPx?5_h>rbESv~PVB^y{wmZow`I8ClUjXLXCCXf z5|^S~x+|Vd`xVgl4(ocKy5{gB#G&!I>_;ubjDz3~0+LJ0CAlJa$AUEDWH!gcOeApd z7sYanKA&FzP@j`ey=!P#$*%IZ`b&QqnGJ0FATY@|<#KM@51mqf+P}X7Oj?Ohg#FeB zo4@g@K`dC`X0GLr=*)x82OlWX=8MM<`+1H&%%3QY`ptmva_rDnf*vehW#?*jdFI++ z=16>clLQAyjMZ?{sgQVc($BEb=yf_{LeFQpj*Q=gKu*bLR+tC6=rRwECLbcFN*-a; z1=L;YX8L8ai4n?_ty~!}Ja>OnWp9S#{;=T^|ALKwJ_}bp=h;DmUW641&RRa)<=;>t zB&w1Cgy{ij;F@m3`isN6rCLHDa=_{bG1kg(bS}hDboD8#kndsk1$Z9q1zkyf)hQ8| zhQjlp8K-86I?9B=6TYZNu|!-pOQ+I)fkr%rSpo-O%WD6^xQj%Xhsc6YH+#bNMt_Zp zy6rWs{$^~;lPhv&)Db@zK*eXV3@71BlM$(i8rub!aX3E$DQD!Oz)Htjeu>ml&t@IE z8ZVJL+ND}Eq8|98=V^e+%G^-!)MP-4tJbRkoY>m@VIXbJj$>qkgnfpVks099ynlqT zf7F~vOipPFN>;C3T`MGREp6jSE4a#*t2vOSLTwdoW+?&33E}A#sO`Kv3k?QUt6}U4 zHc0}YF@9S5MnX!Hb|zgSKm#-p%qju3zGVS?+BBFe;G3Dlati%n1DYH~y+!vS{=VF=_q#|d=2M2@@cyy?-GXer-r zA{)UGr{TnXSST=xC_S;aH}LfV z2zqkAL3+I0)FI#R-rwIYFM<6nUG0LtEKAE$H0hX1Fdkdd-3=qk)xMx{S zdfbs>=<5KA5`etT{=E1Z6G43hT)cT6uOvk{HtUDJMMgc^Z({L9g1ImGs4BDUe=3+A{9&X+O-5aB_;kO;PP1%ED;0OHZ_-?6Dp zqGR%ZH!kKh_hHVsd9_rOzR~RdET(OeuZQqDmo*hrP)r&M8RAIHofl(XYhll)7%I&9DrHnSbOVHaluHA7;Er z^Ro#x<;3jD5?+i(TrZQeEP2&mDfQ@9qgobqd8v?sZ&G^QYt{w-^epci>9A2Z9l_Vb zBNpGnfEC)O{_^tx+rc({UmIPBlIZ25mf5SwMr5G_iB2P!+v3t${Wv;;Q9vN)pp*rI zZO}z5k1jMXeIhp~J#01EuRS&XFhx=EGFJLHM|&_XPhq9hS^rR{<^cT8B3ZH{dI72Hupmo*p3| z9I0&JSJD!U9(|t6hN!;LQvG!yO%$I@mAcF zSBL@_e@h+lwoWpv&}>L8q)*kJWxmrcrh!Q;O>M9mZ-Ib+_sC9n{RJ5L6&qWa%VCe{ z29vR|=kwkp_sv*w?j*eGIru@kv3y@8|dRuJ!<F7gnHp@*nm0s9SDT9@cUE-=r3OoQ#8?mA`46}~#akylSuSnvA=g=YA* z-TFOR9CtVRUS$@}3zm##t**K`h$SJ8l`pVa&$GZwU1>y9_kZZJkd_5KnMy|Z%AOvkkuoj;- z8o>&XQ-xBkB*lyywEQkgj`!PL@-|EqI%bUEzW$n=li=n}B8o?sVUHT&7>zxko6pj+jAyOzb)Xlqpu61m{v0}oXR<(73 zzUaHv!BInezRs~=I@4bc8?jjYX{w;v&jWQ$?zk_jIS4WAAvN*cu$+KY4wYXw`fA{p z6sG;1b7uhka<8%S+P(S91TBq{_`J=1VWHO_PIPm+%^nx!oJSt|EyH!_h<4|f!}%k} zw!t784>P3+xKzjuyUY^o5#%>weALs-Ig#O-I1v0EtrE{qV3Wx!>m!vGaW#O<;nZ^lpxtlw$QVKU%OHDLGTAEvz1{-w7Q`=j&Inj)C{Dg{_-)TRzm(`$ zYK{S-5htzZM&sC@Cg_BKfAeUB(Y7ee8me*xvH!jRN0rp6&GdbTzQXF@HoBynLI;_A z>3x5pzHse;J@nnai~hL*>gfQ<`|&bjLXOUmc1q@z&;3)7mg|=}kDXguc!g$8(AI+J zjoIU+h0DO))!m587HPakEqCup_Qfx2Fi9dnCHP2adC|*$fZxz~6!Y)-N2BcIU0n-Q zBc*ijb1e{p>n{(pj=sfBo~92{8*BScJf^Qc-9OzKugdS)Q_r5eDM#$1biuY=+it>u zbJx6S#lELFKJXp3=eQ-c1YQcNvtzwPy|_FIJ$@UujowI|)sJtIo7R!YJTdhThY8Mr zlql42i1sv&m)!)JHH#ra-+dokk;beXgN=kzY!s$h$4%Sp(@nP<3DLFDls2uPoA(Sp zN^A79-;&7VJ_37{9GBP8Q*YW9EgH&hk?{4=<(S|&|*T6lYfAJeGl`qp!%szbA5pFpiy z3?o{`kmaQger*SERtT<@h&h;)bStC4J%O(u-DVWT z%qM;G$gLoBy@9d!>IrUTrxZf(bv2}IBCU5F1sHZjaI8jC`rU+yJ&?BZd z4O6v9bTt>tVmFCLorx~heDRnox6b7CPwM)D&n9sc`e@vB1E*xg21D~4DZ=hGL+6ja zX}gJ^rj>abKy)33dK&D=k0IEuO;Gk*j_=!KcPz=Htq{>29GTQO7RDDkdJ4H;jBS@G z40~;EQ8z&~Sb9i;Aq1G zIU+mHgL9LLeG{1In+$vdxL(UI31<%5`SwBkRZ2KKjORR2B{|m0Jk>i#URK`u`|RwCe{M&52|R6(tK7Tgz-5k#HnlS-(6&k4>=-RpXbX4mLiRR4^@t) zXw{Xd;yqLnHc?0!?;Al#sb(a}CqccST~aYq zoHx=Q8;Y^|Pjn2>R+IRFGjkJb*)*@q-1eJg_@XV=n#pL4_dPW~)>1lh7ttKV!V3f@ z)EA;A7hH&9B6D$&l3B_dyI%I~_7~8gZpfzdESm=ziHOhmXwhRL0krB%_sj?U7hc=i z-`TxK9Xu*>3Kt#%fH{Gf*O}l;j$#v$RAG@4Fz*D-jkeo&g_5@RqNO@T#2pP9LgR{u zxsHK6r}znyNsuaUHU6w!z%O9_9R=eoP18CQIkqT#1t#NmS>YB7zHvcRFJckqgH#7E z<_S8r7pxfdLx1_L0XeeV2ueG|2RA2iixXK3eSEhluCg+*Uxsd+1t(Z;=61H)_^*R- z)(>hQ)^g2%v+{}+QYG+Y4$L&v<)3FqztZqCdY|+E^fH%PoUdB_O6v>&b5Sv(#pWe2 zIwc9EYgFZZ7cRIGQJz&M`{J~Y1Z-tOWeJwc>l#jV$FZbd0i=#opWKY>ZF5p3E`wI^ zlNC_KfU7$Gp);tuymkqUWmbN*k@Bk2T78K@ZvxR0jdfq*3q{KEWs`(n%CgN00v!%o z#?O67wj#_Col7bzpe&mvPSvSK{!8nv$k({MyVdujh@OLrAkaD~1Q80-$V*ji$kG(g zxf1Dj3T0lYK47tUClpkJ(wpmN;}~a_`@C>8IF5FPI))34n%D2kK}FPi?{E-wV#JeE z6g~E`aYf|*pDQ!gI4{uAuTS8-n&F+-mwc!Ua6jYGXT^Ef>=u)`Z<1F9&e6V4AlTYn zr#tf(8A6+L6I$&WyS{0Z$fz|r+n~25$JpAStiE`Mv3BXOw}i#YAa2%MTlQyhFLax%)$2YN=f7rn zC-H<9GeK@gCJ;Y8y3|FqG_z!YDjIj^|GM5@ z2a8a|w+^_`t4&-Vd0t!TYc(xl!aMq|-+=(*bpfiGV9yqkhU9{ZOuoaaz7NZL`Adi- zE*z>q+ZW$~TuY++O7O{2L!CRQ)FEEz%6spf=MKl?f}7Gz2{^<{4x;r7up^1L-3@ti zJHg2Ecfa4eF}Dvh28rftE=JjD^luhAjBjCK%oc$6#KR?=OthJl)!YnWa1P4py~gT< zDABMV@H$^9G%^j+{t&GhU~bi*@eXpEuh`Na*$FDO?k2SGzx0P&LkM_{-MCHy9l?gR1pQ74+TtNZl<#a&qSSEdmA_D?C`$pQa0(BS@O>ug`%DEOlX?6 zXr2M0m32cU$?4Bee^kQ(cqz-{duUb2)|*kjR@Uy^ytBKWqM=VOiNgGV+9@B|kip6P zs)oi57fR51aq;dsRu33VADh3VJ?)xZjz-mVuUfUYzxL}WV|{%3(w8u|<$qx?i*4kD zEX9KknU+_cznH*`CQI}RmjyK51}0OkBX0pP8CB&&3e5tA%T6ALkp0R-)#vw!bqPFe zdHU63*oFNlcrqSm+e6S6UW@B-UCj)1lOso~riD$0LbKFupT3a!`jPVbh!^IAM|DV6 zokYJad~L(>Kb`&y^C!Kc5T9`7K&O-OG`0CprH3I{>@fOGYcJ=>d)SwxY^#o9PJT6j zsNr_ctkbs32Lk3y(~<3%y;Ay)PaFFq)@+L+2$Q#l?AQxk;Hk)9|NHBl)F(uxa-a z?0oaQ5(D(9y}QhOdE7lE1`!_zVBIg3P#bgQZ3eEgt=LV4b~E-jZ%F<_1Z5z{)JV}G_){w>OO zll0|qSav!u)Xfxk%F$i`kotWuq;bppc58a0;(jYK(P2~qQzp zb;j+f(+|M;N@%#(aHq^vWM|K7`G>F3?kYZa`sXCL^~K2b*hN_Ap8=TbzKk3p(Xk&n zJKdPsrD+>74uXTenY?%M8fbsL$SP&=@uR(vQ1#!1U*a43Ge!ia7vy1f~TKLa#RAzZzORo`J1IR!Ca znc&=7lFE!*a$m5=00J-+gb?9hz9qNoQJhuzS>^xkm>VuDBs}bAYBajycId@*Kz{z~ zb@HpAW-#&U%*Gai#?_;XvA)9$%%%1&o1ThY=dF{a?eDf>Wd7W_A82qcYnd!rweL+$ z4+g;B+Tz}kQb8%YkXz4)hs%d4aw>0^_D(tDtof3JH#KPwe@(%+ zep6dktMf?D>EO%#Q`>UuMhlFdfsR4K{$HEiw1-<9kAU7Vp^ev)Y^_U|duZ6+fafIy z)?+sl*^U%falr9A1D97Ss1o2hm$ISt{qc*l%Z(imM$(;0fFNSHpjAT!`GtKMF`W|% z{sX9CvgGo8p1P+TfcBbq$-x)nxQZmRc0~$0#6Ei_f!{T6w#;9FfbJ%_J-cFGXHK$n z(=GfG(x2lsa_$wsK(Ntw(I{;Fsq4J-+8+Ez)MLYb96->m%cnLwMLxxQv@1y~^X&+= z6ItKjeVb{hY~Y?ssV<#9+;$U1n-b_**K_XB>dy5(O@Ar&h5R*CZE@Gh1Jv zWKYSu4q#wwk=^?WA!#eUjS(K$bJ&(}po9y1DUA7z=AihN+M# zL!#Gzeo+7C$nO7kB>0b?tY!RwEc9_42!ZgSDF0xMDVE_Ae554GE3ONnbRTI(PeD>} zsqw3y(E5lq!%2r^lLad^*Hh2Hd9QBjn}AIKuFk340{a5;_J#Hzro8R9rWm;W;fV_8 z()<&Q%wgUFhwm{b%FEM4aK=<9rW|Ou>xhwJ{fU$VFkn=KnK0duNKyxA1ZWWn&18j) z%0;G>aW)}g0H}m74j7Q7M=S@pA_EU>5o7dr=D{ZBdZLue{Bp1nl(4Ns4B$-1B!&}8 zX>2JD323&gnta)~@hL*Z-@A^{WK1VvpqtUUgI%-K_G1_{Ic<_Vo%+KBAxxxuTzkmo*{ECS)xGpq-F$@okOuFV6_{jk zD7gny1Nft<`SuMXD~zQhaB=>cAv|zCJ_l-!c&o40$m0+^QC%!3E~Mc_nxgfKu`#bs zoFZeNLYVIg)Y|L?DXHh_l@W!pW+*D{uA@l7BjO znYY7w{fb~`FG%3Q4{&=e^Yo+InG%A=A&ISJYCz4^(~lt9Nsf-ekiS?87HYVey~F_p zW42P*OCi2}EGfG=bx^OUN%4Zs+x)cyY+lxde804}3WYhMvd2dPGu9LmCr#z^zN_F8 z6KQD}?AB-d|s-@062(w=8JCpzMx;7;QZL#JgoHFhr$vZ?nR zm&SN4X}Y&Ev6dS9V&awpu+0ejECk`GwRk%0Er# z1+_cG+_++DRMn8zp-u@MIr|#EUv)0RSde$2r4KuWC_9Y)Poe znx_lA|M7Z$cf|Uj&HN{0LV7_y{Ve_b0e}422kStV#D-7vsmyykME-E$_^&w)M96_p z`CnQbPgepkE)0yP_4gP?!)MFlYfu57_6B+S(0!kyk3(kau>X)Lm1BQ;Dj0=+44lLX zHsv$Jgc{m<68qz8E$n}O53Ro0II^(+h%v@-JrisnkIf%QdId!cWD&C_0;mw zAEL2U8zB@t-AjVJ)jF#FK_RdXW}0vlDa#{aE=G+C?bu^I6n!qgm$e>3%AD}n8K`i~O zv1t#F`T3b49i-Oh{a%G(TR*@3-VYxL^N&|kiK18a^@Dr#4i4=428|EskNr=3cSJAY z@QpGXa} zYY~5f`NC4|1ispB8Y=(<(^Pdt=;J^R%xF$}2d2;3m{LVe%_(H5ps>)v$Pi$W49d>zl=rwK#1V(->|@&wRl=>w8Vgg3JJFGhGNuqmH;N_;Te* zIZ4eIWRWjD$m!Jhbo$~!Ef%7FCC{fp)N+K(4H70}Ma0>2`{z4wRg?mhyeN|y4|cYG zTz&^)d8_*kRfk5Gywu4zc0}fg`%-sp{^!PMRE|i}0s+j9Is4879`cd3YqFrVH*WZ) z;tdm|>jTcIOi2K;aTT6wQiaEBMrok4P($R%wr(XMfQ&XPW^7X;>FIh^LrF*BsuTQD)_7 z`?aU;p4M7Iz(DkuZ#%>G-#D`aAFR*>e9Q|X49q0!pvu^9somCc&oU=soU(#8@NlK8*GXp}WOkZ+PV2k1zsgnE$hy{@+)= zn25G|?GGOrOWOY(rSw*Yj|RBV*>qlSLj|lF)-OK?$f&Vhs8YwKl*Q^>GbQk+X30@G z?*&Lp?WU7y$0wh^-1q)s0!iEs6UZvcZl5{m&krBIwC}}^&%!43044SThQU6P#E|Ga z;EYTS|D~E|&}Z-8;hPYNEB3<(VA#utPG^hlBFgy~Vh3${c|Cv&7{9ciHrYs!f8_ll)!49;oc7a-@xBTGMb zzC~(ji!Cl!Zf>#i?E)?-y@Od~NM+mlGX&fC6j14Qbw&baMS%Djm~3-Ih8#)9OlDHE zMN}x$a%@Jlc17J!_6a37)}z|Bt6!4*FQlfj^#qT=bzJ$l|5Sp>d)p28Q^Wo|{guu< zH{c%EXAE)Bz#EKL$N8-IBK1NfTm^~8oApRKKSTYcBuOUo00yA!oCH!mB65sG&644= zllz|yC^c2CZ)2kl`p?>VmA~(XQ;r7w|lP~dC9fX7H zFs*nfm_=%9C&i90aYY1R!?3;z#+NVbzr>dZJVy=*4(B7I;s+=WjZdiemmxsChN!NA zesqJhmZ#!@SpbEj>CUe${HVh~ZM^R%+|NI7UAA5^@@B4^d1nIstJW?=-2K%1!H!iw~gDW<75Eig=B9K_L09CJOEM z2P@FYBmO#jJ=2>YnB^EHaSoA9Y{7NwWRwC~$2%1&u_MT>jXEq}`F1;1qCQ30#ug(p z+Nmq%%*r+DETPXSiCrT%K+gPoIq?r-ZJ!Mv@BYLPy>Vd6j+PiGl#H?AT^f6|d+}q4 zk9U{-9SZ=4OcQDY5_uQ=%f1f$Onk5JMXRqeq~yMuKH_U;taexDXY8wiLg(a z^F4JEcaG9PmWlS$*bsjS1!p zEeh@-REi0MHN#g*(dU!G_9~D3t6wB|+Aj$G;Cds-7(1kfL9vM+-VfB;G$}gC^=?LI z`CrtgTZ?EIcLLSt$e$>RzXWFYPc>vA=ULBPp>5N^)GF5U=q}`JGx@y!>DaKJZNIHw zE*w?W!RBl7DmEs5V?kRSO&e_R-~oMM&SxCH^`r>J_;YU~E!@N)_|Rkw0_*n89aN*g zB6*L^hAipiVfy60*)QaNVTH1j6{@QrX&VX9c>Dgt^yV7}D~?i_$I9=x!OB%gDt{5m z3hlmIMGMf%hNYm4iQ;I=s56)sN@}lXc7Pm}mSkHi;l>DrC;NzJT&9~`D>J*0I z$Pn@@xV3MIt!aKIy1Gsb^N*xrWDl!CF6hPtXHI!m8L1`=$MVcZ)vZIkFY3zN+uILTXHe}UJ>`ZePdKtR9`gVV%89&CvXRUK^+GuwW? z-2sE)qV;nfa^a6IF8)kkbaG*-@#IJ}z*_x@0$SxJ61GKdt0zQQH~>?f2|J;qJQU#E z-O33Wn*#^Q<@(D)bhUmzHa-`sIhfmvF4g8Vz#iV&K0cqE8wuh0`P>DD$XQoAQ1_M< znDp0h&eLb4frRmH@f?ZkTOd)wxF4=+?dUO=&XBwZ@h+-NMNNmOHeF@0|GQlUfKi)M zk>ys1Q{!z5nN{{SklrIBLaDoP@fdtf)Goo;D%zc5Z%eOwVqMtk>X2iB@CV4#B^ol; zz@rB@^juzmk3c=io+o{kNtsJ8o>_U4KxBxHCx52 z`-v~MIOr&QZf+Li-o5f#ZOs!sfQ~;@>aU=8Re4FMlLQT%R3;_oG?QP7w(6rBVOH`8 zH{eYMs+P%3LQYo3Ou(6s@g?CBCVY(UtA!W9{J@mDqV)hSL2!A_56|qI*MfO5@sibY zbYtmihG+?t`HZibHKn!MOL@C0*HV^cv6*3n1#U5f!lNx^u_2tN1hK>*0B*ZklnEBYR*Zhf zSRAF@Q5JBl@7)f(9Dhy$%$EX3e6sIGRYNboq!bMehW=iTgtIk<8t6a;&3wa?O~Lc0 ztMEflh6~gGDqvK*%5bo=3()DUP>1_j>8YDd&+!Hd8Lgn@J+XQz<1B?s&c3Pu`AZ`$ zTD7YM6+w>tT|12~_N}KOnED&xjLWI5G~#rGM)M>tp2>ICpA(IS06G50VNf1mBXi^F zbDN;!ex8=)*eD~t9Tf!f-H5MBcKrTF(Dz@Vi32|Od-pwxdO=l0V{wD_vN{A^++<(! z2zeyPo}(DFU#V5YS&bD%_RB>R%-8GooVt5c^%a}r(twuOJ@Aigk%NNulIxbY5nRW` z@_H7pz2NDd#?Ylfz&*06i~v&YOJ~p(Zb*q6ma62Jip^V`!wmH`)Vjw%S<>5Qh^Tl;4n zReh6el0!r0i@NUlT&KMhzq7RZ^2kKC$)*Qev{co4!-Mw#jZWM zg9q!k6=&5AfQw9K3hlj#kI{zTyK+f+VJV+E*WclCE7>PxghV)HM_1p-9V+NNsU~|i z)n)}lkkRKG+g&jq5V#huLMX< zO{r^RMKUU>F@1lx^krl)2$;r$$dxzN*3wCn?m%XClBSfk8Tu?8bc3}SonC+^G1ei% zX=f6c1Jczb8D@8|bM$y~aKiXMEQ6vGtq-F0xMEi-kWj@eQQXV205PC!V#vK(H4B-# zKgQtl<6GzpE~{O1p`3Ir^Op}%zd%q%ia1#-2dFjGorU8tgwPhzmD~GrG;Hw9Rg0Lk zHpJqZe;C!>1w>sq_-ba{33I*S6h?THmRyH8v`t>!*dt)x~u+|q(MQl&9931G7Dx48=eZ(N!NE@_KTG^YL3(#~B?Ci{B)Htz)n&fTM zW)di=bZO6P<(|SqzIN|;`U*)gT!u_!HHeNPKW=5bQ%#u2p|qWK-C4~F#7|$esIU=> zP&(zu8)hw^KQA)VWJG1aDM{o#@yR-mb9OAGfDzDJb=AYsXU5m$vYR2I*6a^6q|3nV z35_bL(3*CD1ALS4JwCyQku5FgtE}Pqg@#YCOkY1Z$@1-vhZy}^v|SSap^FC^F##V1 zpQF?7A*oXQZ>;1x`!huB-!nEglhiGC0mBBtgp|U2DUmh|*C0TYiCq+Vk#{n#927kR zfkpz%>+xcUg7$KY5PEDK#30w{9uF&D;wpw7?ROJ-5dcOFn0di%1u4}xr%l11r2RJ| z>VED*1$+(OMRV~kwMrYb7>{tnRdd6$N{@f8?)C9IyrH)DyE<`b8rqL!kSWoecM5H} zshxQc|H9l!S>~lZiDmH*Bb8%byv%H68+lxaIAwD-&z^K&Yd0$Ypy|MhbU<60&KNmc z@BU8pvB&f2a=3b4eq(^#TU_mkeOIiN=k8A9xDf(r$wx=+W-y#PrOnGdqa?&7hu`;; z;7!aLBK6P+E6`ILiz$rR=WRCxMh@_w#3Oc$l-ZpSF0GQDjm<)O`%su{;O7>8i|tv* z*WE*1P^3J2ESccxX($wqL;nt(eNn&uCnVG)`(K;!!%6i2fSCVn)9*o9PVpaDkTLMv zAQ~=Eg8>)ye<$*u({PUwg`B&1jd8>puho}Yq0^es-R$oNNJ)vvgE4@}Xz;&WZ9+;V zB+@7@I{T6mb^hw<(eK~!g#OsA;q4rJ|Jep{BRFI2QL8w2nr94F)0qxbDWQ1{9{)yg znB=4x7q%QrJ`6^qw`~5%q@7iXgR#lyo&~fuQGtYib-&&lcX9CrcTRAU3ciuC@MSzt z^gma@(V~BYaSk??CL80B6_(-~>mtu!PE!ch%iHuR95%KTuBKcyr@i$zD#4I;(S0`N zt7(jM^wSi_l_1|`L-lJeGAu<} zOLL63&jjaJ;JL7nrm2|@@bIMXwT&tLIMLHP^o+MiQH0CH{+((?fb{CvCB_cuawOFr z`Q0G7}> zv9y*0MV}<$P;Avr9a_Nb>1k{sw^lk;+bWmAsWdEY<_KMm+xEb89L{MZ%(AUh`p=H~ zoIX6Dpd($Q_DeUm$btme-@*Y2ZB_ab;+JslvwFAkwYpO0Nk!f(<59R!L3*OChX|R4 z3gD^hPU$!}yD04nDLX@p0zS$kaUjMQ&d^f0`i;M|bgr!<4SvX9ofB?VB})-RJhw@y z@(d1=CnYS4+aO1OdG(-T?8CU`AQiP!4g?K8I=YlCGH@DI6>R8YtHA-_e9z|*>6vFO znX$`jHHCtP9+FL<%9w1-6RQ+rz7=kUz>&V4c~I|g6f5-%%Fb|*jdt5G(6pn$1IA-$ zw}2`_@KyGw$ts?V{L-uBs+Wn9%Lxz$Gpj;A#xwqFm)y0-;P}9h4QE};#Mv5>+N^EG zPwaqcMvK>RC0Cxjr-TAH488RE?S9hbk+S>bzf4z`NifRMArvL-sc;6eS0ruL#|TwS zH0Wj^CAFM!#=3yI_2&kwzCG~&}t2!-zdH;$X^=>GXdL3iL zohGVtSYf=zOXQxZXfn{nAwk$*=l61BY!VVVKt4Ol8w~@5g&Neg05%x{I;*zyKmr7XBEi!?Sy6W=Xfg)z%i}NY+ZalCsxYy(@--h zPg%^al3G13M+J-+N74JP!b^8Y&~GJTsL3jF`z&@HoBp^f;}{!eRRA?#{$XFuKB3rY z2f!>?u|TFRsFz`RMYy>lwYG?w`hxIkL_kZ>w|N93U<^TOD&%C-I4Ei_( z&SnMyA^Eh5?moak2H;uSuS?u^`VC7Ug3&3LHT4l9X;|sjA84$qu`v|4=fOl{wkL9x zNTndtS5*G>WB)_q{v2=iwHj5-XKKdV{JZ&@_Zk`Vxc6l}%QpdO^299`HZ4_Tzmj>f z&AGIMC>nU*j9VIt86&TGQin-nw2){qR&8-sQ3eBy64u=Bv;aPM3JDP)s|h_;NrdcJ zGHZYzUwX_;sA)gAgip?xxRCGQBEDGi)^)H=!BZ*&2}mq2Mn@CVnQ0D<@uch ztjp?p8ne9$nym;HHJ8r?Sq82QTlZWS1zK$$oReQ98)YbIy^0v{70M%i0{IeO?0`g0 zxBDY|&=kH>6W}5{K=Tnkh_G~CIl4ky2j^400$Q3siZJekmlvl9C(WTq+owtj8-YcI zlTXVq)O_upRB~2mVBfgbrp9tEann$GaMlf79a}oMnrNwaM~a}8=wI_b=7E5hV-Kqj zu1b^*UJ1IH_ZA!zd=U^Kk1&A*)K8?@Bgb2m3Yp;u1CX@Cg6I^IS|XR2a@Z!h2c1~_ z6sI7TY|N!5Hs9adKDA=Wi3wF@YbX1jbn|TSB_n$0G7w=TcZ|94#uLaghcoxhaFtaC z#Kvif>Mq2xnc5O-PVrhmn>~)X4Z0v?GMWijTf&_IXIs+s~paAwMvUDIj zu|iL(46tg|&97gpFBrR$#GwEE1oCH_I|H#dmry7!vq*=rV2if}rez*c%$~(!rwNgm zIP+#rzueie>=OR=9qvY_;W42t{D<$v96TE_U$uJ$6S}Rwdjb?#1*C_)G8;)jw-AaW zC8{QhV}exgGFY*=V`R-SQ=tVeNREHWolQLmG{BvC`z5>04|oHz»X_tE2mp#=B zvRmY#PFkhZL8H)%YBfxvY@;u2j{=b(8L(;YA)5t69?*Bddg_g^*1d$3SHS}4L*`jt zeTfhOhN83#oL>TV-WklXsG%A07)7PL<8+;24Tg4I{>Els-zfuuysW9iWjmq?dM4nPcT5(Vi29OLAjvi}zdL+1hfue=S zsV1c@l9A!ROHGwtj=7Hp8HU9}hX1}s8zOOJuB8GFR<2P8>(GmXYT;Wp^Cre>gg7Jj zqY2HBs)Z?&5r)i(g?k`qRZ$LbXY=Bj0;IaadyXDqF3D1oaQN5$6XYJFOBvgtn6~+3Pu17XLQ%GYJ59suC z3f0B_lTk}n7gD56kO+y0cI_1*{cU}+X}|?%66(6u_-jn#+iL@hE+#?D3}ztsD|~>E zSPw$8_kItlp7l;Ofd>VCyo3Oe-W#%mu&Myu&r(c z|Ao_tPma@|v;j1+{k2;E+09MPJlIQXo0~hc_a=DocL4L%elZ77suLorN2(`2#Qaj} z_CQyzZ}c`p3R(q1q=nRsj5)9=u9Z2}(r_JpwY{d2**(y8XoFtqk;DkJgrG*lBA}YvoOZA%zc4g?tk&%MEl9}@eJ4fVlrA;s%hTTZbN?7*IqRP;Y zTdcw@sayl62Gmq;l_N1UBmhYq8w^|Fk0^{d$C7wmCw2j84fz69E!-qc3SrvQQy?FD z%XLxPDAiv!LH-G@3Aa>PTSONdbM=M&D3kTt-&#FNLfse5-(e=P=|0Gn7+kf2jdKxMORju3Y0JWQ0jN4GO2XP4&F%m8$a&<`@ z2S-FKUt_>gdb)Eh>tlfja}w$KyCb+?CS;KkMfb~igg;7TECPVLFIJ640Ek$w)1XD) zzQ02F{{8w2>&TpbV$etz!R457FyaS6#|C1IFRL23>#S$rqHXLc8l2!UZ~bGJAfktd z_y5<|Re)8sv}>A8Y}jVB|^fWnrhDkLf04J@dIz9=__aqhj3=w~d=u z{V@FOW;?*?WXW)?os)8&%{klSv6=tv?tQZoC%ApPK6d=o?J1W>cx~uxbIAH#yRoS; zNw=&OpN1^Qqi)-MpX&t&S;LM!C&QmAMF3YKxXYsh?+@<%vjbrZjdRX+I?U(#d z`v=YH({cIK)Ar^~1M67&E7rSxUo`N|pAE_-_fAG@*YS62;%*<)%XjRG?1S-JJD>QT zv&CusfoIbWuBltA{e^wb7s^xO{q1kgZFGA0)b_I9Gv%?%PUo)M7br9JzPI;s$+%}p zzZ`>E}9mpudGeyZ=3N3Qq|9tKqaHHOwz!RPG?w5EEANZ=~`~1MBLeqel z>B_TugEJH3*IVv-lAJ&DT(6WySlKaji1=ODrq$BVOQ_@yWPwOdS3MO*swjKX!I%9 z4lzHyb_{x7Y7y69$h~8>6AXHPoi->jK;zE*p=*v6wZ7ZGQ~y13$B|w44RM+t^WyxM zxb8Eq#NE2{G`Fe2^`Vz*tnPTDtI7ERgMaVRITO*&K1K8QpQZOp4T>i38{mC6=cC{2 z@wsRIoEY}%h0{Y4jO%Uu$`hJ&*2OCYg{@`H!nFntDp7*W5QNP!)v0kk<P%e!n(2#xp-|Wzwg*%1twyTq@l)DX-hc z7oD`q3ezI5h3GWc`QXye7t`zeUoOmZbl5d_i$RJ-$4|#{rkJjHI=3ZRH1Ct%jm|&r zNBqp|(*DKQ%pp@#`)u5Ie}#oly9;{)*ZpH?R9;t@w{hLMZ$EB58k`WaM4ppklbnA# zV?e_Lc0FR+gxEhFn11rW@bc&{5jkJv->=mj)i8d`+dtRvQ>YiSN9KNMleBxH*6J>w zy3Ray%x1Jl*Ovh)uR6{=l-B-wba~&yeG{*GkN7plGAuLS$Y=7!YmbKa-`jpy&HhFu z<)xnH-A?)Mu}B=)VAS4Crw5jPh;zGq>c@>jHT&AW*N-KboT}jvJok6{SnrRsGQIDV zT|byOtGMxxsmZ^0mm1l&^^2%g{G_wz&7F@AF3z{ZnW`9-tFj+@t zY=86Y#Y~6hVVxf;Zu|CBn7x{w^84QPHYtg*!TK6|vyVCU>_66i(2B)Jo_}0&I{MaQ z^(nzt5q>d;bWX0d*2;c-xASw`J~I~X-rB8>Yf#(npYra6JRG#-%D@ND3P-=IF|X69 z+s`~sOdsC9XGr|^33-XvJ-2>7HsZyvJ&9^DttKTMy>5FhKr7jPae~m>#9_*v$#J?R zMukCVS7wHu$ZZ(XX?Kse8@d?27|?KOzqb=S9rk-%M$ZR4jhl6IW zL1|>@UQ<2tV7Mr?U??cbgW}?w%4W66cY= zCjL?9jWkL43j+r-Phz%}CLD74f0Vda=uo3drfU+D!CD?<>@d;Ght&6rni=Zj+`X!m zSmKLRjj);uh3%k)0tuv~nV$ClF7bRV5`II+l_XTu(fwhtenpEUx|+HX zhl;PXKGpF{PpCj-gK1^_(a@1gYKc&<)!V5!NYDS|dViU`lmMnhjEOy>L}ZC#p{ZH* z%as)NYfjxL6#-GAK+g~qc<=e&VSW1z=r`22YQ0bf)^&n(GOnmwRPl>a3TUV5hliWO zw9O-`iAkzcE@_p;RgCrRJ+imNBJKBYhKmD$Rt;%7Lhxh5Vay1jizE7Gm$mn`WtQlw zt!?>l(p6~BP8+SuI;?N1(uHwexpT?d9HS!BSxqghf(`#aJMEQ8W!F|r)N>#qTg2*e znu}5mwj+6w+5#yGR~nF#at&lIz7^>|CL$5m(upJrdbLR7WFHu zQ4BiWr07Gz#UTFmlD^W6Dfm^d$=c~qa1;uHBno!dC;s`Cenh>K(wr#>b%lb}4e3{Z zGS^jU#=b^(#Md98T&-V$xh}C@kD8_yw;F)-3}F{4@ZCVP7#A~ts2!sHjnD%dNG#?y zCwYk)zGS8w9*&DUEEqBmJ1jQ-=>`5afAI&k=KS-WmA1^FT_YI0(2jqNSZJy*PAGgb zr}fpxXUXCzDDIZOirca?FRq_~(wsHh;luAjI-|*AFsV0``03ZJqDRX}YJxfD;W3uH zGcrMk^kVJxh=B#ztEqWO1mDN-2{c)*6j-l&6bgsS;`!Hyo$&(|v+_*k#4k+j3Jo@( zD?A*=>E23hXHw**G-Oux{;Rq2E{;-~;fF=)NUZE1P4YHd2a(llDti1MX!(>({JrJ% z$lCx>w>$v0Q&Z_|+>rDSP_kcjAQx-ntIZ@nN2gkuB2jo38iEe-j_QqjZb4W61~%mPc(F#_6~ka!FLcITQJwT2 zrQ=Q<)0JA}Qm#;g^@q?o=ooQztLfJ#QJ;*gRZgrK4<~-KRVQnRSv|6Sfs#*C^3b{Q1}80LnI1+K2Rs&bCr>-yz0GB+nqiW z#RQ=a*)-13Qe`3jG+OAeA!(N))X4C=BPT`9G6&XxtZx)HShasUI@efqt}u!IyGBfg zq%IKTq+67Q83`LJwqC>tSo`c$p;Md88=-Vye)Jj*N?830ikxg6t@L1&6kqCLJFYXV zm(r9J`6}(155-;Qe7T4+E=KFNlK3#NZ>1(jZ)HvHa%4lbNfi&X0R$sZ617Ct913%a_DCMEBMHfSqn?YQ?>(uU93rsZb4~ z_+w~6=FJo*shCu__IW2I*^*^e8V+pDXZAO)YVFbl^RqkZG)PkYS$jGEuqZsTdPXL; z@rv6})gqsh999BD6yFvc*dUo;uycA{QYuqgXuLhl8Sp#02INy2x;aG$?+-$D$|+95 zRS+5RJ0y58H(KoOHt;xH@J6#rtD)gJ&VRJO(vPuT{x5cOJ$O7E({?+F#{;i&R{AqO zBu7K($E@ZDq23*Ca?(%_Q)@Esf!LMHYl5V>MM?G~*h48W1OJ{MW^^6O>tRi(Eiusl z4%b3|9+>zEx>_VU-`c2hiMXf>eajzlCi?TDBspGgLIz{CVXk;Og0A(vk`BNsC*4mN z8v&6b8_ zZh_d;?LMOh07_X6ux%f(v5YMJEU*hf#P2TXgS(auXqDF!^cd>)oJQo`oqw-ArvMNGOf*pvRcTk$(a=bM+{~AVBa{9 zlkh9jGUB^m;L~b3#jHkkRU`$u8!0#ArYIWQH??l_5il_w|D^u8)>jG1NVoSoCYAWz zwk?!?73pPDFuSzrZ*E%Y|1OsWQ!?Ry0sNQ3l&1}^<6a{ zs$8K;y22}CIX`>j+9O09=!cJ>n`r#2B3TiGqawm@7@Gel3+UdfzM5JsNsoIvS~R`W z+>Gd@iz8ytICxb&g_3HppU=)5WNk(Y$BD@Jf_JNlbXmN)XJPDr7NsbZN1zd zqNsW~MDNd0sjk768{L8b>HB0ZK?jMOr_!oRVtR`!l@ozfc{(S>5`nKCH#$c|d!kqg zC>AP8^_QKteqj|Y6BF=r;uPew!F=I--j&C|D1YOyAiTD{P;e(v(Z+g2XTLTBgKVPS z0U)U{Wh*E9^fu8Up_?r#ky-9Jcqg3VWO}tQGhV-vT09HFTkdn})H60VDnz&P0j-OV zoRT}pMn+yf5L>|OCrD{V#ALDHM|_V10H`qA@~EuOi&z0^?K42y`-YQpJIKt*I14Sd zRbWMNiJ5n8CxA)kMc6;9dJJ9f>T8ilTLgDjF?ev@iH84YkRSS0+ZJ z*Y^ta)@s(Ug;>AV;9jQ`Uqm`lp&INOCvkBXx6m;guAdo%-e-V4g>-5SapU}eyyoN; zwv5dFcnUO48gWusO9Nd}&{vG*++T)*28D1^StRbo)>mw=y^Z-R}xq^6>0lSkPAAGqnMMK%rB zwIpRV#nBbjX~>H_tmV1DTT<+2ypan5(7BMdsp7!)6jvwRQ3=tXpO=%ILE?sFw9D?v zv%nvyU41dIzE@2~KTF{In_E6RyOPpq>~D;URXk3l;F@5@_CHZ4SDfVZMJ1fh3Rg(5 zmeI|%Tk|zHKETfsz&~ka**@X?hwK%mI8p{squB3;h28lXSaMsqRL2EYD;E0I16qLTpN2L|N99u(I z$%2xsGJnv$&d>l(k6xn`_L&6`{Tj$gxw8;lD(nFpO&72m(Ag&6huAi#QBSc z0BEePXP~GIDVDc_DWMCeCk(bWCD9Q|9xq=qvh@NQDOitLLK?C--c_Yfmy|TOl#w|t zExA(W$*U%%R^Q;s4@ftajFW!dRgz{T{U|mGs^I39ba+zOgVNneiX&DM%W>j8a-9n5sck9~~7ixBt~6%*q4NU&mh+Fgu2RSrF22=z&= zcK6gJ3yN)g$;uvvb}GAHEha9t!dz*BGazYCxE8KUW13+Ma4_9%GuDqc0Pl88!P1yn z=N6(f3`r`s3QAT-|FtMM_k!f6BiwnZwJtH4Ak-v>dnuimwtHIwn;*q;XoneB3P&9J za%~w=$+|?*1Y_viXu+Hb#S?#jJv6!@iq}?BP0i!PYVtcHcr)IH9{V2eLfl|rW_1G> zQgzAu#X<|lX7B2fo%ugI#I}Rv&fc8wZ_8~?Ku};!8+x+f;iE@nNVVDJg z2~~gDX#vH$P@Axi~*?B|mN!4Hqb&sho9IK!BQHJv)JGTn7&Gb}On5d@MtAUvTpX=+an-!+zz zBhB@y)Jf{Whki}}r_||Zx*3l&Y)IB{V@EcxGAqy8uRORHh5L?wKB_{}r9I&lIvNfu zNz4j?pM4Bl3U4R9hVeCm134l?d~SMyc=EU+4YaPGP?FZVw(L}!{McY^z;?pS(1xoo zM<)W)yfvx5V)}hWeZIqC{q*aWMx@q6aUJ=EmeBn92i8)Bx};r((wa?Gm%kglH$l5h zMY~8x#$t7%eg-kdbXRl|$2JBAnXg1uJ7!?ILP`K5&HX$4 zlWxxZ1r@&*aR^q{Vy65`Vd{#qlEhBhN8l?fxWU9G=Vef}00oy$f`9BKGICp;Tk?Qd76s%BYgQqZTorhv-HXORZv}A@PQ=;yp)omtYHDthjV-0LpkRg07{c-p3>!h+1eW`*I+&QCo4 zeNN*(*yVUi`1k$8_<>PzXn>iG6(qy8 z@ZLIwlNii>`%O!qjHQt@TDn!!Ko6SC=_PB;Ey?R?;w+jbgVyUaL{UA&B^Zov;Eib+-#6q_l!$I04b(dGMw=hBJ z%nBioy>k8;y2EeG>rxovwVv~{LKNh}WL67s6tu`?qottHc%256Y`*M1=5AC)N!9-2ICqO8Hf8qL)vG$tlVf~7`g zyr}tj@q&SPUJN~tf0m(AMWJn>9-Swy)M^LjXP_xoq%zuxnSOut+k(uKJo*tF@ z7**l$ozug*+S-xpk41aE5H!@BuFxlGmdbkM+Z7YT%57M2Gte?U^i}%?1=*i0m@@40 zT(|M5BKWZYezcObaO+n3eE;zvT~n#reUXVB_WbXBl8jj5R}7%fervMrHqkG8iXZ2X zfU48#PUPKlbwd(ot>i$xUbFW*O3`PVV+ND%!868+0s#v|%$nmzLO#Pkia3HKem+bB zZ>+Kjo3&zJV2f05qCSPI0Hdu8EGiGsFC~NaZ?Zl`xE^G-x`s(5?L3v6ks`_|UWyg& zMS)sjD*v3>AJgyXg-RP{NqquHWoeA$Nm@-1!^(M)_-s9yefH06jYXHv@Ix*@edj66 z(Hdm?1ukO2?`=f>$>Vf578jE=|Tc1JRIWxrU zokK=2+NxoO0%{DV(C2Ila@Ae5HtMr*->%ZJGVL}^Bz5Rh5r27(wm7E{& z$dtqmHmDNfN^by5x3W+~6(07BGc_j5uZy!<$z&M3wTml2j7wRG$vxABJ?&wzF$_wN zPbTi;`~a%_bRmvbaNUFSs~rWtGScROttknOHa8+(8^mqTVf*rHh0xa*`npKE#l*w< zB>X>lR}ytnbZgKFxOMdyRp>;voDf`C6<;keSW5RI8v){^lkLcR=qjIe>XU7&1vAEb zfA>PmI^bRU7q8nx#v4Xb4~o_n0NiSxd8|+TPZ+zBB1Z%r@0AJ;OwZD}hmYsM+iUnI zov;@_69sI;^~@`4bVsIAsqvhdup_VVKbV?>a$~P%*uOWzxw{ngEqw*vcLud8w;X>i z!=0cnsya3%uI@@R21$!HX@7l(PWBT^D)y(UzwEU1Z~CNY4MK`~_fg+9F>UqNSu^4n z7DE@n*E%Xry2=&&)TeM4@Rc1Yq5p-6A=b_YKR>rY2fhnj;4G1`*EFCq8TytcM1PcM zBHh!qbQxv@W9+6}CG3ZE89V7!Ut5<19~Z2dOR0Tg`u#?OoVTu?Ca_sNX%xKvi*L~o31Ak*l zb7ePG@hixsA}nNM7Kxh>yZTriFEm%#*T;*Z;slybvlTA1tRjm21~QFX$&FbWE;H(M zQN7)K82x+aQ4~={^5JW2@9F$opJpAMZ_P;ww*?P|={v8fb#N-_{SnoK8*r>OS!`W9 zR6MAEm3TI121dqS^mu7qtoVoXV}oHunpMQ&laIp8jE+@E7mp&U4iCEAc8W2h@wR#x zWK!Uj*oE`6jX~;797Il^Ch%O}?aru+Pcc>Kugi=LDwA>_-hVa!IO=^Tdb70O3_|W3 z>zy>H9+rx_Q|JTJQco7#dMO)+;n)Y5sfnb)$iON>@PtRUZ4esKNE^aQ2(t6-_4i@; zT7Mk>3b@TXLz&KKyCSs9+`l@{n-tE<;QV(RZ5uX+v&PgKF|a`ig#O+GI8Le}gv4AQ z)|kobQOiS+jYkW}^yRM?`0$I{1yXBd-%(Q5E};Q4@X)F-coiH917zo6|5GVYimO#>r>oK5pe&Vy)*chOdCeP9n@HjjIepZW`R6p7LDTh~AnES@ZF* z?oiBskoyJ{*Hlu%<1a;9De5M~VT@9nd`G?pn*?G23pK3O$%0>i{W!HYArn8@%E{4Y zViV7#Nx18*IUR@oGLkh`Q;YZ}T2&tJXYD|Fe))2q0-CUoA34`1pLU45qESJ~Kj{Wy zGaPFzajaJhCWK(UA<0QbnqT=sQRdH`+WzU-sfEN<@6~xdxuNnvOxqhTl`K(pg@=H5H2PxZvY!O z7;Utca@3?r!aUD;rAC8~{m`EbFfB+y%*+Fn)t794E;Fp$6Mhuo!=1drWgEpHxTpKIy*0W-Z9?AphYZ)c6Jn)(h zbgN-~c=a^D=16&cSbe*Ehs^&aIMZ|ilgi`oZ;Q4icffGmgAz(H;lOZ`_XnD@YM2_4 zQb#~ORVw!nn%wH%gwt8hK$wP@{wB1MzuJ!~w};LkN!9mi%J}gPw2TZ^=WE+y8gxu* z#uY^eBA%H$UL1}^_ffh{xSc?KFn%EBr%!DbrCl4U&ET%IF$!9!wCIz9+L!}rw49BX zdwtvf=!Qx2l}&YA-K`ZBvL!KtMO>{wq))@BJ^zZtJ_{BnWLgd+w69B7eV8RiCL?qz`|T?l> zsflqj@}~8Q<^Y!a?XYyHm?+lmcY>rDA!P1U@rc{5~ zX}{0QiS<3P!_INBj!Fe;e}~j663aJrv{%}&nXUTGqK%j+m>W{X2<)>)zS$(pqjrxAVRvCtJ)bx#xoIx0=F;s2eE#h)2amv!lpF(JaOM z&$tag;e3`F{B$I3>vnWz8lsWzsUBJwk@r3Ie8{{ZqCve|Q1K4uVffrgk{nJotzF;D zcN^Ruh>za#Gq!$Od)`^-Z<>^G#mJ+NPNU5piMMS z`th%T#WJ$PP&|&C{SQdpY9qgL4ZuWvhL| zQMMM6#<+Hp+3_V|PXt%i4l8G(uIJwuRUomV%8j|=rl5`eDB4Gki;S#4D`u7%!?Z>> z1M=Qd>5k2e4H-P9BJkB66UAXUqmTjxg!Xk8!_e}7T@o$e*cdq0O5&J>E9Z~4ur^es zvoZ$@a6L|AO+#)dp-hX&=8ALhN}56ZtVZ&uj^qmxDMZOUqK1M79)N2ue-* za2_tqT2wuN@E$Kkp)dq+pCzuaHtJe*yZjZ)zd_Q(0<3CmOGJ+$$m|S$>yLSPlm_5a zb1J-)5d(8E2VR|CYCC?4>&Cr99g=D&tK8E3q=y)a2b9e2K^c?R{zxjLGw#vxAoY08 zNIG}g*T-$Xz5P228pQ8u|G(-Y0X3{ zOxv?aGAo+z`27D`V!ozv+b~JdCIoUJmZ)lYP)uu#RE%*cKvkqcM!IGPAcLLy#{g`@ z1?Ctz71w($4_ps_0im8+)s1v)pb`>OcO9+^JP7i{I54px(ETrfJn3Y!wUJ7QrtZoJ zqP#{#bE%kB5fiYb)p4{`3XKyuRv2tUaK&lbN zN!ZUAk(QT@tHkl|-9Y19!xQ6eMqs;LBR@x^DXiQ_*@! z0L#+tjoUEp6ob4L1edAkJuD`4(cE-=DRlNht6(x${bi@=jp0IbVs&jQlfyOZ6#k+2 zsP3a)!&KZ9ZFe5a`B^BoO@9MJa@Il5kijc9(a84c=FK2^190Ns0XGh4B{5d-l@(|6 zG>F=C0w*y@ogSzmC!s6FY4>t6>OUm8il{IXH)K}t7mG4(UUd0sxKXxLzL^GZvJJHPd$So#%&q?l^J6j7*D z$BZ1sY|1M6qfvPC>2Ub_%FB^h) zG3de*Orx#O*Krccg2}QXNVzNp)ndDmlQ2)pNpXt6j{%0Ibm-|%Qf8r;w|8&`U|4-} z_7gBZUA5%&T4(G|PAb`e={stpz~2XPnOb^p8$4`;){%PHYM)A#HtC_G@T}bL%e%tO z_=B9zZfdl|7iOx>ZkWnAL`m$nm@XN07F|x2!_ai|(Xk6CzL%fv2mGhD0E8q)LpA8Vk<~ z&rrH2DH@5WEN)Km3+m=3PA)E3gJ%PgMM}Ef&%2c0ogmC=Bz=n@Vm8bz`M}65`}t_G z4MR+<>Mw4Gv1@wdy_b@`>tIM+as+;bz5cXEmwTg3ZLs1rml&I6Ncr9A{UfK!i1^@9 zFx1PKt2(&asxnjLP3qRJH<4v2ysutE^WC^61U~ut__9_+zdHc$cM*#XlNc@R$XQwH zNbVECnN7qyUoEOhi192;#Fj%iL(y>y9nx!&cyWBLrM2P}dtE`pCbf!&?Gr>WwaOT7 z4+f)p~$DVd9DLHZj;Zhc566^e0hl9Q_3ljLtgA2S=~V+!POYJ8!Yoct4N_Xs^jz)AECn_K=Wj;+UYX6A zLQf!(=-Os696a}I_%&n9u?>LerHwdsA>&7(waAW90)GhO-Y97LiL5(#PAM7ozV?+F z4hyI=y^&}~%XPN3c%Lx-9c13GqF?Pv$wAzVy;pJN$R`V=dIuSa0aBKy(6&U&ki4#V zP@w!Rcw7&w#4SkFyNX0w3O;KVaMFNyvG*s}$Jek6^s6nYh_qM};~HX{+1CIauO=`$ zYG0csdmAx`$KYe3uHEeqsFHU!)e`_NJT&D8`m>LV2YzkWz&XVWl`cIpZ;hBmw|Jp> z-4STY-K87QJ-G+J)(@B$vi?znu=m{h%Q>>sITBvr`qEdR< z%Ih<3`TWL9LJU20FTdpazmb-vk_$1UwSKvH4@{ufmfqTP5<^09IN@gF<2=2bHKvA2 z%8FP6B(YB0u05pwb*RZba@)G6pd``6+b21+k@ z4|-r|6{R@*)+Qz&1pf9zaf?i_7(hMS7z;w1L)X43f{Ws2AT-yr zvBaTngD5|{5vCyXt|0Y^UT9}Ce^Tjz9TbiXg^+aTP(GLoWxbObkZnuEW!_@n=uS=| ze=CWR1w%PMyGBMK4=JXhJ_-&mA!kj)BSA&iC4*ke_@#D6FQ#Yh2NF3Qz{`Tf`r_hC zz1`yWGX?QPjeaAl$cHwwv>+k7#qCj{Dd;+*DBYbXa)4=?ZMLWk!|2f~>@&x5a$5yP zR;O9w9w>6@w7v9dc%OdN&-16v=KPhX)n|_?_!S=Jv#%$VPFuy9hBd*3LF}h^LZHzf4o+|vC#%gnYtRd9v+SH$d*Z{rTBaOvgKhK4LdKBbVF%mjxo%;E} z{)s|Ls!4o%n^fM*FQLe)#d9vi4&%dyTWXWbNg`-cn}7ND0W842Z>tZvoIhNM-4s7D zOT0@m^;X9I#qew^Jd<{j3-w7}EA%V+i^62iC>wNnxLo;Th8JkM1^+*-@du!^Dt@FiEmp&J&2I8=fdY?N0FGH@bFGsT3T=89(nD|8f z>6n?%zNtRfPTs(U<}bDp*exX1c+Z2*TwVvf(RNYwp*I3YZZN`1iz|SYP`VZ^&9pL= zHeIAdXv;|Juciz#GPQ>H-MkA?iT|Mzr6c-;jP&k-&d%bWH4n&`46DgCjVIVHCy9%( zQ`2zdObi3Ru{Snh`6jIQtf0MDyEdnncq(f#dwFHXOWwfV-#H^aBRFeoJ^pY6V>g;o z^vW6$m*WV6NxST>-juxtK_03dLs$&Z#Xa~hoA@t89z@V6PDgkXalRBWm24P&5}@;F zPO1oR%E;km2wBpt?!+avdeWGi8V+2g)iGxb6Jmi>+w(dK1`?T%Tk2@ejK+Mm0rQ&_ zbWTj-hLD0_%lWAd{~Py)UkAS%I=qLZCk0NV{I+ELL=pNq(UY~<6#lhKnz$z%Crq`x z0Kp_EkuGRG=5f~5uSC@Ssz>g-mer@2|!Yxboz}$Df-+eYX#-0x*`AjX|FnGsEjQ^ zGBIBeU(p*?_s;XLEbdyGf;T8zwG%DPr9iRwr7HBN@nX>P)Qlg&M{qO~hZR{Gmwi^q z`Dt7h;FKS|{>+XX_ft?))5WGo3aE7NN!Zz&SGpvjfjHa5cP=gHgnqak$Wj_$81z*o z=ty?76aAX&xZlRPrkdK4*y?`y{N(&-XC30O9tnW-DGCN%6T6Xt4lp9atm`1@h`XIF zNzWe!j-=%mG0*iz`w{6%G_>AdFxS!DlHD9*d9KcL#5~slXWF)T1JJb%ik&37)4eT8 z{%nEYckYm}*zgwm&+?Jwj`|m^h7AaRG_9cA^JF~0P(5gr4jV1J7DZX{zyZHF zKKo6)lL9BQlTjns<*WX((@r(!tP!JeHS{I2Tv&&X(nN#%x921Vap!4@If6wlM~dl{ zHu_*#m`V|i$i{b6@w0?c9*Jty;4WDN{)Z@3xP*Uh=PLYj>nUB?g!OE#_hX*UFu$vc zE+cm)in*t4S7N{{=&m9u$l{ga=6qd^^*UKFMe9#mcgZoFALsg*8}S+}8zF(mmp7UY z{uAJrZr;**sq{Osh%nHrnVoOHDl-wHW()k(Plt|;J&bn!hCr{ZUK z{fx+pSTk8=wnW=WsK1AUsxU2{Bsekj?K1R2dHgyME5T-tlq;?#!-G>GPHwOLm^X8L-G{*)s=1~tXMf(A)iU(Qzqvf zos1{==&=-A&Zc!fFslZ1e}!&o6CPQ@`Tt;-#Ku70A(S?H6|X3Elz}Jz6qWnnUi9+U z73A#4q-D^La|)>nYO_7$fCkm-;pg_5r#<=wFLP}8uY zm+w7EnaoI=w8lX-6Po}r&8{|#hHTaDxK43ap{&79*N+|PboT@hP%;o#b4f*A^|Rtf z^-V`9`ID%Z?vHtyM}tfNeGw8~y_Qz|orG*tB|pqx3AC;m5Nkyf0X)gxu_m@O5rCOA zy>nX2xHeKizf=bdDPye2`SD%=E*v`2LYka^sC-QTuVh> zWQfYh$q;>>x6G8;Q#(-_{A0oSSqze0 zOS3Hzmz+a4cJ3Vl=+*JBy+%iGCWJ_pHG7YWnUA8U=rsc>C^CXHPZ@C-YK%t&N?+l23IjfU(RAmyA{HeK3>A26=kH*Gp#&_)CxDa zMfdrCKVi_JIp@cTDnI4FYy$)85&SEbN$1L^UuF6jnymvzW3sD|p6H14M*2aN{O~49szv5}$6d;^^o4WU z_E9uJblx_VlncpmQ1aJW)*5)O0X~by7Bwi`?%z*oF_)uqp8<1kd57M2;vss6Il6i=;5)F$z;h^(sZcNs() z43fqqm%iXGd`}f$Vk;j}^kgQyEdzp+?k!Hg3*)uhF#PIj@V*9bsH8l3nl{8F0Wat$e6SUBFlpD@3UH=*7`IvS zBIO?B!vGOuUcy+^+>1N}e0Cy+i$p|_HV~f==|NMJGJqh!W5z0kp_&U<@|(?M4-9Bt z$0O-^AMglc+aJcdokBdo@anI)bI9i@iw05wx6aIs#Zv zXg6@XQ)P2q@qsf1a8d$RtR5mj{Xt?t$RP@&uV_FW5Y5!hu+b#A*uVs8R1U> zNE1RiiJf_e9^<1r^A0GXjX8xqYv4E>Ss$ugxrS-5W!9S0@y4_*OZR8?<~@bztd2G` zAJdoIdy0J--01Rscz)z9ppOecA7PR{rNuGct3rBA*0E8+nkCe*>xrp-s}cymbg z?jJYQhTh?hW^D1zexGy1a{TvU@TED>Jx1Aud3Ll1obI1(qe@Fe3f3fCB#Tjq=&4&{ zFq}BZ+;Jlnk2T@Zs+*>U!w8}=2cTk{da4Z{t4Hdc2uteN!^)#3*vA>ClaX%YYVfV}J-ch3~{%Jj<6bC_{Y z3Y-iuYDR=vRWkD~O#r!7ZBAz4%fjvI{8XKpEUMq>cP?sc3QV?;w8ZGvTnOPwca`_K zBQUuw@5?DbqAZxFO@59LE3Nf*n0uMT=LL)$V#7EZxAQfg9;uXsyEq%H zH8bD+`@3T{rN7@H1q&ljRLGdHrwCZ{y*M{-*?b(EtTL}YChDAG?073wQTdX5621Eo zc$rdcNyTgM`Xqm&7=o~;hpwG(b2_kN!p7yKwlK}NvEHe zcuH~wT}bI`q**t#bV>~}Z{>Dp^R7ecE$%&#=OrprAH?t-> zRU=PsVQTT$<)iP^-79XK)lXMho6S_vdwmqj50dq7tBhXG2SY1?_%s66_h~ z>%3^)pOMh)4eTq$X6@QD){Le2aV2A*Nyc9F*PNXsnq4|`&7Hh#jLCtKB7fB%OLLU) gYsPSX{N%HdWpt!AA}T1O#vhFf0qSbk<0zc`KgMmlrvLx| delta 101820 zcmZ5{WmsLYvNrDS?(R^mK=I=4?ry~$b}7YOH|`WDt{a!)?oiyVxD|(w!g=nw--RDD zE18)jvyz=;=3Vto&Ihf|gi5lIPzYdfaByJe!8}^DXE!kqU|?W`5MW?fz!z&mTtHOZ zicAk1TF@z^SBO_Y0W)K9(_#3paK_CFAI99&MWeJt&f|(NUkI5Q-JhBeMl$Z4cSm$| z^i2qVqxh9(V#l5*#;dR3l}*Cn2U9HlF;WsfXiCFjL$0UZQSM-vQK8cXI*^kMD z4GgIWDiIm1nWm}QNe7eRM#gEL=>>?_4taquA&`GDV;3OQ3~sxE;bPuc*Q88|_YfRKO&1rMq8 zT0jKFo~#1{n=k^2@SkVVH}zF0p*NZdCHO{-Al}XALi7Bmo*o#xPl%L&27>^p@@mK! z>f>95F%$#vt0W|J!YBm7e=N`gt0-X*6Hs28_}2$QNZvO}4j};f&o!ek3~%%vhX0N7 z!b+hKyt;(^lbu5~I2c$JBoIXhjsTFPs%*Q&hUQOtB!u_ZuD2$$2N6z(TSaF`!z$Nj zAi^xu-o8eyK(+R0Ua>l1%(g}(8)fCjujtgZ2vyUl(+MgyRNuLf?aAyxFxWPA^#z$7Or1@rTT%@kaYU9I@19~TkCVO0iPx4V6i#Nv{^$4bvvhyD)V~4K@floiUIX< z#{7g$#-+-q8o%IZlG-<^WhfoB$)feRK~;4)TZg;Svv!Z?7MM%5$nNqoN6yC|gd>jA z(M2VJzet`Dvt)+~Ji^MF;b-&e9BZCr8!Zann{;f_#$F)pW}UqO--zO%UWm6yT{qXC zvY@9cREPHJEsb%Do0mseD|1ib%Tu&GsC! z6nN7LJq1;+P2~Blh1MsTU4u{7YkF8tW1ZiMO{lUv8wPeaI?z;||K-c-3RJ z`E-%AvY4qj?yLR@XuR>oIXzPM%rlfmvju!{;i3`5?HmwX4<9xF5#CUt7Ug{Ki$<;X z_ZG6h4i>Aw?ep(YV@>)(47XaWwBrZx4||BOLY~AT&Ko-ZChZP?CVKkvx+>uQ`yRo- z{`(X1Z_=wY8uRNa8kKqd|p#wYZa|rA3I`n4;o-hTu?S(oRMO z4akB(lcL-3r=|lf(zN(I1FnM2=RhYud<=tySPi+w2?;_Jwp5L=0rTT3E$&z= z@s6EbA_wf_f-iepQ(+QQsFOdoxhZ`BYJBY1G z4Vpj`mNKHAvQ5?QNJqn|Q`?jMpup6WaD4(-h17M0;ULVR4Q~<`?2fymM+mkoU=6E2 zJAVV3Sg9QyR%KDyzA0q%A(9cc#;5^MB15m%=jYVVa<-lOc~X{EiR>HRROvG}#+YWX z&EwpddI!y*11RD%8L_P@Rc7BdWou?3NT|L?Y}Qh}roQSZTZe$RO(3?olL=GyjJbCV zm1BhAJD9jTOjfChc!Ru1=Yis3Z*w%w)eTRlKrS6Y5vM%my?utlw6bthHj6(X8(=W; zbbI0;pey%9HEnHj)c3D(bIO>dXZnQaUtxfpC8HrA429F&~B? z`}2xM#$eJ&jskW)T?#^*dA_d^12>`dlP5FXE_hKQ;elw)5ok)Zbo=V{|H)0gw`p>= z)m{CV#(#AXdu}Z$RskSU+YDf8E1e9@!x}xi!iA48jlWda%`Wc>^MRAGyXBC1BubB? z`Gwae+mNxrNlh-%s@QtAeKaWcX$@|ezR_4tS0S_HgGWqib=V7zE_!_dlo}2KsEZw?ZK)=sh?4k8@hBB*A+@} zY)KbVWDLSu01FA80&4aCNX}785dJdsD;Xa-m8D{}XZjS23fZ6$R9(;R3htlU52O>7 z)AIu-%gk&l&5dYTh|6Za0 z|JwcFi0CKyy17}=038PLD1eWUkVpU>`!7qp7|#YdFUE7DuuXdjrzfT>1#U}Fwt6`h-A^E(roML<`1I1Etf#CW1da=# z|JUaa?fyYJa}^#}dM)7$j}bn2NtKGo*KjlVdjY+_!YOo0zIJso^E)IueJbSP9XF{vT_k{KFqYf^MYQuI_B)0%Oy^%>)nV-lR zg4(32F9hIN7F*k7jg$K43*c&SnwE}VhHWC={KPkzN_p4emS}5L{p%&a2Sb7@i%`j+ zh%E!h2yu~!aFrEKHacvU9@`@5)0nh_Da$hc6LD-at6_ijA+@rAw0Xu5BEtA{YHD`n z1lO_-{6bo``#qLInwKH_sqL;`zD4%TQ3qv*UeG>kdESFzgE4Wh9su5dH{_D+n$7aQ zax{M+a7KTCYz<)-c4q}c#@?5cm&v%cX^sV>Fjh}o7K_7voRFrG+kPS~XvhDijZ^~u z6NCN3xcui|RH?8?kocs@M8nvsgH&`ZxN1z=&JkT9#w8GE+p9@L*T3}Vp~fM%y9HGEcB_5i+ZyEQ9j_Z%SKFH0>4A-Tw0OklFLc9r#_wVZjar*d7?RxBGS zuGx;rXz=xckHd0L*g9=~Q4*pfzFpFsP{4LXARj*}zwX*HR2L#? zJzFf2fQjG&M+s$=pjo&q4hl#Cv%UIaJa!15Jl}KEXud>q@d-`OKlD4_FJhqrm&q-U zAQg&Un<()b!>IUYhzy3zF;>P%kKf#8***#P z0eifE&GU6{rDE3X1~r@bn*(iXu>Rzd=~iU2f1Q^|rP+7}1ltgo-Uh>eD!a~`Qc_bs z+DV7OPKEu}j531pn5k5(+OUkhs#M4svXGg%jbp4@)@4_ERv43jLj zP293}#imtLFnkjKTFi#O$sGA@RxkC}-W(0zCO3Tna_?SOOi!F%5_fJ8M6(Ju##x(} zMrd)+k7k|EfFGP`@V?LBjfu&#ohF&hjrH)6k3SF(HxKG2g*XkBJ~kOZ_-x-HfDdUh z8SNG$y>6tHt78U<^XE9~d1UQG@`uu>P?jRM{w9jDQLfq69vg#MB8zOWK=1s=eEg+mwYF z2-;lj2JBA67yMpxA3jsEqJC3xSz&uz_V;<=dMKwjrp0ZkVtU8LUU(uRw)L}J=v11+F?*D`EZAZ5TEjk< zz64-d@6uGdX?prG@&`=$D%KG}IPdh5YWV6)_Gta728pp7GpTv-)Jw{7Qs15;>iH`6 zg1QbMJx)ld)vT9tq8_P#nm!z2Otf)FmtB2tU-2X|%YKNxeQ zCX6tlvL!Bkua;ng_-?x$4|_%SV?OK7uIQs?SdXOVk3S(gbeAUuQ=f>A47Jh3U4jA3 zcJ=DRMg0P{cZIV=sJmt%5vjJ)r0`RsZo?4a4W1#ni>{Hm_Z3-=N~nb@FgZlxwce40 ze)_Usc(r0LKv$@!?aL(FO|^`BQ5`Rk|IQ!(J!SmgyF)z4bXFY-42%H*42<&i>TLSO z%pB-Bh=&4HfJDRubUD0UAlg`~Uf2%xbKKKp>|^9j%F=s6C(HH4SD{3dnXIhss^g@R z3d-;TUfMnsY}2J*DP7HhNlV4~Uha+(atL}qz(lVn$SG@&bXq%p{8fNK|0H|0$n}d% z&aIqdqdD*K|Vda8q`wK&x=3^tBFvGnP3-7C&I$YzQKF_U z+Mh`*2M4L9qoa^J1z8Hciw?OH45rF)`W?lDiFW(+3KF?UH=hRaz7}j$ZT-m_pQCO5{p|6Q@>PZj|5qv^=wBg2jws@4HN`1G1Rg*BNL877Ixx!Z==?~` zT%0ipteVv23`bVYr7zsM(8){j$`DPS^0Hh2mfR_wFYy$SmNE=QR^Zf^+VJey^qby4 zx99X2w*aDKGULX-Lz4*TI<+vAovm93St`?G<9<~*|BBXUYUGVtZM;lVd4R0!f9sk@21e?u8!&}+57eK9ePbl7D6F;nAo0&e%XagOFh z`b6JAVZ=dMGL<)0jJz3hJmKCRF;}tAjdA-4Q6Ub%7jiG99Ew#Rr|^vL+VUj&c`mpj zip0SeYdK?oQAx6}0NO5=&%jeihvhg?;IWkLMo)+;djiAPGqU*{7sq%_qxPp}PnLIb zB~CWyRlB3CaY4YOESNL3b6KgRY+h*+{GqenL?2(G zpn|5bp)ANR?C)~S@tx9zCAz|zsj<`JQQ6Mip-fkugC*`TBQK@W8d=ncUi%BeZqRK6 zLn~&W6RU_d=8>7Lm=NQmG3w`sjcrbv-%cPvi+xvp4@GUAAQFD?9sf8}q~w`BXuuGF zDUtYhIRwe~9XWXv%Qp9l{Oo!$PqjcS+O zB(>8)wiW@BWI4yfOB@w^TkeQ*NEbl0uzs{_w({wE^OM)%&i-3WuSBZxO{ zu@nUo|BY)mK>CaI#x3`w(PO@Gcp6yhxNqF~20ZUO2Ym-4_s%`E;flX=u!`Ug>~C_C z?fCrfWo)i6=I@+46oD2l{M(Rl@X-DMJAvSwAfvD7*tq{9haim)1Pu6ZV`!X83%Vfz z10zrbP9`9~kD;QA5CPP7go;c0CTt%JSpfdVDHvi*y&DMVCTM#vbM!}`0EheM4(pZe z0^r711Rfy34SVxVfwl;(1@5hrL`Beei17c!X4>&?C4gO)m~=(5F{<|#f=aSq;P}0@ZK)!5;6+Jo9>Y<&JFmRX&18w zt{9-7AR-U&qZ=V|?K*wQd)LJ4jO&5qT#!sar9EV5P>wKS-@6rMU$p<|ewYdr76%>$ zV^D&S>kwYM_3vF*=HNBHa|0M~R#0zceF?}KL~p)pnC}Jefd>O4#((o2$aak2FZ`Pf z$Qn5S;$P?&0NHGbaDbYVgczX2KsXubH|Y>c z7|a9<A^0M`byA)I<@SAWv5|0u@9#m11ts`>mI}}z zUZK7%3VJssP7iD_FnV5~vKJx=Q0agW23V5(I?u0`2;#6&{zV57B+c$GUkB$+1}tL6 zp$EviEJ~rZ7cso#M(EY3Bdqc+lnwj*szj)VQo&IeO5bFYf|S!pn~av|qv{2$IkcgX z!3WH*M!2D;9n9ajjVEYMV4?uYJ-X=FKA>cN{dik5+cT27>sUx}z(JY#eV+K?4{oPo zg@`nYOkgD^;BFI}SB8MOFRqK5wHnUu=SRRz6Q?m@=dJ+v8YxoC9*_;41XlBV7>0K? z;s;7G3mVZB#wQ-R2`m?Vk&0AUQIo)IsJy#GYs~@rdt@9PIZK!sJ!j%E@d=s_VxlI7 z=GEWBR(>;XwyG;5U+Qx557o5~ujM7Fx=j48g05v%>@6 zX~d=$IgBdMRnUz%;7mHuZ*L#Sl$1qc zejDvQV6)cln*b%x$1}BM9IE$(2_J%ShTjus(I8z1BWZn&H=PkcPPzF&R{wTiaIJL~RK*R#CzxCM)Nz1fE*D(< zPdn|e>*dTOs>`&YKGO~&`qiGw675{H$6^hEjtKLi z`c28xtdN8BTnj~Zeh*0Qkf#HFN81mc;xLtS^b#g|wiM!kcEeLw>F(lOmLl`FIHEJ~ zr+0`lO;KHt=10TAqqwNTV~K&ApA^m=y8Y;WJN~Gnm$a?)I2KLQvVoiZo4Tit^%-T( zTg!P3xPe^*!OXWi?Z+XTOpsSz`TS=SV5oz1dMG)XRHRj=E&M6B0Y?j98)_<&xCG6q zY!E%PqR-aN`a6$-AWuENOIIP#h7pr(@|*0uZb4Lb6V6aM2RD`=s7ZDHN!l~ZYQ4CK zXjl2D;A-7#yP#}FfVya>eopsrp%-9RWL%gTMvtxQCY?ExJSsi>ZC15&^Ji#s#Wh6p z0D%X&R6Q^sVJ$I)$!Q7Dj!BYxwGcP!{3R}W{`#6hy3GzxZ?J+1oUMFTAgNPdqhTXF zeaU0FOW%Rtpmr(XzG5*`u2tV5T3l#rXcIH&u7NE6R~cQIhnV9RDS!C<3#oA@Qd3uP zzNXTiUMua7Gf0<^1Yb1FWg1HFa}u{#HPp{ISBc8AMs%|WdlnJ^p53j;FAU7*%}rMQ zss<%6e$SndYd>}Dm(QmQ>XoCo#=FGA!9t)K*+cKyC+%|Tk7fBX$u_a?e=gX!HD)JV zcuL>R9A7twn8VP1+Zz$Euo$a^4KWL*8QtpO z)A(|8!kgGji^n5K_zb5w{&%y;x^(YdMY2S8s~U#3q%-+9J*rOvN%5_?qo*>J zifPk&Vy;fDdm&gSaM@n{9e!HbsJ`wm+NPu{>IR`DY;=zg09Z32^?}6(Tilr761jwF z$xxrs(P0!bf~ga-h};Xo?^)gB(&hmUnH)~dGeMLp@qg4mM&=KGXK}5079bp+96EXt zxJ4-qU!guf*>0kG9*+NAO1G+J&;}sRZ4i?qMIbV@Tu+3s`$43=~Vj0(#xf@a+vz?SjB zz^LC7NlLUh*nrn?G&kD6aP&srPT-37RCA2WRl$!#23w(GUoYjGyOWfzEf}__< z;6pSP^)Rb-JdJ!ZYPI-9me0w_MwrUk?lq$)B&kf&@d!}-(r>J^`7r1W= zcDo;B7jd1ju-IWp=|L;2=atco_4gS)#aM+$Ymu@;S4JpI)E4A~Xlp5>a^&{qWpJSb zaw=m9%8-ew0lg&}wyDkEXnGCxg$GUHJ&#FTy-5H>bv~H}-FbSh_oNC!Hye3=?sF%9 z`TIZdehTcKK2a`O`*=LH0$xNA?eYs?XkO z2b|LY+8?VT5q*F#de;;MzfX$N){#9DxWo7^dy1YW80DP?Otc-|D@@Vy?5$tib;O(K zDUy2GhFuEZ$fR!NcI*F3++x%7#StBOKEisP1D~)G~etclGrvK9f^G=M|CmPnP ziQ^ABu-CnoUgdTp-h63W2-?U86IQr$TXZuG*Z-$Ljk`Ps)nPQ1SJ5 zK*P?OoWVk}t$LHsmMs;aZhy+m47}H#eCEj@dGX1)t880+bQt50N}Ogw&AUJ5i?@suNJxUo4dH$Mmd#% z+A4}+a8(1wH;UG%vLwd6A%&9K!(MYT0H!d$gfPf+;HAgLxj12+3y!6ymCV@nBY&~& z;fZKA0ttA8DY{BiaurHt%Kqk2mXa~k3{5m)8!ZG!6LeSfYBX8UkE8;X1XdFLF9lNO zGnRh*s-JV8*z1r{{{HRc@5YLnYnuyqU}C>S5}g{JYXIGnnM@b<7cNOlJS}ar0m8lQ zLb(o`L=e*$14{DDTz^j9<$+E$@Sa&emMLk?EC+Vi8~wN_ojZwosz=R*#`19QUT9r< z7-m-m!XIB~K(4}aF6X_#RJdspd3d&UWcL>YY5*{XNZ@T1e5ggxLG-$&o~-UNtaUG6Q-_or85cXByin zAALustK0SOP^a3rj*f(ENu~o6qP_79{Ng#S#&(XW{sb}gE&f)FV2>}X1dJHvUtBRW z+jZ_ZI_Rw-JqIv^fX)V@cJm75RG9RxmC`pm7-er-!?Yf+e9I2Faa_OEH(LuYWu&9l zkR#P>pqW2C_<)pCnr6p`Mv;Gm@HF&3$u-sMz;EzJcthK^Sp*TB81LHLt1YeLbNF>i zvf#`o>d^!mMPX(}QrB3a0OE3h`zq(V%*18eT^8x9R#`dcE5tD8f(jI4@qtYev$(d# z$W~W}pyice`rRf@Rhq3r)wTuRtjNVbkdBPX)2BSE$#C=0x;>=6+bkqc3*8q9wxojo zyOJ@>8<0Xit^1Ow-p0Zo&B!bK(&&V|iP(aMzYwGH&T*2uTP+HS>P)Z74FrK19AT~0 z?f8snpK5zLe5oFNP+s)^+CF+a8MOwk`SWqV!t!U`6oZ~lgW(hGzo6s)-c9~{x&L=1 z;XtDBrhiR(wh+E2J+1W7kO9j7a-ROI6EB^Oez87+);AE5or`3*N`j)orAAd+x$PT#xasq@tTtlLJcNIB4H48 z#<1=#1j|SSQ^bg+IiX`(uA{a<38qp}Scps2V8qj>2=}CJ*9ruV0i?t*nqwo&wK57> z$OTOi00ODeW2}Q2l9As9W<_)SyK4(Yhmr-T3CRJmAB`s2wu>_)zV}a5PyLW;$l=&N zhrptZiDym5M^8@~bRERh+>mo(f3B*cneCKeWx6gM7?mmeOlWvDfD0zd_2L9j)k0DD zld_=zQX~=a{Y)y&1mK7-8u?{`3NFjRhQp*wjb7x`dDNSv(@ATH9G+s11y!trgzn-) zkc%(FV0~LVQnUU0k~=+0HfgdV7O_$*-vhdx_yPFB9n4sg&TC?g!17=?M(yeSSIP<^ z8QK-*uY_x0fLgT*6_^+|hA#=LHx(C?60dt5!#uXdulr;8b$|dt!dNFQWrvs`%46J` zDjg{QfHWbUzKK}R5Vdc5vQUv`e%qI$9Zy~|f!!B*(C9Jg(Sx&n9~~wd^79dSTq6uy zV!o<&v8i6{Pc{hrz%}b%5Q2N)Wh@5Uu3ZD7WvP*IZ14$;Stg#UZ0u^h=!WNV?2Ps- z@+B7r9lttq^Z`;iX?j>EMJnM@Ef(NxYwF~ds4N5rt36*mpaq*Ou*e>SlEz3dlbcnq zUua1X!$!q=G0ycH#m4fgVuIUWVyPVHK64D#QjG^o$)C<#!OA)!)=HQ(o6r)YmK1P# z$^;g1d0J?bune~dXBXEWxHYXEq`6}?syqhwizJ3r83I~Sf286-x$SAI$x-j3?mq;e z^Ys)xtq{x|`WzS4scg1Q^LOGleb>i>i`8c1r7qrd40pwYI2BU9gESozhDM45DLPEH zOU?@r!uXzDild(=Gq4mY(8d@)gk+u^YDxhS%3D4Nmf(L%U38BXyF^ZWWQ;p*= zRa_(v+XmRqJjeZzi}7s<)Q^Zt%!o~~Kgx}X!FLfter|o?Xl!nxdZFl8yT%UZ&gWQ< zaHb6SknuMv@0gxo$~x&F&kK)MU{mi1To(zCe4MK%zHWV|?9ovs6#uteY(@UW*G4v{ zpL|3lg1jpZhU1A$!{&MllYf1E)f@?O4(HtJQt|;a>IgC)8amVOkw-${Gw|a>Mt-R| zi7Czg4zre6io~EWapb(1u}qyVk07P@%8I_Evtb_)E`vtJW-kkUBBv;rQh7?$I8f9~ zfYXa+Kbj9t#JA|-;*R@p!T*t_W7aa+saBwQo|nXw0BJ!i=7`-%h*qg}9m;jfTBmP~ zP8k7!WHm}sn|YK8gVxN2<@`N9DXzmv>GO^H0-qPVrS#}84z=jnCFbaWF9dsIa2Fq) zB0JHE(1g>Rv8fBq;(V)4wO1b>yRr#( z9BEElu$@YFz-%j4ei`bH2h&-R>`tq7G(p^3Cr{ zfUh3AC6K>6XCb#C^hJ!hfh)akaF|pwGZK$=04Qpf$16j%J zXPj{#8*kLn9qWqDkS(G-KZgu-WkUPx*?cxzv$%F(kfEQ+ETn zpJ4)70r0Y&eRN~Lze(E}gn8y#`~eI#PnUAQcTIkk!0jBMH)PLnk2{<-efW@|_x-14 zr|9f08wTeR#79Gw);eoH)y83ZKVi$q&pLpX(sdbV4=wr;>W~P-f=1V$^HvxDnLQ*Q z#UuJ35=yD<8ICIZy!Hr;A7E6bBwH_W!aOvI)&v)ZvasdX1hI|AqaNgByUGBbC)`lB z#!RZMg#?Vpi7(m26=LHK8~xEb$6sZR3*8onOY2ljXh^iIPN15n+=nYD%}wtX?59Hs z+gJ>)tPVCy)f**BFgi~@-9sn*W!F%Av@EMvdX`SIEh1N~)|pLL?`xng_#v#0!qbP+ zn8Jk_UstOPIYnRB%0xeCI3NYE*X&;5I{$l3a{BG|o83tH+DY~hG!Q z%tKMINK#iza#e=RD1fzMExqa=cVk~+Lhl6B$EBc`#ocB$2(Zc)ipgyZdFuFR(<-U{ zJ|Cm&Lzg5pElIIm_d{Qj?G`sNUoBPsqy})U77h)oJZu>vQTJ?kDUGwNudl8BJ8As) zJn?^Tv{j+{d$knEI>305`oI*= zM8-g4(=|YrEBMG0ZD}J9O=;;w0Ihb;OL5$FTPBt>2o**tVIq=TZKM-LDhZG|v;Mhm zZl#97ha1VPT~Hu7kiuRLa23S&=cWEY!hzvv4HIz3rHNQB&D=@4ImwF* zAG6*&C5&#T@VPVpBc1~Hw{hI?p2JZ1ruDF*Yq^QkbM0UWg5E`z#`{kS<+Rb$$q_7f zFyRv;Yi%9Y`u12h3nvLS2Dy}R%VpTpJTIBO$dN5y8un?vt)(&?FSBdQo$dj+M0RL z=@T^DuZ*lvmIdk35N)%>o4SiE<<(j?AGlf%{D3bonKz`(dCY)v&85doN&gfWHd*h} zqx_Mnsq_*yOlV9Vycn7~qSN_S9eqL#v)oql4cow%Pe~(<7i7V0tODWCf;O&gnhkTp z_YdSjSV18l14YHJIpb2CJ}?EUEeq?2{5qK7Yre#S6d-epLT{wi{xIC&vBfzl4fyP~ zI^@n6l`{iXj#33kQ3%kroeV>e!|Kfwj_<SA6uGoFiT0ht-WV1mwaWDpe;vCwGuNlzbHDCVWXf*Zb z0f~glM$P02UdRdQ*TOl6%gWw|>(DogkjJ^;8&1lhDV6cyp(Kjh?4d7V{P87t zIFyLz{PNOK!20NO+kswA6RVPn%>4yMf>$0i!N&`8$I0I@e6`vhF0=~9gl#1r-Qh%2 z+G@WEcdfAG3Mm#f%08#r>P@nlhq&tG{t%e+F@Q^y0hr9**L|SpNb;3fJlXuLAh+QB zQ>>|_qS$LXF+@anPuYRJ^!L)wp&gjo^4MDHp}+(Mc>NNx9imgF_MP5bai}ExpZ$p? z6vgJ;rMDh9Pvv1p&W1;@3@IM`*BBl3=>w_U=DE5p$;78N_5|Nd#NEIexVg;liAjo? za2tOlO#`S`scXXjnusP_M#y@%xBXmSK47F{y)uh5h3`HAKM9lf`_A)$KxWQKYowm5 zkdIVp)YZ%M_sBI7Mef3Mj8VSQbB8;i1w%E=^UtzF%6%7}*4X<>3K zPlu^Jd1=zeFQQiPf>$4?8%Wo@78w&KP@CWoy5eHTDg7!!#OI!}1nJtZt(`w@LAYN1 zlo&ZvqPt}OahJ|**}@P~h`ukc#l`_Md7}F?_M0Il;Nxw&a*Q`?agOZWU*@~ZaSWxl z(YSelr|q1BpP8WHRMqSO|2D}AKEIKc-d6epLPgX>+m0@wkX_K#WoPR_SFAs~R~Psb z4XTu)WKY9z`PWe^f}cc5I$u?EHP+bz=aHFs70|y1mSlX@8LQCuBe2nYZqMB7G2Z&Kui<%QRH9TWq7mV z{R<2Y0%R7FXw5c8{S4Y}V!Z9*|1vUUk7{_-^EbQTcdWHs9LG%`==|c*qcLtrTO@IL zn!vl`UN!Mn&$DVy(Lkv%&|>_|h_m2aTwHf-8L9j<)vCMYuKB0mT+uv;-&K6t4piO+ zP;?3fZ6iLdB|LdSdgADJlnq`1&29 z0NlYhMdz2P6W|Q-YV-cO5ha|8e#2dpnGW7M8iX^)htNs$=q>6`75aZ_K9|!MpD0vl z%9T-MMxPL6`iNLsRa>kjkcK|>kN$oEw20rs2&J@pY3ih@wczai}1rWy-Y z$i1rs5CZ>pwF()_voEFr?_ux0)dlZn4h~tIlJ|Q%1EW7QDpR6_ zU}8Hok381i=ARO;8oP_IA4tvw{p9)R{yK4{e#tK%tB4;^UDh`~sz^Lkj7=7Epc|WJjMlaG=Xk9#I>2gZ4#fa1)sjF z*V}d2uwKR5-zWUCDjIjEFrStM-8ZI>>E7Yjc$zA(xkvmxr0S#eW%NNTB3yM@; zbMREq@6})b5)y&TLlEuW6YJ0Zs62RY4O)N2f`12n_)Z}zy(~aKXSxfH9XSoWO`a za3m1&F%D#0CVZ8O7agD(%2LlW&$Q&p_ zhW`pu@I%1>D%lee0)^9H;ekr!u+SiyQk=4PX=GC>C+yeuJQ9HH3rLLrjKKpDj-g1B z8`0rGq1339?=4%*;@o4rp$~q4g~fZvzIX}2Q(?Z9p>?CBy`wRZVj=V2aSXGXGD-}u z*bhfwfFL3%&@loM74#<_QsZ6DKpiIjKN|L!G4I^&JyN8^o4uuVSn~HW$xsOPckbl? z_47MMBr$~nX#m*OM!*65N=5_+L{K6^09LqT@4sPKChpeefO3bZf0(?m(!UoDkzk*a2n9%UwoAvtx zi~7dZ%2GnVBQcD<(T?9MvvhI(ymQfpIEU{ukHsf0qUm+qc#uGFJJi?LZwm(p>_j9Y z0`^@Zql0P=k$=6bJYixozR#z>NQCI`$drI3gi6S_uDMgy1%|&)K`v-ut|unR+t+_U zVNy6~kl_+^6U3WLQvf)B=xb9fkU(Q0$ak4!F(OP*R0~GlI~Io>8*K{w4R6xqD2JQz z8cfv#qT!;E0%Jd*pn?wdkXzruJ;eJMtZ;9>H^9Su?ZQ8}%kAn(deiIoVhQl?=3hT6 z#0HrH;8xxjU_TG0CoJ+m!Cwze24Sy#9e#zZaD)Cvqy2n$6fhC(@ST%!gkyWxl4ZgF z1^K2mDk7%&4*&xKZpa~W0Tob*U_rLXDA^G3|7?oxl&J&)SL6^s0?VymYmVG;GT&?! zc~QNihJfDR5trXy=p6u&{jIL{=*t|;ThWRFX8OBx?Q76>q25N8!%C?2_St392;?d9 zzW}q5ZJVFu>j=LBX?O4_0QDO3ic7+1ZItJv61x!b>1vk;68Nu!GvFa9>Q z3~D<(sbGgWU*kXQQ9z?Aa7c^Tx*qZQvN}@X2ev05oXZYH*)@W8kB_*&IXr+RM~-C1 zB5Kiy<@oysR#F&LEQ|nELY9PZbAv(_`-@I+6uF$*Mck-bR8ARh1pt1A&Um|Fb(EPQ z(u4Uu_RtHX8h3ewDK^y!IY{jW4IK+n%%MaZLT^Hp4>ym^f?=D2X0=RBAPYY>NVw2a z;7}{2y1a*UM(2tMWKziY#jKsqv~V*Bo#l~=X!xS=azT`M5zq2`=HopKwRuq)6gl!X zfo9G*d*?{3+EkJSJiwBoGiitbDkVZGE6!9?M{eOg5haR&*M;$M3j&^J%S~Y<~6j!3w|;eE2-84&ePe=8LD_ zHE8AeC#r2S_5@~vbhQ)pi{~GRc+;N1nA95VSUHE4o+=yrPtx^J{Iq`!Eb-1lVb#(? zU2rISF2IwjOyhH9ni}+Hso3h+;1!!n`lLI`BSQb)WZm3=$))C=bd^7$o(Xh-!6loZ zW*BEtMeDLk5G4R6VAi-~XNlHLQ7MwGGAi2}PXq5Hb^`9|iAj3>!8{cF7VvRY*-tPW zJ4xBE?xy)B_CD*0#@MH9X=LtCrlbKiA~8kES347j90&6(Rq%01wBolKbxNjK>2`aA z_ZU!qYLNe&RL=-n|Ee-hmq{hVJpK(3s2;qg=b0DatktYwJ=|T~ ztNyJpgySM4MOC{uR=Az6HffCJ*b+3)bAUZ%PQ0cv*CTG|(Eut%u2bzXWl3T1V07$H z8Kp6dSVGdXK0EI5>um1BP3z56ca)(_thqU}*%?)#oENJLPfOnh$=Xmz*@GB!Vh|J&jJXMG|4-yNP42lYwq6{Z>f z3S6Z6|M29mNCH@3MmI7tV8ta}0u3-O_Z$)x51Xi*d#p+uomu>B#p$*k8MpO=HSI%2 zK`HvgR_vzJRfosTN$1yCrk24v)N@a}b$7tS_0kT?>CQCm8OCcIWGgZx2Se!t%2Oir z?YJV1%^+1s0#%II4{?mZ=^Mr7uOK~93>PV?5zIj@x!4|=@KU`EK=*(!M7qq_o`6Ow z;<`)4d5C3x*9N*Ns>By8oOW$p(a%}2Y$F` zc0vXoyp#xiLiDYJNL39Hx&AWFgwoVj>91e5iIZ_$);{F-IZG9r z0{K@yHHut+Yzg312E2M)upVs8*RJmd@ z(%k>jldLAC&nOS!YDUk#cU}jZY97#6qH)OL7L<+$PzOus2bk<%`QhCX-`1a^K4wWY z5k;PN2E0Q^@;qhb05Gcvn37S62L2r`h*PoVC*70(`wJZB)I!^FfMT^Q(LT-B6R zysT_q^E{5<@WgJkhalO}!6k4NU0$(h@;G{Z;wg#4O#{WP_|14W8B!xbQzJujV$(lb zixjg2MV7+E0eOSgalSO>HLkbyNdvEX}y=d2N;lA1y;cpxT<5i<7W*OA!0%ji!l`FMsM@~S+E(%#9XoSL*f)~B6pY& zoQ;;9pAJ(b@y}64M~&kVTrXG~jEJMk$^~(q(00h(0jW201{ToeD`DXX%AC&f*UN=4 zKIw6|5!HjT8qhZei-Q4XC=|ipyCdlPtABAGeApcS9!!uhFNIMMLo*bitLelUs$x1c zfDpd)jsMG&z${j~Sz1u)^q?sbk#SK3`;I1FNxjxE#&Wza*sAOW(gf%g2PZz@rIU`0 zh_jQy2hi-%!hekL%h+eot_RU4*f{`cdr)oMHVOI?Mh;aZE*0da5m+M=C11*h@ww!| zBE-Khk`{^TnwG(0bL(ug1byz7g9K2m5i{CB|F!ZnH&HCqGNQ3?7Sz9z*I)>#jGEUiX8J{s7nyj=O!=n@*jM+Zia2lco0w&v#$? z4`UQz07RrwPhHvlTZ2{lx*L#kwM#=6OU8`{CIHvvb#96y8u zn-zdAR?-7brUQ_zig#2F7Y_aGkd7mpgrp0=T^ZKfC?1gI5E>~O$p65I*)(D+rmZus zqXT~UApguWf!Se;$*XtNoLy3X&EgYPB;|~5JEs>SoMG=ic~|x&-aMhA8cROO2NH(Y zF_o(t9xm#f+Ed`lG`N&Ptpph~A`U>-Q+Cj`{`}=4-jt;t+u`{9X|28*nB7zoPAiB564Tyg4xgL*|B-iD!I>G}$ zmL>#*@yF>!6{~e^@LGIXaA9e6aa5j+r8i32^!5(f)}AdhN&}`nGG4r#5(9IDPb2H; zehh=ACE55&qW_PotBi_c*}Ay9ySux)yA#|!I3xrgf;$8o+zA#uKyY`L;2sF>K>`83 z33I>q?)x!ooz-34CEZh}dhdNsJxUNapG*+nUzPk!KLa|c?5mf0hDH<$#t4}R*gIL8 zt`SMw8^j-u98q>CGL|+JOB!@2Q@yRkZ~m?yT}psu)&y|m8P;-8D#fj2Q!{OfCM>l8 zkOvwbhcLN+VUI5fVnrVt|1K$f8)MXQ^j6mdCqO*6&Pl~}G=!}E_Tm!qnRW@9)e{h= zk#IF%S`HQS}hSb6D)Zs%zkZe1AqkkJPq-aKX?e=AsS8M^h^`TX?)Sz-= zi8&eVu?vt$KX4lWc<<&ylIk4svBFwVjQKn!31@(<>xM@|Dd5M*W#sZ@?V-i)YH5T? zrz?#@wFhB{Hlt$mR!BIX`rQiZJZCfWp0Mn1G!M#LNJqRhQM_oAscVZJlFyCe8A%>arxvnxuZzPE^EcITZRV zv7CZQ-##mNDJXe$Jly4SbJN%<9PKz6n$Ju~t4wAK-W_W^;YYw9UiIOR&VM}Je;m)ubjS)>`?ZQr8PjifcfxpB;PR75FgWb?fs#Kg?# zYOA4PiwF|(ZJQGfmlpwankQfk5biSayIsR4&=YdSWV0r5n132l-Bw~MORgbO_6Kr` zbQg1AsH<|A6Xv9pUT%tvB`rQZzisr$uwIE9_+mD^^b*-hsy_K#K?p2$DNHP7tT-|A zq0pH7kkE~_J8t!1*md}gwSoilAi$yVNyiD{!m7?bI1n@J$-oKWc{<0V_P8P<{d=TLJwW& ziU&t|9}@d$to^d-%j))O8!{Q0XB{Ja}HSAgrsi2)YG`iXMiOd#9gx7ABgyh2yMU2StEbsEwBsrIt@3mBva+ zD}aJI2Dv7B)o66?csN@w=wcj|{d8V2PWJ@NkUE;8p9doo0+w=RMjofUI5n~q9a+0k zvHR>Q;nqj*t`Hi^HV+Fsu7!U=sn!jwbK+ZzTxk;kHY}PfOl>bv)%yh`sCH0nWY|g} zb>oZZR|OYS8_1aN&7=NUENScvt_}5S&*~?N8a^TsxAqe6D$pr}+(R^9hKY#pFi)#p z+itzXMWfY=k(tE1ZKc85ds4kr_WSlDgHQ*rXQsn3p+n`8kr`!h@)SM2yK9+ zR49NB;E3OvH%BO|j0YDuFyeU7tsGKk73N&EJom9`8ou1<(o;*j39jsru*dY!S!pL; zhsFCE;ss!0;bvz{xOO=EgYguZOwdHSU0o6<=6EQyuh3rz@*#OP!{bo|PdXaj*NcXB zCCznRN+bx!v6&yfwBJVKieMpo|F66ZNmIK`~`%C)gbTy7KHvn;nW6WF@ zC!VjgzJtcd>Q{d)Nu4u+C3q%rvd^-bN4jZVw4+L)WHJcF&-^W|QPVB&t2G?dvAOsv z02&^j4#Ms~@w3cS5}cM7<-J!bxfpN`{;E_e6)=f#G_#O0O2p;`jphXxIIONIkGgut z)8#C*a6061XuG>R?>5~hQ8U-Tyspyua@&(OA>y;r;U+_rrIr}0V~@X+!M2bDz&l? zx2z~k7%D9uUB6ZvNl_=B&VEJ;_%f9kFUA$$nCyS9jqoR)-`PJXU-Kw8>|{;tlnV`r zayRGF;4^)6vnJ)ydgmC5Hy_=FTQ*h7g3e8CJwfGxU1Cp?0nLVWgvLP@tK@;bE)NjM zRKzW#(khwt4Ne3s+)!0Vvl7e{Ak~?oQ^n?RF(c>hQEA1v-~njXhq9M zW*V8VpO(qB+ODc}wsmL7!ie!dz`xbRShU0Y*Iwb1<{W^>dd%Vy=Ct2H7M~-UvROi)gGAN&fpgvQb@C`R|rG zQJ+=oi0tvTDt*3pe7rSAx=)HUO|Gu;EZV-r(y5#~@g8IPJce+!m%9fT%S#(mYYqMFdP^spim&%v*W+HQ0`i*Rcj-{^Uburrj?VHLKvuJMsrd@w7 z+wW+Vn!>AWtoRjP9t42HgUR9-ecv_9w+r05?M)da<}I<>K2LkYlATbznc=f5H+UAP zI*d%SJaqQF1l&?4_;@u>Vp}4)$km!wtk~@c9yk*<5~QUc6&s}C7JaK9rTicn`kZxD zij1IDKC`pOG7eSNr$`w9kQ_t+K%1(nx*QvCC;RdM(bzW zef^!5lBqo={zi33|D3)nrDi0Z^}e~z`p441l)MuekEP(?pPJ;HoEdXzLi@4k)bUNq z)ft&)=0)czrN>GY9rY zj}X!wC*wxMn0KRMN1fSQ%9d@G)npZqGAT~dRTY72O?!J%7iA1D^jmQsGx+%1>RrXv8S|+s3%5IeXF~3A?fU*M{%Nc} z(27BIb>#|uD-~8x!}a*rDgYxL`>54-ULm$Z$YO3Y&-N((F=CVtl(nPdbn^bhSce$m zdzr$B>11GT>TMO}WRQ-AAFoE*r(L(h>xd+YO#1iIqfe(*W4Dtw)O-z5jOPpL73kg} zRly3XSWZSmsTC59>$RUvR(yXqdo(b%AX4~LiRCzJyA6hNBD+4?n*-LVzr($opYw}v z{n^PX^FHkB*sKWG;J$M(;}f9oiI&vwC+g=n!{@DiJi})JF$MutHj2pp$%?3IiW+_R z^pDVUHvGnZO$H_3c2j5;g}lF1_BB;QCufY3GG4%cg@r6*DYU95OqjCAXM>d=((XD} zFmavicy#|_Iixl=byNb#r9I5mYx>FROy`<-eAje*RsH605~1eY2JIT^E@8faR1`P9 z{kg{LEwQDgd9S|4p6nmShdifp%ew6p!v6PLT-p3TxW(*nmfkks`(gb44TQ_&`oPzZ zN0JjCC~7U_@pOR}Jkbce;9d$uuTtUdq&U$1W#;?#c5%X$wbKi);ssjVWcIgDN2(K}q95}{og&-Vy81^p(LI;XcC1GsN;U;K@dOfX{ zT;huiii@m52Wr|NkO3uPL9xW2<>5KOLBTH-_`k7UA?Kfs*!SR2;CyqW2Qa;&K{|vSv13LNMyzoPC4p1-=6bFZ3Un%|ea{pTSpu5JSEI_l(bxfUE-4vm;=EE+P4U zNFU^eZu&VeFVq~yFXssw-w24;c7coVr4eHS-w8s;y}m(Fyci8&_Gj&D5*`nTn8<(! zUN(Z*9}Zd)6rjciEKalkCXz0=%cErkli)%6Gyv5IK33{XKtc3Mc9`ViCBVN%BzLpH zfg+$moJvE7TnIQwxs4GH3LMhBLxbvV1#0&>6!-=G%kcmO*hx^EYsm;h{uv`H=q>Em zTTQv)9fR4u-zTH}f61G6U_lB7U-Nfe$}=>SiJ9q{Y10U)r)MuvcxAO*Vrf+uUv zmc|xCe&wy6@ogF6K^JSl0x!d{$bhas$gsf9SOlbI5k9y^Fk=)YPS_2$@IGKDLjwNN zHF;ST^J1Xu@Ib^a_CKJposA2-m8*-bgB_c#vzwcP^<=jol%#bm}Az>NY zkr=fKnsHhu;N3--K5W)99ZzP{RHtzAS9@aisEPISO;Wv!e5QFcT5ss*pu~5Dg?YUv zPZTV@E58hpYGT8Wi}g1F5TrA~GYErHjx3XQdK!eMRVz2SXPKCoGR~Mr^Bb}du$Dof z9F-j%5gl(aiao5b>(EEZ35Lm2LVA|)m#|Y3wKyB?B^+Aqt$KGzfH=toesyzi-}w zS@Av~Rm;C@?43;3nf>k+ht=8})xeK7`r|i>rI)Ej2ZCDnNvREm2(JgbDExVwE(v-c zMihkrS5rLu^A5ZcFY}prY+=j3CjSA6%}tSB8rDXR#57)_E=QML( zWDi<&$tu6BE4+(J+bb`!u~O8fW|-9GX(CTxzJK{q(jH%!KC*KvYz`59us7i$gRziF zx%M2#Bdf4cA;rFW&zpDpTN>vU_B3Rc{cIv-R(DQWuKiDp^b`4CC-d(Y4UCH5{>{Vt z_Z>Vo?D$>=atDQ&|Biix2R`Far6vUN~&8DPY+Qe$_>rFh^h%)aj%v z#Zn*y;|eD66?iSLmss~)0?tp^6g9bUg%!}vcF3+L#`*C{MMcGeOE1Fd8F)UM)1<>n ziQ6Ft^n2JY{3Q}8wLnDT^x;ql+cXxg!bb{RmJ%uWWCaVHJiKr|ZgfN^Ks5%CAr=kN zQYJ#2NIjh0v?taMfpO6^a=xa;K!B!Ui)cYB;3J_$9nany!LcCZ6@6cAoXHHOmh|Ku z|Dt<;3yEr2rj9M212?*b3~vpCBGKA8*gf~@ z)1L5tpcAwd5d#lj?Im3gU@fuEdYNlV02`4nr?L@){#+SHkT(`4-4ky2iw|mgU6NX6 zC*I(0m+Xvh2U-TVP|8~}T`@V=$W_fhNBgsG^fzy;%Xl?!^?GnQMb{z5M(RUVM>b-s zoGDDc7-J<;I?86p{AN7hLMBn;HZ(e7;vRYV2}_lLi15IP@&h9G4)DXm|45@cB6;yg zGxoPLGm{(nMYbO4GxyEkYjZEdJ6?zlgMON|~%x7+cVA>9k6=WjG7$%AT!_Z_qYP zEu*>%a1N2zhyvEM)0FykvGaP&s3Mz4){=ZJBruuY7 z{EgGwIiQU&X9Q1QQZPwt4a01cwOP5CW0Ql&tVX@LuSW0U?nycc0-?ahshtiIu#Obhz}Dzf zLS6oM8(cfC>A^Kct#6}$l<;)?mWDZqQyM7hbEfxQcdz)beiNtuCSG2^6m4m2KCe#H zJVbFAf~6`Wz*Kyb*h|sMK>q%_abLy=)2iYK{;Don1wkcXW>J>;>g~-4FxMqmUOiwl z-lo9bizY2daMsDH`ugo^Kiqs3d&YyMFm4i-P%wS7#m%3Q-}<*zG2ezw9@}lJp^wxw z5Qq#NrfJoq8GpmGC2y1Q&wHc7YDxo?s&Z&$_-)I@BaC|CAG4S{>*2<~k#9t9dWZS$rC!m9s+G7WeIsWJ`N>?2%}d zF5T{wkTb7WsahiX81~%5YJdFF8AWkm@E|Q(ju2qv{MNKLy?v-!x@tSDbT!{W_v=PD z4OgMR@wQNSGo|P5L}|I1y)AMzr7>WCyK)_?JvIgxu=W$sNyyL=TOQ(VruO9X+jy`! z^>cVct|}21-=1?|wv^}2ANnjMA-9CI>0GSPsRjW-jKE@%3rce5F2LZwZYwkEL9S^KaPi7aQamD4< zu3H=2FIxob#JTV=MxWKnPvDc}Uk?}(#(jKHxu-D^Vsni8HA9hpQmt92PlcT1B|W#wl#s903-5-=sm z{ljQ3t==G1$B%7Oa9>!D-v9X?({D1~hDM9k(w58iXYh?DSx=rjA1`w?jPPhQ&rdm9 z#^c)|d0`aOFiQg_{m)BO+QH_9EWAo;NYF8Ws4#2)&!PH}^9Mn7br_-ILV~1ZXiGYD z!2?>W$`paL6WNL5#&4TO=V7y;@Di+|dVPAWk&uxqqp`TFEi)!1+&bTQ@bR-4l}H`7 zj6?Sd%32M|n-V*JRAftkpk~gdKk0&&Ukh7`!0(Mj8hX5EwXIFvvux`dAM@Di1mw$-E@tiMfaQrFxG$es^5 zF$!unFbgFO!Ba3 zcRfq$b!#ya;g9nZ*vX?0&ra!qRKX~S3_SfdK{Cj={xHt?B0j%=)_5A;n*^D4h)!YE zYA)^u7N){)vecyFzx-4tqd4v{sE8q1l<|kk#%_aFLi1j7HVyic3s1f~-h;KZA(;zb zH+q%ayBj9hyNh(U?#ly@S=f|(1qkzAMC2s=vO#%BYSjSn6oTkRQ3HVh-KSl_g23yd{G8(GpI!AQA;T>J(PA zBK(Ud=4viqbLl;~qyszA2%G26# zt=c`QIYW@u3_NOb8GWbIL>ruv@5G8BH9ZCI>QEY+uM{)0d|B71NDXLNVpVqJPP5DO z_3IAT-ZyfC`&}-E;pA3pWlYq>@U^a9ptZMcRt{5f5M$RSv|6zxO53@f4zVf5k9=*X zFZ_Fma(p(AbdOxlR7B>-lQ`YpH*gUJ{mzEeY^Ki5r}-W(~jyC1%{ zI>cowVEw-H-r3YXO$d~-{ahadrcrxRf^}T4)ctN^(#5gC8ZZ}*JiUNdqDrAnZ7bD- z&5Eu)7ce7s_-*6et?r{!D!S41?jAIc&r{dit7ET+-LGSQl;KH$wGTGW<|q^RcAq`f ztP;+=qH*ZHe`|oQs!W%V_KYjRrD{%nVs;|MFq;=4KoDk0362d0P+{{|zN;eahG9gp z6~n_{bHvs8_I;8sB0dgsK4NvA^&6{tqBd~_RSK`jwa-OJ%3+VgqNi~?HcTUaM0^#e z7puaKy%SPuuZF|N&bx#uxH~ByjKz*(uO-|ijxdYcVMvRQ44mC5QTY|A!^o0S-RAd& z4s$M?(tnbfTSCRq0P}drKc8-3N4g5N87l4af)MXyK2Kja-sH(UKqMk>@-9G0tX7#3 zV55pDB>whTDYoJ(&g_}5G7M%75H{5#K($z9JzHrcVqmaxXqG%njo5+~rD!z7{+Qtz zVwO+jJ*WS6>`d>yn0%qGXU0gnE^gI4l3wDUEKBrNQ8(!PDS$FM{5f%talXFt5h$!; z`ua=IL1yG2PRMr6@#Q%e!u4(%PW=qrXL2#8%(5ZlEVd8djr4ir14)e3?{3Nzu>O2K zQS&JrfIDBg@)jQoY+W9Hr<^P#a^lso)F$;P_CyIafJ)n?>H9cSlYKtzsiBB;ryNa; zbfV7GG^sITPY9T+=-8dx-118PqyfEFeMO`k5W(s7=7HAp==xzDt|jshZ!%LWzmZJi ziA&aP?oI@}5gT;Fq{YwOM6Qe4^My-6{b3xy`E)FqIdW!xc8SiUBuy3EJkh4S$ctbq zlFN=BVQ*W*t#*lsvsoTS`|4X02CeQPAgIZTp_0yzT)^S znu|Vnc9H!c($rmN2a(I`#8!c6z^B4*uMpRIvSQeM4FB(o-sj z|D?^vRi0@k5t1O~MNsorz1qpyg;x|5_*ubd*$i zbZPWVqLe*g^Q9whXpo(gB4MMcKTm!Y&XvEesb`TiHbvaUN6Xxzt({^EuiT;c>4TTc zk5W$k-{p49?Ehj5(C>#{0hk|a^vxXVVcbzq&%>O`g;eiIavA|C=M$LK6~T~bBFb+@|IBv z=iLQvslaojxst#%lf}beg5kX!I!qDvSIlT?09_e;Y!n5G2n-x*JHQ8n<>Z4)q<0e_ z{ILj#{u_=5h^LH<*8D<^bqAKBs}4eb17EAzg91tm_%~nP?N69dFtRyAOcV@0#xMXK z3$~^aaQ^`!IrRvoz<_013(CkVQUkOOMY?&_8mEUucrk)dC69k;zg^t0b-`Gfrj)(* zzfeH(Z^W>`gm@CT=Gjs_6EIjtz2I^FS3GVH1(xq`If3N+e&u*FU_|OrA1fDZ$seN& zfURglEMc$(hY#9rDXI>8r4-``(Kx{^q#^bpY zB={x-sxgwFH#d4?l!N>8PM#sV2vlYj3Rvfb_m3zXl*kYUc#1-T+1z@G+5s*&^@w`V z1S*IH0!ToX zGc+1#s~P?E3R@J#sS4E2wC}h)o3dYCYni*}Yl~Pu$C3 zIFUgHn)wyHY+h7B{t5LO4T8R<`$GyC`vgl5EQ}>4YR+nfG6vVKB1Jd@eqz4_;HY0Q z;)Xj+061-;H!E~GG@6VHzkW9DIdvHb2sdN^qTwNn0ap32{>n_@vrD-hgzy~b7sYmU z-oJ|%Ep1tvlyULQ@e5YakWztSfiPTnf7P|g2BG!mR(|cPI@ZSl%MW#rs{=`Mc`gkp zYaAslIO1d^@bd8D6S?A9&(2(w=idFHww#8RA#*>V>RDN)N>1H&0CGAgJnG@Fw^ zJ9b170GDjBxeQa1B1Kd&M|2Gm_4jML(BgRN0>=UU!O$5<11{SZGhG$M*>4@?bPEc{&C+(-F)AS0?B2@pTrtl+j)ie~y8a$@cv_d!NM8fhYN zHCMPTPTxgBCy{wfTTQlKX`%uZs!jANky-jNR-8C(V6ENgDMD6Zd1?cJM?Q3DT& z2Y{<@#}Qh~2pOqbAM4)}ap2(Yns?H~>_PtC_ixcBOG=2T1eiSg%Zg2k`=^^3v#J4i zub>56+P&Mp85*0ieA?V2%rN_-^xW~y+fRXj0I|ukxkZXrLM0x~`f|f_^l66o!eF0e z2QlHxkY*j*LFT7M7<&8zLP2f9Mejr+m{>p$A(zXS$Agt+Jv*)pbCF%8K+8{*7_3q+ z-dk09GsJX9w*Y_-*|VrXFoQ>ms|#;U+2xm}8rJp7qM{!c;``@mwH!s*Y+ogTLAse; zGhP=tv0pUqmSRyf*N&D@)mTg*rr^1G#(0uBl{>_zPIh^mVht)tCQS$)$#WN!@O}fR zav}FUuq#s5)cQOIv9)ZxrKq8))6B1;Hm0MJW>xxDvP*BbxP|2q6H2#LO@eFbq=2CK zg(w};&)SmQd|A|p{VYib4)#MQPUG*2z2gUSCSXJLxRyCGrGsct_(Fzd@Eh)THDF1{ z++(Z_t%I4MzR5@R<(LyK2#NuI@{a%r1(G8L<^sQnwO+4?cYg_hbHn6U)TL9Mf?2{eX`&+lB`isZnx-|pY1kZwV&(hjlGr09I^jW>w4uxKGN@a| zyb4+iy9piS%;Pwo4kxV4BU7#;PoSt3er@bbQP5V3!7B`cis@U<>c=RL=_9; z?WMNdl?aYjH$UF$n~Q5OPiKcY+Im9YJo(m+{Z2PqMeVe`ZwWTW*Ki3I!fvkMhnxA} zVxXg+JaOc;k-#5r(jS!L0Qi&{6~aT-tayd0zXYB8xJhMC;_Zl4s$KCOttr4L?CH=s z1&?!%bVu_F?N8UC>^&wN>9SnpYA@tzelA8y2OL>r8bWU7gP>=ynn=I zlPpZW5oGh=ZQN-{&;!klGmMPoiRiNFVz|wcbdvX?Geq6`M=OI*=X40qe(n(gE5Sml zzh9Q9MOM#rR(C-tDIOjB_^P2LqoZUNa|jdAe`6nbn@I+6-2a|ClG`E|-DYN5$r_e| z45VgTutBUNKfo-?0qj3~?j%7b$HSP^mDDSP$^UvGC9zD&t!KqE_AwWSm^t_J$4^`x z+&zX``yFh%gV4?Z-bb=F^V7#3>SVX_%2X=>FQ$4X2r?2$+L+RDbg$MQWgCi6rAhQe z2<>&=%(U@$hL5tOwzy^M)Pa4`-mNc>`;;%(-=m0$Hg##z*Z_}00xtX-q5~nKOFone z0jQ~u)Y-GqPoZQ6Or6#y^gTh?A>YRWy~^|%v&v|f84{k6@3PJZ)AT$=7m$W{vJANg zJ=MAR;lkM2i=mUDRn%&$L4q};F{%#|rZp*y0Kk_Sx-Z?IYe(S|AaXAWexgj?H ze8FDO*z;kuRCCmGLg5w(36bHh0Ojp3ieiNikOl-$>9PQr&T*C5-p++j)ofz&=Cph< z7%n`@$zH;R&_7uP?9@PeX&p#k(#4mGSj?~~5$aI;pv|Indl%sd9zFt4mH-K&!^@?}fe<U1;QN3WW>c6T%rv3h4on|4Ao6HBX7FihosSI7t@DmI}|oEkHQq$)&_2h`zAO3@*p~2JtWU4_C%`yQidw@(TVl0Mjw{3nqCAqmO|rgV zmvg>>tZaKt=5B?kslzWpJFpW@`FpZom?imTJevB;Yx21NF6I6$2l=~UCL?OG$=}#c zJrIrlmn>G5)sbXXRFh*@RFjd_XH!ws)(J}U)H$+*f?6h8^V%Iz(zeLA^CtMS^+&`;DjAyr>@CZTCUuB2f+hp%*D;%H_OA8 zD$&tQ0MKGonnM0_C_dyyZfo)}!u2!c=ndI#@+w@2Qgf$_`ghAP-Tls4mQt$3+Vp4R zbWZLnR=&_a3dFcnLH-lm@OD2WtjIV_akHj}IBS_n-gS4dHrC`KJ$n+j3%81BclIJy zMJ0S9>ni4!^!3UWNZ)0t0BtS`kMRE^ME-Y({`=ATm*sRhm+cq>eLAp%&X6Q)!r=h7 zAfeF!@gQowQ}0MP)?>w_MB`>D_rRsFusRZ_!DR2C8=@%qbmgxkQF~5=kBLxrrKP}FoV!cld4|AA-A1TWM71=_ld8jsL;@-C!!IL*+pW7 zzELJ0n~sj`UWy-IcCcy_M?8L3tcMsiQS3OC-mtgo(mihCxp`~#uwf_0kor%^u-#(r z@v?X2m@0WsOzwYSxDE-LBP3HF&^G;93J6x0AC5-$pNc6JrGOujxwmz?=$2v(jlL22 z@xJ^GriE4SGU1o9g6Q>fuM}3PN{1a!`g}D$t+j?{+VCHoyr~WHNxW_Ua(s^G3#V#K2xiw(?$+=Q%zC z2K>dmwk3geuQfrqfwH3?wn1ez4YAVi^SkT@W(S)wj1?N}^D&=IyD7oSjW%+C`T0@{ zifmF;X-#&YmIO)?>a8hK;(gy-isQPa!DpT$N9i@1C8ztV9BE3$ukki}iV9&#X# z5xpgZDahRXW2TT6Aw$}>en?g;Z*F!WpH>FQ#?5fEj)~AH79>DUW1ZE%qQR;5OvQ^F z!Dp6L^*}?(&bD}al*E}!P4C{9=aXCd*s<}p@k)^Jlv(sRRqS?>vZ-k6#LiK`-Gshw zm5NZh8;}Uv!51C>MVLYJg8(Blv-_ThevRv4co#-6nSha{B+XD!;&76=VRd3e0P*+aL z2f(k=vIFnvcnnzE?`RDZu7?_VB@5D?}kbNZjR4K;9lX=sET}Ckn1w zB{U_FhZIoP`f_~JdP!oy8lxKr{9bu#q*L^ixolQQNVufo!RhEIi-w2+K!Qx@I3q1H z6=+6_$|D^UH+WQ= zOl4VaEz`|O|I`+ZY2*1&j>4tAxrIzJ&>(K_^a<8$sQG8D)h%=XH4wjX6y{i$hU|6; z8BPLclCsCoWRnXsg!U~oAkxn!XgL;63H?*U8$-tK)OML%*`$zK1%>h^9EH7+qLA7$OD^yhoEf&l%CZ*!s^!CxP( ze+5|vBrs3a435`|=mFfRLwwCONz2K?Mklix6jXdL_fymS(Qwm}j^8JJB-G%SefP=I zCe+AJE{-0hJ-$KJu69c3M4VOeJFK&ECkwj=gf2H8oXcPf<-Ec0&zs&JqKJk)sm9hL zw-;kCuW#oJsI9NS_2dN%O=Vru~BEn78le`W)1=-KBmRp}bN!O7vOJ`L_pR{ivV%!0Sx zO@w8Qr%fD^D|KQlqcdZYg(7={Wjl_DpBnV!%d`w&Bdr&5^^9GS(XE#oZ3iyxxdkTH z@qGzSwx6hgb>iayPC|w!G8gDIAT`}-ubkD7jr7a|Ue$T-E$YU+jA)h1))|J6rg3Ur z=AY+tABMT6-&$CH)jL}zy!l%F)=11n7(cmY`qt}Kf4?X@RguuPha`y@EnJb!R_e!v zF)eo?52}G(jam!J2WhfADC0p7%EXhe91hq+Kq+P1nF=U*fCB)7IkyH;FvHUynIOQ* zn<}w$D4T%p!p~1HrV?k(;xg+k;(64)yK&k`MVgI2liSTSZJkX*w4SV+{*l0nH68bZ z;kCTKY$~tqs^FPe%V;iQWt)0Q?LObvFU_tnUVxA~_gGzCPaP{DaHNYbkHY9Z!4aOn zlY@9?(Qj`@!13knV9`!`89e69Cn-#B--;5_`npW7Ri2$amwF&`cw2TO7e9qAqMP1_ z=Z}i)lnWTo!t+8}&+b14<1=M@y)4xp>mPT?etJ4OEQ2Chg_?)IDmgL=7#)<|JK0wr zC9HF9Lab5EF`YTx4+6KpQ z@|>fAf}c^}(BCRwdjMWq2YXzWlrNOuPPZ&n0k02x*_u*MrKU7XXL!nya&#RxqUHnr zSQ1X(++N~~h@H>_6(6ku(c= z&^3H9e8c_CAefKF?Z}0|!nj?~X=G(bS5I@xrS#3`fRUSSae#v-dDb50G<%-=2kfbm z(R2sxTBTK0OI%ay^+;kP=SIAhM&S$UvYjsMKe$uHD|9Wk4Tg(Vx0t5_gw?CFa4PJJ^Sc?-=_b(GaxcwBJ8qQh(GLLvkZ zg`zJ9;V%VL=_KU*%di18R)`6KLCYvG!0sDJgk~s5#5FKgwmph{^2)C?#Bbb#wf$-R zJn(IxL`y?9;3QFI2_TL3iYEyM@FJNAr8)5&f8;eVpwtYPfZ#Q`(;rH~pRk~0OLS@e zM6bD>5Sz18&_FwX{q0d2J^U1yzB{plc?4gn+y)2EE>-!2i3+w@ek1;~LD*J5S}T~9 zThqk^rIZ5Qq+z_Li{zCV3R=R!Ab92C){K5-41h*O4#*yYME_q|Kc61l`D?{B0%-q` zX+T}Ve+2kd{TTnGXF4yzO99hPv7-oyU<=TL5f8TXDPb?bdt6CQK?rsN*-f#y(u}c= zU!Oi6`NkmH@Dvh(8QdKHdRSA8 z*J~Z(aaq8}KAfUHgDrUx7WVj4=gxPusXqy_Bj_z z2K<1KFo^>&ULVy42?7(Cuyf7Z&bI`8al0e@E9Vo(0f^$F{(PLGu(DTqBsVn?xf#08^ypTF8gs zfPiPYU-sdk-oI{V26UTDA%m-a$gJtP>K4bL(>VSOSE*Ms1!WVORWl?B zK}out2QF7fv2x%`(^n%HwXGI87J#N<9|V>Rxj9enV5uDkKCBd-id*4*Ty2bm4Sugg z(J$8Vs93uUw{0ItsaZL~e*5v3$a5Cmc-$6QARpjgYJeK%MZKKHAoc}ZfIWI}H2!4niK);CB}UIC0&U>JYC zr|AN?m$5A}6Y(}=i+r1cpKnKncIj}uC|&GbIl3NEoa2f0+g3UJW1X`2lhKHi0O%-_bsn1vn+(8atLS zeTCGKW-Gv^#h@oWkXOtmfzJd~dOtY(3x-wNb4AI`-LrhWgi#wRk`#&itGT8mGU-e3 z<`IA^e$b@xFZ((cT#Te=H2x-{2QW^%cJlI7;-Y-ps=J)`jL)%a^zS!79 z=i=)XljscqH%;h}sWMa};aKV@vh{WFs)1;p_G3Mz=-rcN7apw5?Co;o8>;o6w2#w% zg!POExEG?Ic~H_#-qJDRjm$=O59t@v?^1VKuLaag7Yn+jISzZrpKmvK}Wt7HXTs)NIgHk2s))Y%?8qy%Elsrt7STakU?-9*4b2 zJVlYj@l-Ovp~^><)fSA}tK!=!io2)0%5ytG<*t;TfecV-TpQr!69RiP zS~PM$*6XATM8TW|&uOPIP+0U}j}?ZL3HR#VfTs^6e+4Wq|5Cu5Mg5cgumKVf1@o68N8f<9@p_PNJ33<>)ldnD^QWh1ox#r(xLPrzP8Q5}?LH#F3AyNdC5LiB< zFN9?`4RWfQpdh|@XgZ+V20T14?}Zo}2#`iSdcD>AS*WjI^~j7STr}9Jh-u^Tft{aE zyvEBQsND?EU{T1c{>0=RXf_WB!nk;?xM7Kt3f%D}1!O3&Ma&P41GdQbQBlEGmkk~% zc=GOgA>u)Ot^928U~uN|1-KuZR9 zO0Wb$Ic&`%hMiHFPsaPx3=v+;Q6VB<+T zE{W93fgv#yK=Fa#V1-gq*S-MRy@Uec_W#KG%CIKU@ArQ9{;P+X?&RVHd^X_oRM)d|K59_LFc|%Amcbu zRzD6bz$H&?B2TLD`$9c27yZt}nYFE#n>E_-@+D&O-$VPK-~Hd6l}kqD#1@>|Gyv8f zh3EZO&5eTUd8foG@NM&KQ~bx|feBPwwy)vC_Ud?YQcBhtmCCksmP4q&wme^t#(isz zSW+>SP4t{{t-;F>l@JX)aH8kcCUA8kSw|}{jf^oV&uuN{LISqxwm50B9{!#iSxgyJfs_I*oFTX zh#qvRxl&F2z#jUu#*ifH_j~w4nlT=3YG0+gJ=uHXI~ppPw6J&8Q54H1D59z&MwkUg zSqe`ecqT7tsd+W?1o`vUJdD}g*MJt{n5~%2*qsu9{|03oA|Z23^^P5%C}j*{5w-tY zX}~RX043U#Zy=r^P4C)hE#~{Oz5I6+F6|r@U$Kq3|JbvR?`d1@<*?XlP~rJ}l6UUW z{;)sm;=`Vi?whSSS$vsl+2t{zMgLdRrAD=qWiqx}9$ePjbL37{HS^9+9$>s|sPQgt zNTc@-xm+=X+b4sKtB6Cv&wJax7lZy0IaKxi=LpMy*dXPIXFODFZ@;NW<%+5**NVQq zM&ecXZ@cBJONFvmp<)c1?vY-bg6D(ij^pi|U9s)@ehkR=W!^^HBbVx0<<;m5?j;J9 zl1CytmAx*3oaITID3_r{5dc|fB_}t+nxQ-)jvEzawYk<`K7; z$SCiN^Y_jou<^rWQYET72ne$1uQ`8;PtP`6aGDY7mii8{LN!(29@7Ih5HGd`5_Xpk zZDn-6DXZ{P3iDUSQd8(hBr#-N^T*;Zo?%yTj3MmIdm-shPA^?AApt&H%)noSnCgfX z$o9z|kpmCUA2ToET6e)>ZTt?53+u5p<=knRmk#Z$!?1J3WphBhisTvjFy{g62C4p9^L}ov=Ro57R|2-q&7a(9I$Nfdo4IQx+`R8@iyrqtNaCwZ0J(3d?*;Kt zTFEa?SIDAo6Z(^?aOY|S_wpPyxO1c@y>1TOywmS3))Uf1HclDlZg!OYyv|=BkN^8m zm=xo`F^|7~**Cll48Sw=aDaEAvhwFGa5SvrHJoj80V;>|p5I=;%s{xr(Nsv- z5LM``Dxy}1AVH_!2P#?cJJ0?91PM$M&@CZz$3B30g51qws1$ey{!(`lAo2wXhoopg zFpvTEMH5KD?)5PYE@WwZc@V-N7N;6kHN;Xo#~FoS7UWAf%@E66o~Qys17=|%rht1P z7;C3uKCpq^6&3XV02TN^QXf)`hInOph|B@B391`}S)3PvbpRn0@1PPNLDm-?3*+aD z)lem>59z$am0%y@-RXj`C?LP6C>aD<$huU=|MmF(I^ku$IAG`X@8`|93;!DCrGEPU zB*n@VxF{}I=sV#Oo*tm*V&i7%!eMLfVLo91u8jKz<8Ni$PYYJiEx=Tan>tR1bpff^ zW8vu0wR}OcpgO*^0~V##jKsHn!vKjS-uhI*&64uA6{?70Z|^tadrof{1MY9JMMG~` zQi~Ibmr;Hy_vRBY2hc6dbA+Zj>GE4LR8=S`#$T~e9hz| zKtx4ErdiUQw6sxyOL#MKTXcDGKEbT4LrtwUXfJ)CWN97q-g>mVem2iMN}py751A0Y z3uq<5zBs^zVh`c@=|3v)rq$&S;d_ z5&eHy?-?yZ8<_y-b5dtQRZb7aRrytHeDWT(-+|hT1gUZp`|bs@8Kc$Cq|C!3jPaT( zRo&=8v7c;L-y-zRs&2+g@P0hDc5XN3pzTq7ojSR@ICv}OBBH1YxQM!}-Wro82~K&O zVTx5&)1|VVXT!&x2Vp6>&^<&2L*X0P?zNA{_uMj>Wmf}0P4i!E3m-S&-{dk;6VY)g z!+43k2^--qGDaIZn*>ny4Q%&?_k0i)RoVUn+b%@eK^_-=K7yi=k~E)cw3em(DB@i_ z+iJGgX)FAISh<0CO0j_7-#%9tDeGN%TO1GOCU)(1QmR=qDS22aa)a^M^gp}C_^4kh zE!z{AbCc5hWC9{b^1ZmEj%1@qEfe2TJ!-yw#zn>E5+4H>=(3r4PZWL;3E=y(TU5@z zTFi9BYY^kIhGpK8m`v*3f<5BjN$FkJ+>f6+*tZ4^h|jxk2t8Fr{lw7|Zv3qWmy95c zD0jAw@Sb-z0**KJ_3^MGUPL8FB#`xksgZ|TQi&C#DbjT^a&Sq1Q7U}LP<%+~&{%sY zI~q3Yn4QRtA<>W0VNGCoew&2~0`z=q{V*DNOa%*_Jo|oj{P;q5OlIM>#V?#Hhdoyi zeSjh#017cb)P~wcSTGx=GU&uoXjg~5s9sJ|h{(qSek|j2L#LbJXG0oPqA2xIdM`@v z-#aPAIPZRx#%xIV@((-|9}#WYW1!FE2+DrkPBl1bvfD~wX3n& zgt%+>GlAjwE9sqg40j>ldWwwS@Nq4A$g$G4mjiGrqxP@J+{PzAFc8hwmT4o%ul7_k z_AV-Y38WB8(%e09_y`m$mD8I9Fdn3%HIDAZyz_crhowRibZ<&6az{&Vv+IcE4+R4 zcnSb++$MHzM(xt^#t4ciVt5_&CyGHBhBGk}#_->v-F%J|ll0-d;p@K=2Dx&&qV8)a zGotT%{9L&#vOiLc#t0`F6@ux+;=6r=Jw);t9jM5A*NIYKw701P^i0Ha!au&MJ z(Kj=|@4VwS68GpGHa5(nMOeXoKz43X3rjgbho>7;FtnM4vt@QjGx#+vhlXi!VP`f= ztVp>NgIAY_Vrs!kl}C*l(WctQdZ1m3L0zFO4k$Fh)cSWMX{NR*-BLD&{pIRqgm2DZ z>IW}y4Z=Tfb=04EcjIFTOQrG6w}Eg=5o=PyhMp14C}N*B1y8U z&`0LKv}Pl_8oaaFk_aQ@#<;oQ?72ECB;Iw)=?(8U)_g^Q;8+J%bf7uBI(rAtvTXim zG^ehMQ*4MUKR4b&zESN)^XBWQ+L&_WuPco}ut zEi3t2XIK1AEYV_a&R?WGy7~Mmmrv+L&K5l|@EFg6y-6`o@4u05hilZE4w^5o!~+0o zh1Gqa#E7qH3M}@P*R-GAVg$p^@V#qXwpYKr)mAEh-R3T(b*hVyJ}EM0DxI%LD*3F- zmYLi#|2xz;YgzbRR%_>5wpi1W-r=n8@D;9O_eQC1sKdXv4wa}bkMMurv>kjnDX3`& z#x0V{RJWe~O4*ckWIlM+J{`B11)2bSX(o4mwcGhEe}N8do6?EVy9eWzH-AE;PcW>Q zVb|w})ER5RLQ8ySbaRK*<^kVh%e-&&{e7s?^W`lSylxgX;Nwv)w%SjEe8&-x78)sL}UIdu27T4jvezb3pWfcQrTO@cl9(NI=%X%!>%o~5&s#Z@4o{Bg!~iI z_QrXq(5%CZ-8+4sV*Fk^c=d)@F@y7Pm}xh%8P0LX>I~EehQG4ODxw2}LN;z&<;;fh z5`(bIhm~2Z%yF1wz5$AQyWj6qA?Gj+S}7BeW8?5zT|5MvpIzY-U&bV+d@P0ACtO!% zPs(snA<*=vw&*Uj8J!WFKqBPgcGbqqQcrc^hVHx5ycn6sYQa+e!8}D9%`0pD?Kmmc zqk9otdQXaAGdXNdoWTAUMd0*!^kM(IP$sYEY5C$sN;H7a)S*`BcGn5D>re(} z0%N?Ya=n?8I=WO0l%i2(59!ZLf!%<^SYDwXJY<=l6iIg{_|BFgB=75IOohOXQa6w~H+Moa`=p=#b?$u2i^bUzapY?yv$CYPr znM~;p8Og|Wj$Nz#^v>3qM|^kCDs9taBJ)TTaQ$bcrsfwwR55iNF?0mgHTGdcDEtoa zep@(Wind{#dX9;XRCJzIFv`lZLb~8$VCb629CznwkCNVK|f}Cp~F)NV2%_ z15azn)!*&{zS`T*r2+paAGjg)K>e7G1oFNXkpEuxwOT+oJCe_+x>CS8etrIpcx_`? zWf-YfW*o6<-WO;+jqI7VpxGpbVGhUye!~)z`^dB*Tl(be|GdF;QtWQ7O^5(9x*T zNaK`p@On~D9pp($vj~!rZ0BHPi-rcJWtiEj5jDEmlEw|Lc7!pwD=iSRsUzIh%EnO^ z{kltb2jDoH49Gbc4U&J=sp-qdnl78mho``!J&#_NrB_47{Y?pLQ>fqxM;^yeOI-WO zAxE@OOxk7OAWqew`1nKTJGC!WwC8wpI|0=3%hCv5eA~o$EiDyN`mU@)=L=AV`QMm7 zss~muWIp%wBB1=F$F_q$2n(a?#i(;j*cFiUZ3onsl8S~vQWhC@qFzW4YkbrWm#OTpSY}mOdWy8f{n0M+Bq4Se5i{=S zTo~ZVv*|-iOB)^3E0LWh$TnvuBZXgOe2so}&S1XY8#{V#_lSJ#%e`0GClW7`op2IiaB}psB4aFVIAKOt;v!tR!e5s9$6qF;{6mAVA8th?9 zhdX2*4|PPx%ZTE45K+i#h=uJ6LF8|;vKz&UM=1siozYqQ+%N%i%3SwcKkLv}heCEiVT-Z{0(qTq*O^OJ0$8=TWG@ra8uyKi#%~j+s}=&KW@? zMqWC(y4&ayn1zNaX6vQW4*3>TIia(F7N_{0400g}P`l$2;~UyHA8L|t1O3hx%B1usJS`x?0g&vQu4WAn7hf`Rj22&;_{f za(m9~Beg*DT)7Hp26+RYdbcrh1hK-Ct*AU2Y;khqee>B`TL|LZAwM98a&C~Vad;nuzQ_gF zrt9L7I`-byT-y3kvRsEkKPSf;fYf_tDhCDfdD4pvkYRL~R++cL3@B7k|D(j<>GsHI zAluP_Fog+RA2eKUUpP}tl;0Hpjv+zhALiP<$-ZInSg`{Qs?M1}J-PpW`QiB2N_*z+ zR^{7Ain|`vzR9jiUC*dK^N-Ikp`IVr`eSJxc4Z6_%QhNgjzsRNOP^uX$-q1I|Ln{E zy<>w3eI1~p)+AXBp5$Ce^|L71Sw zRWRfdK|%Sa0Y5nqi-Fd$Q2HULUFH@JD*zn4^+yJ7XQQ$Low~tk_fDmx7@&4C_zbw0 z(COG{-OM*2#wIiuutowI8K}pLF9-r`LPx`Z1p}7XvH;NO>9k zpSosxFT5=JO9WMbig@q^H)~kd@PPnmRasbskZ>)t8SE}(wZs)+!BE=YA40bpI3xeX zx~ISmhs=RQ7{UGpPi%O!g%IXI@|H-s@GsOIT0#YBOt71eLk0!K2Xr1G!Uvu|pfG^* z@3H=3V>?F(p&Q<_x@Hfxe4MJ8`GYu&5gYgeTyTxDYsQuuMNDl{3D2FoiAgp?&MEy; zSjXQ_TMkUTM1^-AmTI=evLho(h2o`@rg1THV&raTfV5)PQGMlXv*|X}S|2>_OWC`P zswGRvJ_nU+XPePv*wTlE4u(gD!U4bzotk&WB~<>D(9R79!p1HxQDle=n@eTL{(-a_ zl7gBnhHH6eA5xt@8LCA#J(Z>(*H>*kN?!bEx0tX7Qu;w$beCM0R5e*}25Gn8oCN#I z9zpug!pg1pJE9la9(5o3Op)G?`ymBTSCwGfHZR34a;)dyr))^F@Z$;H)B^6}BdkeA zXc}zNkkaJWJ*(@3xbODW_tbf{&GyiS6FCu-NmXVC^CA~lD9r`)reNhi!-b{MOwk_*8+Cqk8+c@TOek z(B${|DV}hLmjr7^9bI%~z zpv$0+EegY~EsIO?CK)qWNGqazTHD~xe`#17tPppsM~T_M7Lhv6fda_Ezn|X|S@)bO zLSFs0nU$h)XF|(&S67abq3J>~=@n8*_xabhg!HcZGV!}Z{m-1K*^2f!t}I?SfB{)2 z{6Aw>RZ?jY14fDR3fLBMVd1AdoJv$gKR)Bh6>Y_y4d=5Fyu;E*_+)`agwTdbJ7u-R zM2*j3fhpQJumBy;G|mX1$&?(&HO?gg8uusBy^{J$0y8a*m9`DV}i^p zlcYuu@*R1MqA!uvE)+uKdgfK2hLFcjNUTXJ%y zASWOCCsl)ee?mU--9rTFm!+C?R%mNa3pqhTLd~Z(qeg@sUkkixAZuK7($r7si@nw2 z-NQtpPt$`sb5qNj9LBF)SV!sl_PZr1t(5DS#i*%Uoltnw@X`~61SHaIlcnc$SX2hw z3pA5rKG(Mt1c>thn!Z%k_ZHkqR+Cf`oILCe8n0FEx4bDP!NUGCS1$$SWJTbzZ%;Z) zqhQ!3zc)G0BZ>c~bj3zyh>fY$`n%4@rFR^fwR`rTHs48#dQDBNiYn5E6D}{ML@?JH zaQ6#9@iM;~=GSU9fPWW@CAA$Nog{B`=_VY z5;@}^BH>@Ia?d%w%QpnSqmXf`<5k{Ph_~X_WW}^P8${d>Q*JTnJ9A~hmn>{Eq-jm9 zcuq~YbMZ^=qXXVYaO>mxkDDD%TbyS3xN;;!$Ko3lGb!@9}I+4?La{SE+QRD?$HBLCqSd8|fjJ@sbo zoV$PM;dZI}L?Zrqu55v^!Pr!E|7;N{Su0|F2+_Bbt>ZtwQlq@$!P|I0w}HLyOYFRwuXNO z|4+-Q(cdh=O*LnZ?rP$E`uv`FZ}ze)Hy`I@60375KWzT)cNQ6dE`xsfST`;rzCY^c zuYXMjm||1uJ|5RicD7uwp~~U^HAbg5$u@YWbQ!A?-~@$vdLshDxSynVI%sm)o1FTL z`h&%T*rl}(+3%pKwQ>?Gn$g3}RE%}LESO$*>T`pg-w!+W;zK!VpG2AX>beQOBhO>Y zuc=&BKO-uA3-975@T~=>`F}(U`^E~a-1pStOL>u~9u-iVQwt@)b*{h8wX2b>J7OKY zxOqX%{(IB>-+Uc;bC%OHZ~#mg3ut}<&j|RU2d>a0h~r8|<#crTwo?SrFENU;ig?ut$rfq}#Ixc5SmWfq! zpU+jRrxjjpn{g2rv@Kto%b_!xGavP})wyP3TZ^b9)oiskLmR%Ys!@9Nsht!GtE9lW20cCOovVQCn{7yrbI7h z$U?7*)K{{`n-!dXYrl~ckE+JPPwaqG91TG(0=6vtmOU11l>=QvDu!41Q4}H=FyNF- zpV{@nW*LQ~u~fjo^yeKu)T`S8{E8YdQR7(AWvm+zD7Wkh?^1U$-?T*pkH#(%S5M4J+5r1Tmz4*)jRTCS@>O={}D-fl!vE> zrzA)~k}r9o=V0(qP7pwdd@k-bCcRucz2&|IWYK?y_hg~91 z*f)z`d`#U@*k>!A5%jUJuJBfw3=Vs?=VQbhnLU2letBm?^FK6vN19Gfe1E{jFRAo3 z@8m1!IJL}t-L%o|Rcf5R{qkY`aQn&Gf22V1ZN$;FseJB~qyKw=9j>U)K?dpZZ1)cZ zzBiUIs;?MN4U{b~Z+&_uFvnA&=xh70Py2lQqIbN?cs%id@Ysi$xQKSFO!#DLd>?|r zC3yc>C`^7bJMM-$$W|}6+~BUwWIRnb(qJ#p>a67+P0Q;M{@f}o&aT1dAz{<*(0el7ZPqcC;Uel3 z1X`^bq*)SPT!*U}+Xj>kBt4R^_;}oK<{!@CVET^rg7SA?1NNF>5bGpCN4KEW6O6u< z>mn~NdRKO>BZ{{Vp$Eku1}0M?+rp$V%HK|7et3$U7Q#C>#cR=2=QB;XyJbL>6v%t{ zHP~DnNUnP+h+$1+WLyBv*2A2VQD}?MQn->Zxo?Wh>A*M}NT(Y)#6X-c{qZap6k zHm~%z)?AND0YFfrwgyifRw?dHID8tO({@az0GgrHt}LCgIe-zA&o1q#ZP-zRtHq3D zqKvr0SLV@WmGz8L92*LoC((|AI zm(KUBGci7V$e;`0WisnZdKe)`sv?(c8on}mW)Jfq0(b(%MC62DoyjXtqp?2YSKjN8 z?_y5LJ5{-JY*WaVZ{TH|uSe+(;Qh`&Ddv!)u9dnkGId^_9b!OA>(k?|#nDD03jOju zZ*F7wv570|2mo6+lO8EKjrK&Dk{7MZ9@Q+e!vj#g$jp2c4Ifh8k66~aqEP1{w?C~Q_j}ZV zI6hAD$he)GV-Wwj;=-$+;M%(GkDK^_BL% zN5H3U-<7scOr996_uQI4mt@{-tHGuaK-oq>O$Atr(w|SMn+|vz!&3giHTOJt$-I*% z`oG)3e}gsP@Cq>87^@C)PBTJMfas%r5+(xkSpS~lrrU_!Ae@JVY*cHCm&2uMM*YGl z7^Nsnfz*)Sz6Y~5rqA&nA#jG1J>E40cxc1Ixq?{P$t2v645iRZj4i?!IANi26CDNU z_X=7D2H!r14Kt6+5rjV=R z&0^}?ugaU$C9}tn=qP`#@ml)#k&EUs@H347BWxQ~*QQ5%r8V&nT zb$2f8%Aip2JtN6nL6krzQB-V@xG-!Tr29HqR5}QgsCtmDwHSOGIxIw#??tN+oP&l2 z?7jr6`ca?|qk?81h#Dd7oPw~RAs*+3xWE?$!oN5M$kGw38u6vE6Fu%JgugSEP7n`4 zE@He1q9Il(FD^M8_TR2WC0d{I(ZT#VAt)%;WM~;i5Thq-5Bf_sQiG61K}7PFfN;ZW z$?}((HuMM`LfGRKNkYQWU>Ic*T;84^izro0wr%!F>g{@AXTPr+dpYYpR9$lqGxVCSxJbQONe&X0#|l@Nq;gs~f-` zt$@daGw>dNKsn)}kSq3%O=-uRN&%-eg4n2zOT}24AqM%r=)%oQfXbDE>)WNQ1I#sP zE(d(E+X!xbMY8QGkA7H!msnkx^1tUYW?LvbTOJq^yB#x zoj!IsLA6vXw&YGEKI7UP2QUJ6pUY$$QhE_sQmALvjpaW?i3~X0HYm?sX@J^R?2K7L zJc_(#7KA;vg6&@?_k5&pUcRD6(@zQ(^hQ2yXbl_yNisnN zO#qXSJc$VpNw@@rGH1gr1#pEXiVyp|ke9IYS#I-+u}|T(`ki3*ljsu{J>`R*-s6m$ zV0yv@z7T2rWcb{U7n~>IXNh~+0OGhwk&sV2f!GhNaTi(#i3xG4gY#8 ziab}$MpQ!Lw_NoR4hCd2W{Ye9lMA?wtiXuQeB5(|1r+m#`Ru4Jvt2|@ecPM_u9VUY7%mMT>j}`v zrV-qb!()fZ$Dejfn>YyczJaOUiahot#_Y!0r@)(>(4=VIT_L2(P3L6&iElu7b;QqU z#63)Hq{nlDD7B}N;4_7w^aXgs*}lzo4!~WqgdMj@s=qyEfok)8rGVGU7ZO1r>GX_x zW6j-AEc9W?30vCGZ@WYZ5QrZ=cLEDisV628^xhU&=MC{=VVm`5#tjXm#tuuJ!MEh5 z{J~gf_r(t3*l@UZLASk8CtCy!@EPhCPJ!?s4l=nk zZov^ZPQ3tfzc7ezSUr$NAa85Ohmbg@KXBE^V?z25g3y;^UEc#AXqBWmBwxG_&%ctX zR%>rpi;&m8uI5iOPxKs4%xp~pIKpu@V+V^q8HeeCgZ>V#aRUo>(Lf%{N)z7I7JZR(f} zUM}I|T&-We5Q&G&s2_TTrHfg*;Wwf1Ihs2Xict;ARzyAxm_}bjyDvQoH4fyMS}LpM zeu^W?Po`a06Tj1yk*{if+*~vOyU5{~Se@;uz|~JYHn>&Hw0Eky^+L1rYp>Ef;}_5q zr!|eGo2V^SIRQb9;UG=|8%|Gd2e-Gcx;7Q?6MUAaw!RF)6NL=89@U}HYAdKqu*sHa zzCOsMV{3Zz<0sSkO170$<{Yt^bPtU>732K08lzQog05Z(=-pa=;QQ!W!dDfYT5s2t8RH0$Yi^2pTzNCb9Z3%Zb0Cxc z{v(@Ux`c^^4OA0Z#3mv~_F_1etp`(Uz8TYRnRl;0%b5^6jM{sCsMvG;rR+7BY)ENj zIBa)K)u{c4<7PGN15i#kGHoh;%xo<_w3p=S!?ZrOK9kQ$cy|xZpEYRnfN)A^48xuw zm_CUh_&x+o!0TS>QNA=N{`ytY{pBq>pXhdx>UXlzGCC;UVme>*3w~ss5vAA<5oCWZ zTvZl8wQ&T8Q6Jreng;>n(mr0*sA(oEv6}bEAM>=_$eN^%ab2hb$-GXY9H8<gskNm zhwxaw5-I$}9qsIbcEvV&mZ9s6&+d&V3D;MB<5%q27r$w5=IE=)3mNpu%U_?+V1L($ z$X%!>kOnXj+^O=tPltyVqL%n`OX;GWz@}Fy{K~>ZJX(4wVg|E;Uy?77XUh!QI;9;>z>_V%7;Omsi7z+>w#cxmfws$6= zFG)d%a;aroHbsFwBC^l0BQe3MG511U*=OZb(}94(WI~_coZf09P-JL5>g zuj}GKg}kyxN<`{^yh>tNI!{!N7fBQL!2hU!7GST9Xvq!df266MF?H%V4r~cVE0Uag zALS1CHgk2AJ*3cDX~TPrx4dAWwC218mtVkK#Wm6O-txm@vRDhJ4cK~KwfGnOxZbRaXEp;zgWpoO1$r4EPV~Ajb8+g0%^^UH62m(FAxfx>tK$NGq(b#fx8_h?xj`1sF-Z3zRmE zBRP_f_Q14>OS@l|>aX!fP~pZq6;~nGqs1ME;RACyFDVg1~)a;gOu?ut*^hScX-wY(vORkgxmv4R23(h z7JRc!ro?|SQ{s#JoPpwxX;b2Kd!3v%O%yF!v3zxKdZ<`zYXR-6_5D9m2AinRDwe1Vuo&YDY=58VAS3>m_j9EJMt zRZHvoaB3{CM|6}0M6c3RXPN+Ss`k#hE)ED)tT~I>&n`+u7dyWViG3}qZ=EBdiMx$R zKg}%urLi*^oT;XBg__}E%dljp-NwZElZoac|E<}HW3FAwK4EL1S|zNO`)DxfsL!hV zvo8V1Z1LPvHfs159e3KkT_1q5osB1#bh?b)vz~frIhBKBZ8cb8Zl!VE9uX(#{=&pl?^g(s9 zUGGTYIYq9>c7)NwOt^|~t9pps?|hOQkE(2b*k|MXxa{&*w@b&gAw8J#I2Dc49T+|| zDk1;A-{liv{8%@dxOfI_H}v!F{=k#;8&n1;-`>$~`PwsT(4PlialB~MuRU36Q1ZZ{*fmb}m z_{<{m;K1)eLW=UrFZ#&RW5VdY7QcgCq4uv^odI&2NvsrH)CO;~rRP%;9TZ`2Nf{Ci zr(+Vw=m@EGM6vsiF*vi}GTCS&&FDsk*+$X@aOtE0mVVi+hcU4V(u~9wybLM7k%@Uq zPHqs3HiuAgF47Ydl#1X^35`|0((`{$VoUlNCuvx&FYEDQ)wVuSXv3+o6(2TBx_GU71YVXK?sC+U7%Yuwu znsOAZbnfLeW>l|!*a%f+7h&`FN#h0#T;`f-6V3fUQlYr#J>H4$Dq5$lqp7snb_HAk z?(uI`dEPYIX(p&Ouo>L)BDmN7Zd(={E1Ek=awU)qy%pwP?ueId7?(h7Z< zj5{C^)b`En$0~CB6EG{Lw#c?X=J6zRF#me<@gwym-h;j%)FOWuVXNr&ZespHAh|hD*53vzz8iv{wXjhrD7(i_w!1~kHNeU#h@a8VT+vE zH(c`{rRU~%l)%C*68t3t$V%~$8M@^@#NQ;v(rmHwnq`6M2LRc< z75%_3;N$umLfeNgZ{-W>gVUw2dWJ%fv)>p)(-CNeu!>Hc=D*8Nb6dz3y^VtQOK6qL zq*RzO#-9zRrbE#EjlLkq+#%0lMvhLZvdu&Z>$+$yc2`Tq#2~HEYN7A)0V;X)+jN{* zKBDw2n}#Hdr2BlAZDyqTLnF(zkP^TuvR7_~#1QX$h^8L~c7k(KKN^7mM;OZ(tlVZ| z#9-0_O2JY=Vfyn|7c&^4lvSMGxgxID8X3KN8zQedM5VT2 zU_bppU9p6@?U0iil*_VOhm{tspC58_j%38LxNhVq0Y}3AUJ5zS9jr_cyFmTe>?iCz zEje_)niZCr6KSWI!(&Cp&<>NbR-4M^KU{$me7n@Hf2MFLa7cy}posTV8SGL9B=gwY z`;_6;rB}kp*V?9n;*begao0xyii6s@$n}MraZBgrQg2E+X9cY(cd)8Nt5Tr47BzGR zVur;dKGD5Bt zoPt_8Wj^sj&EX002%cs(iQN00R$8LGc3;gOgYHs0STde;mUNg>I9k8*}m9I_Mdf(>M%> zrEGwHeeYo>s*C~!Zw|nCSXtV#k%58 zbrewnt4RyO7_?pLcK&_?eVzSK{Ru$B`jrQ6F%T!{&sAhE=Pm+h7Z(Q^I(>Y+yJW)q7rKbFuk?gi& zC!3GvTZgk%m(9aQTjgTe-YjiTAzg75>c%oB9JxN}8#e0PfUMb!S(*V-A>v~{5IJr0 zF_xj<0*@|Hg9pl0>TE-qAI5&)4mt7Mk$3jEbH^HT{~IzyD8WuF>e6p^<>_Sfq4C^& z&$n}d<#{6{fop&m16H-1=>-;0xVUH=B`@yG=8eLj*l-@)uiN3=q*SJ7ava6!19Va_d|-G<)+lSP?G3L-MjB>}__Q-?CP#vw zmaM@4sAgGKD;>5}%iHmPnz$QYztL)^TqxQ5#DVGQlAR)Q%~t(r9U%ryr1C5>P=K57 z*KH)WvvOJdh_Iux`x5v5Z_nX>e2Tvj3(qzzhJQ#q*H~vO7-^@2ev!LM<0C)-&aRSd zzreyu{rHy<09I;%+Kxl9#^WOm^R>G#dly53E1~+(#m}smoE}wy^pt{K~1X3VaRpuq@79s7gAl!ch zaIirQ3xvNQ>`;p<==YGvt|}v{K?t8jG#!%3U{W92KeZXVVmNT7^Lh{*BuMeo z7eyD6?Th$f4}}8^=KK6rMdJk3#K9UuzhLOZqKJJ!ZV3!tV23IhKB&tTR|5j*{}I6` zfLK!o2>B3;avWUE{H2-MGE6NHI{}srIBfu~?&FY5h6A!~BTGT%#cXT5-OG+#EQ2dV23<*6rT9{-#~d`?;+L*GBG|R%CO~t^a1iWpG$Pgmlh3T zhMka(`MYuOA)yL~3H%_4#9Ff3J8>znKMG3#VU_+zqzxA|CXZzf`A8yG;4>i+8X*As zTSx-3wE}t_@=K&Z!KNKr9+&_^p#_2?6mUSla~u@V;|-B3BzyUZ9+nCcc=(Brfd0!WnyF;V0}`4=KEV|@;jw*Ac&iKlxhXTzMnl+eu+tDj%X z02P5P_5^q@9M1thE4zlIWclFE=&e_aCcSlrO`TV(rDIDgSx;;rYtEO{P?|~S&T&b^ zu3P<(`-N>-F<*eS8YZ6CbD)1$H@5tf7-#A|)mTZA>P##l?5(t9-2TBfWBh`rLWfn0 zrwang#U-DcL0v$dEoYQ z$;Gy9+sP%l*tTt(8{4*R=VIG7?_%4@Ki|9GTkl){s;#Z5+1}pSo~oIi?x&v*wqAH5 zsg;0-qe`pDi_SA40bvr^v@jdcyp-SYv5TdtHg&?uC0VSy6u&KsAj;#;JVo@LpJ;2$ zK~9OQ<1hS46F|WzPg}G;SHSr3AF6cGc}VXleHM4$?flV2px8)aA}^J-MgE_y%&7Bi z!{>5ML>mLYYzr86C9~z6iKx|sV@FvncELpQ=ziIsvyw30b8yoCx=%3PN2qUeGn7IG1b@9vZw>3z z;Ex)Y9dEeo^v`=_8mL=f9ShA>!yJz=@*xbla~Bh~8NZBTh7O2GUiC}Pbn$UXe%PFK zgi7wW13ad^45hkNp|~W?F9R(o(Uk}%^MyN&gjyOxf_wADVBhGZy;)~4x)CTMIS(k! z>pzMB4hQ}%T4KE06y!@beCdNte<*Qb0v?=wi{7~fW^#-#FjPP4as>9o^unh>o-FFQ z7l?0z_DPpYADucV>TY7Z7pRw)Y>7S}akW9)0cRjgXbJ~nhshbg>IQ_jS%{n4q|S7> zU2w7Zh0>~N4->_!a9#o9TdtJ6&`<&l#-)vL;@a=~7)@_12w5tL~-DJA&hI+>`(cITO z16cogwIE5gwJ^=B%{=C%{c-;$DC5)XfveUgbo3bTQIVUXkL-FWuzP{gvZrenHX9>N z_7>Vd%!Nh5DGZQK_0V#V=I$rlO!#oIikWdgVhlN-zjd|gO{x0Gx*C582JEj^%;y(1 z5-Wx@`#2X@=(zfwTb#CPhCj44N8Fe%GZ=0k{7VM-zgLIljN&D!6nA zd^~0lmgz?0R*2WYuii#bI&j=h4r4lu93B4_y+YXvOO>2NX24A43=xL_ zdV0YqCfbjMI48?{PGWGZ7@Y_TRxa-9L2<4lK{U!NA|P@3nnfQpl_LE)HS-mTY{14I zTScPkL3cBA!bxm8L{5rzj-uIqbaGgEmCcg*pmn#4Ohv@p9muDy{SET|&F*psVTr@L zcZq-*01|WnyZOd+&?-OJ!Vsxte)VsRmfVfJgWX8bS-;10|XBii%If&nl3Rb~A7>g@b@n3)6L z>xls)3pQrh?&X_KPV^ScbZ`mr8<$`25%ZfNhsQj4^gte=Pa5^jH$Nz%ByyXm@mwka zxq+FMDR-J;B9Y5Km3qu@4OXK`g}$S$8W7Us6^I4~_k{+8kx?|>^Q@Ls9{)umH)c@+ zU|3kGM@=P+E9`n1;g@!Of9Rmj_ix5a;u!`9vaviFiwQZRb_p~>Cx6_W+8#Flu7+xx z$~L8ZDaCf#Udyh@R+qM{y~S@fBD}`)No*S_iyY7Uj#hTZ!1Z}6sl9c_xJirg_Q2UW4TDlBZroD zBQrFELlm&N#QXz5px~+zsE~Vl}C8;=P;=NqI^C++&-h2B6{G zQ9HE|nrTp~R{HZ(Z9h-#WW+VmRy*>b7p*qW(KgV_Uhl{g?ZU#0OOf;d=2#*aKl~+v z9&>Rpu9at`l_M#0ZH}%xUs=?0?C9;M7Bx`VgW_Q|-k;E9Xdxyi$&=CESkWTMa-G7< zn!?Iy4}=~BNQ@gC@Xo9HDs8{D5iZB|NzE&ya@wq7l^dRbm8eXFyV-Mc1CNLe)9_Sp0KZT?Z5A8)Ldnl!A%<`EowH0Mo%?b!>qeS z)^*i{w;o{`l&@dI*k=9M)iCV+YtV-r?WGo3(^Nz&%^ z#U(?G3u}aS0rVHf0`?|n)EEg#h&ZRakU|fyP5beIr)a!W+?UfAv1abeTJlFz+V>t` zgO6M&jI4k=AOs$a`b_rri(-|5VaZsN2U>_*eENrvQ8cnar?I32sx&l^d{g~yoG z{r>#`*|6zo7H9GU&2v0I(FX>4Nl!D^t>En_?+mL&YVUWMFPD6IRNp7)e;$Va_ZI$t zpRE7$Fz5;BP~?8C9!#)J>4k;HO>u_-fdyDQu8T}$2*{r7k&4?xJ)>a(JQ}*kKa5T z6m165lE)!maZju#2>_QAm*}-EAd;!qJaW62BWZC(9lvDONpu~hLUA)0nYdF^RFoLV z*VVf}^s3?@l%iILS(WvLXnz)c(gN&2)y`&nbh59Q!UJ_h;;AL8T4_7@X8_V~l|cu9 zjmMSKg;p~a52WY`5^_+J--^S52#_Xfy#zAqN`H!dg9TiaTiiRav~_pG6AL({qPGxF z{yzF{2AViofSt=8bzx61s0K+2^b>Fa9qaDG zZB#S|>=zQPLdzvhb|s7LdzBw~9#vw+v}tHNJU{A0QR|gDv6Z7cxe{`{zyD;##+hD! zE9v6q^I*f2k(txG$%vivKJj7d@L)5-0RhLgm!krD0~AO`p@cWixFk^&#vj%D`n7t0 zlqe0vb@iO9ZU2mI`-(^~NF8u(ke0~NK^ltz&0}6KMU|&+oWMG}cywCR^#S!V1ufUO zrvlAK75?@^%OMpUeiy@bs~Y`PQ}bs5DE*Lm`ZRGWE+ zl#~fNdW-$xDZ4PA7wb<{AgCOZhFdwMlBqdwN86_qw629#NuSA*(!yA-{zy>u+(O+x ze>x)O?yOlpR~nw%OTz)hqnB?;%Nn)v7JWYNixv2)6%2GYi_dKuRTYL>dLrN_&u?%+ zqFY9@@!eb4yZ!|2SYg#7^%ZdnIdK?dWw5)pvt(7Y*>x%#4~@Vd31$5}dY8`pL&`V)7|A{T7B3rQyTuN7x`mPtt`<^yk7a}LmZ$}pkz zp0ZGx{JrFyaBu)ex*qw{8*vPLL4EpuLgmX(t62-JERh@%qUY_Sw3}bOkeMZj92R!= zNStuCxpoq2Vz*T7K)KIF*+l5Y%vMuvOKwZr&y@Dz936GRxGvFW#Ynk1AlE zr!Y7dZnnAY$=)$(i3IK4k_WC*JN2O2XR|?TobPqu4%%AX)Bt+Zz_it|cM^t6XA+;Lg zoIPq0vcrMq0Xxb+HinBUh-!97844#;XRtAmD4cQ?u#G1Al-F!@H=$I;7RgvsR3hfh zim+(zljaljZsSEu6M~-Xu?W^VfG9=zLOV^StMEeIl_xtkNi1H}zad0Id+;TkoeK_1 zf*Szh^v-N?7=G1gGul?@`k?Yk>AC4PL~d7B6rO#7__`=A~o^x#=4z)C!=rMlxNaEM{OvQ7Bu5s>A}U zxcn@WijXG+-ki7d?0?ahmU^EkN&VAAmdtnrQDHCKpIjPIj>(SBYT?}N2qm)$l zvsn~XLSH;b*rgc37_iX*XxEMw@;rl~ocD zQxM*7K5i+6jFpqa!d)n1nqa?BI*^vZt*Ui0WM-eH>}Q#lG9YEi5P<66$H_35RXmYE z3dJA;6KSugCoWr&vxsr1=PPid(h#2bucFa{z zko3fC>-nO_m$56-6eoQU4PHK8bs>|dU*@!qhHf5JfzT(YQ>R6aj7&nZ3QUE%r4di` zlh**g76$@HCmDiL&~o&wQnwUnKn}o3ez@Nym_t+r#j9Lg;230U^eiR=5zFyIFr2@g zGpNvy@GHCe3SbOD3VpQ5bJ5uVmX8Y4N@-RUZC9dLSPU>iQhy}lEk_WxPgbN@!7qJ^ z06;uQ*z&zmA`&o@u@p@tDDd3IN=om;d8GZDeXB$X#}xy}i$)Ji-6_spX?s^GoQ z(z;;^KQEm!2l2zdH@e3UzA4JJBZLwLB7|O*Wf3vM8c|y^%K2o;oiE9-tO^CrsU}}~ zL)pvjglQe;8;L6pHNar$eGd^(h!@bx|5?P8NUxlLZwS|WttBK%^n|A?wc-u@V;5G$ zb=(VtUP*~V7oF&dH7{xna53VLI)-V6&Ba1QJ+qjHq!&QfbW6n6Ye(xRV+glZw_($F zV3sPIAL6Agu0@>3eF+1SRZbEVh6fd6>6~svWxgz zVdG%Z+RxffWq(|((awHF8RVPm1on2U6B-y5`8GX>;?Gk^QE+FfSjhVDOw|A zR6)zA+btzvpkX%wv<4ZA??a`}IW4_svo8!(YJ%xJm_8GYlf`A`?bhRqUwb;!ON{a= zYHkOCm-6`p^oCmM0+$G@tWgvLANYyqaLvxw#d%=>3gAi)F=s#Zo8sMk)wjTtZzUZ) zJtVlkrD~-&Y936OANQu44T`9)$8&8Zy8Gq{Yf&H-xnv#y&O>nJEGpH)A8@E#lX9+K zMfi&q;b;-ld~k=JLpq+!{MzGIOeM{+%@wX+7F$xWj5DG(3=nk?wQ$7b?o}$2`nq;K z?d@}4J??2o$XX{;8`E#95?8RV3Tn{BzhQ_>KJ+;xgZYFnk2x zKaG7Y_3+ydAl5`LATZE>%E?oNK=5Hp!YDQ^Qk74q^zX0&zejJ>UsIZ zqb_oc2h-W<>-`ua_KRBi^#Wev$&?$E#xwvHXuj720pg@N!G1?`S2qgisfX1dicW6* z#f8~Q?@?`@mL?9g1JJU*+Asbm=@EG)2CZYl0XLWg;7!Wu17ae7ynd*E8CS{6GMYy_ zUBV!tH4;|)jgY%Zeo7|gYal^izDr8oa1;5*r@y}G` zRu&i8+%L+riKv{XhUJN4@FJJv0n!2)Uw6UTWZjk2!hWVX(kT~5kD`gl z*~s)VqbOg?L2I+;&K74IF<>peMT4>yWx}Lv)-(fUPR+m}ZvNryF;$Y7HxqE#%ND|F zOF@!jFOe&_eD|>N2_V4Gp&CSZEq>KF{2)Or?@}@qiER!>v)oE2e2kluiq?GXQ1B(8 z23&5|UK!1_7_SgYe4^9e>^#T*aBg91p3L`$_89qT6qDy}%ZBycGOF%Me0*NzkqBB$ zs7us5-O9dnL{&2$wipE!7-3;Kne^htEsodllc4<2&vP<+zDeLb!^N5bR}Eo8a^k@s z-`Ey&cr2B61+VE^R?YRittO^zh<)Oz1aJb}e{?%|_QMuR8XmOLvZIt|186Ib26;ib0P9TKcrO$l?ZR8*IA4+zu`EJRdA6_zi2K8>*v?S2juJUa zi>Q~9N@Oj&KoG1&N7&jw0`tO}d!g!NwAbfIWZkp-K^Xa`jm(&Z{j2mk7!B9A06=i4 z2PUbW7ttvw3v^$>$kMpvkLd^u_L=rr3V;yBl0GfoIx1&WIk%Jts0!VRAEt){cqZBV z*%{BIWc|xrk~w#&S%pBA_Q_#8T$QU4cX>R{gKHjke{4D5=KB)!G%%^hsz@Bo{VaFs zl#JJQ0z-N8k(w#n=DCNoL4%aAD&QAO7KuW{&!DZ+g8@^l2&gadXLPA?eo!g)0j(*M z@DEmGEM%wpI*SZo-EYXC0}3GD^1rXuR#+ERSa>^+fnd*$5kt{A+ZXLMkd3PA77els zr&)yop2MrGh_b?|zmwMI-K{y-z$F(h3`UvhczMwX-^U*KqoB7bCy5PlD*`Ygj?yn> zm7^+61I0GWaD+_99mT-CPGK@o{eKOBX2E_ zgn$K(Rk|&aR)bfYE4z;`bOs<%u}iL@b{2)y;MWw4c6JiS!<*w|;|$6QS7A_xxa3(% zh)m8lWWK|i9a8Jqg|_Zj#%}d8`~!}Lq0$`Dfr0`WjvIxaK8i$Jl4hdR(jYS%M3LL$ zL`pnCn9!ur9;WvFE(VbqlX$Ta3<;L^nbZRfbO|?Uw?vj*#z_1o&;wx3ovUWS8H zanrB;Rtpr!f8J6)(ikoTH{MF_Trs*GATLtA)ef*+M$XmI@pfwCnRXxoJnO6xv68 zT|~Mb$%3vsGV2f~v{pWoYwcDUZ3B0>MFBbb6Z+?x#p31onb}fZ{^?v%Rs}l4ixZ-; zw+kMGbxzqP9(AgZ46Udu#C#3iJecaqD~CiRIn)yI0k0w$CS!A!LD2_ELR{TT2nZ};q|p9i z{9FID#>g!UP|3jZH$$WXUwEb)7nEQ*#o9O0#tg3nEKd0=HQ2ACzlJqg)y`%% zQ*|~HQtCz7O)*1bmNyFDP~9CF5Xz-qNX)ubd$T5G=h@i5M`j?S<-4TWB9}GCcE4#z zHpAR4{Zd0%gYyYH(JTwkuKA3OylKAG+Kje0PdYGS_JP)q)~#hhx2#|ucDuFVphW!7I(V8O`ti|} z1|vP76{_RM$R-u++$_lhgz>OXTTy|FXldYxpG84kRY|rKGZ?1q$gG^pn@x7PV{R~eG`6tDmGYIu>k@C5*SE0n0SV4_Iq{>bcQ&n!c!h-yW|`c)fpVwzNK8BsNWZ>Ox7SSda4t=?#h`ngQurpK9H$qV4J z4x~R?`O!+1`}G2K^=2YQmT{!4&>I}WGC`ByXt5Fc>*jA}P9LsqoIcE^cOyH2KFd`_ zv$JLD4qpJ%Jy)cBdquCP?+F6=j`yim zIhi_{x>G}@4Jq-+#gz^o@YN-lVOr6EV6Fw-&}lkT&h=8_lovxJtcujBqb6uZXKz-neR~58FUhoA<8(%LN)> zy|hg&p_dS7xY488hY!yhE$G(!o+qk4;(VPQlFMpp$Q$(&Rip9IPU7ulj0P#S+HAug z>jwd~IHar{PrtKA6ROM& zO~KsP`N9m{LGHZXh(r_G(wrUuZsqh|!stJkerWy*Vt-jk&4@m<*;XMKS(t84Td~Bu z_yLX2369SUl4J5=&G__D=Gsy)WXmT(dyp!j6&l?R$F^+vF+e^pc&V_eMv`$mmmTzA zzZ@JX?K5PY#90yMAd1up`rKF@s(rN)4`V2ZrWeW-%RGp($A$}j^HOeBR+L+r+3Z0AEJi2z8f0PCIfsErwt>mpyGq6%~IFFNUm{?SnknDIOux|`G{~U zvwS%`|F|4-cp0q$qR@lY)ip5mGoNkMP^RKzVSo?L{x%HA`L152JUz)|U=!^y0n8&|Vr zB_0NC+iL0-)RVj&<9D}7(Ref@f}y~UWxf1;Hl7a64-}ArWh)ybhAhcrq2_%OE}aF^ zY{mO1F}ey+brM_K1lZ|6@xFsDW?h2U_W-0yICX{N`91li^TYD39#LVXq8Fq5(dIo- zGUH+8E(6N#E2TGN06f(!Q*DA;(F7VaP&I&)uN!~vHvW5S5p5y<;8~$4( zUWxImqyCGj!~9~zn7_zETZb0OKGgr&{bq`>zBsD?2*779q;uTA(WFw0+)?{q;9qVE zV$C<277ij{qCDY13JsUeOB;@bpNWteMmGsN{JhT6l}>lM8^L2u{0+zGR9xc0!Sf(^~rzMMz$0u z^sgMdzj=6H+RT@Kpy661X7azU*iC;A_*Wxk0m9gyl%AAtEG+~d5b%Go#s<+Mf1JPn z4tEn?9P01ZX&?yTf4p8dLg7Csj+F@m`rkhDLwNgtTc5k&|3fkt>eQNhzZ&lUguZ?y zs^Fx+M*y$?t?%6riu9iqF?+CEVd1Nx%fA2Tw*2dAq-aTNhx!V)`wwdiy~eVE`kQIO zxVY}{U%{_1yE}0Flw1uQ#1=1a)c=LxWvQe6FI&aQT4=fZs~q`%-iI&g0uMBLN|qxw zOoJ`q>c4};--Z2`VAmq)gYZA7u<8@ze~JUQOW5asRB8SOCTi?hLsR9Urx0#|3by3cVsQS05ux!Ry#M_YffT0GKj6;f4t(JsdDv_S zlC|~)&IqIavpD=GF=%*n0sqSvHWNT|Lj0xp499HY7QTvcpr-KmAmOEa6@yLjh{1+S zfO3Ux8CV701^?TU5ZGXDAb)dE@{5G&E5CwA%n<%r9{=XK7U06o|BF7k#K3g?7pjK^ zK>LIKSM_*>0QSESlucx?FU!Gy7@~-?14hUfaafnKD+SHklIH?Z`j5b@Y6dR+$1GB% zL7@NJcTop%jPmzH;XEU4Vf{TK{94!|kiTqUkb{Xw)|Z%S|F4(|BSlFS8xgQ+`=xmV zd@(}1I}g2(TILuJax`VwaRoX%}0X(3MPR*@ig(6>o5W(kbbRo%jU1p`H4noh{ zuh-4x%PA+fw-;2Lep9@HnN;66y<~`h%)vXv(N=ZlAcn*svD94!epdw87~keneyHzd z1PM^MQZzyGUMd(|QoJO!L4cnVau~QkLZNT~p0sE>I{hidz(C6yp@ZcO8r*nw2UyFp z3q2V%8u4GOQwlr&a7>rseV~ev=+I1=wkWzZup>?UMJYV+I8zH?$gtG5jo>aWlndDE ziE%9cbdIwDm*0&h-*Ddk)V8UVD(bjK%jgN7V6OYdfqZ&S?IdDhQZ6`%@ zSJ@H?S5X=RP_^WXO|kVj-O(B_pEK&4XbUTnh`({xt0X$s9r@9z@7Sd;t`%21B<$Hj z$=#&-P7(OaFZkUGumHk6;PyRO-mPz}p_T(GDtg_x+9LHFK+zMfOJdNI;R*99Pt#L4CHG;`+io-K}x3LC=WH! z){vniha}uYVh_`F;-@pCcKO`DEFP<8gNMaG7c#i(2RCo&cLR80QjDOn+5L^5@{reF zo)m(lC|luFF%(k*qw(DE*Nh6FrqhNjTVOtnsF&<}rjD@b_8P@PeRtJc7nBDBTE(g5 z>+MYiNdr*XGkbfmVI%_l*xej3Je%5zXe5Qb*L~r6GqlX03snr8ryNO{-ChweNbI;; zo8sEu!WRe#B%+c#c%P`6@Zpok;Ee_>5kE^Ni2*(VvV~Fzd7%RH5#JE6OFU& zj0%{-vK%lex81N)J<}zd$AuBk43tOS`)&|lZ8xE)jv(o7&(+lKuB81`q^N7M@|+LR z9@H2O_5m;^FbSvQXz8wL>V}J$$Zi+180$4bO^}u3$waRRck{HI`t~&;O9yD)Q45Pm zcO%4(v6eH|q5N?3Bak^hF(fod-kxeDO08AdyDjbmKSGe^nz0pbaQ($^%zeN@vRG0a z_Ih4?{%kD;W2r{xJW_@h1yb#Sm=RbbRVM{_g8=N`uZJKV)sbAUhpgQX(-nQW+a7Gv zMoPq#^Ea!viSIoQ3-MfKHV|@A)JnT-f85l@#RJY|1Xul(b5QYd6wjB74|kPbRICW@ zyfR)qU(g!yQBPz_JPn#LKc&zl2`xLM|3HsRhDGl4k<3(Z0z$BHXIBOb6&z;G`xB2Z zZ~*c(Dc~{SCTjir50K5!wu6sSXE@D8CvrY(hBkcOgiqGy?dQgC02&@{v~1hP{!Qmp z_4OO&0gFUHjG8W1P>q{%7H?8#VGcqD@A3Ju;tPVYhFDYm_8YyEC`vwVsC3&v4}R{IrJ}+rZr)IjUPaH`TUy@`hPFq|8MW;KR)DHQtKh| z%S&AA^c>S4XOSQ4beYscNHXsN^;o>$yl*3=eVvo;B+BVNnbuSVrp!xcmF?h z$Tq(3yTF+PrM%4A z5|_m}Wp6Ey+?-X5m)V;rY#h~9*pI2mcodBeZWiPeDF+tlYhNVmmNFTNn`nafO!)%= zn{NnwWLk*8xJS$20fP46lPRNFBM};v)T4IevdpdJermZ^tz&Z}IpR~9=b%P65km@E zi;yg0p_uYtmI*NnEAgt`oQyUdUd$*PTif4(q0tdH9!73BS$~HT(-}E`;iV$4o|1XEI1zIAU14rfWj-=QQ};p3GSDnT>!yaex9- z{_N>_QIe&A?>#>`IB=5CT$0V_&~3>KwqIwk|Gqu`vtRVfjaY_mN>{2lA;R&=g=*-q zJM=RjPl<9E4^SQ@l=9Rf26hpXQ0~yqqXntyF;Uhx*XMh#*5ti5z{&`d zXps7-Lz)Ul2Wl(_Igfgql;vDy`6QMz>&ruiZMR=n1QJSB2V0-)SpBc46r_V*ZJr_e z>E($2tXQDN*nN)iaCKm26W0KCq#e@Ixe5|+7mr4@WplI~lyT#@?w}Y=EQCL&3VkRl zj#X>G9dXPA96s4fH-{ve<2lEkfEY0IDt`ddC~>2X(Q3ZS>E1#+XU~C+YiZh)okQgH>eY*Z0CwX}`=Ecw$u+-otZwiLe_7ocIHH^Y}CepU*CFfs?)uErZy8#y_JwDDF zH+d>fJMMGP(QSVKYZHGmM90L}o3eEZ)YLIhq4RvEj(rCWNO0_Q2q5YpafSbJ*50E5xProsQP%~*p7-KX zN;q7%6@hQ8fxcL#k?axnhhJtA6&7Nz`Wa*DXXB>e^SuQO2hYL!`;s0PU35-1-iaL;Mp6X@@6wOy!y(q6fzCHdLtw<$T)1E4Zo zzCv_N^u?F(U&_Fkd$&f4g^uG}9WPs+LssMw)b*Ig)ZL(x$2PH)@etm+DzSf|~>K zMSHyjuDJ;SAp(_2K3P3&>ho3z`Tgxb_3YjgqK6Yk7FFh`Y$5}^W&R> z_%Vt;e~PB#@8d$ni5VUpRdW`AvIa(R z+B_@mxsbewky*G%->-q{flI0udra&FrtRX17Z>#_zNwB@lXc7z4U&Z4nz(K3|^2K9^ZFWRwCx-$5Rxc-*fO_Wdec|N1yNZGj6X}L&lFY zTFB?&SBRIHzK4szJMgySM-Bk@fpHe@&U~);@`H>6wk0DThMnR3jpUWJInd1v}Z6s##?{f6Do zcacmtTJ;h~rWLC))&>zwo705tO>6DXczgB24FnD#GgF z227z7{C;u%_SqdFOE)DuP3C(sIlgTA8#y|zaWhW9kk*%1($I_cG^H>Hw?}~c8wQUn z#=(>k*Eg_H86S+CtS?N3kvrsJRs&=UlwE|EV$Hn$p&(2%%NOa@0*YmoK2BMH*BL1} zu_4q)Xu4bKFkas~=|EWA0^;c`1?|Tx8NkOcZMEf)|Aubi&r}iTujV7yF=Qvm9!MVA zh$4*zqDZ-zquaV0jGd;SSc z!PIH)!tD_iLN2sQW|2_7IlNcgD32&_6%t2_T&0%qjVBdxL_S#5rXgA0A!&roq4*R! zp#`33MqkjiZci#7y*0UQATXxH1e%0paW=Y|<~OW5ny95wKtl4$~NWCboDJswS+q#_J){`%1B*orB$h69rf*s&JlWoxe2 z51q%p>xsDT4i-Y@|?A>SK-I z(g%47VEw3y#HjYAk9nMS<7>=yw?SQm)0iCuCgA?RqZ!OFAxfaTY`BTn0u588*R@w?gV`S)e#kx9ja-iD2 zXAXa@uyz1OEBc;C^rH3; z5P^f#TV*)KbytnLUch`=QKC`s+(ul5&!xLr0*Rx=okH3oK=W^`#bCq?;V_ zjqL-%s2C$i8B9TpJ^<Wr=aN$88u?Ib7OniEl*6X%bzj&8N|m?#>pQbFm( zOU%d;GsB4Jw{Tem$H0cYGS%?MoJ{1J+7;Y?X2!6w7x)3JM|=PyJ&4*MJ~SJFaRDzr z{USfn$5|s+lE!d*5pqNN+Xm(P3RS?wLY5-`vQvBee-Isa~Vj)^Kl__(<3 zqL~_I!Wcx@^e7M_0=S?x+H3lJl|AB8Wp#5Zd?y<^n^vIRU(_DLT+n;e-f;4tx z8g42kMWNSP@Btfemq$gH{!8sOIfOl#0hiCBkEX;4Dmg2wc<*=Tf$(U~h#0wowt2tL zOO`nQRIS2^>;6oi?I16~Drv|7aa(l#F?v?YnTz^qK$FUW6f?-IK{bGkcnuby_u^Y+ zN!K?sSA5<9soh~-VW4W6#rtwrXoDH8Mv4^z=gD|p(F(BCujNx4#!#Lj8lnuHqyzuO zhN$bng|^kx2=AKLn}e+r_Z`$IP3JIi1?=FcD*lQYLV=HF4v8Uh?FzbKv{0k8W?NqM zMLBnt3mlM1Egp>RP4RRanO)`sOw05e{g&+Rsvjo%28{t|o6;aQd|5n=rowc3p!@Tj zL3vSJT@{}}Ga!15Mw@K4cV{Kd74V+r;+PS3$g^`B<8kiy|-Vjmy-vcgS$Bo zDJFZ4IDP)E>iG z%hh|t&F?HHUq~mW8u^>ObOHPoX(=4+%$(1O92j()B05Gny$Yzz5}LFds}HYL>NA#y z9yef171pYsPZD|qPMb8Bu_f(0ihsEXBxzAP4VTF`BITyN65)5ar@f$$AKo55qH)Gz zyP0OKhd0fZw@*O%U@`j|SEeA{>rHNBY)Z*bZ4^)ZvcaMoa^Kf?l9DbvxYO;!8Ho6a_-A>`M# z#_Ud@61`T@B<;E>Yqt?Mnt%{9$1TSqL}EgAwSVc+jSz0bcc5NO%|T4|O-VQGER6v! zCefen;MLJaWQVh{e}7>nl%%tZwSSH)yloYI2aNbH?=pPnT1`-38zzmMGRy((j`7PT zIisnkQ?z8J-$78;4WP=E3ku`1*P-@=BJ9VDY1vAG^%yqdf<*L<;0NHX>cQ7|QDa(P zb8olTo3&!H(1^q);BcgWsEeiUwbueRE7C*jrqEsP2^ST(J3%EpJS*^A%0m~0);l%A9IJuCeR(vj(0@-%o z9W!&Z^2Po99tky-CQyQ-#NCL%(osE%4fTYzFx5#-(Pi5rr+Io3TliXBp+^9ikm#`k zH^;*=$w;Hbvo^GiaEDlb!~@H3KiHjSL~K80dcg?$0XhtbO|u&d|J;`IvKE{*af&F~ zv8Y{FVSyOIeCBd=s`*)>XP;;r{GvqX99pg_rIP3PQ#dLZ`Nrbr{w69DbswkVzL{_q z4$*v3oVaK^4K$7QVQ6*Wdd&_{Q1I)AJ(dG8y3Lvz%KFX7!&LwGZ6GW=yl`^~C4(IK zIlAIAvLQjWWcSF%cFNxLVwvmp-`>j`JMryBc(3D< zU@ZiXBKG)siAdU?=Rc3}n>U%+paQvs7U|%Wq~#Rf#(>F_mKifCgcZR7P4{JIrIhm+ z0}(6%AB$|=Zcde_V^U)@#YkGz)>KHRp2gbBQQRSgZoP(rWXM5^tpT)G>LBLz*-E}L z^4PFwZ=4pY#J#^m;6dsltAB`ZyQwpLivJJ4*aXX~n0rRyB*>mXLq}q|e6%K3XRXq5)o${Tqam zBu>_V#aBY$6s2IBBgZKt%EzJJ5aVwT3~2dQ-GHKLW6<)B)oU`=hJ`Rfmx0iz)35%4 ztz0f^f_IDy{>j|H6KtoTl&(lnbtTPy{xo9)TzDxfxAG#_+5OjfZ94Bj#~#sUhis&a zWk{6T@s41B98lB!NFEhEW*E7BVny3sB%H&QAj+BXGhdyCq)>5nlMym)r)Xd&aa66e ze2-FW0#cJqaBSs=$f5e2Av-GwyrXGGyM=FvUZBASpBJwQvlx%^(H-P^2q# zvsfS0M0QS?Qpy6DF=%DPyIzR`EuQ8RnGnxoYZl7sB?r>-Vz;_D!S<9lZ=KE>u=`HY zqBBMt8jY%$ujR=lUA>wE?|)ntz)B^tn9YX*fnciobU-ilyMztT@P0ce;#;YcqY*Ka z@#+Q6*Zz4e)mVYb-USs--f^#A;-Oeo)g56TtrZ80mwFFyEiH>!ytkk21*FbD>2cWk zUJ&Gim6UK{Ac45x)f$eaTc_T+yf#&kD6W1=du=-3iFX$oOq`*XGUpx>`+Xa2yO+t)`x@CBC0&4)uUlPL-666a3o!gU-t#qV9T{t$T>D6NSZi zH9W-b%)br*-KN`0o~(e*IJ{PYsekaE7I3Ba16Jge^~76L@i@Z|u_+Uf3_9uKhMj~8 zi&y|Ml4mtqDY=A|Z6SxVJ1wc(PfmV>r;mHRT zaK}2R@VUNQTH)qU(YWj3y_7-SiB1lea5IyIz_cXLnAfT`ce0!-+$&y@{m4JHCOj6WSbxSrFh#O@p*mwbJTqRv+wq}1fcU^ zjT>0RMgKU5-SWn8cZ|Q(!hyuqRU_n{1!80B>ZIssM`<6Wj&I$*35-}>ZAJUfi(4~R~ z$y4Fg{eOL32UJwK*M}MUF!cTg1e7kt4u}GFY+x^|fL&23cCi5ViYQ7H8z=-L%T zU3F~;uDyX`FNm&P(eIb!G4SwzzBz}pYi`L+a+BQLdy^Nb`=DGIaQe=YPwA;;e@t(j zdbah}fCJIHd{ZC2Gq%ymKX~K#xK{i6mUy2ZJLINj($b^*-*)IJ@8y(wX3)DsCnjf> z_fNjrdFF$CJB{{i*qNMk@50HBU50$x=>BzGK%2O8?H{g~J1}XnO~*g`?LD_+=7#1@ z2hPh3%dZbKA9HcgY9S(id{N#^zc#(y_TN45?*`u{g97ZPoAiAWBD)>`->HQu4O6Ww zUr*4QHovdm{0++*2Te;lJ-^HRl^XS)&ThNtY*D90F^9bNZu%T|eSdEEGdE(J7j1rF zm-cUqj~CahSgXIeXYjmMtq<=X`{>&EZu_&RIvyI@D*c21KRI{yT~)3%*fV$NlLl`O z*NfS5y`|~VkN)en3MEgz-Y*UxoY^|_>fDre7l(A#{M3B5eus^%yR_b7M5gBKX_^%J zynN-=h55}+wmo-a=t{ZbwRz!lih|R&&{t1IO%lq@FgJs<;sF zJ!?rbtx=!OTo2Yv^9+C9EqP7J{E*JUvcCeh445%7+$MYavDycQ{E@$Jc$w!WW!Gjd zx|iQ6D~>+g8Q)lWe7VPd{eR|Ud>I+3dAZlC^$QHi72#WHWTO}N8s9mPAKU+Pk8W{0 zXP@u$(eAE8;LX8FTgQa0@S5cLdUo+{uVmNk$f)D1Qm9 z;M>Oa%gPpCRG8dU$d@y327KR}cyso7d(*>S0l|;QC>u4sb#{W@Ys-L?!y^y6bRTbg^NqE^ z!Go(Oefyeg9a_&>l@p=&XuHE*=iCzs<2okU{?Vq)*7LQ|*1Uoik0U1z>6CM5L(CTI zqNinpn!Z}PGpui&m&e}4#&rucpJTkr?_gAJzGL}_n;#VhqfU6)tZDqc-|3Quucukg zjQKd5cs;+JVjX)R&T!Je2dfu&O>>Rh`93P?+5XI}-^P?iC~g|sSuU9|$*yo{&!RjJ>gBLd-~+# zJ2P_M1dsa9H1KAhGP7eh|11wVliQ^A!xu@x^ZppOy|~q#R;B%umU{eLdB4!+O-#n% ztJ52_9s09*;mutuwNtZ0I_)}jf1GlA)X|X#V+|g35Eiuw*R-jBbV1I&fZ!0B<}8%S9i>W@ec|gBw2neG48q6V(dAC!zZS;b{zHX-_8w}jGXYa!HGHM zPq#K5SG@Pn;sSZ=Hx-jUKe^=CuKo{_`}%F<>({TIKYBCH-SycyS<({oB-!vDZ38!U z3I2RNWYjhJkBtKl^)$)W82GKjh*ukQ?zNt?XY;;BNz>PQ2i>!pdt2`KO=W6Rw4rPN zRZWr_WS+84jKL%I^=4_>1W(ZKbk#|xbF+!968jmyuvjuX$>a0`tyL~3DsFsk{Lh3& zK?fde*IR3GcSifUk8t^>!x*Z;B|GW^ltWW#CasDHkg^ei9Iy>#@P|G_< zU*zq3w(gv4b#jvX(=&EUraiL!{-c%PvH#93{mpIHPiiD;J>7Lnx(gWLA3>%Kty z!=N8~qq^O^92D2%UYUH-ACtEG8j>AbEPUIY z&Au7!JZAKTS2LWyt(=~5+4$yM=iQh83<%4+|L|u28K+afcure5!F!bXzf=0}YUSFb z{-)xCS1(@6?=+yNr`z-XjUw|}4D0)E?$r%VBa3$3ZV;VWvisVMKWdM4l<7|IHnscb zg>&~fer~wG_R=F)l%?Nv-_<>sa-m>MnC7(Im%_&0_5bnx1icPk71XO9?qgX_3k`bl z)D}Um*Pg#%)xlM}kbehv?LDI>Ji{nslKD3?mIIQu#o|ppmKSp9p4^u|YSwD$MUB(Q zkXp{ZlH2gmd?i=* zL|VonJDo#KPuR88V&7@WbDNi!FHL*a_9(iVfydj#OwQ8=iH~%v1h7#?WJj1EI(JQhaAG%#v<3qQW zBw1UfbEwrKWdU!t&o%8{{QEwKo0(+sT|P9ADaOQnHSzJ<%*t%S=;dA-B-b)6>%!0_ok#KQAHAM+kv( z{EN862n_>hG##p<{vq8^{F!LH2_1|P85B2o$gm-?z7ZWLGo+&d1yrOWUJzspG>Kh; zAjoOKbXssgldO&xYO#V`TJTsiJw?Zgl*bDOv`e>zIiaxxYfCAP+j(=#YX` zQFlB;ffVaT)wL-}o1)_Cy_A+d_F+m)3PV+{j$qjP=zNPwP15|CLf)0rrtgA<+j1{ZiYlo^0r*ov_ zPl6fARv7CNBSFR0`sa4cz_>BR+l=NjPrkK}rVO zX_N1fh9(Torpcq!@AVE~2Yx?9LnBPWFLnJR6T|YphwOp%E^@e}9X~o1w3%M|>aYJ2M)# zQg|PS%--ddU_;_lAQ+G-DFPS4GycWJBCt#cOALqlw>f31E-{@cG$76LtgWlq&7T2w zryVHBja;4~)L{)VsZ+M909=Z}rG=UYRjP&&92GCUYLnbWsya-l1xLVT%rFkh+8`K_ zT44$u65LhAj2d!rF$V4q29DMOQhgGhh0%3L72&)50dJJS;pP4+C$`+}Nk!@0-MlnD z9|T`gRgMhg-~`bAvlQe^_MAiy%wH%lV~iB8hAjF|IjE!)l9;X62wX`Y?Qha%JU;Lj z+*CuOzNA?>&p3Ym1Hq6yXs+U{VwayyYr0rtd`~cI?Rm3r|dYd{|BW z+nmx`lWdqTc#zR0f&-H^uQ#~F$av|L@s_4!&o(Fkoi!CJt?v!NG{1tr_EPhp{k#yn z#C*YyL~J!NC&qrFf`|7U{d_N`O^#(v1rIai_`fn#mZadUpig$~5IbbuLUhPrYX;9S zG%-w{*w>tdAV-{uK-M+zD?rtC(GcmPWGqyYXp?KtZ7j&1$>KcB?~4`%sd$Z_`>N_N zPQ4}>S>=Gsf7ouDNE+ks#Zj5n_G`SLM;we)0wY@S@KWGb5E)=ylup@B-kgYj)sL*x zQ`KUfd0`l;j`UUYvmhR$tV~JqI814}u$7HNV|&-*@T}$v3lcCMyE_F1mjTqdftpm0 zL_AY!ldKSh5m6GcJG76C)LspFE}{|A?&#F08l}0X%APTO{TNK!HsK)UeghTjSvD!w zPl2d!0y&7ixLdf(#*%!VqGGQtlfZvKx~+LNc*Fy33$pYd!JJLPrS(bCccdLwUBx;H<+mASBvES zB@R=EePA_g8Uv;Kg&305eP}hE623>bq|Za=pFwVaV@a#Grf^RA%T27w;C3pGKWcaC z(%Hbz`3*mP7RPs7DOi)4!_n(h3;KGa=~L%c)@((#daEp0&H2AP|Bg}M4gx;Y=^?2( zegWqcH&7h)E2R*>UIs6XIcKRL!^U8?QRBzc4E%F8hy<`R$P`Z_SF*@VoI5476LS{?>%2Y>qy!;|Q!q2VNHnyN@fc6nPldvmjkTL_e-_LZ_=cl7B!Xp@VC zB=V+=bQz$k&p6jNbWZfZ3U&;NDm7GY-@I=Aud z7}m0N=Wea5H-irM%Q5J)b-#F@ZM3h&rzap8A?U4kSY-)TS>w58)h6!f=>Wab;h^wi zn6ullTC&l-mnwx-x{^3f(b6Ht!4_6kJ9N+|wBUPvu}Xz;E%L`)g#lUfNffE6 zJ*qs8#fdhAezKM{O){=VAJ-l5Pn|ft{I#u$>`8_ZqBerweFlHo05#kwJ^!6HY5GOr zTX*0sfO|~hU=w4RC5gAxx2qCw{e8ffE#dH-&u~P%x-Yo1CLc`H-ggO2rnZRGe7e1* z8ve+`TIhV(M68E>sZFNJQ^RdZ(l^14fxNc>6nk9_vLk6{HFb&djh0OneTWsB`|dSI z@(b5ACkw1pUW`73S{W8^IjFp^kri3&j|oCY$E(H z>q)Y%WBX!*LDjWxK90A41#6LnSd|wUpDzwVt=lLr=&eOI?3BBb>|lLcE(J(>*ZY{Y zwKjIFnOFonN<{xSQY+mw$%CwKpz>pDOXqc9vuw1w)PSsCCmJdF6yzUILGI+`GPxO9 zyGAf{q7^LdyWpZXDo8{HQhWV;f)=T_R`4dPd*~REs@5@VmqU!Q-6yij3a_ki3_D|7tnSL6eq+?oS>1jI;a^|#P&my(rJUpF?Sop>z{37oYx{JTPd6fv%G$dg-%gb8|483y)Q!55>1*i z1Bewb8jNE5%V_y!!H3-Y!-^Y6td2&fRyUiAI!>bwsnKh2h1KCfa+<>h(P^QfF4-|e zToY3i;P|Cb6arp0cgAdtMqLGMd?qYEsih^r-7sLPi8JxFFgGE!>WV$~dWQCjEkF*( zb`zrRFKR4@U5A6CL_3g#C%t?Afdr8fA+FYxR{DtyShLVAjn~=G6;Ye6EwXoiYsJox)(R)F}jhZU>G8T0^puUUFVzDCJ+?HIyVT_Hr!=k~H zlHtebgT9u|;1Mn&!Rmb`4vc_xfMzC`b@kL{v-`ZSU`|Gl5tmH;lY?+IL_^~s{u)YT z+TD=jGlTBLQo)&7QPCrFr?tg6C*!l)u)Ng2%_-{}X%p`!HY&+9pmtP4E8~jp7@-9Y z_;QSzfHvDVK$|>#3F{$9S2WUQ2f06q#W^$EvnKBK5N&SN-DoG6N$Q}BP(av&_m?pZ z9z(QNk~&tvAGje}Rf`m_faOSWXZ8!v?1y1+foB=jsDGPNu65ET3H4NVL^%*XDa%ld zGA08F#Cb|Sue&xm*s1?KZ^ z*pJQBHPAM{hyp>L(4KL=lmHe{Lm(mDUQNc}JVYzZj6m@plQ0P`n7fmb2vr?6?qpi` z1IwU0-mn#Bu)sAod0mhI9d_y4;QMzSm!@H{GLz)MQ ztI6KQOFc`0TL-u;CAbF;X_I>2g$O3!wG+T~X{vgxLzInCoc$D{r053{ zPXkjW(Vr`x5hlaflMj6%l1w{>*N)ZRsB7{uiu0&-*0P$eqfHv*3yNXBZx!)@cCxKI+-~2884ph?UrN5b}Pny3o}Z{iwe?W1gx0zp#RckdRxAf$bg*B6F2{mHDKy~x*A;h z3C2MxiW$?6hu(X(rjrkh+e)_JXGrsizd<1K7e01&*32xF^TU@v@~HR6RR|~)%%_|P}vYv zEZvNcUFJmPv*7`zCKkgtv+jQHB=GA4U%JTtEKuVclljX;$KaB5)bCy>%J<6HhCDzC zQ%XMqP*jm9TA~uJ(;s%0VV5od6IpV+Y$3Ck@6k@1jOicC)hU~*u^HNNQk^~QxmCWmf8p(u=@>iHu z`MWBT|E{$NoF9R63khf8m72(tG%pY(NxC`fB*i}me5na&{f6Vi>E=ZI%0&FeBW)aZ z0RJWMrH2QDw`%@+gvMS;A$)hN4-d_5(grSkjk)-fnue-()rcBr2=;8!`z9WJ+zwj~ zweUhDjVKfGtK4tNb>KLnX5qd4%d8rLXcdU0nz;ERucY!TSr+8xGlBc;rc8y7wEoKB zY0u#-4ymQ5O-@f$v1mH+T8x3Ts^IXE1$JiS(ixe!FwiX}m&RQRG|1k3ET@H|ZH#}5 z%#0WbjCXb%1J7W%Fl#7qKlsjcW~K0#>5#WRIw~>n#eFu^bQ{q>E!G~}T{CQ{(#g`? zl{2JXz4Y&Gj7a`uXg#&FF8u}0e|uGd$yW~@bK_`b`wB3`zXkbI@GL~uY^_5s^gqSk70Kdrq_%%>^MX5eMpzhqRVAO z6AaXsd$iP@>}i6K6m7vG2b4B3B=yc?{=%6?on>0Y;H zYjZ+oN9ieLwMf)*QRti#5PG9aH8{e4dgK?9T8I*EASV9VTlSwXL;Ov!&^k!^$E{g4 zN~;ctX#8p_1|W?|rqLA$+LWXas zM#Kz4gbv-=U}W_K(+7t+9R56GqNi`k-EEBN*D*&s+(dU>M0bTqnA#U|B4$7)El|~F z+PqfQ^uHm{W(5`*>E1T*8OP5zG_@pqeqa@&bGhjwG}`B-$f_7Iw<(X{{h^=@ogkX) zD-L1?U~IfO_o=a}y0jSH?SI=|2Vbj+Y)GxcIFX{0bjYGn;#qgYC~zKFrbaSm&a~L= zR!ZivW8CLP2WAC915MEwsd@SSEhA*EX}dni)AiMFl9$z0Hq!c!w;MYQA{lo=PX7~mJ9?x^9l}% z-iJJx=Iw3tt3oIVFK|dnQtGpF@~BVnuNQ9>4|L;7$L)TPs^SpkmmV5#8FT!QW;SM| zSXcClMJN@e7tj;6F&CvfqBABI^NRJeG8f3Y8Dd*~CZO&f7)roA$oL5Y4@HL$f)Uxx zo)y<4>joiiO^5j>R&)7;TKd#kR-bgH9yMCL!w~;7)=8`YQARED#!zhhOMSp5*RF!; zla8fIQ{p&O6nU6oyRnfFcMqD`TB5bYg*Oo|a|N(7<+eIIO*t@Yv1!5T!fg7oAn9yjm&H~3CzqfdQe_GC#bl|G3q)#bFzgW&#H*RnHC zXUA=8M$cW_>yv~wHm;<-o9IqT{1417chl!SFvK2)`Rcw>qYh2#e-iH3|>t53|`n3?vUj~&w_)0K#qRD<9%FX@1 zLb6_9Ej3>BGxW(gUD0@necL+sA{8EJDv5HpB>JR4Q*KXcEyG-A5y;SIF$zez9mDG^ z!R@^Y36Zd1$OENa8kp8UH#jf^4A7^^6CYAXPBM67dsxdt&JwF zjIL?4lx~KiMypydv}Mi4+T@vOXcW|~8QC0h8LC44Q(2Q$b`1Al=X-_+QQvp`NsY*< zCHge3vErt9KD~1i$$`tsySqYd;&VaQo@HAx9(zy5_Vb0Y_9L;T`(@V$EHHU6PQqRa zR;1kq!IaJDHX2h~{O=^4F2)Aj57p_8;LdP0o7e`-f?c^7!zuMyEpcVI%~_Te!%Z^p z6iEGBjzem$nbW-tNWx;38;cO8CX1HRg0WE8$;}O@W6FobzqPjD!6_(V?*<7rt)^pJ z2!ki9mwG^`MYpL72&mtR**QnXseD-FKb!*a^((x42*abRgkZ}EkznCjO?`6QMqGUy za-rs4DF)nk_u#d)DY@^at02>Jp+;1M`iG63=+~tiqdC&1SIlHoZls*PR5^W<*q*YZ z;P^emfQJ3NNRwmYd@2Y;Gk&OJ#bG-5av{G{9TiJ)qd*fdl*GK=HzkJ+zjNHQ7t(yXEG&DHog#BM=sRbBtl z>uA}lU^U#7oV}pO;f$$neBfIzFdYu2QrF@1P&E-Zw}KjovCG9Jz zI9|mU`p_#H0ePfebCsp0jJ$h`kJ&e~Ng0+ZliD*ftmSDl? zMh0Lbp~TkY9`eeVyuapMxOfrz(FQV0^(3={C@=MYt!->$Lb4pi46=f+ig%_UpeZxb z<^SJ~oG65P#_|@Jfe7v|HgYF-K*WxnIUEKX4_q*i19h71j^v;of2cf}>*?vuvj<7N z!Eim@i=rTB;%O-E>Z9I(*Zl#s)RQC!DUhi!2(r+>JH_ljwAK-iSbYz=qZ;?&VS+~?{L6@i}XxtH=HbeE7T!*{jJQaGRC5pLm>MY zF6R2>a4AtPFLq3w+6kM~P;5X_4YwIZQSAtSA9bw(=1ygH$c9yrsxXejE4@d$GN??A|)BqQq1~^Lcme>YN8abL$e3U+g8Hh zr1O2+d9-ie=4$YKlrYenX8;v%sRr_UE?OZ+rh)739UN3XP0Slx;*8Q}yLoBJ2&IB_ zoh|Cf#YNzC`UnTnS6)foJeetraIp@xpL6VAZ*=B4kD8Wy-;K=~ zFKXKSfP+}(+33BZQ!3Ey(Bnm5Rqz|D)Q6STTMnCc`;yza0`pR4B%$#9SYB9bhQ1-`mnWW&R5UgfF`0vym7*MsoRM80HF?fA zR}ffBvV&>7PM9k;#BUciNIFw@(oC+9bPjrJFV49NYm~0o!An`%bsHP3rIer_ZCdF` z4no?sCmUmz9w7slqp$m*ucaOI^_rTCKFL;zDYZLkg4vH}98^v;wTbZpg_s~lRXPRn zL|Mf*4o4ccHYv8o^uB9oYEF937SGL1X_E2P3X0@R92Ox>Y{^1#-(L+kiTqg|B}wS5 z@+X^T8`zRJNV8_Wu?xW~`7AFbY-CSTK3{Iawrge&_wrBc;SNzqCQC+}cCPV~=OL13s~6eFUV z19b=Zhe*`zQavTvYoY2y8?xdX?72w|D6JD&G9H?w(+-DvdX<9NbT^`=7#{_78U9*J zxR^DnB3dy@<-xk;;!>~+ZmgtvPxZ;KgLbx6Nk=Dtq09XI8O&pap308xxEni%G$DJJ zA#0;!fU;UXQir(+KNzKpqtu7e&JSd??qvNhT%Cd*Wu&6?WK*Ri>&TC3*n)!qO$z3q zZ(VfUs}`YG@Z!j|TPnF1C4Z2WE}0voFlO61Ys4B)m7v*t1+I&b%IkW1WLh%D!MHM8 zdlmJz#I$8Ra06q4z*DjUF6kw%K%f5}lv)x7*)IzX30{QPCw}Fkn;{kPGx_okBA@Fk zgj#IuV%^`h+73R?Fm_Uh-p)><+;Gm=W=s^dF>NTCbSzv1l@EcLKt*LEIC{Xxq4$t& zu@@_z)X8pXoRh?E|y-Y6s=;E>3g;9$@PcWb$24mjIl|e z?v>qfeEhb_a0;i!&5CE^-qY-n4%m;SM&R=?O6qQDS~bjB!Qen9uwr|Xbz7!QN`hfp z(A{@&3}9ZvL zFS*N<7W9hZk>55*ujQzpx?A!miu>%^M6f^vmAPA9B?>$fX=0WW!QzUvzaL(vJ}&SIkTOb&AgFQ(76^h8#2SCp|^E-a-E@6WX!6WqvI? zy@`Piok6a?)Q74ElSWr%K^(IMp6c>c_^baD2e%!yX{vMsvhFD+6jj1uXU%l~#$;NVTQlEo@^kQ}{*LGX(S|)9 zi!Ero9!W8V*M(UMmnWI)k+PRo)e@aENNQwu%R}zP_u+PPKagBf28K7iR4FqdKhx8hcoJVy|4!M-K`WcL!3K%MS|{yISfM?vBniQSWZ z3-zs5kax9I&LpcY?zyQWMkc*L`lka2g*aN;F%UaSboaFF^q)OEeQUR zyC!oYTp?DFxKT=h6wAf!=q(m5*}SF^GwgKY8&4rcexaL-bg=n{e77fjgqC6byVzvsck z5;aej2V+EhM(P=|j2qrCJ#bE1RSoHvo)}9Sor5+{Tg^eZe5Riv*U=#nZlbTp|IT#n z@mS<7Tx+J$Sbr$etD=WyewOO%6aCzF-xRCg3dlt0rZxqaM%i zVl)0eO_14gj3Y4@h~qRBzYdZ&b+{u{%iqnPK_{x=vrBN1X-Zvk3z0YH`F=oDR=qhU~r$qx9_CA^9ujWhp47+pgugYLsw$;4*%iz`ctzhuv83 zis~Ao2c*?axxk6w5W$V<{WwO_wP8K1GnrO_$S+K3L=K;XWlcfR$l#V8C=x*pjI_wq za(LEg-Y-+Nm77d52V$E3egSmG9Zm#SiZ)4WCFUzOp9Fr*N;#;R8xnD7RV+p3RjnbR zM2^P2;Zx!Xe;9m|Nv=OC4JlB4^6fuiM$c&zQr#(aTb6XueG~ zBG_s?2_Ounv+SN72PIRZnM(Bv!ByEDY;3fah*E7tt7&wbI#!+*INbgSO;y38FSX1H z9q~~idYKNUnUZwBB=F00@BI6OjDeQYOU-VQmCfE=B=GBiuO-&F71#S^znrqwSQTpb zuXzA??8^mX$v(O8=ki&N6Qb-{Pg1YGsw#0un>qhjcV6ibq%i|(yaRGby`|0iP$FmC zJ;9lH@jSta_2fLGAr9BEww#BBBE4(jIgsP~?X=b-K{rKDjCTZja^z$VDtT$8Lz3-b z@f5D3n+J>&cDw4Y5pMag20xB0Un4EqZ!bBeQZ#9v6e? zIvOd>p*it_6U84>C~1y?F_O!*s#%p+=0ZptO;RsF<;I=>5a!`Z9W|QgZZa7D5SM}9 zoT|B={b5su4nxw6r4}(cto2+QUw|B|AcxfW-5yp+V?d^Q(gfw>Vy4l_JfG7vALlX} zEL}HWMKc<5unS&(pf=S>y%FyBz`hLZTS{~=ZW2d@(@Kj3C#YOmUnh=6Umst>LF}!7 zOQXfX=}+6;X{(w_d?p}gt{9n%MhH$8b(F*LOn^4o9;k9=9i#bZhwC~U4UK6THKo{8 z#PONu3O1o_RTwL?*vu;lhqq~1GNr@14_Q6ve8Py36K=;e?Ba#JMURM|H8hMMk%l5L zQ?XYAq&TZh#W;j`WlwZSLNn++-X`#10-m<8lR07W7?0gZItuqq7-~je9bK9xjEgT5=-CEyx&( z36=!tra|zg)*KF-RY7d0i;^t40Z@?*2j!f^{oLC(#B23Y@Kwo%*omMVyJ~*wGTiKw zZE~a_PqHTz51gp4&S~}q)j(%8NQZRpE2fwqZw8QE9SUOhE==+MaAY$l7F|HYYXjcH zg~G8I(VKP1<2EJ%KJYBDv{kzCCeank--9CAk%Osl_S4cV`UHT7Cog68WpSCn73jzy z<3AUm$8ynQO(bfZP*;s_LRJ}w4;T1F%D>P1pZL)}9KUb^Zuk@sF z1kPceMWiB-Dx$s`q(@BR^zcSPbKJsIKTI$pvS9Ig)JKid1`_#BHmF9a-xgQGX$m-N zZ&^!Jda@CRBVdITn~P>T-29+(w@rtVS5!^P@F%tWmxgj8W+8^37rfY9chcY2iW*|Q zvDQnMg?cSHer43lg8Zy}$6!H^jBC#^%iOWwN*DW}w(3em>&;}9m}t2>3X-RFtOhTS z!i66?{!8hT3m=Aa5Q0#6u?8i)y}F?KFoh(v!cC*(Br`4I=PK7B9d9Aoo<-@X64>no z{oCmj2}hG=xSZZ^r)WzA%mIDlMVxi~9900btiJSf`(*=%i9H%1RgGUuIV!WPW+e(% z41dF(FVP3kOG~h{$ve%>JIT-f8 zU>Y=*4E@w~9G_WC)7#5+$?#R8W!Ynil>Cxm@(ArgoQ+=)#hQtixIu!y+<+ z2~|U`V`kihD*tW(KiehI=4vy*v+OPS2jXo3CrRhFKP?hWexr>YdJpClY9LW=aK6nu zZ$_rMiSsw-5{5450&l^yOX7{{sS}|~^Zuo!%>1z=g%d^RuKpr`9ByzB^ElM{Ajj?U zxT|+As^;dj;ufbYw?jG|o=M`bLwM6{*k7MSrd2jlxdqQH;hS~q zBAYa`|CQSlWK0q|s+?gmDM&0LVl}}KWiufQ?&Bm*`}MQsv{lv^a8uk5kgj2~Fgs~$ zvTouISo3gA)2h(kfSV9UZ><{c%j|*j{tueIB75y&4|LhDu24*-p_5t@sYJ4^7da^8 zsNlo8u1JNh^DE`0CiXZU)5i4tYwq?^tiy{j38ibXKL(Y`1+SI`nYa}bmbSy}_E{}s z>;#HWH7D$>FKT=PvUH7L!}#~mY!SHw>-I)iNz&ZG&t4>=Arbe?0U)q>! zXq>_cSUT^1He~2z@oWQYvc0z$|1YF@Ln#Z8RX(ITd3-?;q3ebRbB1QOOdW@Om-|41GFXQ09w>r4KgC@b@g=EQ)q0zy5t3h zuLp5Bvquzl#S|w$)15PBqs26wAyo$*-0GqN@Q0By#B3m=%-b%9fKS(OX@fot%U ztZe8kGbB-OMX%JJvvrRC2_hwkT1t9Bh*1k=OV%t0QT~6}Y1QV)G*8v-ji9^*@ucf) zy-{jPJre3wnfI82mNts5214WwNtv3q##PtgOfI6kTEufCmR}Px1#u6DCg?Nu8(14I z*sQ~w>>8QSWvL-#GnG?Td9`+w$;`M;(AgMEGb&alaU^D_um_pWMvCbdA_XOwN z*gvEzgTst!M9e`oI$Crxw5Nv3!`bSZ4M|!{Wcv9O;!&V=QKCPsgUQ4?`RJ__aF=?N zWUwIUU^1CU1>imqjoq;c+u@7OA7LQ)Jxb@2E)q2`!pm?p`-p01s5>ONk-A zu>FQeG$d%78r6Z=#HhIK^Kitu_4EOPEZ7E8FVuzI9KW!ExfA(;Bx}~lpx~oFvtU=% z1+tff|4WzzTv9{QcC{FYTSxssr6(#Oxa>boWc0(o>d0Pj>VS2QgFVm67O@TDc_f_)-$>jb(lNKb2Kk5x;pV3zoiZcu&iy6-(cit;5<* zhcCNAo1c5w;_~6#xTW;%(gduZQuk?_D;%F0M{(D9{GDiiM60&lR)e%jvya#-sF=qz zA?C-s9E8klV{&vp!jII{F{^0MEfd3Hfhrs&UAOp&1v7P;hK?B%H?jNV_#r+q111cp z?ggR@vcf)GG{P)OamKPCVKDwZV_-uzVMelXTC3WtR|p(OXt8vhZhqs!!DD33eb11) zcckCNJeu%O2l$>6-Wop`K5mBL`OXZL4ePOgvfSz}!RW~_^`vTJRzcB4uM?h|)FIFN ziyOo9^QoC>kbj*;&DBd!gM{|MK7`koOv&*aT*Fl7=)I(IC4_ev99||};@2B%6kQ*D zXaF(Nk%Qo6G9n#iupHRIp2F<~zz<=fQP#{0#qRl!5JZ#6bPk5mIGn>_E%EJP*A|q* zt;MLA$LGry5wv6>81_n3gYb^sLc}2G{*)V>_2>VN;q70l##ho99zP?5v}yFo-Bv?m z%F3ESs88ECex+5Ts0uN%R)3`lNsCW0q>Puyr(Y?vF=xRxX5)0ZcjMMul6FL}l%-wg zoE&?oaMDK7WyV;si~{}gLF^-Z+FSJRvbUKv-l{bCOj|l z)oplbTT3{#=rj#I3|WHP@zS}65k9A4ke|cq(|cH(q1n<6wm{Ab@MRget(+s zRfLGUW}A3O@oJ7*O}bj~jG?j3#lX_8WhidcO)O>)8+~7f$0=x|1{sX(cpU6q(J-mg zY)p5Ny`7`TCm*gF`SKoCEVHQc5*XfrsE?(B&PX=qZTI&behSmr8)pQmuc~z)PRSyP z)6U~fBy}WlsuMUT3}kRTYH7v{S2h&Bh{VY+j?sDXA8tXDO|ZGOk}PQN63}d#lXRkqrh-NM zn3{Jg07w0Cq8#w(*i;!hN+o0*H(rFn5>k?v$zq5{lO}#-Oyn)EG=lhr$Hj2x$a_t} z%%NAiF1Hk|%zLnFrWQAib=%5yHIJ? zQV2xTjppuxZ;wvZNG)(~mTtu|i&klgeKi1kl6>uIE@4`j(t<7fyz`R*>hh-*zq5r{ zmjS8xjO9daqy(j-qMzGD1eQ(&7mXnwJ{r!cYXS~Vd#rf!&?G!%&Z0c8eNgO|Ef(Vn zI)6+ldEMy>n#7ZiUB9kmR3iVfLbi@0DW9dXWqNz48{TLOgDcJJ@T1OLs*8MA{-)y=1VnPCT#5bljTi4+l z?|t?v(5TP4Mr7x8(V?-rsBFA8oGkrefH+CI#OfYhbMYAF)NC<+G`-V3`C3#k$ED_( zFs8(c4A>(CF-s#lG}1i_$Hw%ynm4fqKgX)POz>qryvyeB@qL&gwop2$W!@c6v>*-0 zjl`X@#3P5vVpQ)HdWyV+84-E+X5Q@NE=4dMyAN!VfKI$qX%C-8Ts zPb*Zu43mab9C?=(*5YeDgYmS78n!zbbnE-TwasrYaUTMore6UniMQrY@UXDY^gI~w z?i{#FB0r9Hsu7Yw&E^ew&9=% z^XjVdV@gqXnMZ?lSpMY8YX%4O3arV1lZN$3Y5-nkS2xJh8hjj{TGOr7lZ>%eIWayl ztt-wug3rSAnis^!RWRI!#9^^*wbv5Eac^4_`J&@g?)+Wm>?>NlPub@0vB~7ZV-i0T zn}2#;a&i(qsL(fe9Dm~C3=@VHCa1WG+L5qzRsgL9E1%OZ$!5> z#Er}X(Z6v1*)4y1qvtZZUFyWRK2J?#Mq`is*3HOn#~=Q0uR_5BH9mW{S&JM?f$>Sz zuLEvd%dRe}247Hl+1dejU}XiV9K`n0{$m7{9+v0c8x@`#$iqlnQEIV@!>c?3o2H3} z`v>&y%(>MZjw3pYQn>ObcbVqXD{XDpa(GxrTRW1EtGaBa7o7oV-WCqROE0*ddse(B zuyiUwE4FhGJ*cODp1`kEFaUf7UkBKHC-!3d#h+@7Opl?z83750t~tkXLJoj z{veXBy~?9%A|**&0(?l``)TXv2~OBJh2J*L7N;0K^AI!<@UG|EeQ@7nYwo@A&(^-X zfIl3ra;cK#p5b+{I7ll~l`X?hJv?KI8GHw8a7b+|nRG>$s^NRv+TuQEwgdv^>IEl>7U?2<&`|G$T>VL!_w@xuIpHb7*nk&f6vL+v@};pUsN|6 zIkWT_l>Q;QS9k3{Gt{s_>fWO7W z>%=Sw-r^hwpO*;I$ z4ovd9#g|-6uyy-j>+UA$|JzD3U=KsXea%kS-VfJR#vA+no=JGor*tje3#X`!HK9XLBEnk z@Rclh+wsN^tF7uwSv3#3`FBJ)snM|Hl$bwOfvdl=mv=dcnHm-;2IBdjO4Feq>Qvkp z>l7^m{@VDpi7~F8Xp{<;gtTGxOE`Yzhaz}F*5h82w&kJU=iArZJ}y1t__+LINg@+f zW^5g#pX(yohFAw*z2R^cz34tu^pZG?gXr(xRs-Q{rbJ81zMr zFFrKBJs z*%@ZqqMb2~<^+C_lP;@Nf#e$ATVYE=)tAL;x{ng!I;E&dl?;k$PQz*gTIx!icZo(p zsd1x(AgupC5bBi9rjN!H)qyw`TI$g!TP(>5m&!vKZtuueT2eT!MbJdoW1|)PfX+Jc z!+;ZGr{aXC#A4+wnKKbioG7hZN+LNT@E308ftt)`lr^x-vAQvzRbvt??QHm6lLU#t_lFw@*6HOq(bI*_Gmpz;|s cYQWgB5goK)eQ9X?l>L-l#KCCZNCE%)Kcq2F0ssI2 diff --git a/parent-pom.xml b/parent-pom.xml index ea1bb19f0..466d4fa8e 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -64,7 +64,7 @@ 1.3.6 2.2.0 4.11.0 - 4.1.100.Final + 4.1.111.Final 9.37.3 0.31.1 1.0-alpha-9-stable-1 diff --git a/thin_public_pom.xml b/thin_public_pom.xml index e15a4e3c4..715a42878 100644 --- a/thin_public_pom.xml +++ b/thin_public_pom.xml @@ -58,7 +58,7 @@ 2.4.9 1.15.3 2.2.0 - 4.1.100.Final + 4.1.111.Final 9.37.3 UTF-8 UTF-8 From 50b5788911bb500ec53f9bb585b9063ef6a89cb7 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 15 Jul 2024 11:39:02 +0200 Subject: [PATCH 47/54] SNOW-1503887: Limit retry backoff to always be in range (#1822) --- .../snowflake/client/jdbc/RestRequest.java | 70 +++++++++++++------ .../client/jdbc/RestRequestTest.java | 39 +++++++++++ 2 files changed, 89 insertions(+), 20 deletions(-) diff --git a/src/main/java/net/snowflake/client/jdbc/RestRequest.java b/src/main/java/net/snowflake/client/jdbc/RestRequest.java index e2d48fa82..5be46c5de 100644 --- a/src/main/java/net/snowflake/client/jdbc/RestRequest.java +++ b/src/main/java/net/snowflake/client/jdbc/RestRequest.java @@ -274,7 +274,7 @@ public static CloseableHttpResponse execute( // if exception is caused by illegal state, e.g shutdown of http client // because of closing of connection, then fail immediately and stop retrying. throw new SnowflakeSQLLoggedException( - null, ErrorCode.INVALID_STATE, ex, /* session = */ ex.getMessage()); + null, ErrorCode.INVALID_STATE, ex, /* session= */ ex.getMessage()); } catch (SSLHandshakeException | SSLKeyException @@ -496,28 +496,18 @@ public static CloseableHttpResponse execute( requestInfoScrubbed, backoffInMilli); Thread.sleep(backoffInMilli); - elapsedMilliForTransientIssues += backoffInMilli; - if (isLoginRequest) { - long jitteredBackoffInMilli = backoff.getJitterForLogin(backoffInMilli); - backoffInMilli = - (long) - backoff.chooseRandom( - jitteredBackoffInMilli + backoffInMilli, - Math.pow(2, retryCount) + jitteredBackoffInMilli); - } else { - backoffInMilli = backoff.nextSleepTime(backoffInMilli); - } - if (retryTimeoutInMilliseconds > 0 - && (elapsedMilliForTransientIssues + backoffInMilli) > retryTimeoutInMilliseconds) { - // If the timeout will be reached before the next backoff, just use the remaining - // time. - backoffInMilli = - Math.min( - backoffInMilli, retryTimeoutInMilliseconds - elapsedMilliForTransientIssues); - } } catch (InterruptedException ex1) { logger.debug("{}Backoff sleep before retrying login got interrupted", requestIdStr); } + elapsedMilliForTransientIssues += backoffInMilli; + backoffInMilli = + getNewBackoffInMilli( + backoffInMilli, + isLoginRequest, + backoff, + retryCount, + retryTimeoutInMilliseconds, + elapsedMilliForTransientIssues); } retryCount++; @@ -630,6 +620,46 @@ public static CloseableHttpResponse execute( return response; } + static long getNewBackoffInMilli( + long previousBackoffInMilli, + boolean isLoginRequest, + DecorrelatedJitterBackoff decorrelatedJitterBackoff, + int retryCount, + long retryTimeoutInMilliseconds, + long elapsedMilliForTransientIssues) { + long backoffInMilli; + if (isLoginRequest) { + long jitteredBackoffInMilli = + decorrelatedJitterBackoff.getJitterForLogin(previousBackoffInMilli); + backoffInMilli = + (long) + decorrelatedJitterBackoff.chooseRandom( + jitteredBackoffInMilli + previousBackoffInMilli, + Math.pow(2, retryCount) + jitteredBackoffInMilli); + } else { + backoffInMilli = decorrelatedJitterBackoff.nextSleepTime(previousBackoffInMilli); + } + + backoffInMilli = Math.min(maxBackoffInMilli, Math.max(previousBackoffInMilli, backoffInMilli)); + + if (retryTimeoutInMilliseconds > 0 + && (elapsedMilliForTransientIssues + backoffInMilli) > retryTimeoutInMilliseconds) { + // If the timeout will be reached before the next backoff, just use the remaining + // time (but cannot be negative) - this is the only place when backoff is not in range + // min-max. + backoffInMilli = + Math.max( + 0, + Math.min( + backoffInMilli, retryTimeoutInMilliseconds - elapsedMilliForTransientIssues)); + logger.debug( + "We are approaching retry timeout {}ms, setting backoff to {}ms", + retryTimeoutInMilliseconds, + backoffInMilli); + } + return backoffInMilli; + } + static boolean isNonRetryableHTTPCode(CloseableHttpResponse response, boolean retryHTTP403) { return response != null && (response.getStatusLine().getStatusCode() < 500 diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java index 536acbc03..ae56a49f7 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java @@ -5,6 +5,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @@ -23,6 +24,7 @@ import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.util.DecorrelatedJitterBackoff; import org.apache.http.StatusLine; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; @@ -596,4 +598,41 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl } } } + + @Test + public void shouldGenerateBackoffInRangeExceptTheLastBackoff() { + int minBackoffInMilli = 1000; + int maxBackoffInMilli = 16000; + long backoffInMilli = minBackoffInMilli; + long elapsedMilliForTransientIssues = 0; + DecorrelatedJitterBackoff decorrelatedJitterBackoff = + new DecorrelatedJitterBackoff(minBackoffInMilli, maxBackoffInMilli); + int retryTimeoutInMilli = 5 * 60 * 1000; + while (true) { + backoffInMilli = + RestRequest.getNewBackoffInMilli( + backoffInMilli, + true, + decorrelatedJitterBackoff, + 10, + retryTimeoutInMilli, + elapsedMilliForTransientIssues); + + assertTrue( + "Backoff should be lower or equal to max backoff limit", + backoffInMilli <= maxBackoffInMilli); + if (elapsedMilliForTransientIssues + backoffInMilli >= retryTimeoutInMilli) { + assertEquals( + "Backoff should fill time till retry timeout", + retryTimeoutInMilli - elapsedMilliForTransientIssues, + backoffInMilli); + break; + } else { + assertTrue( + "Backoff should be higher or equal to min backoff limit", + backoffInMilli >= minBackoffInMilli); + } + elapsedMilliForTransientIssues += backoffInMilli; + } + } } From 8ae689702ee0ee52eabfbd30b512af9d9f1f494a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 15 Jul 2024 11:58:57 +0200 Subject: [PATCH 48/54] SNOW-1528799: Disable OOB telemetry (#1823) --- .../net/snowflake/client/core/SFSession.java | 19 ++++--------------- .../snowflake/client/core/SessionUtil.java | 8 +++----- .../client/jdbc/SnowflakeDriver.java | 4 ++++ .../jdbc/telemetryOOB/TelemetryService.java | 7 +++++++ .../client/jdbc/SnowflakeDriverLatestIT.java | 10 ++++++++++ 5 files changed, 28 insertions(+), 20 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index e79d23b28..2fecc4ef3 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -619,14 +619,9 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK)) : false); - // Enable or disable OOB telemetry based on connection parameter. Default is disabled. - // The value may still change later when session parameters from the server are read. - if (getBooleanValue( - connectionPropertiesMap.get(SFSessionProperty.CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED))) { - TelemetryService.enable(); - } else { - TelemetryService.disable(); - } + // we ignore the parameters CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED and htapOOBTelemetryEnabled + // OOB telemetry is disabled + TelemetryService.disableOOBTelemetry(); // propagate OCSP mode to SFTrustManager. Note OCSP setting is global on JVM. HttpUtil.initHttpClient(httpClientSettingsKey, null); @@ -656,13 +651,7 @@ public synchronized void open() throws SFException, SnowflakeSQLException { // Update common parameter values for this session SessionUtil.updateSfDriverParamValues(loginOutput.getCommonParams(), this); - // Enable or disable HTAP OOB telemetry based on connection parameter. Default is disabled. - if (getBooleanValue( - connectionPropertiesMap.get(SFSessionProperty.HTAP_OOB_TELEMETRY_ENABLED))) { - TelemetryService.enableHTAP(); - } else { - TelemetryService.disableHTAP(); - } + String loginDatabaseName = (String) connectionPropertiesMap.get(SFSessionProperty.DATABASE); String loginSchemaName = (String) connectionPropertiesMap.get(SFSessionProperty.SCHEMA); String loginRole = (String) connectionPropertiesMap.get(SFSessionProperty.ROLE); diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index 189b5137f..ad55f79e8 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -1655,11 +1655,9 @@ static void updateSfDriverParamValues(Map parameters, SFBaseSess session.setClientPrefetchThreads((int) entry.getValue()); } } else if (CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED.equalsIgnoreCase(entry.getKey())) { - if ((boolean) entry.getValue()) { - TelemetryService.enable(); - } else { - TelemetryService.disable(); - } + // we ignore the parameter CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED + // OOB telemetry is always disabled + TelemetryService.disableOOBTelemetry(); } else if (CLIENT_VALIDATE_DEFAULT_PARAMETERS.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setValidateDefaultParameters(SFLoginInput.getBooleanValue(entry.getValue())); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 56da5b258..6d02f333d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -18,6 +18,7 @@ import net.snowflake.client.config.SFConnectionConfigParser; import net.snowflake.client.core.SecurityUtil; import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.ResourceBundleManager; @@ -64,6 +65,9 @@ public class SnowflakeDriver implements Driver { initializeClientVersionFromManifest(); SecurityUtil.addBouncyCastleProvider(); + + // Telemetry OOB is disabled + TelemetryService.disableOOBTelemetry(); } /** try to initialize Arrow support if fails, JDBC is going to use the legacy format */ diff --git a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java index 7ddb3c7ce..ed360789e 100644 --- a/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java +++ b/src/main/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryService.java @@ -11,6 +11,7 @@ import java.util.concurrent.atomic.AtomicInteger; import net.minidev.json.JSONArray; import net.minidev.json.JSONObject; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.jdbc.SnowflakeConnectString; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -135,6 +136,12 @@ public static void disableHTAP() { } } + @SnowflakeJdbcInternalApi + public static void disableOOBTelemetry() { + disable(); + disableHTAP(); + } + public boolean isEnabled() { synchronized (enableLock) { return enabled; diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java index da9847c9b..05df191d5 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java @@ -54,6 +54,7 @@ import net.snowflake.client.jdbc.cloud.storage.StageInfo; import net.snowflake.client.jdbc.cloud.storage.StorageClientFactory; import net.snowflake.client.jdbc.cloud.storage.StorageObjectMetadata; +import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.common.core.SqlState; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; @@ -1764,4 +1765,13 @@ public void testS3PutInGS() throws Throwable { } } } + + /** Added in > 3.17.0 */ + @Test + public void shouldLoadDriverWithDisabledTelemetryOob() throws ClassNotFoundException { + Class.forName("net.snowflake.client.jdbc.SnowflakeDriver"); + + assertFalse(TelemetryService.getInstance().isEnabled()); + assertFalse(TelemetryService.getInstance().isHTAPEnabled()); + } } From 72223d02ae444c47f1f20ac0837808b569681e4c Mon Sep 17 00:00:00 2001 From: Dawid Heyman Date: Mon, 15 Jul 2024 12:06:10 +0200 Subject: [PATCH 49/54] SNOW-1530843: Parametrized max lob size tests (#1825) --- .../client/jdbc/LobSizeLatestIT.java | 36 +++++++++++-------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java index 33ab5e772..56f02c6d5 100644 --- a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java @@ -21,6 +21,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; import net.snowflake.client.category.TestCategoryStatement; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.UUIDUtils; @@ -36,29 +38,31 @@ @Category(TestCategoryStatement.class) public class LobSizeLatestIT extends BaseJDBCTest { + private static final Logger logger = Logger.getLogger(SnowflakeDriverIT.class.getName()); + private static final Map LobSizeStringValues = new HashMap<>(); + // Max LOB size is testable from version 3.15.0 and above. - private static int maxLobSize = 16 * 1024 * 1024; + private static int maxLobSize = 16 * 1024 * 1024; // default value private static int largeLobSize = maxLobSize / 2; private static int mediumLobSize = largeLobSize / 2; - private static int originLobSize = mediumLobSize / 2; private static int smallLobSize = 16; - - private static Map LobSizeStringValues = - new HashMap() { - { - put(smallLobSize, generateRandomString(smallLobSize)); - put(originLobSize, generateRandomString(originLobSize)); - put(mediumLobSize, generateRandomString(mediumLobSize)); - put(largeLobSize, generateRandomString(largeLobSize)); - put(maxLobSize, generateRandomString(maxLobSize)); - } - }; + private static int originLobSize = 16 * 1024 * 1024; @BeforeClass - public static void setUp() { + public static void setUp() throws SQLException { System.setProperty( // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); + try (Connection con = BaseJDBCTest.getConnection()) { + // get max LOB size from session + maxLobSize = con.getMetaData().getMaxCharLiteralLength(); + logger.log(Level.INFO, "Using max lob size: " + maxLobSize); + LobSizeStringValues.put(smallLobSize, generateRandomString(smallLobSize)); + LobSizeStringValues.put(originLobSize, generateRandomString(originLobSize)); + LobSizeStringValues.put(mediumLobSize, generateRandomString(mediumLobSize)); + LobSizeStringValues.put(largeLobSize, generateRandomString(largeLobSize)); + LobSizeStringValues.put(maxLobSize, generateRandomString(maxLobSize)); + } } @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") @@ -198,7 +202,9 @@ public void testPutAndGet() throws IOException, SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { setResultFormat(stmt, resultFormat); - + if (lobSize > originLobSize) { // for increased LOB size (16MB < lobSize < 128MB) + stmt.execute("alter session set ALLOW_LARGE_LOBS_IN_EXTERNAL_SCAN = true"); + } // Test PUT String sqlPut = "PUT 'file://" + filePathEscaped + "' @%" + tableName; From c3b9f8b7c8be17a8cb0d7cb7cf1e8085df16b715 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 15 Jul 2024 14:14:02 +0200 Subject: [PATCH 50/54] SNOW-1526507: Add INFO log for connecting to host in China (#1826) --- src/main/java/net/snowflake/client/core/SFSession.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/net/snowflake/client/core/SFSession.java b/src/main/java/net/snowflake/client/core/SFSession.java index 2fecc4ef3..bb0b2b2a8 100644 --- a/src/main/java/net/snowflake/client/core/SFSession.java +++ b/src/main/java/net/snowflake/client/core/SFSession.java @@ -619,6 +619,10 @@ public synchronized void open() throws SFException, SnowflakeSQLException { connectionPropertiesMap.get(SFSessionProperty.DISABLE_SAML_URL_CHECK)) : false); + logger.info( + "Connecting to {} Snowflake domain", + loginInput.getHostFromServerUrl().toLowerCase().endsWith(".cn") ? "CHINA" : "GLOBAL"); + // we ignore the parameters CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED and htapOOBTelemetryEnabled // OOB telemetry is disabled TelemetryService.disableOOBTelemetry(); From 579cb8fc62213b568bc1e30549c3ded36dc1ca20 Mon Sep 17 00:00:00 2001 From: Waleed Fateem <72769898+sfc-gh-wfateem@users.noreply.github.com> Date: Mon, 15 Jul 2024 07:41:54 -0500 Subject: [PATCH 51/54] SNOW-1094021: Add Properties setters in SnowflakeBasicDataSource (#1800) SNOW-1094021: Add Properties setters in SnowflakeBasicaDataSource --- .../client/jdbc/SnowflakeBasicDataSource.java | 170 ++++++++++++++++-- .../client/jdbc/ConnectionLatestIT.java | 87 +++++++++ 2 files changed, 242 insertions(+), 15 deletions(-) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java index 074e3f878..354f84c72 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBasicDataSource.java @@ -13,6 +13,7 @@ import java.util.Properties; import java.util.logging.Logger; import javax.sql.DataSource; +import net.snowflake.client.core.SFSessionProperty; import net.snowflake.client.log.ArgSupplier; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -22,6 +23,7 @@ public class SnowflakeBasicDataSource implements DataSource, Serializable { private static final long serialversionUID = 1L; private static final String AUTHENTICATOR_SNOWFLAKE_JWT = "SNOWFLAKE_JWT"; private static final String AUTHENTICATOR_OAUTH = "OAUTH"; + private static final String AUTHENTICATOR_USERNAME_PASSWORD_MFA = "USERNAME_PASSWORD_MFA"; private String url; private String serverName; @@ -88,12 +90,12 @@ public Connection getConnection() throws SQLException { public Connection getConnection(String username, String password) throws SQLException { if (!AUTHENTICATOR_OAUTH.equalsIgnoreCase( authenticator)) { // For OAuth, no username is required - properties.put("user", username); + properties.put(SFSessionProperty.USER.getPropertyKey(), username); } // The driver needs password for OAUTH as part of SNOW-533673 feature request. if (!AUTHENTICATOR_SNOWFLAKE_JWT.equalsIgnoreCase(authenticator)) { - properties.put("password", password); + properties.put(SFSessionProperty.PASSWORD.getPropertyKey(), password); } try { @@ -119,7 +121,8 @@ public void setLogWriter(PrintWriter out) throws SQLException { @Override public int getLoginTimeout() throws SQLException { try { - return Integer.parseInt(properties.getProperty("loginTimeout")); + return Integer.parseInt( + properties.getProperty(SFSessionProperty.LOGIN_TIMEOUT.getPropertyKey())); } catch (NumberFormatException e) { return 0; } @@ -127,7 +130,7 @@ public int getLoginTimeout() throws SQLException { @Override public void setLoginTimeout(int seconds) throws SQLException { - properties.put("loginTimeout", Integer.toString(seconds)); + properties.put(SFSessionProperty.LOGIN_TIMEOUT.getPropertyKey(), Integer.toString(seconds)); } @Override @@ -150,19 +153,19 @@ public void setUrl(String url) { } public void setDatabaseName(String databaseName) { - properties.put("db", databaseName); + properties.put(SFSessionProperty.DATABASE.getPropertyKey(), databaseName); } public void setSchema(String schema) { - properties.put("schema", schema); + properties.put(SFSessionProperty.SCHEMA.getPropertyKey(), schema); } public void setWarehouse(String warehouse) { - properties.put("warehouse", warehouse); + properties.put(SFSessionProperty.WAREHOUSE.getPropertyKey(), warehouse); } public void setRole(String role) { - properties.put("role", role); + properties.put(SFSessionProperty.ROLE.getPropertyKey(), role); } public void setUser(String user) { @@ -182,7 +185,7 @@ public void setPortNumber(int portNumber) { } public void setAccount(String account) { - this.properties.put("account", account); + this.properties.put(SFSessionProperty.ACCOUNT.getPropertyKey(), account); } public void setSsl(boolean ssl) { @@ -191,12 +194,12 @@ public void setSsl(boolean ssl) { public void setAuthenticator(String authenticator) { this.authenticator = authenticator; - this.properties.put("authenticator", authenticator); + this.properties.put(SFSessionProperty.AUTHENTICATOR.getPropertyKey(), authenticator); } public void setOauthToken(String oauthToken) { this.setAuthenticator(AUTHENTICATOR_OAUTH); - this.properties.put("token", oauthToken); + this.properties.put(SFSessionProperty.TOKEN.getPropertyKey(), oauthToken); } public String getUrl() { @@ -217,18 +220,155 @@ public String getUrl() { public void setPrivateKey(PrivateKey privateKey) { this.setAuthenticator(AUTHENTICATOR_SNOWFLAKE_JWT); - this.properties.put("privateKey", privateKey); + this.properties.put(SFSessionProperty.PRIVATE_KEY.getPropertyKey(), privateKey); } public void setPrivateKeyFile(String location, String password) { this.setAuthenticator(AUTHENTICATOR_SNOWFLAKE_JWT); - this.properties.put("private_key_file", location); + this.properties.put(SFSessionProperty.PRIVATE_KEY_FILE.getPropertyKey(), location); if (!Strings.isNullOrEmpty(password)) { - this.properties.put("private_key_file_pwd", password); + this.properties.put(SFSessionProperty.PRIVATE_KEY_FILE_PWD.getPropertyKey(), password); } } public void setTracing(String tracing) { - this.properties.put("tracing", tracing); + this.properties.put(SFSessionProperty.TRACING.getPropertyKey(), tracing); + } + + protected Properties getProperties() { + return this.properties; + } + + public void setAllowUnderscoresInHost(boolean allowUnderscoresInHost) { + this.properties.put( + SFSessionProperty.ALLOW_UNDERSCORES_IN_HOST.getPropertyKey(), + String.valueOf(allowUnderscoresInHost)); + } + + public void setDisableGcsDefaultCredentials(boolean isGcsDefaultCredentialsDisabled) { + this.properties.put( + SFSessionProperty.DISABLE_GCS_DEFAULT_CREDENTIALS.getPropertyKey(), + String.valueOf(isGcsDefaultCredentialsDisabled)); + } + + public void setDisableSamlURLCheck(boolean disableSamlURLCheck) { + this.properties.put( + SFSessionProperty.DISABLE_SAML_URL_CHECK.getPropertyKey(), + String.valueOf(disableSamlURLCheck)); + } + + public void setPasscode(String passcode) { + this.setAuthenticator(AUTHENTICATOR_USERNAME_PASSWORD_MFA); + this.properties.put(SFSessionProperty.PASSCODE.getPropertyKey(), passcode); + } + + public void setPasscodeInPassword(boolean isPasscodeInPassword) { + this.properties.put( + SFSessionProperty.PASSCODE_IN_PASSWORD.getPropertyKey(), + String.valueOf(isPasscodeInPassword)); + if (isPasscodeInPassword) { + this.setAuthenticator(AUTHENTICATOR_USERNAME_PASSWORD_MFA); + } + } + + public void setDisableSocksProxy(boolean ignoreJvmSocksProxy) { + this.properties.put( + SFSessionProperty.DISABLE_SOCKS_PROXY.getPropertyKey(), + String.valueOf(ignoreJvmSocksProxy)); + } + + public void setNonProxyHosts(String nonProxyHosts) { + this.properties.put(SFSessionProperty.NON_PROXY_HOSTS.getPropertyKey(), nonProxyHosts); + } + + public void setProxyHost(String proxyHost) { + this.properties.put(SFSessionProperty.PROXY_HOST.getPropertyKey(), proxyHost); + } + + public void setProxyPassword(String proxyPassword) { + this.properties.put(SFSessionProperty.PROXY_PASSWORD.getPropertyKey(), proxyPassword); + } + + public void setProxyPort(int proxyPort) { + this.properties.put(SFSessionProperty.PROXY_PORT.getPropertyKey(), Integer.toString(proxyPort)); + } + + public void setProxyProtocol(String proxyProtocol) { + this.properties.put(SFSessionProperty.PROXY_PROTOCOL.getPropertyKey(), proxyProtocol); + } + + public void setProxyUser(String proxyUser) { + this.properties.put(SFSessionProperty.PROXY_USER.getPropertyKey(), proxyUser); + } + + public void setUseProxy(boolean useProxy) { + this.properties.put(SFSessionProperty.USE_PROXY.getPropertyKey(), String.valueOf(useProxy)); + } + + public void setNetworkTimeout(int networkTimeoutSeconds) { + this.properties.put( + SFSessionProperty.NETWORK_TIMEOUT.getPropertyKey(), + Integer.toString(networkTimeoutSeconds)); + } + + public void setQueryTimeout(int queryTimeoutSeconds) { + this.properties.put( + SFSessionProperty.QUERY_TIMEOUT.getPropertyKey(), Integer.toString(queryTimeoutSeconds)); + } + + public void setApplication(String application) { + this.properties.put(SFSessionProperty.APPLICATION.getPropertyKey(), application); + } + + public void setClientConfigFile(String clientConfigFile) { + this.properties.put(SFSessionProperty.CLIENT_CONFIG_FILE.getPropertyKey(), clientConfigFile); + } + + public void setEnablePatternSearch(boolean enablePatternSearch) { + this.properties.put( + SFSessionProperty.ENABLE_PATTERN_SEARCH.getPropertyKey(), + String.valueOf(enablePatternSearch)); + } + + public void setEnablePutGet(boolean enablePutGet) { + this.properties.put( + SFSessionProperty.ENABLE_PUT_GET.getPropertyKey(), String.valueOf(enablePutGet)); + } + + public void setArrowTreatDecimalAsInt(boolean treatDecimalAsInt) { + this.properties.put( + SFSessionProperty.JDBC_ARROW_TREAT_DECIMAL_AS_INT.getPropertyKey(), + String.valueOf(treatDecimalAsInt)); + } + + public void setMaxHttpRetries(int maxHttpRetries) { + this.properties.put( + SFSessionProperty.MAX_HTTP_RETRIES.getPropertyKey(), Integer.toString(maxHttpRetries)); + } + + public void setOcspFailOpen(boolean ocspFailOpen) { + this.properties.put( + SFSessionProperty.OCSP_FAIL_OPEN.getPropertyKey(), String.valueOf(ocspFailOpen)); + } + + public void setPutGetMaxRetries(int putGetMaxRetries) { + this.properties.put( + SFSessionProperty.PUT_GET_MAX_RETRIES.getPropertyKey(), Integer.toString(putGetMaxRetries)); + } + + public void setStringsQuotedForColumnDef(boolean stringsQuotedForColumnDef) { + this.properties.put( + SFSessionProperty.STRINGS_QUOTED.getPropertyKey(), + String.valueOf(stringsQuotedForColumnDef)); + } + + public void setEnableDiagnostics(boolean enableDiagnostics) { + this.properties.put( + SFSessionProperty.ENABLE_DIAGNOSTICS.getPropertyKey(), String.valueOf(enableDiagnostics)); + } + + public void setDiagnosticsAllowlistFile(String diagnosticsAllowlistFile) { + this.properties.put( + SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE.getPropertyKey(), diagnosticsAllowlistFile); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index f4a19bd43..0e7083e7e 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -57,6 +57,7 @@ import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.QueryStatus; import net.snowflake.client.core.SFSession; +import net.snowflake.client.core.SFSessionProperty; import net.snowflake.client.core.SecurityUtil; import net.snowflake.client.core.SessionUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; @@ -1379,6 +1380,92 @@ public void testDataSourceOktaGenerates429StatusCode() throws Exception { } } + /** Test added in JDBC driver version > 3.16.1 */ + @Test + public void testDataSourceSetters() { + Map params = getConnectionParameters(); + SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); + + ds.setTracing("all"); + ds.setApplication("application_name"); + ds.setAccount(params.get("account")); + ds.setAuthenticator("snowflake"); + ds.setArrowTreatDecimalAsInt(true); + ds.setAllowUnderscoresInHost(true); + ds.setClientConfigFile("/some/path/file.json"); + ds.setDisableGcsDefaultCredentials(false); + ds.setDisableSamlURLCheck(false); + ds.setDisableSocksProxy(false); + ds.setEnablePatternSearch(true); + ds.setDatabaseName("DB_NAME"); + ds.setEnablePutGet(false); + ds.setMaxHttpRetries(5); + ds.setNetworkTimeout(10); + ds.setOcspFailOpen(false); + ds.setProxyHost("proxyHost.com"); + ds.setProxyPort(8080); + ds.setProxyProtocol("http"); + ds.setProxyUser("proxyUser"); + ds.setProxyPassword("proxyPassword"); + ds.setPutGetMaxRetries(3); + ds.setStringsQuotedForColumnDef(true); + ds.setEnableDiagnostics(true); + ds.setDiagnosticsAllowlistFile("/some/path/allowlist.json"); + + Properties props = ds.getProperties(); + assertEquals(params.get("account"), props.get("account")); + assertEquals("snowflake", props.get("authenticator")); + assertEquals("all", props.get("tracing")); + assertEquals("application_name", props.get(SFSessionProperty.APPLICATION.getPropertyKey())); + assertEquals("snowflake", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + assertEquals( + "true", props.get(SFSessionProperty.JDBC_ARROW_TREAT_DECIMAL_AS_INT.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.ALLOW_UNDERSCORES_IN_HOST.getPropertyKey())); + assertEquals( + "/some/path/file.json", props.get(SFSessionProperty.CLIENT_CONFIG_FILE.getPropertyKey())); + assertEquals( + "false", props.get(SFSessionProperty.DISABLE_GCS_DEFAULT_CREDENTIALS.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.DISABLE_SAML_URL_CHECK.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.DISABLE_SOCKS_PROXY.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.ENABLE_PATTERN_SEARCH.getPropertyKey())); + assertEquals("DB_NAME", props.get(SFSessionProperty.DATABASE.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.ENABLE_PUT_GET.getPropertyKey())); + assertEquals("5", props.get(SFSessionProperty.MAX_HTTP_RETRIES.getPropertyKey())); + assertEquals("10", props.get(SFSessionProperty.NETWORK_TIMEOUT.getPropertyKey())); + assertEquals("false", props.get(SFSessionProperty.OCSP_FAIL_OPEN.getPropertyKey())); + assertEquals("proxyHost.com", props.get(SFSessionProperty.PROXY_HOST.getPropertyKey())); + assertEquals("8080", props.get(SFSessionProperty.PROXY_PORT.getPropertyKey())); + assertEquals("http", props.get(SFSessionProperty.PROXY_PROTOCOL.getPropertyKey())); + assertEquals("proxyUser", props.get(SFSessionProperty.PROXY_USER.getPropertyKey())); + assertEquals("proxyPassword", props.get(SFSessionProperty.PROXY_PASSWORD.getPropertyKey())); + assertEquals("3", props.get(SFSessionProperty.PUT_GET_MAX_RETRIES.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.STRINGS_QUOTED.getPropertyKey())); + assertEquals("true", props.get(SFSessionProperty.ENABLE_DIAGNOSTICS.getPropertyKey())); + assertEquals( + "/some/path/allowlist.json", + props.get(SFSessionProperty.DIAGNOSTICS_ALLOWLIST_FILE.getPropertyKey())); + + ds.setOauthToken("a_token"); + assertEquals("OAUTH", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + assertEquals("a_token", props.get(SFSessionProperty.TOKEN.getPropertyKey())); + + ds.setPasscodeInPassword(true); + assertEquals("true", props.get(SFSessionProperty.PASSCODE_IN_PASSWORD.getPropertyKey())); + assertEquals( + "USERNAME_PASSWORD_MFA", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + + ds.setPrivateKeyFile("key.p8", "pwd"); + assertEquals("key.p8", props.get(SFSessionProperty.PRIVATE_KEY_FILE.getPropertyKey())); + assertEquals("pwd", props.get(SFSessionProperty.PRIVATE_KEY_FILE_PWD.getPropertyKey())); + assertEquals("SNOWFLAKE_JWT", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + + ds.setPasscodeInPassword(false); + ds.setPasscode("a_passcode"); + assertEquals("false", props.get(SFSessionProperty.PASSCODE_IN_PASSWORD.getPropertyKey())); + assertEquals( + "USERNAME_PASSWORD_MFA", props.get(SFSessionProperty.AUTHENTICATOR.getPropertyKey())); + assertEquals("a_passcode", props.get(SFSessionProperty.PASSCODE.getPropertyKey())); + } /** * SNOW-1465374: For TIMESTAMP_LTZ we were returning timestamps without timezone when scale was * set e.g. to 6 in Arrow format The problem wasn't visible when calling getString, but was From b9dcc47769c58ee198cc508572ebd3620d63b8d1 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 16 Jul 2024 10:42:15 +0200 Subject: [PATCH 52/54] SNOW-1526507: Pick top level domain for Snowflake hosts (#1820) --- .../client/core/PrivateLinkDetector.java | 13 +++ .../snowflake/client/core/SFTrustManager.java | 23 +++-- .../snowflake/client/core/SessionUtil.java | 2 +- .../jdbc/cloud/storage/SnowflakeS3Client.java | 2 +- .../jdbc/diagnostic/SnowflakeEndpoint.java | 11 +-- .../client/core/OCSPCacheServerTest.java | 97 +++++++++++++++++++ .../client/core/PrivateLinkDetectorTest.java | 42 ++++++++ .../client/core/SFTrustManagerTest.java | 38 ++++++++ .../client/core/SessionUtilTest.java | 39 ++++++++ .../cloud/storage/SnowflakeS3ClientTest.java | 2 + .../diagnostic/SnowflakeEndpointTest.java | 28 ++++++ 11 files changed, 278 insertions(+), 19 deletions(-) create mode 100644 src/main/java/net/snowflake/client/core/PrivateLinkDetector.java create mode 100644 src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java create mode 100644 src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java create mode 100644 src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java diff --git a/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java new file mode 100644 index 000000000..8d4a01742 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/PrivateLinkDetector.java @@ -0,0 +1,13 @@ +package net.snowflake.client.core; + +@SnowflakeJdbcInternalApi +public class PrivateLinkDetector { + /** + * We can only tell if private link is enabled for certain hosts when the hostname contains the + * word 'privatelink' but we don't have a good way of telling if a private link connection is + * expected for internal stages for example. + */ + public static boolean isPrivateLink(String host) { + return host.toLowerCase().contains(".privatelink.snowflakecomputing."); + } +} diff --git a/src/main/java/net/snowflake/client/core/SFTrustManager.java b/src/main/java/net/snowflake/client/core/SFTrustManager.java index 171f69e1b..740c70fe3 100644 --- a/src/main/java/net/snowflake/client/core/SFTrustManager.java +++ b/src/main/java/net/snowflake/client/core/SFTrustManager.java @@ -167,8 +167,10 @@ public class SFTrustManager extends X509ExtendedTrustManager { private static final int DEFAULT_OCSP_CACHE_SERVER_CONNECTION_TIMEOUT = 5000; /** Default OCSP responder connection timeout */ private static final int DEFAULT_OCSP_RESPONDER_CONNECTION_TIMEOUT = 10000; + /** Default OCSP Cache server host name prefix */ + private static final String DEFAULT_OCSP_CACHE_HOST_PREFIX = "http://ocsp.snowflakecomputing."; /** Default OCSP Cache server host name */ - private static final String DEFAULT_OCSP_CACHE_HOST = "http://ocsp.snowflakecomputing.com"; + private static final String DEFAULT_OCSP_CACHE_HOST = DEFAULT_OCSP_CACHE_HOST_PREFIX + "com"; /** OCSP response file cache directory */ private static final FileCacheManager fileCacheManager; @@ -200,7 +202,7 @@ public class SFTrustManager extends X509ExtendedTrustManager { /** OCSP Response Cache server Retry URL pattern */ static String SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN; /** OCSP response cache server URL. */ - private static String SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE; + static String SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE; private static JcaX509CertificateConverter CONVERTER_X509 = new JcaX509CertificateConverter(); /** RootCA cache */ @@ -315,7 +317,7 @@ static void resetOCSPResponseCacherServerURL(String ocspCacheServerUrl) throws I return; } SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheServerUrl; - if (!SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE.startsWith(DEFAULT_OCSP_CACHE_HOST)) { + if (!SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE.startsWith(DEFAULT_OCSP_CACHE_HOST_PREFIX)) { URL url = new URL(SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); if (url.getPort() > 0) { SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = @@ -331,7 +333,7 @@ static void resetOCSPResponseCacherServerURL(String ocspCacheServerUrl) throws I } } - private static void setOCSPResponseCacheServerURL() { + private static void setOCSPResponseCacheServerURL(String topLevelDomain) { String ocspCacheUrl = systemGetProperty(SF_OCSP_RESPONSE_CACHE_SERVER_URL); if (ocspCacheUrl != null) { SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = ocspCacheUrl; @@ -348,7 +350,7 @@ private static void setOCSPResponseCacheServerURL() { } if (SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE == null) { SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = - String.format("%s/%s", DEFAULT_OCSP_CACHE_HOST, CACHE_FILE_NAME); + String.format("%s%s/%s", DEFAULT_OCSP_CACHE_HOST_PREFIX, topLevelDomain, CACHE_FILE_NAME); } logger.debug("Set OCSP response cache server to: {}", SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); } @@ -791,7 +793,8 @@ void validateRevocationStatus(X509Certificate[] chain, String peerHost) ocspCacheServer.resetOCSPResponseCacheServer(peerHost); } - setOCSPResponseCacheServerURL(); + String topLevelDomain = peerHost.substring(peerHost.lastIndexOf(".") + 1); + setOCSPResponseCacheServerURL(topLevelDomain); boolean isCached = isCached(pairIssuerSubjectList); if (useOCSPResponseCacheServer() && !isCached) { if (!ocspCacheServer.new_endpoint_enabled) { @@ -1546,14 +1549,16 @@ static class OCSPCacheServer { void resetOCSPResponseCacheServer(String host) { String ocspCacheServerUrl; - if (host.indexOf(".global.snowflakecomputing.com") > 0) { + if (host.toLowerCase().contains(".global.snowflakecomputing.")) { ocspCacheServerUrl = String.format("https://ocspssd%s/%s", host.substring(host.indexOf('-')), "ocsp"); - } else if (host.indexOf(".snowflakecomputing.com") > 0) { + } else if (host.toLowerCase().contains(".snowflakecomputing.")) { ocspCacheServerUrl = String.format("https://ocspssd%s/%s", host.substring(host.indexOf('.')), "ocsp"); } else { - ocspCacheServerUrl = "https://ocspssd.snowflakecomputing.com/ocsp"; + String topLevelDomain = host.substring(host.lastIndexOf(".") + 1); + ocspCacheServerUrl = + String.format("https://ocspssd.snowflakecomputing.%s/ocsp", topLevelDomain); } SF_OCSP_RESPONSE_CACHE_SERVER = String.format("%s/%s", ocspCacheServerUrl, "fetch"); SF_OCSP_RESPONSE_RETRY_URL = String.format("%s/%s", ocspCacheServerUrl, "retry"); diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index ad55f79e8..63cdbe14c 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -1702,7 +1702,7 @@ enum TokenRequestType { * @param serverUrl The Snowflake URL includes protocol such as "https://" */ public static void resetOCSPUrlIfNecessary(String serverUrl) throws IOException { - if (serverUrl.indexOf(".privatelink.snowflakecomputing.com") > 0) { + if (PrivateLinkDetector.isPrivateLink(serverUrl)) { // Privatelink uses special OCSP Cache server URL url = new URL(serverUrl); String host = url.getHost(); diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java index 15110bfc8..5b405a15f 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3Client.java @@ -237,7 +237,7 @@ private void setupSnowflakeS3Client( } static String getDomainSuffixForRegionalUrl(String regionName) { - return regionName.startsWith("cn-") ? "amazonaws.com.cn" : "amazonaws.com"; + return regionName.toLowerCase().startsWith("cn-") ? "amazonaws.com.cn" : "amazonaws.com"; } // Returns the Max number of retry attempts diff --git a/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java b/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java index 6cecb71d9..2a181c08a 100644 --- a/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java +++ b/src/main/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpoint.java @@ -1,5 +1,7 @@ package net.snowflake.client.jdbc.diagnostic; +import net.snowflake.client.core.PrivateLinkDetector; + /* The SnowflakeEndpoint class represents an endpoint as returned by the System$allowlist() SQL function. Example: @@ -20,10 +22,6 @@ public SnowflakeEndpoint(String type, String host, int port) { this.isSecure = (this.port == 443); } - public SnowflakeEndpoint() { - this(null, null, -1); - } - public String getType() { return this.type; } @@ -40,11 +38,8 @@ public int getPort() { return this.port; } - // We can only tell if private link is enabled for certain hosts when the hostname contains - // the word 'privatelink' but we don't have a good way of telling if a private link connection - // is expected for internal stages for example. public boolean isPrivateLink() { - return (host.contains("privatelink.snowflakecomputing.com")); + return PrivateLinkDetector.isPrivateLink(host); } @Override diff --git a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java new file mode 100644 index 000000000..9a5af03b2 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java @@ -0,0 +1,97 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class OCSPCacheServerTest { + + @Parameterized.Parameters( + name = "For host {0} cache server fetch url should be {1} and retry url {2}") + public static Object[][] data() { + return new Object[][] { + { + "bla-12345.global.snowflakecomputing.com", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry" + }, + { + "bla-12345.global.snowflakecomputing.cn", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry" + }, + { + "bla-12345.global.snowflakecomputing.xyz", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry" + }, + { + "bla-12345.GLOBAL.snowflakecomputing.xyz", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry" + }, + { + "bla-12345.snowflakecomputing.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry" + }, + { + "bla-12345.snowflakecomputing.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry" + }, + { + "bla-12345.snowflakecomputing.xyz", + "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd.snowflakecomputing.xyz/ocsp/retry" + }, + { + "bla-12345.SNOWFLAKEcomputing.xyz", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry" + }, + { + "s3.amazoncomaws.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry" + }, + { + "s3.amazoncomaws.COM", + "https://ocspssd.snowflakecomputing.COM/ocsp/fetch", + "https://ocspssd.snowflakecomputing.COM/ocsp/retry" + }, + { + "s3.amazoncomaws.com.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry" + }, + { + "S3.AMAZONCOMAWS.COM.CN", + "https://ocspssd.snowflakecomputing.CN/ocsp/fetch", + "https://ocspssd.snowflakecomputing.CN/ocsp/retry" + }, + }; + } + + private final String host; + private final String expectedFetchUrl; + private final String expectedRetryUrl; + + public OCSPCacheServerTest(String host, String expectedFetchUrl, String expectedRetryUrl) { + this.host = host; + this.expectedFetchUrl = expectedFetchUrl; + this.expectedRetryUrl = expectedRetryUrl; + } + + @Test + public void shouldChooseOcspCacheServerUrls() { + SFTrustManager.OCSPCacheServer ocspCacheServer = new SFTrustManager.OCSPCacheServer(); + ocspCacheServer.resetOCSPResponseCacheServer(host); + + assertEquals(expectedFetchUrl, ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER); + assertEquals(expectedRetryUrl, ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL); + } +} diff --git a/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java new file mode 100644 index 000000000..b3af68011 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java @@ -0,0 +1,42 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class PrivateLinkDetectorTest { + + @Parameterized.Parameters(name = "Host {0} is private link: {1}") + public static Object[][] data() { + return new Object[][] { + {"snowhouse.snowflakecomputing.com", false}, + {"snowhouse.privatelink.snowflakecomputing.com", true}, + {"snowhouse.PRIVATELINK.snowflakecomputing.com", true}, + {"snowhouse.snowflakecomputing.cn", false}, + {"snowhouse.privatelink.snowflakecomputing.cn", true}, + {"snowhouse.PRIVATELINK.snowflakecomputing.cn", true}, + {"snowhouse.snowflakecomputing.xyz", false}, + {"snowhouse.privatelink.snowflakecomputing.xyz", true}, + {"snowhouse.PRIVATELINK.snowflakecomputing.xyz", true}, + }; + } + + private final String host; + private final boolean expectedToBePrivateLink; + + public PrivateLinkDetectorTest(String host, boolean expectedToBePrivateLink) { + this.host = host; + this.expectedToBePrivateLink = expectedToBePrivateLink; + } + + @Test + public void shouldDetectPrivateLinkHost() { + assertEquals( + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), + expectedToBePrivateLink, + PrivateLinkDetector.isPrivateLink(host)); + } +} diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java index a4326d5bd..6a55b2cd4 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java @@ -46,6 +46,18 @@ public void testBuildRetryURL() throws Exception { SFTrustManager.resetOCSPResponseCacherServerURL( "http://ocsp.snowflakecomputing.com:80/" + SFTrustManager.CACHE_FILE_NAME); assertThat(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN, nullValue()); + + // default OCSP Cache server URL in specific domain without port + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = null; + SFTrustManager.resetOCSPResponseCacherServerURL( + "http://ocsp.snowflakecomputing.cn/" + SFTrustManager.CACHE_FILE_NAME); + assertThat(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN, nullValue()); + + // default OCSP Cache server URL in specific domain with port + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = null; + SFTrustManager.resetOCSPResponseCacherServerURL( + "http://ocsp.snowflakecomputing.cn:80/" + SFTrustManager.CACHE_FILE_NAME); + assertThat(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN, nullValue()); } @Test @@ -65,6 +77,14 @@ public void testBuildNewRetryURL() { tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, equalTo("https://ocspssd.snowflakecomputing.com/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer("a1.snowflakecomputing.cn"); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, + equalTo("https://ocspssd.snowflakecomputing.cn/ocsp/fetch")); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, + equalTo("https://ocspssd.snowflakecomputing.cn/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer( "a1-12345.global.snowflakecomputing.com"); assertThat( @@ -74,6 +94,15 @@ public void testBuildNewRetryURL() { tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, equalTo("https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer( + "a1-12345.global.snowflakecomputing.cn"); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, + equalTo("https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch")); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, + equalTo("https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry")); + tManager.ocspCacheServer.resetOCSPResponseCacheServer("okta.snowflake.com"); assertThat( tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, @@ -90,6 +119,15 @@ public void testBuildNewRetryURL() { assertThat( tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, equalTo("https://ocspssd.us-east-1.privatelink.snowflakecomputing.com/ocsp/retry")); + + tManager.ocspCacheServer.resetOCSPResponseCacheServer( + "a1.us-east-1.privatelink.snowflakecomputing.cn"); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_CACHE_SERVER, + equalTo("https://ocspssd.us-east-1.privatelink.snowflakecomputing.cn/ocsp/fetch")); + assertThat( + tManager.ocspCacheServer.SF_OCSP_RESPONSE_RETRY_URL, + equalTo("https://ocspssd.us-east-1.privatelink.snowflakecomputing.cn/ocsp/retry")); } finally { System.clearProperty("net.snowflake.jdbc.ocsp_activate_new_endpoint"); } diff --git a/src/test/java/net/snowflake/client/core/SessionUtilTest.java b/src/test/java/net/snowflake/client/core/SessionUtilTest.java index 0b5a542c1..cab5fb68f 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilTest.java @@ -10,6 +10,7 @@ import static org.junit.Assert.assertTrue; import com.fasterxml.jackson.databind.node.BooleanNode; +import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -129,4 +130,42 @@ public void testIsLoginRequestInvalidURIPath() { } } } + + @Test + public void shouldDerivePrivateLinkOcspCacheServerUrlBasedOnHost() throws IOException { + resetOcspConfiguration(); + + SessionUtil.resetOCSPUrlIfNecessary("https://test.privatelink.snowflakecomputing.com"); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.com/ocsp_response_cache.json", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.com/retry/%s/%s", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); + + resetOcspConfiguration(); + + SessionUtil.resetOCSPUrlIfNecessary("https://test.privatelink.snowflakecomputing.cn"); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.cn/ocsp_response_cache.json", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.cn/retry/%s/%s", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); + + resetOcspConfiguration(); + + SessionUtil.resetOCSPUrlIfNecessary("https://test.privatelink.snowflakecomputing.xyz"); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.xyz/ocsp_response_cache.json", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE); + assertEquals( + "http://ocsp.test.privatelink.snowflakecomputing.xyz/retry/%s/%s", + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN); + } + + private void resetOcspConfiguration() { + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = null; + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = null; + } } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java index d9019c8e7..3daddf3df 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java @@ -14,5 +14,7 @@ public void shouldDetermineDomainForRegion() { assertEquals("amazonaws.com", SnowflakeS3Client.getDomainSuffixForRegionalUrl("us-east-1")); assertEquals( "amazonaws.com.cn", SnowflakeS3Client.getDomainSuffixForRegionalUrl("cn-northwest-1")); + assertEquals( + "amazonaws.com.cn", SnowflakeS3Client.getDomainSuffixForRegionalUrl("CN-NORTHWEST-1")); } } diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java new file mode 100644 index 000000000..a926a649e --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java @@ -0,0 +1,28 @@ +package net.snowflake.client.jdbc.diagnostic; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Test; + +public class SnowflakeEndpointTest { + + @Test + public void shouldDetectPrivateLinkEndpoint() { + Map hostsToPrivateLinks = new HashMap<>(); + hostsToPrivateLinks.put("snowhouse.snowflakecomputing.com", false); + hostsToPrivateLinks.put("snowhouse.privatelink.snowflakecomputing.com", true); + hostsToPrivateLinks.put("snowhouse.snowflakecomputing.cn", false); + hostsToPrivateLinks.put("snowhouse.PRIVATELINK.snowflakecomputing.cn", true); + + hostsToPrivateLinks.forEach( + (host, expectedToBePrivateLink) -> { + SnowflakeEndpoint endpoint = new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", host, 443); + assertEquals( + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), + expectedToBePrivateLink, + endpoint.isPrivateLink()); + }); + } +} From 1c0f40aa36195c0068bf247dc7b7f56456d6b00c Mon Sep 17 00:00:00 2001 From: Dawid Heyman Date: Thu, 18 Jul 2024 09:18:56 +0200 Subject: [PATCH 53/54] SNOW-1479614: Fix conversion of OBJECT column nested fields metadata (#1831) --- .../snowflake/client/jdbc/SnowflakeUtil.java | 8 +++- .../DatabaseMetaDataResultSetLatestIT.java | 26 +++++++++++++ .../client/jdbc/SnowflakeUtilTest.java | 38 ++++++++++++++++++- 3 files changed, 70 insertions(+), 2 deletions(-) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 8c848032e..efe8ffbf8 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -395,7 +395,13 @@ static List createFieldsMetadata( throws SnowflakeSQLLoggedException { List fields = new ArrayList<>(); for (JsonNode node : fieldsJson) { - String colName = node.path("name").asText(); + String colName; + if (!node.path("fieldType").isEmpty()) { + colName = node.path("fieldName").asText(); + node = node.path("fieldType"); + } else { + colName = node.path("name").asText(); + } int scale = node.path("scale").asInt(); int precision = node.path("precision").asInt(); String internalColTypeName = node.path("type").asText(); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java index 7bf5872c0..0549a087d 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java @@ -4,8 +4,11 @@ package net.snowflake.client.jdbc; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.sql.Connection; +import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; @@ -32,4 +35,27 @@ public void testGetObjectNotSupported() throws SQLException { } } } + + /** Added in > 3.17.0 */ + @Test + public void testObjectColumn() throws SQLException { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "CREATE OR REPLACE TABLE TABLEWITHOBJECTCOLUMN (" + + " col OBJECT(" + + " str VARCHAR," + + " num NUMBER(38,0)" + + " )" + + " )"); + DatabaseMetaData metaData = connection.getMetaData(); + try (ResultSet resultSet = + metaData.getColumns( + connection.getCatalog(), connection.getSchema(), "TABLEWITHOBJECTCOLUMN", null)) { + assertTrue(resultSet.next()); + assertEquals("OBJECT", resultSet.getObject("TYPE_NAME")); + assertFalse(resultSet.next()); + } + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java index 1110ce4df..6e61d82dc 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -5,7 +5,9 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.getSnowflakeType; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -33,7 +35,7 @@ public void testCreateMetadata() throws Throwable { fields.add(fieldOne); JsonNode fieldTwo = createFieldNode("name2", 5, 128, 2, "real", true, "collation", 256); fields.add(fieldTwo); - rootNode.put("fields", fields); + rootNode.putIfAbsent("fields", fields); SnowflakeColumnMetadata expectedColumnMetadata = createExpectedMetadata(rootNode, fieldOne, fieldTwo); // when @@ -46,6 +48,40 @@ public void testCreateMetadata() throws Throwable { OBJECT_MAPPER.writeValueAsString(columnMetadata)); } + @Test + public void testCreateFieldsMetadataForObject() throws Throwable { + // given + ObjectNode rootNode = createRootNode(); + ArrayNode fields = OBJECT_MAPPER.createArrayNode(); + fields.add( + OBJECT_MAPPER.readTree( + "{\"fieldName\":\"name1\", \"fieldType\": {\"type\":\"text\",\"precision\":null,\"length\":256,\"scale\":null,\"nullable\":false}}")); + fields.add( + OBJECT_MAPPER.readTree( + "{\"fieldName\":\"name2\", \"fieldType\": {\"type\":\"real\",\"precision\":5,\"length\":128,\"scale\":null,\"nullable\":true}}")); + rootNode.putIfAbsent("fields", fields); + + // when + SnowflakeColumnMetadata columnMetadata = + SnowflakeUtil.extractColumnMetadata(rootNode, false, null); + // then + assertNotNull(columnMetadata); + assertEquals("OBJECT", columnMetadata.getTypeName()); + + FieldMetadata firstField = columnMetadata.getFields().get(0); + assertEquals("name1", firstField.getName()); + assertEquals(SnowflakeType.TEXT, firstField.getBase()); + assertEquals(256, firstField.getByteLength()); + assertFalse(firstField.isNullable()); + + FieldMetadata secondField = columnMetadata.getFields().get(1); + assertEquals("name2", secondField.getName()); + assertEquals(SnowflakeType.REAL, secondField.getBase()); + assertEquals(128, secondField.getByteLength()); + assertEquals(5, secondField.getPrecision()); + assertTrue(secondField.isNullable()); + } + private static SnowflakeColumnMetadata createExpectedMetadata( JsonNode rootNode, JsonNode fieldOne, JsonNode fieldTwo) throws SnowflakeSQLLoggedException { ColumnTypeInfo columnTypeInfo = From 74a37b2e621c77307aef7e81fa5501b72455eaad Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Thu, 18 Jul 2024 09:38:22 +0200 Subject: [PATCH 54/54] SNOW-1454348: Remove duplicated consts (#1827) --- .../snowflake/client/core/SessionUtil.java | 24 +++++++------ .../net/snowflake/client/core/StmtUtil.java | 35 +++++++------------ 2 files changed, 26 insertions(+), 33 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index 63cdbe14c..6a9db988f 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -72,12 +72,11 @@ public class SessionUtil { public static final String SF_QUERY_SESSION_DELETE = "delete"; // Headers - public static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION; + @Deprecated + public static final String SF_HEADER_AUTHORIZATION = SFSession.SF_HEADER_AUTHORIZATION; // Authentication type private static final String SF_HEADER_BASIC_AUTHTYPE = "Basic"; - private static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake"; - private static final String SF_HEADER_TOKEN_TAG = "Token"; private static final String CLIENT_STORE_TEMPORARY_CREDENTIAL = "CLIENT_STORE_TEMPORARY_CREDENTIAL"; private static final String CLIENT_REQUEST_MFA_TOKEN = "CLIENT_REQUEST_MFA_TOKEN"; @@ -644,7 +643,7 @@ private static SFLoginOutput newSession( * HttpClient should take authorization header from char[] instead of * String. */ - postRequest.setHeader(SF_HEADER_AUTHORIZATION, SF_HEADER_BASIC_AUTHTYPE); + postRequest.setHeader(SFSession.SF_HEADER_AUTHORIZATION, SF_HEADER_BASIC_AUTHTYPE); setServiceNameHeader(loginInput, postRequest); @@ -1032,8 +1031,13 @@ private static SFLoginOutput tokenRequest(SFLoginInput loginInput, TokenRequestT postRequest.addHeader("accept", "application/json"); postRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + " " + SF_HEADER_TOKEN_TAG + "=\"" + headerToken + "\""); + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + + " " + + SFSession.SF_HEADER_TOKEN_TAG + + "=\"" + + headerToken + + "\""); setServiceNameHeader(loginInput, postRequest); @@ -1126,10 +1130,10 @@ static void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeS postRequest, loginInput.getAdditionalHttpHeadersForSnowsight()); postRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + loginInput.getSessionToken() + "\""); @@ -1331,7 +1335,7 @@ private static void federatedFlowStep2(SFLoginInput loginInput, String tokenUrl, null, ErrorCode.IDP_CONNECTION_ERROR.getMessageCode(), SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION - /* session = */ ); + /* session= */ ); } } catch (MalformedURLException ex) { handleFederatedFlowError(loginInput, ex); diff --git a/src/main/java/net/snowflake/client/core/StmtUtil.java b/src/main/java/net/snowflake/client/core/StmtUtil.java index 3566ea225..96fefe5dc 100644 --- a/src/main/java/net/snowflake/client/core/StmtUtil.java +++ b/src/main/java/net/snowflake/client/core/StmtUtil.java @@ -23,7 +23,6 @@ import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.client.util.SecretDetector; import net.snowflake.common.api.QueryInProgressResponse; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; @@ -43,18 +42,8 @@ public class StmtUtil { private static final String SF_PATH_QUERY_RESULT = "/queries/%s/result"; - static final String SF_QUERY_REQUEST_ID = "requestId"; - private static final String SF_QUERY_COMBINE_DESCRIBE_EXECUTE = "combinedDescribe"; - private static final String SF_QUERY_CONTEXT = "queryContext"; - - private static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION; - - private static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake"; - - private static final String SF_HEADER_TOKEN_TAG = "Token"; - static final String SF_MEDIA_TYPE = "application/snowflake"; // we don't want to retry canceling forever so put a limit which is @@ -315,7 +304,7 @@ public static StmtOutput execute(StmtInput stmtInput, ExecTimeTelemetryData exec URIBuilder uriBuilder = new URIBuilder(stmtInput.serverUrl); uriBuilder.setPath(SF_PATH_QUERY_V1); - uriBuilder.addParameter(SF_QUERY_REQUEST_ID, stmtInput.requestId); + uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, stmtInput.requestId); if (stmtInput.combineDescribe) { uriBuilder.addParameter(SF_QUERY_COMBINE_DESCRIBE_EXECUTE, Boolean.TRUE.toString()); @@ -376,10 +365,10 @@ public static StmtOutput execute(StmtInput stmtInput, ExecTimeTelemetryData exec httpRequest.addHeader("accept", stmtInput.mediaType); httpRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + stmtInput.sessionToken + "\""); @@ -613,7 +602,7 @@ protected static String getQueryResult(String getResultPath, StmtInput stmtInput uriBuilder.setPath(getResultPath); - uriBuilder.addParameter(SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); + uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); httpRequest = new HttpGet(uriBuilder.build()); // Add custom headers before adding common headers @@ -623,10 +612,10 @@ protected static String getQueryResult(String getResultPath, StmtInput stmtInput httpRequest.addHeader("accept", stmtInput.mediaType); httpRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + stmtInput.sessionToken + "\""); @@ -717,7 +706,7 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE uriBuilder.setPath(SF_PATH_ABORT_REQUEST_V1); - uriBuilder.addParameter(SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); + uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUIDUtils.getUUID().toString()); httpRequest = new HttpPost(uriBuilder.build()); // Add custom headers before adding common headers @@ -742,10 +731,10 @@ public static void cancel(StmtInput stmtInput) throws SFException, SnowflakeSQLE httpRequest.addHeader("accept", stmtInput.mediaType); httpRequest.setHeader( - SF_HEADER_AUTHORIZATION, - SF_HEADER_SNOWFLAKE_AUTHTYPE + SFSession.SF_HEADER_AUTHORIZATION, + SFSession.SF_HEADER_SNOWFLAKE_AUTHTYPE + " " - + SF_HEADER_TOKEN_TAG + + SFSession.SF_HEADER_TOKEN_TAG + "=\"" + stmtInput.sessionToken + "\"");