Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SNOW-1213120: Reuse connections in tests 5 #1818

Merged
merged 8 commits into from
Jul 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
263 changes: 132 additions & 131 deletions src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
*/
package net.snowflake.client.core;

import static net.snowflake.client.AbstractDriverIT.getConnection;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
Expand All @@ -16,7 +15,6 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.time.Instant;
Expand All @@ -32,6 +30,7 @@
import net.snowflake.client.SkipOnThinJar;
import net.snowflake.client.category.TestCategoryArrow;
import net.snowflake.client.jdbc.ArrowResultChunk;
import net.snowflake.client.jdbc.BaseJDBCWithSharedConnectionIT;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeResultChunk;
import net.snowflake.client.jdbc.SnowflakeResultSet;
Expand Down Expand Up @@ -70,7 +69,7 @@
import org.junit.rules.TemporaryFolder;

@Category(TestCategoryArrow.class)
public class SFArrowResultSetIT {
public class SFArrowResultSetIT extends BaseJDBCWithSharedConnectionIT {

/** Necessary to conditional ignore tests */
@Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule();
Expand Down Expand Up @@ -595,155 +594,157 @@ private void writeTimestampStructToField(
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class)
public void testSortedResultChunkWithStructVectors() throws Throwable {
Connection con = getConnection();
Statement statement = con.createStatement();
statement.execute("create or replace table teststructtimestamp (t1 timestamp_ltz)");
ResultSet rs = statement.executeQuery("select * from teststructtimestamp");
List<SnowflakeResultSetSerializable> resultSetSerializables =
((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024);
SnowflakeResultSetSerializableV1 resultSetSerializable =
(SnowflakeResultSetSerializableV1) resultSetSerializables.get(0);
try (Statement statement = connection.createStatement()) {
statement.execute("create or replace table teststructtimestamp (t1 timestamp_ltz)");
try (ResultSet rs = statement.executeQuery("select * from teststructtimestamp")) {
List<SnowflakeResultSetSerializable> resultSetSerializables =
((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024);
SnowflakeResultSetSerializableV1 resultSetSerializable =
(SnowflakeResultSetSerializableV1) resultSetSerializables.get(0);

Map<String, String> customFieldMeta = new HashMap<>();
customFieldMeta.put("logicalType", "TIMESTAMP_LTZ");
customFieldMeta.put("scale", "38");
// test normal date
FieldType fieldType =
new FieldType(true, Types.MinorType.BIGINT.getType(), null, customFieldMeta);
FieldType fieldType2 =
new FieldType(true, Types.MinorType.INT.getType(), null, customFieldMeta);
Map<String, String> customFieldMeta = new HashMap<>();
customFieldMeta.put("logicalType", "TIMESTAMP_LTZ");
customFieldMeta.put("scale", "38");
// test normal date
FieldType fieldType =
new FieldType(true, Types.MinorType.BIGINT.getType(), null, customFieldMeta);
FieldType fieldType2 =
new FieldType(true, Types.MinorType.INT.getType(), null, customFieldMeta);

StructVector structVector = StructVector.empty("testListVector", allocator);
List<Field> fieldList = new LinkedList<Field>();
Field bigIntField = new Field("epoch", fieldType, null);
StructVector structVector = StructVector.empty("testListVector", allocator);
List<Field> fieldList = new LinkedList<Field>();
Field bigIntField = new Field("epoch", fieldType, null);

Field intField = new Field("fraction", fieldType2, null);
Field intField = new Field("fraction", fieldType2, null);

fieldList.add(bigIntField);
fieldList.add(intField);
fieldList.add(bigIntField);
fieldList.add(intField);

FieldType structFieldType =
new FieldType(true, Types.MinorType.STRUCT.getType(), null, customFieldMeta);
Field structField = new Field("timestamp", structFieldType, fieldList);
FieldType structFieldType =
new FieldType(true, Types.MinorType.STRUCT.getType(), null, customFieldMeta);
Field structField = new Field("timestamp", structFieldType, fieldList);

structVector.initializeChildrenFromFields(fieldList);
structVector.initializeChildrenFromFields(fieldList);

List<Field> fieldListMajor = new LinkedList<Field>();
fieldListMajor.add(structField);
Schema dataSchema = new Schema(fieldList);
Object[][] data = generateData(dataSchema, 1000);
List<Field> fieldListMajor = new LinkedList<Field>();
fieldListMajor.add(structField);
Schema dataSchema = new Schema(fieldList);
Object[][] data = generateData(dataSchema, 1000);

Schema schema = new Schema(fieldListMajor);
Schema schema = new Schema(fieldListMajor);

File file = createArrowFile("testTimestamp", schema, data, 10);
File file = createArrowFile("testTimestamp", schema, data, 10);

int dataSize = (int) file.length();
byte[] dataBytes = new byte[dataSize];
int dataSize = (int) file.length();
byte[] dataBytes = new byte[dataSize];

InputStream is = new FileInputStream(file);
is.read(dataBytes, 0, dataSize);
InputStream is = new FileInputStream(file);
is.read(dataBytes, 0, dataSize);

resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE));
resultSetSerializable.setFirstChunkStringData(Base64.getEncoder().encodeToString(dataBytes));
resultSetSerializable.setFirstChunkByteData(dataBytes);
resultSetSerializable.setChunkFileCount(0);
resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE));
resultSetSerializable.setFirstChunkStringData(
Base64.getEncoder().encodeToString(dataBytes));
resultSetSerializable.setFirstChunkByteData(dataBytes);
resultSetSerializable.setChunkFileCount(0);

SFArrowResultSet resultSet =
new SFArrowResultSet(resultSetSerializable, new NoOpTelemetryClient(), true);
SFArrowResultSet resultSet =
new SFArrowResultSet(resultSetSerializable, new NoOpTelemetryClient(), true);

for (int i = 0; i < 1000; i++) {
resultSet.next();
for (int i = 0; i < 1000; i++) {
resultSet.next();
}
// We inserted a null row at the beginning so when sorted, the last row should be null
assertEquals(null, resultSet.getObject(1));
assertFalse(resultSet.next());
statement.execute("drop table teststructtimestamp;");
}
}
// We inserted a null row at the beginning so when sorted, the last row should be null
assertEquals(null, resultSet.getObject(1));
assertFalse(resultSet.next());
statement.execute("drop table teststructtimestamp;");
con.close();
}

/** Test that the first chunk can be sorted */
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class)
public void testSortedResultChunk() throws Throwable {
Connection con = getConnection();
Statement statement = con.createStatement();
statement.execute(
"create or replace table alltypes (i1 int, d1 date, b1 bigint, f1 float, s1 smallint, t1 tinyint, b2 binary, t2 text, b3 boolean, d2 decimal)");
ResultSet rs = statement.executeQuery("select * from alltypes");
List<SnowflakeResultSetSerializable> resultSetSerializables =
((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024);
SnowflakeResultSetSerializableV1 resultSetSerializable =
(SnowflakeResultSetSerializableV1) resultSetSerializables.get(0);

List<Field> fieldList = new ArrayList<>();
Map<String, String> customFieldMeta = new HashMap<>();
customFieldMeta.put("logicalType", "FIXED");
customFieldMeta.put("scale", "0");
FieldType type = new FieldType(false, Types.MinorType.INT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "DATE");
type = new FieldType(false, Types.MinorType.DATEDAY.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "FIXED");
type = new FieldType(false, Types.MinorType.BIGINT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "REAL");
type = new FieldType(false, Types.MinorType.FLOAT8.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "FIXED");
type = new FieldType(false, Types.MinorType.SMALLINT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "FIXED");
type = new FieldType(false, Types.MinorType.TINYINT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "BINARY");
type = new FieldType(false, Types.MinorType.VARBINARY.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "TEXT");
type = new FieldType(false, Types.MinorType.VARCHAR.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "BOOLEAN");
type = new FieldType(false, Types.MinorType.BIT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "REAL");
type = new FieldType(false, new ArrowType.Decimal(38, 16, 128), null, customFieldMeta);
fieldList.add(new Field("", type, null));

Schema schema = new Schema(fieldList);

Object[][] data = generateData(schema, 1000);
File file = createArrowFile("testVectorTypes", schema, data, 10);

int dataSize = (int) file.length();
byte[] dataBytes = new byte[dataSize];

InputStream is = new FileInputStream(file);
is.read(dataBytes, 0, dataSize);

resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE));
resultSetSerializable.setFirstChunkStringData(Base64.getEncoder().encodeToString(dataBytes));
resultSetSerializable.setFirstChunkByteData(dataBytes);
resultSetSerializable.setChunkFileCount(0);

SFArrowResultSet resultSet =
new SFArrowResultSet(resultSetSerializable, new NoOpTelemetryClient(), true);

for (int i = 0; i < 1000; i++) {
resultSet.next();
try (Statement statement = connection.createStatement()) {
statement.execute(
"create or replace table alltypes (i1 int, d1 date, b1 bigint, f1 float, s1 smallint, t1 tinyint, b2 binary, t2 text, b3 boolean, d2 decimal)");
try (ResultSet rs = statement.executeQuery("select * from alltypes")) {
List<SnowflakeResultSetSerializable> resultSetSerializables =
((SnowflakeResultSet) rs).getResultSetSerializables(100 * 1024 * 1024);
SnowflakeResultSetSerializableV1 resultSetSerializable =
(SnowflakeResultSetSerializableV1) resultSetSerializables.get(0);

List<Field> fieldList = new ArrayList<>();
Map<String, String> customFieldMeta = new HashMap<>();
customFieldMeta.put("logicalType", "FIXED");
customFieldMeta.put("scale", "0");
FieldType type = new FieldType(false, Types.MinorType.INT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "DATE");
type = new FieldType(false, Types.MinorType.DATEDAY.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "FIXED");
type = new FieldType(false, Types.MinorType.BIGINT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "REAL");
type = new FieldType(false, Types.MinorType.FLOAT8.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "FIXED");
type = new FieldType(false, Types.MinorType.SMALLINT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "FIXED");
type = new FieldType(false, Types.MinorType.TINYINT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "BINARY");
type = new FieldType(false, Types.MinorType.VARBINARY.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "TEXT");
type = new FieldType(false, Types.MinorType.VARCHAR.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "BOOLEAN");
type = new FieldType(false, Types.MinorType.BIT.getType(), null, customFieldMeta);
fieldList.add(new Field("", type, null));

customFieldMeta.put("logicalType", "REAL");
type = new FieldType(false, new ArrowType.Decimal(38, 16, 128), null, customFieldMeta);
fieldList.add(new Field("", type, null));

Schema schema = new Schema(fieldList);

Object[][] data = generateData(schema, 1000);
File file = createArrowFile("testVectorTypes", schema, data, 10);

int dataSize = (int) file.length();
byte[] dataBytes = new byte[dataSize];

InputStream is = new FileInputStream(file);
is.read(dataBytes, 0, dataSize);

resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE));
resultSetSerializable.setFirstChunkStringData(
Base64.getEncoder().encodeToString(dataBytes));
resultSetSerializable.setFirstChunkByteData(dataBytes);
resultSetSerializable.setChunkFileCount(0);

SFArrowResultSet resultSet =
new SFArrowResultSet(resultSetSerializable, new NoOpTelemetryClient(), true);

for (int i = 0; i < 1000; i++) {
resultSet.next();
}
// We inserted a null row at the beginning so when sorted, the last row should be null
assertEquals(null, resultSet.getObject(1));
assertFalse(resultSet.next());
statement.execute("drop table alltypes;");
}
}
// We inserted a null row at the beginning so when sorted, the last row should be null
assertEquals(null, resultSet.getObject(1));
assertFalse(resultSet.next());
statement.execute("drop table alltypes;");
con.close();
}
}
Loading
Loading