Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Snow 1259709 array map simple types fixdatetime #1685

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1396,10 +1396,11 @@ public <T> List<T> getList(int columnIndex, Class<T> type) throws SQLException {
}

public <T> T[] getArray(int columnIndex, Class<T> type) throws SQLException {
int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex);
int columnType = ColumnTypeHelper.getColumnType(columnSubType, session);
;
int scale = resultSetMetaData.getScale(columnIndex);
FieldMetadata fieldMetadata =
sfBaseResultSet.getMetaData().getColumnMetadata().get(columnIndex - 1).getFields().get(0);
int columnSubType = fieldMetadata.getType();
int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session);
int scale = fieldMetadata.getScale();
TimeZone tz = sfBaseResultSet.getSessionTimeZone();
Object[] objects = (Object[]) getArray(columnIndex).getArray();
T[] arr = (T[]) java.lang.reflect.Array.newInstance(type, objects.length);
Expand Down Expand Up @@ -1501,29 +1502,29 @@ public <T> T[] getArray(int columnIndex, Class<T> type) throws SQLException {
(T)
sfBaseResultSet
.getConverters()
.getDateTimeConverter()
.getDate(value, columnType, columnSubType, tz, scale));
.dateConverter(session)
.convert((String) value));
} else if (Time.class.isAssignableFrom(type)) {
arr[counter++] =
mapSFExceptionToSQLException(
() ->
(T)
sfBaseResultSet
.getConverters()
.getDateTimeConverter()
.getTime(value, columnType, columnSubType, tz, scale));
.timeConverter(session)
.convert((String) value));
} else if (Timestamp.class.isAssignableFrom(type)) {
mapSFExceptionToSQLException(
() ->
(T)
sfBaseResultSet
.getConverters()
.getDateTimeConverter()
.getTimestamp(value, columnType, columnSubType, tz, scale));
.timestampConverter(columnSubType, columnType, scale, session, null, tz)
.convert((String) value));
} else if (BigDecimal.class.isAssignableFrom(type)) {
arr[counter++] = (T) getBigDecimal(columnIndex);
} else {
logger.warn(
logger.debug(
"Unsupported type passed to getArray(int columnIndex, Class<T> type): "
+ type.getName());
throw new SQLException(
Expand All @@ -1535,9 +1536,11 @@ public <T> T[] getArray(int columnIndex, Class<T> type) throws SQLException {
}

public <T> Map<String, T> getMap(int columnIndex, Class<T> type) throws SQLException {
int columnType = resultSetMetaData.getInternalColumnType(columnIndex);
int columnSubType = resultSetMetaData.getInternalColumnType(columnIndex);
int scale = resultSetMetaData.getScale(columnIndex);
FieldMetadata valueFieldMetadata =
sfBaseResultSet.getMetaData().getColumnMetadata().get(columnIndex - 1).getFields().get(1);
int columnSubType = valueFieldMetadata.getType();
int columnType = ColumnTypeHelper.getColumnType(valueFieldMetadata.getType(), session);
int scale = valueFieldMetadata.getScale();
TimeZone tz = sfBaseResultSet.getSessionTimeZone();
Object object = getObject(columnIndex);
JsonNode jsonNode = ((JsonSqlInput) object).getInput();
Expand Down Expand Up @@ -1659,8 +1662,8 @@ public <T> Map<String, T> getMap(int columnIndex, Class<T> type) throws SQLExcep
(T)
sfBaseResultSet
.getConverters()
.getDateTimeConverter()
.getDate(entry.getValue(), columnType, columnSubType, tz, scale)));
.dateConverter(session)
.convert((String) entry.getValue())));
} else if (Time.class.isAssignableFrom(type)) {
resultMap.put(
entry.getKey(),
Expand All @@ -1669,8 +1672,8 @@ public <T> Map<String, T> getMap(int columnIndex, Class<T> type) throws SQLExcep
(T)
sfBaseResultSet
.getConverters()
.getDateTimeConverter()
.getTime(entry.getValue(), columnType, columnSubType, tz, scale)));
.timeConverter(session)
.convert((String) entry.getValue())));
} else if (Timestamp.class.isAssignableFrom(type)) {
resultMap.put(
entry.getKey(),
Expand All @@ -1679,8 +1682,8 @@ public <T> Map<String, T> getMap(int columnIndex, Class<T> type) throws SQLExcep
(T)
sfBaseResultSet
.getConverters()
.getDateTimeConverter()
.getTimestamp(entry.getValue(), columnType, columnSubType, tz, scale)));
.timestampConverter(columnSubType, columnType, scale, session, null, tz)
.convert((String) entry.getValue())));
} else {
logger.debug(
"Unsupported type passed to getObject(int columnIndex,Class<T> type): "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ public Connection init() throws SQLException {
try (Statement stmt = conn.createStatement()) {
stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true");
stmt.execute("alter session set IGNORE_CLIENT_VESRION_IN_STRUCTURED_TYPES_RESPONSE = true");
stmt.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'");
stmt.execute(
"alter session set jdbc_query_result_format = '"
+ queryResultFormat.sessionParameterTypeValue
Expand All @@ -68,16 +69,12 @@ public void clean() throws Exception {
SnowflakeObjectTypeFactories.unregister(AllTypesClass.class);
}

// TODO Structured types feature exists only on QA environments
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapStructToObjectWithFactory() throws SQLException {
testMapJson(true);
}

// TODO Structured types feature exists only on QA environments
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapStructToObjectWithReflection() throws SQLException {
testMapJson(false);
testMapJson(true);
Expand All @@ -95,7 +92,6 @@ private void testMapJson(boolean registerFactory) throws SQLException {
});
}

// TODO Structured types feature exists only on QA environments
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapStructAllTypes() throws SQLException {
Expand Down Expand Up @@ -176,7 +172,6 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapJsonToMap() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -189,7 +184,6 @@ public void testMapJsonToMap() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testReturnAsArrayOfSqlData() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new);
Expand All @@ -205,7 +199,6 @@ public void testReturnAsArrayOfSqlData() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testReturnAsArrayOfString() throws SQLException {
withFirstRow(
"SELECT ARRAY_CONSTRUCT('one', 'two','three')::ARRAY(VARCHAR)",
Expand All @@ -219,7 +212,6 @@ public void testReturnAsArrayOfString() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testReturnAsListOfIntegers() throws SQLException {
withFirstRow(
"SELECT ARRAY_CONSTRUCT(1,2,3)::ARRAY(INTEGER)",
Expand All @@ -233,7 +225,6 @@ public void testReturnAsListOfIntegers() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testReturnAsMap() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new);
Expand All @@ -249,7 +240,6 @@ public void testReturnAsMap() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testReturnAsMapOfLong() throws SQLException {
withFirstRow(
"SELECT {'x':1, 'y':2, 'z':3}::MAP(VARCHAR, BIGINT)",
Expand All @@ -263,7 +253,42 @@ public void testReturnAsMapOfLong() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testReturnAsMapOfTimestamp() throws SQLException {
withFirstRow(
"SELECT {'x':'2021-12-22 09:43:44.000 +0100', 'y':'2021-12-22 10:43:44.000 +0100'}::MAP(VARCHAR, TIMESTAMP)",
(resultSet) -> {
Map<String, Timestamp> map =
resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Timestamp.class);
assertEquals(Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), map.get("x"));
assertEquals(Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 10, 43, 44)), map.get("y"));
});
}

@Test
public void testReturnAsMapOfDate() throws SQLException {
withFirstRow(
"SELECT {'x':'2023-12-24', 'y':'2023-12-25'}::MAP(VARCHAR, DATE)",
(resultSet) -> {
Map<String, Date> map =
resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Date.class);
assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), map.get("x"));
assertEquals(Date.valueOf(LocalDate.of(2023, 12, 25)), map.get("y"));
});
}

@Test
public void testReturnAsMapOfTime() throws SQLException {
withFirstRow(
"SELECT {'x':'12:34:56', 'y':'12:34:58'}::MAP(VARCHAR, TIME)",
(resultSet) -> {
Map<String, Time> map =
resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Time.class);
assertEquals(Time.valueOf(LocalTime.of(12, 34, 56)), map.get("x"));
assertEquals(Time.valueOf(LocalTime.of(12, 34, 58)), map.get("y"));
});
}

@Test
public void testReturnAsMapOfBoolean() throws SQLException {
withFirstRow(
"SELECT {'x':'true', 'y':0}::MAP(VARCHAR, BOOLEAN)",
Expand All @@ -276,7 +301,6 @@ public void testReturnAsMapOfBoolean() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testReturnAsList() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new);
Expand All @@ -290,9 +314,7 @@ public void testReturnAsList() throws SQLException {
});
}

// TODO Structured types feature exists only on QA environments
@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapStructsFromChunks() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -306,7 +328,6 @@ public void testMapStructsFromChunks() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapIntegerArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -320,7 +341,6 @@ public void testMapIntegerArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapFixedToLongArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -334,7 +354,6 @@ public void testMapFixedToLongArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapDecimalArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
// when: jdbc_treat_decimal_as_int=true scale=0
Expand Down Expand Up @@ -379,7 +398,6 @@ public void testMapDecimalArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapVarcharArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -394,7 +412,6 @@ public void testMapVarcharArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapDatesArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -407,7 +424,6 @@ public void testMapDatesArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapTimeArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -420,7 +436,6 @@ public void testMapTimeArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapTimestampArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -435,7 +450,6 @@ public void testMapTimestampArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapBooleanArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -448,7 +462,6 @@ public void testMapBooleanArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapBinaryArray() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -461,7 +474,6 @@ public void testMapBinaryArray() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapArrayOfStructToMap() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -474,7 +486,6 @@ public void testMapArrayOfStructToMap() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testMapArrayOfArrays() throws SQLException {
Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW);
withFirstRow(
Expand All @@ -487,7 +498,6 @@ public void testMapArrayOfArrays() throws SQLException {
}

@Test
@ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class)
public void testColumnTypeWhenStructureTypeIsDisabled() throws Exception {
withStructureTypeTemporaryDisabled(
() -> {
Expand Down
Loading