From 75c57f29ea245a2555a54525ac3d051fe70bbef1 Mon Sep 17 00:00:00 2001 From: Przemyslaw Motacki Date: Wed, 13 Mar 2024 08:20:21 +0100 Subject: [PATCH] SNOW-974575 Structured type array map (#1654) * SNOW-974575 Implementation of methods: getArray getLists getMap --- .../snowflake/client/core/JsonSqlInput.java | 3 +- .../client/core/SFArrowResultSet.java | 7 + .../client/core/SFBaseResultSet.java | 10 + .../client/core/SFJsonResultSet.java | 221 ++++++++++++-- .../snowflake/client/core/SFResultSet.java | 17 +- .../client/core/SFResultSetMetaData.java | 3 +- .../net/snowflake/client/core/SfSqlArray.java | 77 +++++ .../client/core/json/Converters.java | 155 ++++++++++ .../client/core/json/NumberConverter.java | 2 +- .../core/structs/SQLDataCreationHelper.java | 3 +- .../client/jdbc/SFAsyncResultSet.java | 6 +- .../client/jdbc/SnowflakeBaseResultSet.java | 71 ++++- .../jdbc/SnowflakeResultSetMetaDataV1.java | 3 +- .../client/jdbc/SnowflakeResultSetV1.java | 13 +- .../snowflake/client/jdbc/SnowflakeType.java | 1 + .../snowflake/client/jdbc/SnowflakeUtil.java | 51 +++- .../util/JsonStringToTypeConverter.java | 14 + .../java/net/snowflake/client/TestUtil.java | 6 +- .../snowflake/client/ThrowingConsumer.java | 4 +- .../client/jdbc/MockConnectionTest.java | 16 + .../jdbc/ResultSetFeatureNotSupportedIT.java | 1 - .../ResultSetStructuredTypesLatestIT.java | 281 ++++++++++++++++-- 22 files changed, 866 insertions(+), 99 deletions(-) create mode 100644 src/main/java/net/snowflake/client/core/SfSqlArray.java create mode 100644 src/main/java/net/snowflake/client/util/JsonStringToTypeConverter.java diff --git a/src/main/java/net/snowflake/client/core/JsonSqlInput.java b/src/main/java/net/snowflake/client/core/JsonSqlInput.java index 3362f3a25..f82e492a1 100644 --- a/src/main/java/net/snowflake/client/core/JsonSqlInput.java +++ b/src/main/java/net/snowflake/client/core/JsonSqlInput.java @@ -195,7 +195,8 @@ public Timestamp readTimestamp(TimeZone tz) throws SQLException { int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); int columnSubType = fieldMetadata.getType(); int scale = fieldMetadata.getScale(); - Timestamp result = getTimestampFromType(columnSubType, (String) value); + Timestamp result = + SnowflakeUtil.getTimestampFromType(columnSubType, (String) value, session); if (result != null) { return result; } diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index e7c90d3f9..94b640b9a 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -13,6 +13,7 @@ import java.io.IOException; import java.math.BigDecimal; import java.math.RoundingMode; +import java.sql.Array; import java.sql.Date; import java.sql.SQLException; import java.sql.Time; @@ -508,6 +509,12 @@ public Object getObject(int columnIndex) throws SFException { return handleObjectType(columnIndex, obj); } + @Override + public Array getArray(int columnIndex) throws SFException { + // TODO: handleArray SNOW-969794 + throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type ARRAY"); + } + private Object handleObjectType(int columnIndex, Object obj) throws SFException { int columnType = resultSetMetaData.getColumnType(columnIndex); if (columnType == Types.STRUCT diff --git a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java index f34bb2d53..4bbc9bf6c 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java @@ -5,6 +5,7 @@ package net.snowflake.client.core; import java.math.BigDecimal; +import java.sql.Array; import java.sql.Date; import java.sql.SQLException; import java.sql.Time; @@ -14,6 +15,7 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; +import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeResultSetSerializable; import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; @@ -93,6 +95,8 @@ public abstract class SFBaseResultSet { public abstract Object getObject(int columnIndex) throws SFException; + public abstract Array getArray(int columnIndex) throws SFException; + public abstract BigDecimal getBigDecimal(int columnIndex) throws SFException; public abstract BigDecimal getBigDecimal(int columnIndex, int scale) throws SFException; @@ -193,4 +197,10 @@ public List getResultSetSerializables(long maxSi throws SQLException { return this.resultSetSerializable.splitBySize(maxSizeInBytes); } + + @SnowflakeJdbcInternalApi + public Converters getConverters() { + logger.debug("Json converters weren't created"); + return null; + } } diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 026d1d240..21cec8a0f 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -7,16 +7,28 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; import java.math.BigDecimal; +import java.sql.Array; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; +import java.util.Iterator; +import java.util.Map; +import java.util.Spliterator; +import java.util.Spliterators; import java.util.TimeZone; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.ErrorCode; +import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.jdbc.SnowflakeColumnMetadata; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.client.util.JsonStringToTypeConverter; /** Abstract class used to represent snowflake result set in json format */ public abstract class SFJsonResultSet extends SFBaseResultSet { @@ -83,40 +95,27 @@ public Object getObject(int columnIndex) throws SFException { return getBoolean(columnIndex); case Types.STRUCT: - if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { + if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { return getSqlInput((String) obj, columnIndex); } else { throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } + case Types.ARRAY: + if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { + return getArray(columnIndex); + } else { + throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); + } default: throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } } - private Object getSqlInput(String input, int columnIndex) throws SFException { - try { - JsonNode jsonNode = OBJECT_MAPPER.readTree(input); - return new JsonSqlInput( - jsonNode, - session, - converters, - resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields()); - } catch (JsonProcessingException e) { - throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); - } - } - - /** - * Sometimes large BIGINTS overflow the java Long type. In these cases, return a BigDecimal type - * instead. - * - * @param columnIndex the column index - * @return an object of type long or BigDecimal depending on number size - * @throws SFException - */ - private Object getBigInt(int columnIndex, Object obj) throws SFException { - return converters.getNumberConverter().getBigInt(obj, columnIndex); + @Override + public Array getArray(int columnIndex) throws SFException { + Object obj = getObjectInternal(columnIndex); + return getArrayInternal((String) obj, columnIndex); } @Override @@ -250,4 +249,178 @@ public Date getDate(int columnIndex, TimeZone tz) throws SFException { private Timestamp getTimestamp(int columnIndex) throws SFException { return getTimestamp(columnIndex, TimeZone.getDefault()); } + + @Override + @SnowflakeJdbcInternalApi + public Converters getConverters() { + return converters; + } + + private Object getSqlInput(String input, int columnIndex) throws SFException { + try { + JsonNode jsonNode = OBJECT_MAPPER.readTree(input); + return new JsonSqlInput( + jsonNode, + session, + converters, + resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields()); + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + } + + private SfSqlArray getArrayInternal(String obj, int columnIndex) throws SFException { + try { + SnowflakeColumnMetadata arrayMetadata = + resultSetMetaData.getColumnMetadata().get(columnIndex - 1); + FieldMetadata fieldMetadata = arrayMetadata.getFields().get(0); + + int columnSubType = fieldMetadata.getType(); + int columnType = ColumnTypeHelper.getColumnType(columnSubType, session); + int scale = fieldMetadata.getScale(); + + ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj); + Iterator nodeElements = arrayNode.elements(); + + switch (columnSubType) { + case Types.INTEGER: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.integerConverter(columnType)) + .toArray(Integer[]::new)); + case Types.SMALLINT: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.smallIntConverter(columnType)) + .toArray(Short[]::new)); + case Types.TINYINT: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.tinyIntConverter(columnType)) + .toArray(Byte[]::new)); + case Types.BIGINT: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.bigIntConverter(columnType)).toArray(Long[]::new)); + case Types.DECIMAL: + case Types.NUMERIC: + return new SfSqlArray( + columnSubType, + convertToFixedArray(nodeElements, converters.bigDecimalConverter(columnType))); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGNVARCHAR: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.varcharConverter(columnType, columnSubType, scale)) + .toArray(String[]::new)); + case Types.BINARY: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.bytesConverter(columnType, scale)) + .toArray(Byte[][]::new)); + case Types.FLOAT: + case Types.REAL: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.floatConverter(columnType)).toArray(Float[]::new)); + case Types.DOUBLE: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.doubleConverter(columnType)) + .toArray(Double[]::new)); + case Types.DATE: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.dateConverter(session)).toArray(Date[]::new)); + case Types.TIME: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.timeConverter(session)).toArray(Time[]::new)); + case Types.TIMESTAMP: + return new SfSqlArray( + columnSubType, + getStream( + nodeElements, + converters.timestampConverter(columnSubType, columnType, scale, session)) + .toArray(Timestamp[]::new)); + case Types.BOOLEAN: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.booleanConverter(columnType)) + .toArray(Boolean[]::new)); + case Types.STRUCT: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.structConverter(OBJECT_MAPPER)) + .toArray(Map[]::new)); + case Types.ARRAY: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.arrayConverter(OBJECT_MAPPER)) + .toArray(Map[][]::new)); + default: + throw new SFException( + ErrorCode.FEATURE_UNSUPPORTED, + "Can't construct array for data type: " + columnSubType); + } + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + } + + private Object[] convertToFixedArray( + Iterator nodeElements, JsonStringToTypeConverter bigIntConverter) { + AtomicInteger bigDecimalCount = new AtomicInteger(); + Object[] elements = + getStream(nodeElements, bigIntConverter) + .peek( + elem -> { + if (elem instanceof BigDecimal) { + bigDecimalCount.incrementAndGet(); + } + }) + .toArray( + size -> { + boolean shouldbbeReturnAsBigDecimal = bigDecimalCount.get() > 0; + Class returnedClass = + shouldbbeReturnAsBigDecimal ? BigDecimal.class : Long.class; + return java.lang.reflect.Array.newInstance(returnedClass, size); + }); + return elements; + } + + private Stream getStream(Iterator nodeElements, JsonStringToTypeConverter converter) { + return StreamSupport.stream( + Spliterators.spliteratorUnknownSize(nodeElements, Spliterator.ORDERED), false) + .map( + elem -> { + try { + return convert(converter, (JsonNode) elem); + } catch (SFException e) { + throw new RuntimeException(e); + } + }); + } + + private static Object convert(JsonStringToTypeConverter converter, JsonNode node) + throws SFException { + if (node.isValueNode()) { + return converter.convert(node.asText()); + } else { + return converter.convert(node.toString()); + } + } + + /** + * Sometimes large BIGINTS overflow the java Long type. In these cases, return a BigDecimal type + * instead. + * + * @param columnIndex the column index + * @return an object of type long or BigDecimal depending on number size + * @throws SFException + */ + private Object getBigInt(int columnIndex, Object obj) throws SFException { + return converters.getNumberConverter().getBigInt(obj, columnIndex); + } } diff --git a/src/main/java/net/snowflake/client/core/SFResultSet.java b/src/main/java/net/snowflake/client/core/SFResultSet.java index 77f30ddd7..b7698cf5d 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFResultSet.java @@ -155,22 +155,7 @@ public SFResultSet( Telemetry telemetryClient, boolean sortResult) throws SQLException { - super( - resultSetSerializable.getTimeZone(), - new Converters( - resultSetSerializable.getTimeZone(), - session, - resultSetSerializable.getResultVersion(), - resultSetSerializable.isHonorClientTZForTimestampNTZ(), - resultSetSerializable.getTreatNTZAsUTC(), - resultSetSerializable.getUseSessionTimezone(), - resultSetSerializable.getFormatDateWithTimeZone(), - resultSetSerializable.getBinaryFormatter(), - resultSetSerializable.getDateFormatter(), - resultSetSerializable.getTimeFormatter(), - resultSetSerializable.getTimestampNTZFormatter(), - resultSetSerializable.getTimestampLTZFormatter(), - resultSetSerializable.getTimestampTZFormatter())); + super(resultSetSerializable.getTimeZone(), new Converters(session, resultSetSerializable)); this.resultSetSerializable = resultSetSerializable; this.columnCount = 0; this.sortResult = sortResult; diff --git a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java index dfb621400..ce129788b 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java @@ -473,7 +473,8 @@ public List getIsAutoIncrementList() { return isAutoIncrementList; } - List getColumnMetadata() { + @SnowflakeJdbcInternalApi + public List getColumnMetadata() { return columnMetadata; } } diff --git a/src/main/java/net/snowflake/client/core/SfSqlArray.java b/src/main/java/net/snowflake/client/core/SfSqlArray.java new file mode 100644 index 000000000..83270796a --- /dev/null +++ b/src/main/java/net/snowflake/client/core/SfSqlArray.java @@ -0,0 +1,77 @@ +package net.snowflake.client.core; + +import java.sql.Array; +import java.sql.JDBCType; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Map; + +@SnowflakeJdbcInternalApi +public class SfSqlArray implements Array { + + private int baseType; + private Object elements; + + public SfSqlArray(int baseType, Object elements) { + this.baseType = baseType; + this.elements = elements; + } + + @Override + public String getBaseTypeName() throws SQLException { + return JDBCType.valueOf(baseType).getName(); + } + + @Override + public int getBaseType() throws SQLException { + return baseType; + } + + @Override + public Object getArray() throws SQLException { + return elements; + } + + @Override + public Object getArray(Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException("getArray(Map> map)"); + } + + @Override + public Object getArray(long index, int count) throws SQLException { + throw new SQLFeatureNotSupportedException("getArray(long index, int count)"); + } + + @Override + public Object getArray(long index, int count, Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException( + "getArray(long index, int count, Map> map)"); + } + + @Override + public ResultSet getResultSet() throws SQLException { + throw new SQLFeatureNotSupportedException( + "getArray(long index, int count, Map> map)"); + } + + @Override + public ResultSet getResultSet(Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException("getResultSet(Map> map)"); + } + + @Override + public ResultSet getResultSet(long index, int count) throws SQLException { + throw new SQLFeatureNotSupportedException("getResultSet(long index, int count)"); + } + + @Override + public ResultSet getResultSet(long index, int count, Map> map) + throws SQLException { + throw new SQLFeatureNotSupportedException( + "getResultSet(long index, int count, Map> map)"); + } + + @Override + public void free() throws SQLException {} +} diff --git a/src/main/java/net/snowflake/client/core/json/Converters.java b/src/main/java/net/snowflake/client/core/json/Converters.java index fa3baadb6..c94361f51 100644 --- a/src/main/java/net/snowflake/client/core/json/Converters.java +++ b/src/main/java/net/snowflake/client/core/json/Converters.java @@ -1,8 +1,26 @@ package net.snowflake.client.core.json; +import static net.snowflake.client.jdbc.SnowflakeUtil.getTimestampFromType; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.Instant; +import java.time.ZoneOffset; +import java.util.Arrays; +import java.util.Map; import java.util.TimeZone; import net.snowflake.client.core.SFBaseSession; +import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.ErrorCode; +import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; +import net.snowflake.client.util.JsonStringToTypeConverter; import net.snowflake.common.core.SFBinaryFormat; +import net.snowflake.common.core.SFTimestamp; import net.snowflake.common.core.SnowflakeDateTimeFormat; public class Converters { @@ -52,6 +70,24 @@ public Converters( this); } + @SnowflakeJdbcInternalApi + public Converters(SFBaseSession session, SnowflakeResultSetSerializableV1 resultSetSerializable) { + this( + resultSetSerializable.getTimeZone(), + session, + resultSetSerializable.getResultVersion(), + resultSetSerializable.isHonorClientTZForTimestampNTZ(), + resultSetSerializable.getTreatNTZAsUTC(), + resultSetSerializable.getUseSessionTimezone(), + resultSetSerializable.getFormatDateWithTimeZone(), + resultSetSerializable.getBinaryFormatter(), + resultSetSerializable.getDateFormatter(), + resultSetSerializable.getTimeFormatter(), + resultSetSerializable.getTimestampNTZFormatter(), + resultSetSerializable.getTimestampLTZFormatter(), + resultSetSerializable.getTimestampTZFormatter()); + } + public BooleanConverter getBooleanConverter() { return booleanConverter; } @@ -71,4 +107,123 @@ public BytesConverter getBytesConverter() { public StringConverter getStringConverter() { return stringConverter; } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter integerConverter(int columnType) { + return value -> getNumberConverter().getInt(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter smallIntConverter(int columnType) { + return value -> getNumberConverter().getShort(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter tinyIntConverter(int columnType) { + return value -> getNumberConverter().getByte(value); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter bigIntConverter(int columnType) { + return value -> getNumberConverter().getBigInt(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter longConverter(int columnType) { + return value -> getNumberConverter().getLong(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter bigDecimalConverter(int columnType) { + return value -> getNumberConverter().getBigDecimal(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter floatConverter(int columnType) { + return value -> getNumberConverter().getBigDecimal(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter doubleConverter(int columnType) { + return value -> getNumberConverter().getBigDecimal(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter bytesConverter(int columnType, int scale) { + return value -> { + byte[] primitiveArray = getBytesConverter().getBytes(value, columnType, Types.BINARY, scale); + Byte[] newByteArray = new Byte[primitiveArray.length]; + Arrays.setAll(newByteArray, n -> primitiveArray[n]); + return newByteArray; + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter varcharConverter(int columnType, int columnSubType, int scale) { + return value -> getStringConverter().getString(value, columnType, columnSubType, scale); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter booleanConverter(int columnType) { + return value -> getBooleanConverter().getBoolean(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter dateConverter(SFBaseSession session) { + return value -> { + SnowflakeDateTimeFormat formatter = + SnowflakeDateTimeFormat.fromSqlFormat( + (String) session.getCommonParameters().get("DATE_OUTPUT_FORMAT")); + SFTimestamp timestamp = formatter.parse(value); + return Date.valueOf( + Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalDate()); + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter timeConverter(SFBaseSession session) { + return value -> { + SnowflakeDateTimeFormat formatter = + SnowflakeDateTimeFormat.fromSqlFormat( + (String) session.getCommonParameters().get("TIME_OUTPUT_FORMAT")); + SFTimestamp timestamp = formatter.parse((String) value); + return Time.valueOf( + Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalTime()); + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter timestampConverter( + int columnSubType, int columnType, int scale, SFBaseSession session) { + return value -> { + Timestamp result = getTimestampFromType(columnSubType, (String) value, session); + if (result != null) { + return result; + } + return getDateTimeConverter() + .getTimestamp(value, columnType, columnSubType, TimeZone.getDefault(), scale); + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter structConverter(ObjectMapper objectMapper) { + return value -> { + try { + return objectMapper.readValue(value, Map.class); + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter arrayConverter(ObjectMapper objectMapper) { + return value -> { + try { + return objectMapper.readValue(value, Map[].class); + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + }; + } } diff --git a/src/main/java/net/snowflake/client/core/json/NumberConverter.java b/src/main/java/net/snowflake/client/core/json/NumberConverter.java index 132003359..18f6e96b2 100644 --- a/src/main/java/net/snowflake/client/core/json/NumberConverter.java +++ b/src/main/java/net/snowflake/client/core/json/NumberConverter.java @@ -115,7 +115,7 @@ public BigDecimal getBigDecimal(Object obj, int columnType, Integer scale) throw if (obj == null) { return null; } - BigDecimal value = new BigDecimal(obj.toString()); + BigDecimal value = getBigDecimal(obj.toString(), columnType); value = value.setScale(scale, RoundingMode.HALF_UP); return value; } diff --git a/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java b/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java index 5cfdb5ca2..fd3257e79 100644 --- a/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java +++ b/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java @@ -13,11 +13,10 @@ public class SQLDataCreationHelper { public static T create(Class type) throws SQLException { Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); - SQLData instance = + return (T) typeFactory .map(Supplier::get) .orElseGet(() -> createUsingReflection((Class) type)); - return (T) instance; } private static SQLData createUsingReflection(Class type) { diff --git a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java index bc164de89..c50bf4900 100644 --- a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java @@ -26,7 +26,6 @@ /** SFAsyncResultSet implementation. Note: For Snowflake internal use */ public class SFAsyncResultSet extends SnowflakeBaseResultSet implements SnowflakeResultSet, ResultSet { - private final SFBaseResultSet sfBaseResultSet; private ResultSet resultSetForNext = new SnowflakeResultSetV1.EmptyResultSet(); private boolean resultSetForNextInitialized = false; private String queryID; @@ -47,8 +46,8 @@ public class SFAsyncResultSet extends SnowflakeBaseResultSet * @throws SQLException if failed to construct snowflake result set metadata */ SFAsyncResultSet(SFBaseResultSet sfBaseResultSet, Statement statement) throws SQLException { + super(statement); - this.sfBaseResultSet = sfBaseResultSet; this.queryID = sfBaseResultSet.getQueryId(); this.session = sfBaseResultSet.getSession(); this.extraStatement = statement; @@ -70,8 +69,6 @@ public SFAsyncResultSet( throws SQLException { super(resultSetSerializable); this.queryID = sfBaseResultSet.getQueryId(); - this.sfBaseResultSet = sfBaseResultSet; - this.resultSetMetaData = new SnowflakeResultSetMetaDataV1(sfBaseResultSet.getMetaData()); this.resultSetMetaData.setQueryIdForAsyncResults(this.queryID); this.resultSetMetaData.setQueryType(SnowflakeResultSetMetaDataV1.QueryType.ASYNC); @@ -79,7 +76,6 @@ public SFAsyncResultSet( public SFAsyncResultSet(String queryID, Statement statement) throws SQLException { super(statement); - this.sfBaseResultSet = null; queryID.trim(); if (!QueryIdValidator.isValid(queryID)) { throw new SQLException( diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java index 77efb4362..7958a5b77 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java @@ -4,6 +4,9 @@ package net.snowflake.client.jdbc; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import java.io.InputStream; import java.io.Reader; import java.io.StringReader; @@ -26,14 +29,17 @@ import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; +import java.util.Arrays; import java.util.Calendar; import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.TimeZone; -import java.util.function.Supplier; +import net.snowflake.client.core.JsonSqlInput; +import net.snowflake.client.core.ObjectMapperFactory; +import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFBaseSession; -import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; +import net.snowflake.client.core.structs.SQLDataCreationHelper; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.SqlState; @@ -44,6 +50,7 @@ public abstract class SnowflakeBaseResultSet implements ResultSet { private final int resultSetType; private final int resultSetConcurrency; private final int resultSetHoldability; + protected SFBaseResultSet sfBaseResultSet; // Snowflake supports sessionless result set. For this case, there is no // statement for this result set. protected final Statement statement; @@ -51,6 +58,7 @@ public abstract class SnowflakeBaseResultSet implements ResultSet { protected Map parameters = new HashMap<>(); private int fetchSize = 0; protected SFBaseSession session = null; + private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); SnowflakeBaseResultSet(Statement statement) throws SQLException { this.statement = statement; @@ -1339,25 +1347,62 @@ public void updateNClob(String columnLabel, Reader reader) throws SQLException { public T getObject(int columnIndex, Class type) throws SQLException { logger.debug("public T getObject(int columnIndex,Class type)", false); if (SQLData.class.isAssignableFrom(type)) { - Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); - SQLData instance = - typeFactory - .map(Supplier::get) - .orElseGet(() -> createUsingReflection((Class) type)); + SQLData instance = (SQLData) SQLDataCreationHelper.create(type); SQLInput sqlInput = (SQLInput) getObject(columnIndex); instance.readSQL(sqlInput, null); return (T) instance; + } else if (Map.class.isAssignableFrom(type)) { + JsonNode jsonNode = ((JsonSqlInput) getObject(columnIndex)).getInput(); + return (T) OBJECT_MAPPER.convertValue(jsonNode, new TypeReference>() {}); } else { return (T) getObject(columnIndex); } } - private SQLData createUsingReflection(Class type) { - try { - return type.newInstance(); - } catch (InstantiationException | IllegalAccessException e) { - throw new RuntimeException(e); + public List getList(int columnIndex, Class type) throws SQLException { + T[] sqlInputs = getArray(columnIndex, type); + return Arrays.asList(sqlInputs); + } + + public T[] getArray(int columnIndex, Class type) throws SQLException { + Map[] jsonMaps = (Map[]) getArray(columnIndex).getArray(); + T[] arr = (T[]) java.lang.reflect.Array.newInstance(type, jsonMaps.length); + int counter = 0; + for (Map map : jsonMaps) { + SQLData instance = (SQLData) SQLDataCreationHelper.create(type); + SQLInput sqlInput = + new JsonSqlInput( + OBJECT_MAPPER.convertValue(map, JsonNode.class), + session, + sfBaseResultSet.getConverters(), + sfBaseResultSet.getMetaData().getColumnMetadata().get(columnIndex - 1).getFields()); + instance.readSQL(sqlInput, null); + arr[counter++] = (T) instance; } + + return arr; + } + + public Map getMap(int columnIndex, Class type) throws SQLException { + Object object = getObject(columnIndex); + JsonNode jsonNode = ((JsonSqlInput) object).getInput(); + Map map = + OBJECT_MAPPER.convertValue(jsonNode, new TypeReference>() {}); + Map resultMap = new HashMap<>(); + + for (Map.Entry entry : map.entrySet()) { + SQLData instance = (SQLData) SQLDataCreationHelper.create(type); + SQLInput sqlInput = + new JsonSqlInput( + jsonNode.get(entry.getKey()), + session, + sfBaseResultSet.getConverters(), + sfBaseResultSet.getMetaData().getColumnMetadata().get(columnIndex - 1).getFields()); + instance.readSQL(sqlInput, null); + resultMap.put(entry.getKey(), (T) instance); + } + + return resultMap; } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java index 94f4bab11..4bc90bd54 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java @@ -106,10 +106,11 @@ public boolean isCaseSensitive(int column) throws SQLException { int colType = getColumnType(column); switch (colType) { - // Note: SF types ARRAY, GEOGRAPHY, GEOMETRY are also represented as VARCHAR. + // Note: SF types GEOGRAPHY, GEOMETRY are also represented as VARCHAR. case Types.VARCHAR: case Types.CHAR: case Types.STRUCT: + case Types.ARRAY: return true; case Types.INTEGER: diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java index 30938b0e4..02909f4ff 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java @@ -35,7 +35,6 @@ /** Snowflake ResultSet implementation */ public class SnowflakeResultSetV1 extends SnowflakeBaseResultSet implements SnowflakeResultSet, ResultSet { - private final SFBaseResultSet sfBaseResultSet; /** * Constructor takes an inputstream from the API response that we get from executing a SQL @@ -50,6 +49,7 @@ public class SnowflakeResultSetV1 extends SnowflakeBaseResultSet */ public SnowflakeResultSetV1(SFBaseResultSet sfBaseResultSet, Statement statement) throws SQLException { + super(statement); this.sfBaseResultSet = sfBaseResultSet; this.resultSetMetaData = new SnowflakeResultSetMetaDataV1(sfBaseResultSet.getMetaData()); @@ -101,7 +101,6 @@ public SnowflakeResultSetV1( SFBaseResultSet sfBaseResultSet, SnowflakeResultSetSerializableV1 resultSetSerializable) throws SQLException { super(resultSetSerializable); - this.sfBaseResultSet = sfBaseResultSet; this.resultSetMetaData = new SnowflakeResultSetMetaDataV1(sfBaseResultSet.getMetaData()); } @@ -272,6 +271,16 @@ public Object getObject(int columnIndex) throws SQLException { } } + public Array getArray(int columnIndex) throws SQLException { + raiseSQLExceptionIfResultSetIsClosed(); + try { + return sfBaseResultSet.getArray(columnIndex); + } catch (SFException ex) { + throw new SnowflakeSQLException( + ex.getCause(), ex.getSqlState(), ex.getVendorCode(), ex.getParams()); + } + } + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { raiseSQLExceptionIfResultSetIsClosed(); try { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java index dacfbbfd5..3d2d36553 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java @@ -29,6 +29,7 @@ public enum SnowflakeType { FIXED, INTEGER, OBJECT, + MAP, REAL, TEXT, TIME, diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 73d07d6c4..6f361b6f1 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -16,6 +16,7 @@ import java.io.StringWriter; import java.lang.reflect.Field; import java.sql.Time; +import java.sql.Timestamp; import java.sql.Types; import java.time.Instant; import java.time.LocalDateTime; @@ -39,6 +40,7 @@ import net.snowflake.client.core.SnowflakeJdbcInternalApi; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; +import net.snowflake.common.core.SnowflakeDateTimeFormat; import net.snowflake.common.core.SqlState; import net.snowflake.common.util.ClassUtil; import net.snowflake.common.util.FixedViewColumn; @@ -182,7 +184,7 @@ public static SnowflakeColumnMetadata extractColumnMetadata( String colSrcDatabase = colNode.path("database").asText(); String colSrcSchema = colNode.path("schema").asText(); String colSrcTable = colNode.path("table").asText(); - List fieldsMetadata = getFieldMetadata(fixedColType, colNode); + List fieldsMetadata = getFieldMetadata(jdbcTreatDecimalAsInt, colNode); boolean isAutoIncrement = colNode.path("isAutoIncrement").asBoolean(); @@ -273,10 +275,11 @@ static ColumnTypeInfo getSnowflakeType( case ARRAY: columnTypeInfo = - new ColumnTypeInfo(Types.VARCHAR, defaultIfNull(extColTypeName, "ARRAY"), baseType); + new ColumnTypeInfo(Types.ARRAY, defaultIfNull(extColTypeName, "ARRAY"), baseType); break; case OBJECT: + case MAP: int targetType = "GEOGRAPHY".equals(extColTypeName) || "GEOMETRY".equals(extColTypeName) ? Types.VARCHAR @@ -329,8 +332,8 @@ private static String defaultIfNull(String extColTypeName, String defaultValue) return Optional.ofNullable(extColTypeName).orElse(defaultValue); } - static List createFieldsMetadata(ArrayNode fieldsJson, int fixedColType) - throws SnowflakeSQLLoggedException { + static List createFieldsMetadata( + ArrayNode fieldsJson, boolean jdbcTreatDecimalAsInt) throws SnowflakeSQLLoggedException { List fields = new ArrayList<>(); for (JsonNode node : fieldsJson) { String colName = node.path("name").asText(); @@ -340,7 +343,8 @@ static List createFieldsMetadata(ArrayNode fieldsJson, int fixedC boolean nullable = node.path("nullable").asBoolean(); int length = node.path("length").asInt(); boolean fixed = node.path("fixed").asBoolean(); - List internalFields = getFieldMetadata(fixedColType, node); + int fixedColType = jdbcTreatDecimalAsInt && scale == 0 ? Types.BIGINT : Types.DECIMAL; + List internalFields = getFieldMetadata(jdbcTreatDecimalAsInt, node); JsonNode outputType = node.path("outputType"); JsonNode extColTypeNameNode = node.path("extTypeName"); String extColTypeName = null; @@ -366,11 +370,11 @@ static List createFieldsMetadata(ArrayNode fieldsJson, int fixedC return fields; } - private static List getFieldMetadata(int fixedColType, JsonNode node) + private static List getFieldMetadata(boolean jdbcTreatDecimalAsInt, JsonNode node) throws SnowflakeSQLLoggedException { if (!node.path("fields").isEmpty()) { ArrayNode internalFieldsJson = (ArrayNode) node.path("fields"); - return createFieldsMetadata(internalFieldsJson, fixedColType); + return createFieldsMetadata(internalFieldsJson, jdbcTreatDecimalAsInt); } else { return new ArrayList<>(); } @@ -760,6 +764,39 @@ public static Time getTimeInSessionTimezone(Long time, int nanos) { return ts; } + /** + * Helper function to convert system properties to boolean + * + * @param columnSubType column subtype value + * @param value value to convert + * @param session session object + * @return converted Timestamp object + */ + @SnowflakeJdbcInternalApi + public static Timestamp getTimestampFromType( + int columnSubType, String value, SFBaseSession session) { + if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ) { + return getTimestampFromFormat("TIMESTAMP_LTZ_OUTPUT_FORMAT", value, session); + } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ + || columnSubType == Types.TIMESTAMP) { + return getTimestampFromFormat("TIMESTAMP_NTZ_OUTPUT_FORMAT", value, session); + } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ) { + return getTimestampFromFormat("TIMESTAMP_TZ_OUTPUT_FORMAT", value, session); + } else { + return null; + } + } + + private static Timestamp getTimestampFromFormat( + String format, String value, SFBaseSession session) { + String rawFormat = (String) session.getCommonParameters().get(format); + if (rawFormat == null || rawFormat.equals("")) { + rawFormat = (String) session.getCommonParameters().get("TIMESTAMP_OUTPUT_FORMAT"); + } + SnowflakeDateTimeFormat formatter = SnowflakeDateTimeFormat.fromSqlFormat(rawFormat); + return formatter.parse(value).getTimestamp(); + } + /** * Helper function to convert system properties to boolean * diff --git a/src/main/java/net/snowflake/client/util/JsonStringToTypeConverter.java b/src/main/java/net/snowflake/client/util/JsonStringToTypeConverter.java new file mode 100644 index 000000000..37e6aab3e --- /dev/null +++ b/src/main/java/net/snowflake/client/util/JsonStringToTypeConverter.java @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.util; + +import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +/** Functional interface used to convert json data to expected type */ +@SnowflakeJdbcInternalApi +@FunctionalInterface +public interface JsonStringToTypeConverter { + T convert(String string) throws SFException; +} diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index fa53f4c58..1f782ec1f 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -116,13 +116,13 @@ public static void withSchema(Statement statement, String schemaName, ThrowingRu * @param action action to execute when schema was created * @throws Exception when any error occurred */ - public static void withRandomSchema(Statement statement, ThrowingConsumer action) - throws Exception { + public static void withRandomSchema( + Statement statement, ThrowingConsumer action) throws Exception { String customSchema = GENERATED_SCHEMA_PREFIX + SnowflakeUtil.randomAlphaNumeric(5).toUpperCase(); try { statement.execute("CREATE OR REPLACE SCHEMA " + customSchema); - action.call(customSchema); + action.accept(customSchema); } finally { statement.execute("DROP SCHEMA " + customSchema); } diff --git a/src/test/java/net/snowflake/client/ThrowingConsumer.java b/src/test/java/net/snowflake/client/ThrowingConsumer.java index 8b6f8c001..d5a47cd5e 100644 --- a/src/test/java/net/snowflake/client/ThrowingConsumer.java +++ b/src/test/java/net/snowflake/client/ThrowingConsumer.java @@ -1,6 +1,6 @@ package net.snowflake.client; @FunctionalInterface -public interface ThrowingConsumer { - void call(T parameter) throws Exception; +public interface ThrowingConsumer { + void accept(A parameter) throws T; } diff --git a/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java b/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java index 65163a938..c763606fe 100644 --- a/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java +++ b/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java @@ -50,6 +50,7 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryData; +import net.snowflake.common.core.SFBinaryFormat; import net.snowflake.common.core.SnowflakeDateTimeFormat; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -881,6 +882,21 @@ public ResultSet createResultSet(String queryID, Statement statement) throws SQL @Override public SnowflakeBaseResultSet createResultSet(SFBaseResultSet resultSet, Statement statement) throws SQLException { + Converters convertes = + new Converters( + null, + new SFSession(), + 0, + false, + false, + false, + false, + SFBinaryFormat.BASE64, + null, + null, + null, + null, + null); return new SnowflakeResultSetV1(resultSet, statement); } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java index 539784120..2535a6579 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java @@ -117,7 +117,6 @@ private void checkFeatureNotSupportedException(ResultSet resultSet) throws SQLEx expectFeatureNotSupportedException(() -> resultSet.getObject(1, Collections.emptyMap())); expectFeatureNotSupportedException(() -> resultSet.getRef(1)); expectFeatureNotSupportedException(() -> resultSet.getBlob(1)); - expectFeatureNotSupportedException(() -> resultSet.getArray(1)); expectFeatureNotSupportedException(() -> resultSet.getURL(1)); expectFeatureNotSupportedException(() -> resultSet.getRowId(1)); expectFeatureNotSupportedException(() -> resultSet.getNClob(1)); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java index 2ec486ef6..830f940cb 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java @@ -18,10 +18,14 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.util.List; +import java.util.Map; import net.snowflake.client.ConditionalIgnoreRule; import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.ThrowingConsumer; import net.snowflake.client.category.TestCategoryStructuredType; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; +import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -47,6 +51,12 @@ public Connection init() throws SQLException { return conn; } + @Before + public void clean() throws Exception { + SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); + } + // TODO Structured types feature exists only on QA environments @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) @@ -65,17 +75,13 @@ public void testMapStructToObjectWithReflection() throws SQLException { private void testMapJson(boolean registerFactory) throws SQLException { if (registerFactory) { SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); - } else { - SnowflakeObjectTypeFactories.unregister(SimpleClass.class); - } - try (Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("select {'string':'a'}::OBJECT(string VARCHAR)"); ) { - resultSet.next(); - SimpleClass object = resultSet.getObject(1, SimpleClass.class); - assertEquals("a", object.getString()); } + withFirstRow( + "select {'string':'a'}::OBJECT(string VARCHAR)", + (resultSet) -> { + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("a", object.getString()); + }); } // TODO Structured types feature exists only on QA environments @@ -135,12 +141,12 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { resultSet.next(); AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); assertEquals("a", object.getString()); - assertEquals(1, (long) object.getB()); - assertEquals(2, (long) object.getS()); - assertEquals(3, (long) object.getI()); - assertEquals(4, (long) object.getL()); - assertEquals(1.1, (double) object.getF(), 0.01); - assertEquals(2.2, (double) object.getD(), 0.01); + assertEquals(new Byte("1"), object.getB()); + assertEquals(Short.valueOf("2"), object.getS()); + assertEquals(Integer.valueOf(3), object.getI()); + assertEquals(Long.valueOf(4), object.getL()); + assertEquals(Float.valueOf(1.1f), object.getF(), 0.01); + assertEquals(Double.valueOf(2.2), object.getD(), 0.01); assertEquals(BigDecimal.valueOf(3.3), object.getBd()); assertEquals( Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), object.getTimestampLtz()); @@ -158,19 +164,254 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { } } + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapJsonToMap() throws SQLException { + withFirstRow( + "SELECT OBJECT_CONSTRUCT('string','a','string2',1)", + (resultSet) -> { + Map map = resultSet.getObject(1, Map.class); + assertEquals("a", map.get("string")); + assertEquals(1, map.get("string2")); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnAsArrayOfSqlData() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + withFirstRow( + "SELECT ARRAY_CONSTRUCT({'string':'one'}, {'string':'two'}, {'string':'three'})::ARRAY(OBJECT(string VARCHAR))", + (resultSet) -> { + SimpleClass[] resultArray = + resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, SimpleClass.class); + assertEquals("one", resultArray[0].getString()); + assertEquals("two", resultArray[1].getString()); + assertEquals("three", resultArray[2].getString()); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnAsMap() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + withFirstRow( + "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", + (resultSet) -> { + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + assertEquals("one", map.get("x").getString()); + assertEquals("two", map.get("y").getString()); + assertEquals("three", map.get("z").getString()); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnAsList() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + withFirstRow( + "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))", + (resultSet) -> { + List map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, SimpleClass.class); + assertEquals("one", map.get(0).getString()); + assertEquals("two", map.get(1).getString()); + }); + } + // TODO Structured types feature exists only on QA environments @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapStructsFromChunks() throws SQLException { + withFirstRow( + "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))", + (resultSet) -> { + while (resultSet.next()) { + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("a", object.getString()); + } + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapIntegerArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(INTEGER)", + (resultSet) -> { + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf(10), resultArray[0]); + assertEquals(Long.valueOf(20), resultArray[1]); + assertEquals(Long.valueOf(30), resultArray[2]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapFixedToLongArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(SMALLINT)", + (resultSet) -> { + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf("10"), resultArray[0]); + assertEquals(Long.valueOf("20"), resultArray[1]); + assertEquals(Long.valueOf("30"), resultArray[2]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapDecimalArray() throws SQLException { + // when: jdbc_treat_decimal_as_int=true scale=0 try (Connection connection = init(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery( - "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))"); ) { - while (resultSet.next()) { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); - assertEquals("a", object.getString()); + "SELECT ARRAY_CONSTRUCT(10.2, 20.02, 30)::ARRAY(DECIMAL(20,0))"); ) { + resultSet.next(); + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(resultArray[0], Long.valueOf(10)); + assertEquals(resultArray[1], Long.valueOf(20)); + assertEquals(resultArray[2], Long.valueOf(30)); + } + + // when: jdbc_treat_decimal_as_int=true scale=2 + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT ARRAY_CONSTRUCT(10.2, 20.02, 30)::ARRAY(DECIMAL(20,2))"); ) { + resultSet.next(); + BigDecimal[] resultArray2 = (BigDecimal[]) resultSet.getArray(1).getArray(); + assertEquals(BigDecimal.valueOf(10.2), resultArray2[0]); + assertEquals(BigDecimal.valueOf(20.02), resultArray2[1]); + assertEquals(BigDecimal.valueOf(30), resultArray2[2]); + } + + // when: jdbc_treat_decimal_as_int=false scale=0 + try (Connection connection = init(); + Statement statement = connection.createStatement(); ) { + statement.execute("alter session set jdbc_treat_decimal_as_int = false"); + try (ResultSet resultSet = + statement.executeQuery("SELECT ARRAY_CONSTRUCT(10.2, 20.02, 30)::ARRAY(DECIMAL(20,0))")) { + resultSet.next(); + BigDecimal[] resultArray = (BigDecimal[]) resultSet.getArray(1).getArray(); + assertEquals(BigDecimal.valueOf(10), resultArray[0]); + assertEquals(BigDecimal.valueOf(20), resultArray[1]); + assertEquals(BigDecimal.valueOf(30), resultArray[2]); } } } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapVarcharArray() throws SQLException { + withFirstRow( + "SELECT 'text', ARRAY_CONSTRUCT('10', '20','30')::ARRAY(VARCHAR)", + (resultSet) -> { + String t = resultSet.getString(1); + String[] resultArray = (String[]) resultSet.getArray(2).getArray(); + assertEquals("10", resultArray[0]); + assertEquals("20", resultArray[1]); + assertEquals("30", resultArray[2]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapDatesArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(to_date('2023-12-24', 'YYYY-MM-DD'), to_date('2023-12-25', 'YYYY-MM-DD'))::ARRAY(DATE)", + (resultSet) -> { + Date[] resultArray = (Date[]) resultSet.getArray(1).getArray(); + assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), resultArray[0]); + assertEquals(Date.valueOf(LocalDate.of(2023, 12, 25)), resultArray[1]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapTimeArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(to_time('15:39:20.123'), to_time('15:39:20.123'))::ARRAY(TIME)", + (resultSet) -> { + Time[] resultArray = (Time[]) resultSet.getArray(1).getArray(); + assertEquals(Time.valueOf(LocalTime.of(15, 39, 20)), resultArray[0]); + assertEquals(Time.valueOf(LocalTime.of(15, 39, 20)), resultArray[1]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapTimestampArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(TO_TIMESTAMP_NTZ('2021-12-23 09:44:44'), TO_TIMESTAMP_NTZ('2021-12-24 09:55:55'))::ARRAY(TIMESTAMP)", + (resultSet) -> { + Timestamp[] resultArray = (Timestamp[]) resultSet.getArray(1).getArray(); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 23, 10, 44, 44)), resultArray[0]); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 24, 10, 55, 55)), resultArray[1]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapBooleanArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(true,false)::ARRAY(BOOLEAN)", + (resultSet) -> { + Boolean[] resultArray = (Boolean[]) resultSet.getArray(1).getArray(); + assertEquals(true, resultArray[0]); + assertEquals(false, resultArray[1]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapBinaryArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(TO_BINARY('616263', 'HEX'),TO_BINARY('616263', 'HEX'))::ARRAY(BINARY)", + (resultSet) -> { + Byte[][] resultArray = (Byte[][]) resultSet.getArray(1).getArray(); + assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[0]); + assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[1]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapArrayOfStructToMap() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2} )::ARRAY(OBJECT(x VARCHAR, y INTEGER))", + (resultSet) -> { + Map[] resultArray = (Map[]) resultSet.getArray(1).getArray(); + assertEquals("{x=abc, y=1}", resultArray[0].toString()); + assertEquals("{x=def, y=2}", resultArray[1].toString()); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapArrayOfArrays() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2}) )::ARRAY(ARRAY(OBJECT(x VARCHAR, y INTEGER)))", + (resultSet) -> { + Map[][] resultArray = (Map[][]) resultSet.getArray(1).getArray(); + assertEquals("{x=abc, y=1}", resultArray[0][0].toString()); + assertEquals("{x=def, y=2}", resultArray[0][1].toString()); + }); + } + + private void withFirstRow(String sqlText, ThrowingConsumer consumer) + throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet rs = statement.executeQuery(sqlText); ) { + assertTrue(rs.next()); + consumer.accept(rs); + } + } + ; }