diff --git a/src/main/java/net/snowflake/client/core/ConvertersFactory.java b/src/main/java/net/snowflake/client/core/ConvertersFactory.java deleted file mode 100644 index 542ee8d4c..000000000 --- a/src/main/java/net/snowflake/client/core/ConvertersFactory.java +++ /dev/null @@ -1,25 +0,0 @@ -package net.snowflake.client.core; - -import net.snowflake.client.core.json.Converters; -import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; - -@SnowflakeJdbcInternalApi -public class ConvertersFactory { - public static Converters createJsonConverters( - SFBaseSession session, SnowflakeResultSetSerializableV1 resultSetSerializable) { - return new Converters( - resultSetSerializable.getTimeZone(), - session, - resultSetSerializable.getResultVersion(), - resultSetSerializable.isHonorClientTZForTimestampNTZ(), - resultSetSerializable.getTreatNTZAsUTC(), - resultSetSerializable.getUseSessionTimezone(), - resultSetSerializable.getFormatDateWithTimeZone(), - resultSetSerializable.getBinaryFormatter(), - resultSetSerializable.getDateFormatter(), - resultSetSerializable.getTimeFormatter(), - resultSetSerializable.getTimestampNTZFormatter(), - resultSetSerializable.getTimestampLTZFormatter(), - resultSetSerializable.getTimestampTZFormatter()); - } -} diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index 4cff78198..94b640b9a 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -511,15 +511,8 @@ public Object getObject(int columnIndex) throws SFException { @Override public Array getArray(int columnIndex) throws SFException { - ArrowVectorConverter converter = currentChunkIterator.getCurrentConverter(columnIndex - 1); - int index = currentChunkIterator.getCurrentRowInRecordBatch(); - wasNull = converter.isNull(index); - converter.setTreatNTZAsUTC(treatNTZAsUTC); - converter.setUseSessionTimezone(useSessionTimezone); - converter.setSessionTimeZone(timeZone); - Object obj = converter.toObject(index); // TODO: handleArray SNOW-969794 - return null; + throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type ARRAY"); } private Object handleObjectType(int columnIndex, Object obj) throws SFException { diff --git a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java index f31ffcbc0..4bbc9bf6c 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java @@ -64,7 +64,6 @@ public abstract class SFBaseResultSet { // The serializable object which can serialize the metadata for this // result set protected SnowflakeResultSetSerializableV1 resultSetSerializable; - protected Converters converters; public abstract boolean isLast(); @@ -199,7 +198,9 @@ public List getResultSetSerializables(long maxSi return this.resultSetSerializable.splitBySize(maxSizeInBytes); } + @SnowflakeJdbcInternalApi public Converters getConverters() { - return converters; + logger.debug("Json converters weren't created"); + return null; } } diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 9467111e9..21cec8a0f 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -4,8 +4,6 @@ package net.snowflake.client.core; -import static net.snowflake.client.jdbc.SnowflakeUtil.getTimestampFromType; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,9 +14,6 @@ import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; -import java.time.Instant; -import java.time.ZoneOffset; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Spliterator; @@ -33,9 +28,7 @@ import net.snowflake.client.jdbc.SnowflakeColumnMetadata; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; -import net.snowflake.client.util.TypeConverter; -import net.snowflake.common.core.SFTimestamp; -import net.snowflake.common.core.SnowflakeDateTimeFormat; +import net.snowflake.client.util.JsonStringToTypeConverter; /** Abstract class used to represent snowflake result set in json format */ public abstract class SFJsonResultSet extends SFBaseResultSet { @@ -102,13 +95,13 @@ public Object getObject(int columnIndex) throws SFException { return getBoolean(columnIndex); case Types.STRUCT: - if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { + if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { return getSqlInput((String) obj, columnIndex); } else { throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } case Types.ARRAY: - if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { + if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { return getArray(columnIndex); } else { throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); @@ -122,7 +115,7 @@ public Object getObject(int columnIndex) throws SFException { @Override public Array getArray(int columnIndex) throws SFException { Object obj = getObjectInternal(columnIndex); - return getArrayInternal((String) obj); + return getArrayInternal((String) obj, columnIndex); } @Override @@ -257,6 +250,8 @@ private Timestamp getTimestamp(int columnIndex) throws SFException { return getTimestamp(columnIndex, TimeZone.getDefault()); } + @Override + @SnowflakeJdbcInternalApi public Converters getConverters() { return converters; } @@ -274,134 +269,116 @@ private Object getSqlInput(String input, int columnIndex) throws SFException { } } - private SfSqlArray getArrayInternal(String obj) throws SFException { + private SfSqlArray getArrayInternal(String obj, int columnIndex) throws SFException { try { - SnowflakeColumnMetadata arrayMetadata = resultSetMetaData.getColumnMetadata().get(0); - FieldMetadata fieldMetadata = arrayMetadata.getField(1); + SnowflakeColumnMetadata arrayMetadata = + resultSetMetaData.getColumnMetadata().get(columnIndex - 1); + FieldMetadata fieldMetadata = arrayMetadata.getFields().get(0); int columnSubType = fieldMetadata.getType(); int columnType = ColumnTypeHelper.getColumnType(columnSubType, session); int scale = fieldMetadata.getScale(); ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj); - - Iterator nodeElements = arrayNode.elements(); + Iterator nodeElements = arrayNode.elements(); switch (columnSubType) { case Types.INTEGER: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.integerConverter(columnType)) + .toArray(Integer[]::new)); case Types.SMALLINT: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.smallIntConverter(columnType)) + .toArray(Short[]::new)); case Types.TINYINT: - TypeConverter integerConverter = - value -> converters.getNumberConverter().getInt(value, Types.INTEGER); return new SfSqlArray( - columnSubType, getStream(nodeElements, integerConverter).toArray(Integer[]::new)); + columnSubType, + getStream(nodeElements, converters.tinyIntConverter(columnType)) + .toArray(Byte[]::new)); case Types.BIGINT: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.bigIntConverter(columnType)).toArray(Long[]::new)); case Types.DECIMAL: case Types.NUMERIC: - TypeConverter bigIntConverter = - value -> converters.getNumberConverter().getBigInt(value, Types.BIGINT); return new SfSqlArray( - columnSubType, convertToNumericArray(nodeElements, bigIntConverter)); + columnSubType, + convertToFixedArray(nodeElements, converters.bigDecimalConverter(columnType))); case Types.CHAR: case Types.VARCHAR: case Types.LONGNVARCHAR: - TypeConverter varcharConverter = value -> value.toString(); return new SfSqlArray( - columnSubType, getStream(nodeElements, varcharConverter).toArray(String[]::new)); + columnSubType, + getStream(nodeElements, converters.varcharConverter(columnType, columnSubType, scale)) + .toArray(String[]::new)); case Types.BINARY: - TypeConverter bytesConverter = - value -> - converters.getBytesConverter().getBytes(value, columnType, Types.BINARY, scale); return new SfSqlArray( - columnSubType, getStream(nodeElements, bytesConverter).toArray(Object[]::new)); + columnSubType, + getStream(nodeElements, converters.bytesConverter(columnType, scale)) + .toArray(Byte[][]::new)); case Types.FLOAT: + case Types.REAL: + return new SfSqlArray( + columnSubType, + getStream(nodeElements, converters.floatConverter(columnType)).toArray(Float[]::new)); case Types.DOUBLE: - TypeConverter doubleConverter = - value -> converters.getNumberConverter().getDouble(value, Types.DOUBLE); return new SfSqlArray( - columnSubType, getStream(nodeElements, doubleConverter).toArray(Double[]::new)); + columnSubType, + getStream(nodeElements, converters.doubleConverter(columnType)) + .toArray(Double[]::new)); case Types.DATE: - TypeConverter dateConverter = - value -> { - SnowflakeDateTimeFormat formatter = - SnowflakeDateTimeFormat.fromSqlFormat( - (String) session.getCommonParameters().get("DATE_OUTPUT_FORMAT")); - SFTimestamp timestamp = formatter.parse((String) value); - return Date.valueOf( - Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalDate()); - }; return new SfSqlArray( - columnSubType, getStream(nodeElements, dateConverter).toArray(Date[]::new)); + columnSubType, + getStream(nodeElements, converters.dateConverter(session)).toArray(Date[]::new)); case Types.TIME: - TypeConverter timeConverter = - value -> { - SnowflakeDateTimeFormat formatter = - SnowflakeDateTimeFormat.fromSqlFormat( - (String) session.getCommonParameters().get("TIME_OUTPUT_FORMAT")); - SFTimestamp timestamp = formatter.parse((String) value); - return Time.valueOf( - Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalTime()); - }; return new SfSqlArray( - columnSubType, getStream(nodeElements, timeConverter).toArray(Time[]::new)); + columnSubType, + getStream(nodeElements, converters.timeConverter(session)).toArray(Time[]::new)); case Types.TIMESTAMP: - TypeConverter timestampConverter = - value -> { - Timestamp result = getTimestampFromType(columnSubType, (String) value, session); - if (result != null) { - return result; - } - return converters - .getDateTimeConverter() - .getTimestamp(value, columnType, columnSubType, null, scale); - }; return new SfSqlArray( - columnSubType, getStream(nodeElements, timestampConverter).toArray(Timestamp[]::new)); + columnSubType, + getStream( + nodeElements, + converters.timestampConverter(columnSubType, columnType, scale, session)) + .toArray(Timestamp[]::new)); case Types.BOOLEAN: - TypeConverter booleanConverter = - value -> converters.getBooleanConverter().getBoolean(value, columnType); return new SfSqlArray( - columnSubType, getStream(nodeElements, booleanConverter).toArray(Boolean[]::new)); + columnSubType, + getStream(nodeElements, converters.booleanConverter(columnType)) + .toArray(Boolean[]::new)); case Types.STRUCT: - TypeConverter structConverter = - value -> { - try { - return OBJECT_MAPPER.readValue(value, Map.class); - } catch (JsonProcessingException e) { - throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); - } - }; return new SfSqlArray( - columnSubType, getStream(nodeElements, structConverter).toArray(Map[]::new)); + columnSubType, + getStream(nodeElements, converters.structConverter(OBJECT_MAPPER)) + .toArray(Map[]::new)); case Types.ARRAY: - TypeConverter arrayConverter = - value -> { - try { - return OBJECT_MAPPER.readValue(value, HashMap[].class); - } catch (JsonProcessingException e) { - throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); - } - }; return new SfSqlArray( - columnSubType, getStream(nodeElements, arrayConverter).toArray(Map[][]::new)); + columnSubType, + getStream(nodeElements, converters.arrayConverter(OBJECT_MAPPER)) + .toArray(Map[][]::new)); default: - return null; + throw new SFException( + ErrorCode.FEATURE_UNSUPPORTED, + "Can't construct array for data type: " + columnSubType); } } catch (JsonProcessingException e) { throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); } } - private Object[] convertToNumericArray(Iterator nodeElements, TypeConverter bigIntConverter) { + private Object[] convertToFixedArray( + Iterator nodeElements, JsonStringToTypeConverter bigIntConverter) { AtomicInteger bigDecimalCount = new AtomicInteger(); Object[] elements = getStream(nodeElements, bigIntConverter) - .map( + .peek( elem -> { if (elem instanceof BigDecimal) { bigDecimalCount.incrementAndGet(); } - return elem; }) .toArray( size -> { @@ -413,7 +390,7 @@ private Object[] convertToNumericArray(Iterator nodeElements, TypeConverter bigI return elements; } - private Stream getStream(Iterator nodeElements, TypeConverter converter) { + private Stream getStream(Iterator nodeElements, JsonStringToTypeConverter converter) { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(nodeElements, Spliterator.ORDERED), false) .map( @@ -426,8 +403,8 @@ private Stream getStream(Iterator nodeElements, TypeConverter converter) { }); } - private static Object convert(TypeConverter converter, JsonNode elem) throws SFException { - JsonNode node = elem; + private static Object convert(JsonStringToTypeConverter converter, JsonNode node) + throws SFException { if (node.isValueNode()) { return converter.convert(node.asText()); } else { diff --git a/src/main/java/net/snowflake/client/core/SFResultSet.java b/src/main/java/net/snowflake/client/core/SFResultSet.java index b1da1bf51..b7698cf5d 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFResultSet.java @@ -12,6 +12,7 @@ import java.util.Arrays; import java.util.Comparator; import net.snowflake.client.core.BasicEvent.QueryState; +import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.JsonResultChunk; import net.snowflake.client.jdbc.SnowflakeResultChunk; @@ -154,9 +155,7 @@ public SFResultSet( Telemetry telemetryClient, boolean sortResult) throws SQLException { - super( - resultSetSerializable.getTimeZone(), - ConvertersFactory.createJsonConverters(session, resultSetSerializable)); + super(resultSetSerializable.getTimeZone(), new Converters(session, resultSetSerializable)); this.resultSetSerializable = resultSetSerializable; this.columnCount = 0; this.sortResult = sortResult; diff --git a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java index cf7b8273a..ce129788b 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java @@ -473,6 +473,7 @@ public List getIsAutoIncrementList() { return isAutoIncrementList; } + @SnowflakeJdbcInternalApi public List getColumnMetadata() { return columnMetadata; } diff --git a/src/main/java/net/snowflake/client/core/SfSqlArray.java b/src/main/java/net/snowflake/client/core/SfSqlArray.java index 7a91b0ca4..83270796a 100644 --- a/src/main/java/net/snowflake/client/core/SfSqlArray.java +++ b/src/main/java/net/snowflake/client/core/SfSqlArray.java @@ -8,24 +8,24 @@ import java.util.Map; @SnowflakeJdbcInternalApi -public class SfSqlArray implements Array { +public class SfSqlArray implements Array { - private int base; + private int baseType; private Object elements; - public SfSqlArray(int base, Object elements) { - this.base = base; + public SfSqlArray(int baseType, Object elements) { + this.baseType = baseType; this.elements = elements; } @Override public String getBaseTypeName() throws SQLException { - return JDBCType.valueOf(base).getName(); + return JDBCType.valueOf(baseType).getName(); } @Override public int getBaseType() throws SQLException { - return base; + return baseType; } @Override diff --git a/src/main/java/net/snowflake/client/core/json/Converters.java b/src/main/java/net/snowflake/client/core/json/Converters.java index fa3baadb6..c94361f51 100644 --- a/src/main/java/net/snowflake/client/core/json/Converters.java +++ b/src/main/java/net/snowflake/client/core/json/Converters.java @@ -1,8 +1,26 @@ package net.snowflake.client.core.json; +import static net.snowflake.client.jdbc.SnowflakeUtil.getTimestampFromType; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.Instant; +import java.time.ZoneOffset; +import java.util.Arrays; +import java.util.Map; import java.util.TimeZone; import net.snowflake.client.core.SFBaseSession; +import net.snowflake.client.core.SFException; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.ErrorCode; +import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; +import net.snowflake.client.util.JsonStringToTypeConverter; import net.snowflake.common.core.SFBinaryFormat; +import net.snowflake.common.core.SFTimestamp; import net.snowflake.common.core.SnowflakeDateTimeFormat; public class Converters { @@ -52,6 +70,24 @@ public Converters( this); } + @SnowflakeJdbcInternalApi + public Converters(SFBaseSession session, SnowflakeResultSetSerializableV1 resultSetSerializable) { + this( + resultSetSerializable.getTimeZone(), + session, + resultSetSerializable.getResultVersion(), + resultSetSerializable.isHonorClientTZForTimestampNTZ(), + resultSetSerializable.getTreatNTZAsUTC(), + resultSetSerializable.getUseSessionTimezone(), + resultSetSerializable.getFormatDateWithTimeZone(), + resultSetSerializable.getBinaryFormatter(), + resultSetSerializable.getDateFormatter(), + resultSetSerializable.getTimeFormatter(), + resultSetSerializable.getTimestampNTZFormatter(), + resultSetSerializable.getTimestampLTZFormatter(), + resultSetSerializable.getTimestampTZFormatter()); + } + public BooleanConverter getBooleanConverter() { return booleanConverter; } @@ -71,4 +107,123 @@ public BytesConverter getBytesConverter() { public StringConverter getStringConverter() { return stringConverter; } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter integerConverter(int columnType) { + return value -> getNumberConverter().getInt(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter smallIntConverter(int columnType) { + return value -> getNumberConverter().getShort(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter tinyIntConverter(int columnType) { + return value -> getNumberConverter().getByte(value); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter bigIntConverter(int columnType) { + return value -> getNumberConverter().getBigInt(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter longConverter(int columnType) { + return value -> getNumberConverter().getLong(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter bigDecimalConverter(int columnType) { + return value -> getNumberConverter().getBigDecimal(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter floatConverter(int columnType) { + return value -> getNumberConverter().getBigDecimal(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter doubleConverter(int columnType) { + return value -> getNumberConverter().getBigDecimal(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter bytesConverter(int columnType, int scale) { + return value -> { + byte[] primitiveArray = getBytesConverter().getBytes(value, columnType, Types.BINARY, scale); + Byte[] newByteArray = new Byte[primitiveArray.length]; + Arrays.setAll(newByteArray, n -> primitiveArray[n]); + return newByteArray; + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter varcharConverter(int columnType, int columnSubType, int scale) { + return value -> getStringConverter().getString(value, columnType, columnSubType, scale); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter booleanConverter(int columnType) { + return value -> getBooleanConverter().getBoolean(value, columnType); + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter dateConverter(SFBaseSession session) { + return value -> { + SnowflakeDateTimeFormat formatter = + SnowflakeDateTimeFormat.fromSqlFormat( + (String) session.getCommonParameters().get("DATE_OUTPUT_FORMAT")); + SFTimestamp timestamp = formatter.parse(value); + return Date.valueOf( + Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalDate()); + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter timeConverter(SFBaseSession session) { + return value -> { + SnowflakeDateTimeFormat formatter = + SnowflakeDateTimeFormat.fromSqlFormat( + (String) session.getCommonParameters().get("TIME_OUTPUT_FORMAT")); + SFTimestamp timestamp = formatter.parse((String) value); + return Time.valueOf( + Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalTime()); + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter timestampConverter( + int columnSubType, int columnType, int scale, SFBaseSession session) { + return value -> { + Timestamp result = getTimestampFromType(columnSubType, (String) value, session); + if (result != null) { + return result; + } + return getDateTimeConverter() + .getTimestamp(value, columnType, columnSubType, TimeZone.getDefault(), scale); + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter structConverter(ObjectMapper objectMapper) { + return value -> { + try { + return objectMapper.readValue(value, Map.class); + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + }; + } + + @SnowflakeJdbcInternalApi + public JsonStringToTypeConverter arrayConverter(ObjectMapper objectMapper) { + return value -> { + try { + return objectMapper.readValue(value, Map[].class); + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + }; + } } diff --git a/src/main/java/net/snowflake/client/core/json/NumberConverter.java b/src/main/java/net/snowflake/client/core/json/NumberConverter.java index 132003359..18f6e96b2 100644 --- a/src/main/java/net/snowflake/client/core/json/NumberConverter.java +++ b/src/main/java/net/snowflake/client/core/json/NumberConverter.java @@ -115,7 +115,7 @@ public BigDecimal getBigDecimal(Object obj, int columnType, Integer scale) throw if (obj == null) { return null; } - BigDecimal value = new BigDecimal(obj.toString()); + BigDecimal value = getBigDecimal(obj.toString(), columnType); value = value.setScale(scale, RoundingMode.HALF_UP); return value; } diff --git a/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java b/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java index 5cfdb5ca2..fd3257e79 100644 --- a/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java +++ b/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java @@ -13,11 +13,10 @@ public class SQLDataCreationHelper { public static T create(Class type) throws SQLException { Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); - SQLData instance = + return (T) typeFactory .map(Supplier::get) .orElseGet(() -> createUsingReflection((Class) type)); - return (T) instance; } private static SQLData createUsingReflection(Class type) { diff --git a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java index e35c3c4cb..c50bf4900 100644 --- a/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SFAsyncResultSet.java @@ -17,7 +17,6 @@ import java.sql.Timestamp; import java.util.List; import java.util.TimeZone; -import net.snowflake.client.core.ConvertersFactory; import net.snowflake.client.core.QueryStatus; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFBaseSession; @@ -73,7 +72,6 @@ public SFAsyncResultSet( this.resultSetMetaData = new SnowflakeResultSetMetaDataV1(sfBaseResultSet.getMetaData()); this.resultSetMetaData.setQueryIdForAsyncResults(this.queryID); this.resultSetMetaData.setQueryType(SnowflakeResultSetMetaDataV1.QueryType.ASYNC); - this.converters = ConvertersFactory.createJsonConverters(session, resultSetSerializable); } public SFAsyncResultSet(String queryID, Statement statement) throws SQLException { diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java index 407eec4c4..7958a5b77 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java @@ -29,23 +29,17 @@ import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; -import java.util.AbstractMap; import java.util.Arrays; import java.util.Calendar; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.TimeZone; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; -import java.util.stream.Collectors; import net.snowflake.client.core.JsonSqlInput; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFBaseSession; -import net.snowflake.client.core.json.Converters; -import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; +import net.snowflake.client.core.structs.SQLDataCreationHelper; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.SqlState; @@ -57,7 +51,6 @@ public abstract class SnowflakeBaseResultSet implements ResultSet { private final int resultSetConcurrency; private final int resultSetHoldability; protected SFBaseResultSet sfBaseResultSet; - protected Converters converters; // Snowflake supports sessionless result set. For this case, there is no // statement for this result set. protected final Statement statement; @@ -1354,11 +1347,7 @@ public void updateNClob(String columnLabel, Reader reader) throws SQLException { public T getObject(int columnIndex, Class type) throws SQLException { logger.debug("public T getObject(int columnIndex,Class type)", false); if (SQLData.class.isAssignableFrom(type)) { - Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); - SQLData instance = - typeFactory - .map(Supplier::get) - .orElseGet(() -> createUsingReflection((Class) type)); + SQLData instance = (SQLData) SQLDataCreationHelper.create(type); SQLInput sqlInput = (SQLInput) getObject(columnIndex); instance.readSQL(sqlInput, null); return (T) instance; @@ -1376,80 +1365,44 @@ public List getList(int columnIndex, Class type) throws SQLException { } public T[] getArray(int columnIndex, Class type) throws SQLException { - Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); - Map[] sqlInputs = (Map[]) getArray(columnIndex).getArray(); - T[] arr = (T[]) java.lang.reflect.Array.newInstance(type, sqlInputs.length); - AtomicInteger counter = new AtomicInteger(0); - Arrays.stream(sqlInputs) - .map( - map -> - new JsonSqlInput( - OBJECT_MAPPER.convertValue(map, JsonNode.class), - session, - converters, - sfBaseResultSet - .getMetaData() - .getColumnMetadata() - .get(columnIndex - 1) - .getFields())) - .forEach( - i -> { - SQLData instance = - typeFactory - .map(Supplier::get) - .orElseGet(() -> createUsingReflection((Class) type)); - try { - instance.readSQL(i, null); - } catch (SQLException e) { - throw new RuntimeException(e); - } - arr[counter.getAndIncrement()] = (T) instance; - }); + Map[] jsonMaps = (Map[]) getArray(columnIndex).getArray(); + T[] arr = (T[]) java.lang.reflect.Array.newInstance(type, jsonMaps.length); + int counter = 0; + for (Map map : jsonMaps) { + SQLData instance = (SQLData) SQLDataCreationHelper.create(type); + SQLInput sqlInput = + new JsonSqlInput( + OBJECT_MAPPER.convertValue(map, JsonNode.class), + session, + sfBaseResultSet.getConverters(), + sfBaseResultSet.getMetaData().getColumnMetadata().get(columnIndex - 1).getFields()); + instance.readSQL(sqlInput, null); + arr[counter++] = (T) instance; + } + return arr; } public Map getMap(int columnIndex, Class type) throws SQLException { - Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); - // TODO: structuredType how to get raw json object not as SqlInput Object object = getObject(columnIndex); JsonNode jsonNode = ((JsonSqlInput) object).getInput(); Map map = OBJECT_MAPPER.convertValue(jsonNode, new TypeReference>() {}); - Map collect = - map.entrySet().stream() - .map( - e -> { - SQLData instance = - typeFactory - .map(Supplier::get) - .orElseGet(() -> createUsingReflection((Class) type)); - try { - SQLInput sqlInput = - new JsonSqlInput( - jsonNode.get(e.getKey()), - session, - converters, - sfBaseResultSet - .getMetaData() - .getColumnMetadata() - .get(columnIndex - 1) - .getFields()); // TODO structuredType - instance.readSQL(sqlInput, null); - } catch (SQLException ex) { - throw new RuntimeException(ex); - } - return new AbstractMap.SimpleEntry<>(e.getKey(), (T) instance); - }) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - return collect; - } - - private SQLData createUsingReflection(Class type) { - try { - return type.newInstance(); - } catch (InstantiationException | IllegalAccessException e) { - throw new RuntimeException(e); + Map resultMap = new HashMap<>(); + + for (Map.Entry entry : map.entrySet()) { + SQLData instance = (SQLData) SQLDataCreationHelper.create(type); + SQLInput sqlInput = + new JsonSqlInput( + jsonNode.get(entry.getKey()), + session, + sfBaseResultSet.getConverters(), + sfBaseResultSet.getMetaData().getColumnMetadata().get(columnIndex - 1).getFields()); + instance.readSQL(sqlInput, null); + resultMap.put(entry.getKey(), (T) instance); } + + return resultMap; } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java index 58c359960..9f182772e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java @@ -212,8 +212,4 @@ public String toString() { return sBuilder.toString(); } - - public FieldMetadata getField(int index) { - return fields.get(index - 1); - } } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java index 164bef81b..02909f4ff 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetV1.java @@ -28,7 +28,6 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; -import net.snowflake.client.core.ConvertersFactory; import net.snowflake.client.core.QueryStatus; import net.snowflake.client.core.SFBaseResultSet; import net.snowflake.client.core.SFException; @@ -54,7 +53,6 @@ public SnowflakeResultSetV1(SFBaseResultSet sfBaseResultSet, Statement statement super(statement); this.sfBaseResultSet = sfBaseResultSet; this.resultSetMetaData = new SnowflakeResultSetMetaDataV1(sfBaseResultSet.getMetaData()); - this.converters = sfBaseResultSet.getConverters(); } /** @@ -104,7 +102,6 @@ public SnowflakeResultSetV1( throws SQLException { super(resultSetSerializable); this.sfBaseResultSet = sfBaseResultSet; - this.converters = ConvertersFactory.createJsonConverters(session, resultSetSerializable); this.resultSetMetaData = new SnowflakeResultSetMetaDataV1(sfBaseResultSet.getMetaData()); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 5af355bf8..6f361b6f1 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -184,7 +184,7 @@ public static SnowflakeColumnMetadata extractColumnMetadata( String colSrcDatabase = colNode.path("database").asText(); String colSrcSchema = colNode.path("schema").asText(); String colSrcTable = colNode.path("table").asText(); - List fieldsMetadata = getFieldMetadata(fixedColType, colNode); + List fieldsMetadata = getFieldMetadata(jdbcTreatDecimalAsInt, colNode); boolean isAutoIncrement = colNode.path("isAutoIncrement").asBoolean(); @@ -332,8 +332,8 @@ private static String defaultIfNull(String extColTypeName, String defaultValue) return Optional.ofNullable(extColTypeName).orElse(defaultValue); } - static List createFieldsMetadata(ArrayNode fieldsJson, int fixedColType) - throws SnowflakeSQLLoggedException { + static List createFieldsMetadata( + ArrayNode fieldsJson, boolean jdbcTreatDecimalAsInt) throws SnowflakeSQLLoggedException { List fields = new ArrayList<>(); for (JsonNode node : fieldsJson) { String colName = node.path("name").asText(); @@ -343,7 +343,8 @@ static List createFieldsMetadata(ArrayNode fieldsJson, int fixedC boolean nullable = node.path("nullable").asBoolean(); int length = node.path("length").asInt(); boolean fixed = node.path("fixed").asBoolean(); - List internalFields = getFieldMetadata(fixedColType, node); + int fixedColType = jdbcTreatDecimalAsInt && scale == 0 ? Types.BIGINT : Types.DECIMAL; + List internalFields = getFieldMetadata(jdbcTreatDecimalAsInt, node); JsonNode outputType = node.path("outputType"); JsonNode extColTypeNameNode = node.path("extTypeName"); String extColTypeName = null; @@ -369,11 +370,11 @@ static List createFieldsMetadata(ArrayNode fieldsJson, int fixedC return fields; } - private static List getFieldMetadata(int fixedColType, JsonNode node) + private static List getFieldMetadata(boolean jdbcTreatDecimalAsInt, JsonNode node) throws SnowflakeSQLLoggedException { if (!node.path("fields").isEmpty()) { ArrayNode internalFieldsJson = (ArrayNode) node.path("fields"); - return createFieldsMetadata(internalFieldsJson, fixedColType); + return createFieldsMetadata(internalFieldsJson, jdbcTreatDecimalAsInt); } else { return new ArrayList<>(); } @@ -763,8 +764,17 @@ public static Time getTimeInSessionTimezone(Long time, int nanos) { return ts; } - @SnowflakeJdbcInternalApi - public static Timestamp getTimestampFromType(int columnSubType, String value, SFBaseSession session) { + /** + * Helper function to convert system properties to boolean + * + * @param columnSubType column subtype value + * @param value value to convert + * @param session session object + * @return converted Timestamp object + */ + @SnowflakeJdbcInternalApi + public static Timestamp getTimestampFromType( + int columnSubType, String value, SFBaseSession session) { if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ) { return getTimestampFromFormat("TIMESTAMP_LTZ_OUTPUT_FORMAT", value, session); } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ diff --git a/src/main/java/net/snowflake/client/util/TypeConverter.java b/src/main/java/net/snowflake/client/util/JsonStringToTypeConverter.java similarity index 61% rename from src/main/java/net/snowflake/client/util/TypeConverter.java rename to src/main/java/net/snowflake/client/util/JsonStringToTypeConverter.java index ae1c71a23..37e6aab3e 100644 --- a/src/main/java/net/snowflake/client/util/TypeConverter.java +++ b/src/main/java/net/snowflake/client/util/JsonStringToTypeConverter.java @@ -6,8 +6,9 @@ import net.snowflake.client.core.SFException; import net.snowflake.client.core.SnowflakeJdbcInternalApi; +/** Functional interface used to convert json data to expected type */ @SnowflakeJdbcInternalApi @FunctionalInterface -public interface TypeConverter { - Object convert(String string) throws SFException; +public interface JsonStringToTypeConverter { + T convert(String string) throws SFException; } diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index fa53f4c58..1f782ec1f 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -116,13 +116,13 @@ public static void withSchema(Statement statement, String schemaName, ThrowingRu * @param action action to execute when schema was created * @throws Exception when any error occurred */ - public static void withRandomSchema(Statement statement, ThrowingConsumer action) - throws Exception { + public static void withRandomSchema( + Statement statement, ThrowingConsumer action) throws Exception { String customSchema = GENERATED_SCHEMA_PREFIX + SnowflakeUtil.randomAlphaNumeric(5).toUpperCase(); try { statement.execute("CREATE OR REPLACE SCHEMA " + customSchema); - action.call(customSchema); + action.accept(customSchema); } finally { statement.execute("DROP SCHEMA " + customSchema); } diff --git a/src/test/java/net/snowflake/client/ThrowingConsumer.java b/src/test/java/net/snowflake/client/ThrowingConsumer.java index 8b6f8c001..d5a47cd5e 100644 --- a/src/test/java/net/snowflake/client/ThrowingConsumer.java +++ b/src/test/java/net/snowflake/client/ThrowingConsumer.java @@ -1,6 +1,6 @@ package net.snowflake.client; @FunctionalInterface -public interface ThrowingConsumer { - void call(T parameter) throws Exception; +public interface ThrowingConsumer { + void accept(A parameter) throws T; } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java index 9a28eea75..830f940cb 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java @@ -22,8 +22,10 @@ import java.util.Map; import net.snowflake.client.ConditionalIgnoreRule; import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.ThrowingConsumer; import net.snowflake.client.category.TestCategoryStructuredType; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; +import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -49,6 +51,12 @@ public Connection init() throws SQLException { return conn; } + @Before + public void clean() throws Exception { + SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); + } + // TODO Structured types feature exists only on QA environments @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) @@ -67,17 +75,13 @@ public void testMapStructToObjectWithReflection() throws SQLException { private void testMapJson(boolean registerFactory) throws SQLException { if (registerFactory) { SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); - } else { - SnowflakeObjectTypeFactories.unregister(SimpleClass.class); - } - try (Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("select {'string':'a'}::OBJECT(string VARCHAR)"); ) { - resultSet.next(); - SimpleClass object = resultSet.getObject(1, SimpleClass.class); - assertEquals("a", object.getString()); } + withFirstRow( + "select {'string':'a'}::OBJECT(string VARCHAR)", + (resultSet) -> { + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("a", object.getString()); + }); } // TODO Structured types feature exists only on QA environments @@ -137,12 +141,12 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { resultSet.next(); AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); assertEquals("a", object.getString()); - assertEquals(1, (long) object.getB()); - assertEquals(2, (long) object.getS()); - assertEquals(3, (long) object.getI()); - assertEquals(4, (long) object.getL()); - assertEquals(1.1, (double) object.getF(), 0.01); - assertEquals(2.2, (double) object.getD(), 0.01); + assertEquals(new Byte("1"), object.getB()); + assertEquals(Short.valueOf("2"), object.getS()); + assertEquals(Integer.valueOf(3), object.getI()); + assertEquals(Long.valueOf(4), object.getL()); + assertEquals(Float.valueOf(1.1f), object.getF(), 0.01); + assertEquals(Double.valueOf(2.2), object.getD(), 0.01); assertEquals(BigDecimal.valueOf(3.3), object.getBd()); assertEquals( Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), object.getTimestampLtz()); @@ -163,212 +167,251 @@ private void testMapAllTypes(boolean registerFactory) throws SQLException { @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapJsonToMap() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("SELECT OBJECT_CONSTRUCT('string','a','string2','s')"); - resultSet.next(); - Map map = resultSet.getObject(1, Map.class); - assertEquals("a", map.get("string")); - assertEquals("s", map.get("string2")); - statement.close(); - connection.close(); + withFirstRow( + "SELECT OBJECT_CONSTRUCT('string','a','string2',1)", + (resultSet) -> { + Map map = resultSet.getObject(1, Map.class); + assertEquals("a", map.get("string")); + assertEquals(1, map.get("string2")); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testReturnAsArrayOfSqlData() throws SQLException { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + withFirstRow( + "SELECT ARRAY_CONSTRUCT({'string':'one'}, {'string':'two'}, {'string':'three'})::ARRAY(OBJECT(string VARCHAR))", + (resultSet) -> { + SimpleClass[] resultArray = + resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, SimpleClass.class); + assertEquals("one", resultArray[0].getString()); + assertEquals("two", resultArray[1].getString()); + assertEquals("three", resultArray[2].getString()); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsMap() throws SQLException { SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));"); - resultSet.next(); - Map map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); - assertEquals(map.get("x").getString(), "one"); - assertEquals(map.get("y").getString(), "two"); - assertEquals(map.get("z").getString(), "three"); - statement.close(); - connection.close(); + withFirstRow( + "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", + (resultSet) -> { + Map map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, SimpleClass.class); + assertEquals("one", map.get("x").getString()); + assertEquals("two", map.get("y").getString()); + assertEquals("three", map.get("z").getString()); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testReturnAsList() throws SQLException { SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))"); - resultSet.next(); - List map = - resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, SimpleClass.class); - assertEquals(map.get(0).getString(), "one"); - assertEquals(map.get(1).getString(), "two"); - statement.close(); - connection.close(); + withFirstRow( + "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))", + (resultSet) -> { + List map = + resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, SimpleClass.class); + assertEquals("one", map.get(0).getString()); + assertEquals("two", map.get(1).getString()); + }); } // TODO Structured types feature exists only on QA environments @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapStructsFromChunks() throws SQLException { + withFirstRow( + "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))", + (resultSet) -> { + while (resultSet.next()) { + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("a", object.getString()); + } + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapIntegerArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(INTEGER)", + (resultSet) -> { + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf(10), resultArray[0]); + assertEquals(Long.valueOf(20), resultArray[1]); + assertEquals(Long.valueOf(30), resultArray[2]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapFixedToLongArray() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(SMALLINT)", + (resultSet) -> { + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(Long.valueOf("10"), resultArray[0]); + assertEquals(Long.valueOf("20"), resultArray[1]); + assertEquals(Long.valueOf("30"), resultArray[2]); + }); + } + + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapDecimalArray() throws SQLException { + // when: jdbc_treat_decimal_as_int=true scale=0 try (Connection connection = init(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery( - "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))"); ) { - while (resultSet.next()) { - SimpleClass object = resultSet.getObject(1, SimpleClass.class); - assertEquals("a", object.getString()); - } + "SELECT ARRAY_CONSTRUCT(10.2, 20.02, 30)::ARRAY(DECIMAL(20,0))"); ) { + resultSet.next(); + Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); + assertEquals(resultArray[0], Long.valueOf(10)); + assertEquals(resultArray[1], Long.valueOf(20)); + assertEquals(resultArray[2], Long.valueOf(30)); } - } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapIntegerArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(INTEGER)"); - resultSet.next(); - Long[] resultArray = (Long[]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0], Long.valueOf(10)); - assertEquals(resultArray[1], Long.valueOf(20)); - assertEquals(resultArray[2], Long.valueOf(30)); - statement.close(); - connection.close(); + // when: jdbc_treat_decimal_as_int=true scale=2 + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT ARRAY_CONSTRUCT(10.2, 20.02, 30)::ARRAY(DECIMAL(20,2))"); ) { + resultSet.next(); + BigDecimal[] resultArray2 = (BigDecimal[]) resultSet.getArray(1).getArray(); + assertEquals(BigDecimal.valueOf(10.2), resultArray2[0]); + assertEquals(BigDecimal.valueOf(20.02), resultArray2[1]); + assertEquals(BigDecimal.valueOf(30), resultArray2[2]); + } + + // when: jdbc_treat_decimal_as_int=false scale=0 + try (Connection connection = init(); + Statement statement = connection.createStatement(); ) { + statement.execute("alter session set jdbc_treat_decimal_as_int = false"); + try (ResultSet resultSet = + statement.executeQuery("SELECT ARRAY_CONSTRUCT(10.2, 20.02, 30)::ARRAY(DECIMAL(20,0))")) { + resultSet.next(); + BigDecimal[] resultArray = (BigDecimal[]) resultSet.getArray(1).getArray(); + assertEquals(BigDecimal.valueOf(10), resultArray[0]); + assertEquals(BigDecimal.valueOf(20), resultArray[1]); + assertEquals(BigDecimal.valueOf(30), resultArray[2]); + } + } } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapVarcharArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("SELECT ARRAY_CONSTRUCT('10', '20','30')::ARRAY(VARCHAR)"); - resultSet.next(); - String[] resultArray = (String[]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0], "10"); - assertEquals(resultArray[1], "20"); - assertEquals(resultArray[2], "30"); - statement.close(); - connection.close(); + withFirstRow( + "SELECT 'text', ARRAY_CONSTRUCT('10', '20','30')::ARRAY(VARCHAR)", + (resultSet) -> { + String t = resultSet.getString(1); + String[] resultArray = (String[]) resultSet.getArray(2).getArray(); + assertEquals("10", resultArray[0]); + assertEquals("20", resultArray[1]); + assertEquals("30", resultArray[2]); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapDatesArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "SELECT ARRAY_CONSTRUCT(to_date('2023-12-24', 'YYYY-MM-DD'), to_date('2023-12-25', 'YYYY-MM-DD'))::ARRAY(DATE)"); - resultSet.next(); - Date[] resultArray = (Date[]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0], Date.valueOf(LocalDate.of(2023, 12, 24))); - assertEquals(resultArray[1], Date.valueOf(LocalDate.of(2023, 12, 25))); - statement.close(); - connection.close(); + withFirstRow( + "SELECT ARRAY_CONSTRUCT(to_date('2023-12-24', 'YYYY-MM-DD'), to_date('2023-12-25', 'YYYY-MM-DD'))::ARRAY(DATE)", + (resultSet) -> { + Date[] resultArray = (Date[]) resultSet.getArray(1).getArray(); + assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), resultArray[0]); + assertEquals(Date.valueOf(LocalDate.of(2023, 12, 25)), resultArray[1]); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapTimeArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "SELECT ARRAY_CONSTRUCT(to_time('15:39:20.123'), to_time('15:39:20.123'))::ARRAY(TIME)"); - resultSet.next(); - Time[] resultArray = (Time[]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0], Time.valueOf(LocalTime.of(15, 39, 20))); - assertEquals(resultArray[1], Time.valueOf(LocalTime.of(15, 39, 20))); - statement.close(); - connection.close(); + withFirstRow( + "SELECT ARRAY_CONSTRUCT(to_time('15:39:20.123'), to_time('15:39:20.123'))::ARRAY(TIME)", + (resultSet) -> { + Time[] resultArray = (Time[]) resultSet.getArray(1).getArray(); + assertEquals(Time.valueOf(LocalTime.of(15, 39, 20)), resultArray[0]); + assertEquals(Time.valueOf(LocalTime.of(15, 39, 20)), resultArray[1]); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapTimestampArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "SELECT ARRAY_CONSTRUCT(TO_TIMESTAMP_NTZ('2021-12-23 09:44:44'), TO_TIMESTAMP_NTZ('2021-12-24 09:55:55'))::ARRAY(TIMESTAMP)"); - resultSet.next(); - Timestamp[] resultArray = (Timestamp[]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0], Timestamp.valueOf(LocalDateTime.of(2021, 12, 23, 10, 44, 44))); - assertEquals(resultArray[1], Timestamp.valueOf(LocalDateTime.of(2021, 12, 24, 10, 55, 55))); - // TODO test timestamps LTZ and TZ - statement.close(); - connection.close(); + withFirstRow( + "SELECT ARRAY_CONSTRUCT(TO_TIMESTAMP_NTZ('2021-12-23 09:44:44'), TO_TIMESTAMP_NTZ('2021-12-24 09:55:55'))::ARRAY(TIMESTAMP)", + (resultSet) -> { + Timestamp[] resultArray = (Timestamp[]) resultSet.getArray(1).getArray(); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 23, 10, 44, 44)), resultArray[0]); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 24, 10, 55, 55)), resultArray[1]); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapBooleanArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery("SELECT ARRAY_CONSTRUCT(true,false)::ARRAY(BOOLEAN)"); - resultSet.next(); - Boolean[] resultArray = (Boolean[]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0], true); - assertEquals(resultArray[1], false); - statement.close(); - connection.close(); + withFirstRow( + "SELECT ARRAY_CONSTRUCT(true,false)::ARRAY(BOOLEAN)", + (resultSet) -> { + Boolean[] resultArray = (Boolean[]) resultSet.getArray(1).getArray(); + assertEquals(true, resultArray[0]); + assertEquals(false, resultArray[1]); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapBinaryArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "SELECT ARRAY_CONSTRUCT(TO_BINARY('616263', 'HEX'),TO_BINARY('616263', 'HEX'))::ARRAY(BINARY)"); - resultSet.next(); - Object[] resultArray = (Object[]) resultSet.getArray(1).getArray(); - assertArrayEquals((byte[]) resultArray[0], new byte[] {'a', 'b', 'c'}); - assertArrayEquals((byte[]) resultArray[1], new byte[] {'a', 'b', 'c'}); - statement.close(); - connection.close(); + withFirstRow( + "SELECT ARRAY_CONSTRUCT(TO_BINARY('616263', 'HEX'),TO_BINARY('616263', 'HEX'))::ARRAY(BINARY)", + (resultSet) -> { + Byte[][] resultArray = (Byte[][]) resultSet.getArray(1).getArray(); + assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[0]); + assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[1]); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructArray() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "SELECT ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2} )::ARRAY(OBJECT(x VARCHAR, y Integer))"); - resultSet.next(); - Map[] resultArray = (Map[]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0].toString(), "{x=abc, y=1}"); - assertEquals(resultArray[1].toString(), "{x=def, y=2}"); - statement.close(); - connection.close(); + public void testMapArrayOfStructToMap() throws SQLException { + withFirstRow( + "SELECT ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2} )::ARRAY(OBJECT(x VARCHAR, y INTEGER))", + (resultSet) -> { + Map[] resultArray = (Map[]) resultSet.getArray(1).getArray(); + assertEquals("{x=abc, y=1}", resultArray[0].toString()); + assertEquals("{x=def, y=2}", resultArray[1].toString()); + }); } @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testMapArrayOfArrays() throws SQLException { - Connection connection = init(); - Statement statement = connection.createStatement(); - ResultSet resultSet = - statement.executeQuery( - "SELECT ARRAY_CONSTRUCT(ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2}) )::ARRAY(ARRAY(OBJECT(x VARCHAR, y Integer)))"); - resultSet.next(); - Map[][] resultArray = (Map[][]) resultSet.getArray(1).getArray(); - assertEquals(resultArray[0][0].toString(), "{x=abc, y=1}"); - assertEquals(resultArray[0][1].toString(), "{x=def, y=2}"); - statement.close(); - connection.close(); + withFirstRow( + "SELECT ARRAY_CONSTRUCT(ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2}) )::ARRAY(ARRAY(OBJECT(x VARCHAR, y INTEGER)))", + (resultSet) -> { + Map[][] resultArray = (Map[][]) resultSet.getArray(1).getArray(); + assertEquals("{x=abc, y=1}", resultArray[0][0].toString()); + assertEquals("{x=def, y=2}", resultArray[0][1].toString()); + }); + } + + private void withFirstRow(String sqlText, ThrowingConsumer consumer) + throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet rs = statement.executeQuery(sqlText); ) { + assertTrue(rs.next()); + consumer.accept(rs); + } } + ; }