diff --git a/src/main/java/net/snowflake/client/core/ColumnTypeHelper.java b/src/main/java/net/snowflake/client/core/ColumnTypeHelper.java new file mode 100644 index 000000000..0683663c9 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/ColumnTypeHelper.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.core; + +import java.sql.Types; +import net.snowflake.client.jdbc.SnowflakeUtil; + +@SnowflakeJdbcInternalApi +public class ColumnTypeHelper { + public static int getColumnType(int internalColumnType, SFBaseSession session) { + int externalColumnType = internalColumnType; + + if (internalColumnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ) { + externalColumnType = Types.TIMESTAMP; + } else if (internalColumnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ) { + externalColumnType = + session == null + ? Types.TIMESTAMP_WITH_TIMEZONE + : session.getEnableReturnTimestampWithTimeZone() + ? Types.TIMESTAMP_WITH_TIMEZONE + : Types.TIMESTAMP; + } + return externalColumnType; + } +} diff --git a/src/main/java/net/snowflake/client/core/JsonSqlInput.java b/src/main/java/net/snowflake/client/core/JsonSqlInput.java new file mode 100644 index 000000000..3362f3a25 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/JsonSqlInput.java @@ -0,0 +1,345 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.core; + +import com.fasterxml.jackson.databind.JsonNode; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLData; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.Instant; +import java.time.ZoneOffset; +import java.util.Iterator; +import java.util.List; +import java.util.TimeZone; +import net.snowflake.client.core.json.Converters; +import net.snowflake.client.core.structs.SQLDataCreationHelper; +import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.jdbc.SnowflakeLoggedFeatureNotSupportedException; +import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.util.ThrowingCallable; +import net.snowflake.client.util.ThrowingTriFunction; +import net.snowflake.common.core.SFTimestamp; +import net.snowflake.common.core.SnowflakeDateTimeFormat; + +@SnowflakeJdbcInternalApi +public class JsonSqlInput implements SFSqlInput { + private final JsonNode input; + private final Iterator elements; + private final SFBaseSession session; + private final Converters converters; + private final List fields; + private int currentIndex = 0; + + public JsonSqlInput( + JsonNode input, SFBaseSession session, Converters converters, List fields) { + this.input = input; + this.elements = input.elements(); + this.session = session; + this.converters = converters; + this.fields = fields; + } + + public JsonNode getInput() { + return input; + } + + @Override + public String readString() throws SQLException { + return withNextValue( + ((value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + int columnSubType = fieldMetadata.getType(); + int scale = fieldMetadata.getScale(); + return mapExceptions( + () -> + converters + .getStringConverter() + .getString(value, columnType, columnSubType, scale)); + })); + } + + @Override + public boolean readBoolean() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + return mapExceptions( + () -> converters.getBooleanConverter().getBoolean(value, columnType)); + }); + } + + @Override + public byte readByte() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> + mapExceptions(() -> converters.getNumberConverter().getByte(value))); + } + + @Override + public short readShort() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + return mapExceptions(() -> converters.getNumberConverter().getShort(value, columnType)); + }); + } + + @Override + public int readInt() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + return mapExceptions(() -> converters.getNumberConverter().getInt(value, columnType)); + }); + } + + @Override + public long readLong() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + return mapExceptions(() -> converters.getNumberConverter().getLong(value, columnType)); + }); + } + + @Override + public float readFloat() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + return mapExceptions(() -> converters.getNumberConverter().getFloat(value, columnType)); + }); + } + + @Override + public double readDouble() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + return mapExceptions(() -> converters.getNumberConverter().getDouble(value, columnType)); + }); + } + + @Override + public BigDecimal readBigDecimal() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + return mapExceptions( + () -> converters.getNumberConverter().getBigDecimal(value, columnType)); + }); + } + + @Override + public byte[] readBytes() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + int columnSubType = fieldMetadata.getType(); + int scale = fieldMetadata.getScale(); + return mapExceptions( + () -> + converters.getBytesConverter().getBytes(value, columnType, columnSubType, scale)); + }); + } + + @Override + public Date readDate() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + SnowflakeDateTimeFormat formatter = getFormat(session, "DATE_OUTPUT_FORMAT"); + SFTimestamp timestamp = formatter.parse((String) value); + return Date.valueOf( + Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalDate()); + }); + } + + @Override + public Time readTime() throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + SnowflakeDateTimeFormat formatter = getFormat(session, "TIME_OUTPUT_FORMAT"); + SFTimestamp timestamp = formatter.parse((String) value); + return Time.valueOf( + Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalTime()); + }); + } + + @Override + public Timestamp readTimestamp() throws SQLException { + return readTimestamp(null); + } + + @Override + public Timestamp readTimestamp(TimeZone tz) throws SQLException { + return withNextValue( + (value, jsonNode, fieldMetadata) -> { + if (value == null) { + return null; + } + int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session); + int columnSubType = fieldMetadata.getType(); + int scale = fieldMetadata.getScale(); + Timestamp result = getTimestampFromType(columnSubType, (String) value); + if (result != null) { + return result; + } + return mapExceptions( + () -> + converters + .getDateTimeConverter() + .getTimestamp(value, columnType, columnSubType, tz, scale)); + }); + } + + private Timestamp getTimestampFromType(int columnSubType, String value) { + if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ) { + return getTimestampFromFormat("TIMESTAMP_LTZ_OUTPUT_FORMAT", value); + } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_NTZ + || columnSubType == Types.TIMESTAMP) { + return getTimestampFromFormat("TIMESTAMP_NTZ_OUTPUT_FORMAT", value); + } else if (columnSubType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ) { + return getTimestampFromFormat("TIMESTAMP_TZ_OUTPUT_FORMAT", value); + } else { + return null; + } + } + + private Timestamp getTimestampFromFormat(String format, String value) { + String rawFormat = (String) session.getCommonParameters().get(format); + if (rawFormat == null || rawFormat.isEmpty()) { + rawFormat = (String) session.getCommonParameters().get("TIMESTAMP_OUTPUT_FORMAT"); + } + SnowflakeDateTimeFormat formatter = SnowflakeDateTimeFormat.fromSqlFormat(rawFormat); + return formatter.parse(value).getTimestamp(); + } + + @Override + public Reader readCharacterStream() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readCharacterStream"); + } + + @Override + public InputStream readAsciiStream() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readAsciiStream"); + } + + @Override + public InputStream readBinaryStream() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readBinaryStream"); + } + + @Override + public Object readObject() throws SQLException { + // TODO structuredType return map - SNOW-974575 + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readObject"); + } + + @Override + public Ref readRef() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readRef"); + } + + @Override + public Blob readBlob() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readBlob"); + } + + @Override + public Clob readClob() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readClob"); + } + + @Override + public Array readArray() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readArray"); + } + + @Override + public boolean wasNull() throws SQLException { + return false; // nulls are not allowed in structure types + } + + @Override + public URL readURL() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readURL"); + } + + @Override + public NClob readNClob() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readNClob"); + } + + @Override + public String readNString() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readNString"); + } + + @Override + public SQLXML readSQLXML() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readSQLXML"); + } + + @Override + public RowId readRowId() throws SQLException { + throw new SnowflakeLoggedFeatureNotSupportedException(session, "readRowId"); + } + + @Override + public T readObject(Class type) throws SQLException { + return withNextValue( + (__, jsonNode, fieldMetadata) -> { + SQLData instance = (SQLData) SQLDataCreationHelper.create(type); + instance.readSQL( + new JsonSqlInput(jsonNode, session, converters, fieldMetadata.getFields()), null); + return (T) instance; + }); + } + + private T withNextValue( + ThrowingTriFunction action) + throws SQLException { + JsonNode jsonNode = elements.next(); + Object value = getValue(jsonNode); + return action.apply(value, jsonNode, fields.get(currentIndex++)); + } + + private Object getValue(JsonNode jsonNode) { + if (jsonNode.isTextual()) { + return jsonNode.textValue(); + } else if (jsonNode.isBoolean()) { + return jsonNode.booleanValue(); + } else if (jsonNode.isNumber()) { + return jsonNode.numberValue(); + } + return null; + } + + private T mapExceptions(ThrowingCallable action) throws SQLException { + try { + return action.call(); + } catch (SFException e) { + throw new SQLException(e); + } + } + + private static SnowflakeDateTimeFormat getFormat(SFBaseSession session, String format) { + return SnowflakeDateTimeFormat.fromSqlFormat( + (String) session.getCommonParameters().get(format)); + } +} diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index ce83befd0..e7c90d3f9 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -6,6 +6,9 @@ import static net.snowflake.client.core.StmtUtil.eventHandler; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import java.io.ByteArrayInputStream; import java.io.IOException; import java.math.BigDecimal; @@ -14,8 +17,10 @@ import java.sql.SQLException; import java.sql.Time; import java.sql.Timestamp; +import java.sql.Types; import java.util.TimeZone; import net.snowflake.client.core.arrow.ArrowVectorConverter; +import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.ArrowResultChunk; import net.snowflake.client.jdbc.ArrowResultChunk.ArrowChunkIterator; import net.snowflake.client.jdbc.ErrorCode; @@ -35,7 +40,8 @@ /** Arrow result set implementation */ public class SFArrowResultSet extends SFBaseResultSet implements DataConversionContext { - static final SFLogger logger = SFLoggerFactory.getLogger(SFArrowResultSet.class); + private static final SFLogger logger = SFLoggerFactory.getLogger(SFArrowResultSet.class); + private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); /** iterator over current arrow result chunk */ private ArrowChunkIterator currentChunkIterator; @@ -96,6 +102,8 @@ public class SFArrowResultSet extends SFBaseResultSet implements DataConversionC */ private boolean formatDateWithTimezone; + @SnowflakeJdbcInternalApi protected Converters jsonConverters; + /** * Constructor takes a result from the API response that we get from executing a SQL statement. * @@ -114,6 +122,21 @@ public SFArrowResultSet( boolean sortResult) throws SQLException { this(resultSetSerializable, session.getTelemetryClient(), sortResult); + this.jsonConverters = + new Converters( + resultSetSerializable.getTimeZone(), + session, + resultSetSerializable.getResultVersion(), + resultSetSerializable.isHonorClientTZForTimestampNTZ(), + resultSetSerializable.getTreatNTZAsUTC(), + resultSetSerializable.getUseSessionTimezone(), + resultSetSerializable.getFormatDateWithTimeZone(), + resultSetSerializable.getBinaryFormatter(), + resultSetSerializable.getDateFormatter(), + resultSetSerializable.getTimeFormatter(), + resultSetSerializable.getTimestampNTZFormatter(), + resultSetSerializable.getTimestampLTZFormatter(), + resultSetSerializable.getTimestampTZFormatter()); // update the session db/schema/wh/role etc this.statement = statement; @@ -481,7 +504,26 @@ public Object getObject(int columnIndex) throws SFException { converter.setTreatNTZAsUTC(treatNTZAsUTC); converter.setUseSessionTimezone(useSessionTimezone); converter.setSessionTimeZone(timeZone); - return converter.toObject(index); + Object obj = converter.toObject(index); + return handleObjectType(columnIndex, obj); + } + + private Object handleObjectType(int columnIndex, Object obj) throws SFException { + int columnType = resultSetMetaData.getColumnType(columnIndex); + if (columnType == Types.STRUCT + && Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { + try { + JsonNode jsonNode = OBJECT_MAPPER.readTree((String) obj); + return new JsonSqlInput( + jsonNode, + session, + jsonConverters, + resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields()); + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + } + return obj; } @Override diff --git a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java index 8901598b8..f34bb2d53 100644 --- a/src/main/java/net/snowflake/client/core/SFBaseResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFBaseResultSet.java @@ -26,6 +26,7 @@ /** Base class for query result set and metadata result set */ public abstract class SFBaseResultSet { private static final SFLogger logger = SFLoggerFactory.getLogger(SFBaseResultSet.class); + static final String STRUCTURED_TYPE_ENABLED_PROPERTY_NAME = "STRUCTURED_TYPE_ENABLED"; boolean wasNull = false; diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 95a220560..026d1d240 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -4,6 +4,9 @@ package net.snowflake.client.core; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import java.math.BigDecimal; import java.sql.Date; import java.sql.Time; @@ -18,6 +21,7 @@ /** Abstract class used to represent snowflake result set in json format */ public abstract class SFJsonResultSet extends SFBaseResultSet { private static final SFLogger logger = SFLoggerFactory.getLogger(SFJsonResultSet.class); + private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); protected final TimeZone sessionTimeZone; protected final Converters converters; @@ -78,11 +82,31 @@ public Object getObject(int columnIndex) throws SFException { case Types.BOOLEAN: return getBoolean(columnIndex); + case Types.STRUCT: + if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { + return getSqlInput((String) obj, columnIndex); + } else { + throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); + } + default: throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); } } + private Object getSqlInput(String input, int columnIndex) throws SFException { + try { + JsonNode jsonNode = OBJECT_MAPPER.readTree(input); + return new JsonSqlInput( + jsonNode, + session, + converters, + resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields()); + } catch (JsonProcessingException e) { + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); + } + } + /** * Sometimes large BIGINTS overflow the java Long type. In these cases, return a BigDecimal type * instead. diff --git a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java index 2976473ac..dfb621400 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java @@ -48,6 +48,7 @@ public class SFResultSetMetaData { private List columnDisplaySizes; + private List columnMetadata = new ArrayList<>(); private String queryId; private Map columnNamePositionMap = new HashMap<>(); @@ -128,6 +129,7 @@ public SFResultSetMetaData( SnowflakeDateTimeFormat dateFormatter, SnowflakeDateTimeFormat timeFormatter) { this.columnCount = columnMetadata.size(); + this.columnMetadata = columnMetadata; this.queryId = queryId; this.timestampNTZFormatter = timestampNTZFormatter; this.timestampLTZFormatter = timestampLTZFormatter; @@ -354,22 +356,7 @@ public int getColumnCount() { } public int getColumnType(int column) throws SFException { - int internalColumnType = getInternalColumnType(column); - - int externalColumnType = internalColumnType; - - if (internalColumnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_LTZ) { - externalColumnType = Types.TIMESTAMP; - } - if (internalColumnType == SnowflakeUtil.EXTRA_TYPES_TIMESTAMP_TZ) { - externalColumnType = - session == null - ? Types.TIMESTAMP_WITH_TIMEZONE - : session.getEnableReturnTimestampWithTimeZone() - ? Types.TIMESTAMP_WITH_TIMEZONE - : Types.TIMESTAMP; - } - return externalColumnType; + return ColumnTypeHelper.getColumnType(getInternalColumnType(column), session); } public int getInternalColumnType(int column) throws SFException { @@ -485,4 +472,8 @@ public boolean getIsAutoIncrement(int column) { public List getIsAutoIncrementList() { return isAutoIncrementList; } + + List getColumnMetadata() { + return columnMetadata; + } } diff --git a/src/main/java/net/snowflake/client/core/SFSqlInput.java b/src/main/java/net/snowflake/client/core/SFSqlInput.java new file mode 100644 index 000000000..ea75fe251 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/SFSqlInput.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.core; + +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLInput; +import java.util.TimeZone; + +/** This interface extends the standard {@link SQLInput} interface to provide additional methods. */ +@SnowflakeJdbcInternalApi +public interface SFSqlInput extends SQLInput { + + /** + * Method unwrapping object of class SQLInput to object of class SfSqlInput. + * + * @param sqlInput SQLInput to consider. + * @return Object unwrapped to SFSqlInput class. + */ + static SFSqlInput unwrap(SQLInput sqlInput) { + return (SFSqlInput) sqlInput; + } + + /** + * Reads the next attribute in the stream and returns it as a java.sql.Timestamp + * object. + * + * @param tz timezone to consider. + * @return the attribute; if the value is SQL NULL, returns null + * @exception SQLException if a database access error occurs + * @exception SQLFeatureNotSupportedException if the JDBC driver does not support this method + * @since 1.2 + */ + java.sql.Timestamp readTimestamp(TimeZone tz) throws SQLException; +} diff --git a/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java b/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java new file mode 100644 index 000000000..5cfdb5ca2 --- /dev/null +++ b/src/main/java/net/snowflake/client/core/structs/SQLDataCreationHelper.java @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.core.structs; + +import java.sql.SQLData; +import java.sql.SQLException; +import java.util.Optional; +import java.util.function.Supplier; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +public class SQLDataCreationHelper { + public static T create(Class type) throws SQLException { + Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); + SQLData instance = + typeFactory + .map(Supplier::get) + .orElseGet(() -> createUsingReflection((Class) type)); + return (T) instance; + } + + private static SQLData createUsingReflection(Class type) { + try { + return type.newInstance(); + } catch (InstantiationException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } +} diff --git a/src/main/java/net/snowflake/client/core/structs/SnowflakeObjectTypeFactories.java b/src/main/java/net/snowflake/client/core/structs/SnowflakeObjectTypeFactories.java new file mode 100644 index 000000000..2fbb6695a --- /dev/null +++ b/src/main/java/net/snowflake/client/core/structs/SnowflakeObjectTypeFactories.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.core.structs; + +import java.sql.SQLData; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Supplier; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +public class SnowflakeObjectTypeFactories { + private static final Map, Supplier> factories = new ConcurrentHashMap<>(); + + public static void register(Class type, Supplier factory) { + Objects.requireNonNull((Object) type, "type cannot be null"); + Objects.requireNonNull((Object) factory, "factory cannot be null"); + factories.put(type, factory); + } + + public static void unregister(Class type) { + Objects.requireNonNull((Object) type, "type cannot be null"); + factories.remove(type); + } + + public static Optional> get(Class type) { + Objects.requireNonNull((Object) type, "type cannot be null"); + return Optional.ofNullable(factories.get(type)); + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/ColumnTypeInfo.java b/src/main/java/net/snowflake/client/jdbc/ColumnTypeInfo.java new file mode 100644 index 000000000..24d832528 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/ColumnTypeInfo.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +public class ColumnTypeInfo { + private int columnType; + private String extColTypeName; + private SnowflakeType snowflakeType; + + public ColumnTypeInfo(int columnType, String extColTypeName, SnowflakeType snowflakeType) { + this.columnType = columnType; + this.extColTypeName = extColTypeName; + this.snowflakeType = snowflakeType; + } + + public int getColumnType() { + return columnType; + } + + public String getExtColTypeName() { + return extColTypeName; + } + + public SnowflakeType getSnowflakeType() { + return snowflakeType; + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/ErrorCode.java b/src/main/java/net/snowflake/client/jdbc/ErrorCode.java index e0cb6d414..b9cc71491 100644 --- a/src/main/java/net/snowflake/client/jdbc/ErrorCode.java +++ b/src/main/java/net/snowflake/client/jdbc/ErrorCode.java @@ -82,7 +82,8 @@ public enum ErrorCode { INVALID_CONNECT_STRING(200059, SqlState.CONNECTION_EXCEPTION), INVALID_OKTA_USERNAME(200060, SqlState.CONNECTION_EXCEPTION), GCP_SERVICE_ERROR(200061, SqlState.SYSTEM_ERROR), - AUTHENTICATOR_REQUEST_TIMEOUT(200062, SqlState.CONNECTION_EXCEPTION); + AUTHENTICATOR_REQUEST_TIMEOUT(200062, SqlState.CONNECTION_EXCEPTION), + INVALID_STRUCT_DATA(200063, SqlState.DATA_EXCEPTION); public static final String errorMessageResource = "net.snowflake.client.jdbc.jdbc_error_messages"; diff --git a/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java new file mode 100644 index 000000000..55979cd83 --- /dev/null +++ b/src/main/java/net/snowflake/client/jdbc/FieldMetadata.java @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import java.util.List; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +public class FieldMetadata { + + private String name; + private String typeName; + private int type; + private boolean nullable; + + private int byteLength; + + private int precision; + private int scale; + private boolean fixed; + private SnowflakeType base; + private List fields; + + public FieldMetadata( + String name, + String typeName, + int type, + boolean nullable, + int byteLength, + int precision, + int scale, + boolean fixed, + SnowflakeType base, + List fields) { + this.name = name; + this.typeName = typeName; + this.type = type; + this.nullable = nullable; + this.byteLength = byteLength; + this.precision = precision; + this.scale = scale; + this.fixed = fixed; + this.base = base; + this.fields = fields; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getTypeName() { + return typeName; + } + + public void setTypeName(String typeName) { + this.typeName = typeName; + } + + public int getType() { + return type; + } + + public void setType(int type) { + this.type = type; + } + + public boolean isNullable() { + return nullable; + } + + public void setNullable(boolean nullable) { + this.nullable = nullable; + } + + public int getByteLength() { + return byteLength; + } + + public void setByteLength(int byteLength) { + this.byteLength = byteLength; + } + + public int getPrecision() { + return precision; + } + + public void setPrecision(int precision) { + this.precision = precision; + } + + public int getScale() { + return scale; + } + + public void setScale(int scale) { + this.scale = scale; + } + + public boolean isFixed() { + return fixed; + } + + public void setFixed(boolean fixed) { + this.fixed = fixed; + } + + public SnowflakeType getBase() { + return base; + } + + public void setBase(SnowflakeType base) { + this.base = base; + } + + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java index 0b5d7fe61..77efb4362 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeBaseResultSet.java @@ -18,7 +18,9 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.RowId; +import java.sql.SQLData; import java.sql.SQLException; +import java.sql.SQLInput; import java.sql.SQLWarning; import java.sql.SQLXML; import java.sql.Statement; @@ -27,8 +29,11 @@ import java.util.Calendar; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import java.util.TimeZone; +import java.util.function.Supplier; import net.snowflake.client.core.SFBaseSession; +import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.SqlState; @@ -1330,18 +1335,35 @@ public void updateNClob(String columnLabel, Reader reader) throws SQLException { throw new SnowflakeLoggedFeatureNotSupportedException(session); } - // @Override + @Override public T getObject(int columnIndex, Class type) throws SQLException { logger.debug("public T getObject(int columnIndex,Class type)", false); + if (SQLData.class.isAssignableFrom(type)) { + Optional> typeFactory = SnowflakeObjectTypeFactories.get(type); + SQLData instance = + typeFactory + .map(Supplier::get) + .orElseGet(() -> createUsingReflection((Class) type)); + SQLInput sqlInput = (SQLInput) getObject(columnIndex); + instance.readSQL(sqlInput, null); + return (T) instance; + } else { + return (T) getObject(columnIndex); + } + } - throw new SnowflakeLoggedFeatureNotSupportedException(session); + private SQLData createUsingReflection(Class type) { + try { + return type.newInstance(); + } catch (InstantiationException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - // @Override + @Override public T getObject(String columnLabel, Class type) throws SQLException { logger.debug("public T getObject(String columnLabel,Class type)", false); - - throw new SnowflakeLoggedFeatureNotSupportedException(session); + return getObject(findColumn(columnLabel), type); } @SuppressWarnings("unchecked") diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java index 56cc8c6ae..9f182772e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java @@ -5,6 +5,8 @@ package net.snowflake.client.jdbc; import java.io.Serializable; +import java.util.List; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; /** * @author jhuang @@ -20,12 +22,51 @@ public class SnowflakeColumnMetadata implements Serializable { private int scale; private boolean fixed; private SnowflakeType base; + private List fields; private String columnSrcTable; private String columnSrcSchema; private String columnSrcDatabase; private boolean isAutoIncrement; + @SnowflakeJdbcInternalApi + public SnowflakeColumnMetadata( + String name, + int type, + boolean nullable, + int length, + int precision, + int scale, + String typeName, + boolean fixed, + SnowflakeType base, + List fields, + String columnSrcDatabase, + String columnSrcSchema, + String columnSrcTable, + boolean isAutoIncrement) { + this.name = name; + this.type = type; + this.nullable = nullable; + this.length = length; + this.precision = precision; + this.scale = scale; + this.typeName = typeName; + this.fixed = fixed; + this.base = base; + this.fields = fields; + this.columnSrcDatabase = columnSrcDatabase; + this.columnSrcSchema = columnSrcSchema; + this.columnSrcTable = columnSrcTable; + this.isAutoIncrement = isAutoIncrement; + } + + /** + * @deprecated Use {@link SnowflakeColumnMetadata#SnowflakeColumnMetadata(String, int, boolean, + * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean)} + * instead + */ + @Deprecated public SnowflakeColumnMetadata( String name, int type, @@ -123,6 +164,16 @@ public SnowflakeType getBase() { return this.base; } + @SnowflakeJdbcInternalApi + public List getFields() { + return fields; + } + + @SnowflakeJdbcInternalApi + public void setFields(List fields) { + this.fields = fields; + } + public String getColumnSrcTable() { return this.columnSrcTable; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java index 03f59d6b6..94f4bab11 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java @@ -106,10 +106,10 @@ public boolean isCaseSensitive(int column) throws SQLException { int colType = getColumnType(column); switch (colType) { - // Note: SF types ARRAY, OBJECT, GEOGRAPHY, GEOMETRY are also represented as - // VARCHAR. + // Note: SF types ARRAY, GEOGRAPHY, GEOMETRY are also represented as VARCHAR. case Types.VARCHAR: case Types.CHAR: + case Types.STRUCT: return true; case Types.INTEGER: diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java index d7f30d614..dacfbbfd5 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeType.java @@ -75,11 +75,10 @@ public static JavaDataType getJavaType(SnowflakeType type) { case ARRAY: case VARIANT: return JavaDataType.JAVA_STRING; - case OBJECT: - return JavaDataType.JAVA_STRING; case BINARY: return JavaDataType.JAVA_BYTES; case ANY: + case OBJECT: return JavaDataType.JAVA_OBJECT; default: // Those are not supported, but no reason to panic @@ -424,6 +423,9 @@ public static SnowflakeType javaTypeToSFType(int javaType, SFBaseSession session case Types.BOOLEAN: return BOOLEAN; + case Types.STRUCT: + return OBJECT; + case Types.NULL: return ANY; @@ -440,6 +442,8 @@ public static String javaTypeToClassName(int type) throws SQLException { switch (type) { case Types.VARCHAR: case Types.CHAR: + case Types.STRUCT: + case Types.ARRAY: return String.class.getName(); case Types.BINARY: diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 2856e15e2..247831e0e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -7,6 +7,7 @@ import static net.snowflake.client.jdbc.SnowflakeType.GEOGRAPHY; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; import com.google.common.base.Strings; import java.io.BufferedReader; import java.io.IOException; @@ -24,6 +25,7 @@ import java.util.Calendar; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Properties; import java.util.Random; import java.util.concurrent.Executors; @@ -164,94 +166,135 @@ public static SnowflakeColumnMetadata extractColumnMetadata( int scale = colNode.path("scale").asInt(); int length = colNode.path("length").asInt(); boolean fixed = colNode.path("fixed").asBoolean(); - String extColTypeName; + JsonNode udtOutputType = colNode.path("outputType"); + JsonNode extColTypeNameNode = colNode.path("extTypeName"); + String extColTypeName = null; + if (!extColTypeNameNode.isMissingNode() + && !Strings.isNullOrEmpty(extColTypeNameNode.asText())) { + extColTypeName = extColTypeNameNode.asText(); + } + + int fixedColType = jdbcTreatDecimalAsInt && scale == 0 ? Types.BIGINT : Types.DECIMAL; + ColumnTypeInfo columnTypeInfo = + getSnowflakeType(internalColTypeName, extColTypeName, udtOutputType, session, fixedColType); + + String colSrcDatabase = colNode.path("database").asText(); + String colSrcSchema = colNode.path("schema").asText(); + String colSrcTable = colNode.path("table").asText(); + List fieldsMetadata = getFieldMetadata(fixedColType, colNode); - int colType; + boolean isAutoIncrement = colNode.path("isAutoIncrement").asBoolean(); + return new SnowflakeColumnMetadata( + colName, + columnTypeInfo.getColumnType(), + nullable, + length, + precision, + scale, + columnTypeInfo.getExtColTypeName(), + fixed, + columnTypeInfo.getSnowflakeType(), + fieldsMetadata, + colSrcDatabase, + colSrcSchema, + colSrcTable, + isAutoIncrement); + } + + static ColumnTypeInfo getSnowflakeType( + String internalColTypeName, + String extColTypeName, + JsonNode udtOutputType, + SFBaseSession session, + int fixedColType) + throws SnowflakeSQLLoggedException { SnowflakeType baseType = SnowflakeType.fromString(internalColTypeName); + ColumnTypeInfo columnTypeInfo = null; switch (baseType) { case TEXT: - colType = Types.VARCHAR; - extColTypeName = "VARCHAR"; + columnTypeInfo = + new ColumnTypeInfo(Types.VARCHAR, defaultIfNull(extColTypeName, "VARCHAR"), baseType); break; - case CHAR: - colType = Types.CHAR; - extColTypeName = "CHAR"; + columnTypeInfo = + new ColumnTypeInfo(Types.CHAR, defaultIfNull(extColTypeName, "CHAR"), baseType); break; - case INTEGER: - colType = Types.INTEGER; - extColTypeName = "INTEGER"; + columnTypeInfo = + new ColumnTypeInfo(Types.INTEGER, defaultIfNull(extColTypeName, "INTEGER"), baseType); break; - case FIXED: - colType = jdbcTreatDecimalAsInt && scale == 0 ? Types.BIGINT : Types.DECIMAL; - extColTypeName = "NUMBER"; + columnTypeInfo = + new ColumnTypeInfo(fixedColType, defaultIfNull(extColTypeName, "NUMBER"), baseType); break; case REAL: - colType = Types.DOUBLE; - extColTypeName = "DOUBLE"; + columnTypeInfo = + new ColumnTypeInfo(Types.DOUBLE, defaultIfNull(extColTypeName, "DOUBLE"), baseType); break; case TIMESTAMP: case TIMESTAMP_LTZ: - colType = EXTRA_TYPES_TIMESTAMP_LTZ; - extColTypeName = "TIMESTAMPLTZ"; + columnTypeInfo = + new ColumnTypeInfo( + EXTRA_TYPES_TIMESTAMP_LTZ, defaultIfNull(extColTypeName, "TIMESTAMPLTZ"), baseType); break; case TIMESTAMP_NTZ: - colType = Types.TIMESTAMP; - extColTypeName = "TIMESTAMPNTZ"; + // if the column type is changed to EXTRA_TYPES_TIMESTAMP_NTZ, update also JsonSqlInput + columnTypeInfo = + new ColumnTypeInfo( + Types.TIMESTAMP, defaultIfNull(extColTypeName, "TIMESTAMPNTZ"), baseType); break; case TIMESTAMP_TZ: - colType = EXTRA_TYPES_TIMESTAMP_TZ; - extColTypeName = "TIMESTAMPTZ"; + columnTypeInfo = + new ColumnTypeInfo( + EXTRA_TYPES_TIMESTAMP_TZ, defaultIfNull(extColTypeName, "TIMESTAMPTZ"), baseType); break; case DATE: - colType = Types.DATE; - extColTypeName = "DATE"; + columnTypeInfo = + new ColumnTypeInfo(Types.DATE, defaultIfNull(extColTypeName, "DATE"), baseType); break; case TIME: - colType = Types.TIME; - extColTypeName = "TIME"; + columnTypeInfo = + new ColumnTypeInfo(Types.TIME, defaultIfNull(extColTypeName, "TIME"), baseType); break; case BOOLEAN: - colType = Types.BOOLEAN; - extColTypeName = "BOOLEAN"; + columnTypeInfo = + new ColumnTypeInfo(Types.BOOLEAN, defaultIfNull(extColTypeName, "BOOLEAN"), baseType); break; case ARRAY: - colType = Types.VARCHAR; - extColTypeName = "ARRAY"; + columnTypeInfo = + new ColumnTypeInfo(Types.VARCHAR, defaultIfNull(extColTypeName, "ARRAY"), baseType); break; case OBJECT: - colType = Types.VARCHAR; - extColTypeName = "OBJECT"; + columnTypeInfo = + new ColumnTypeInfo(Types.STRUCT, defaultIfNull(extColTypeName, "OBJECT"), baseType); break; case VARIANT: - colType = Types.VARCHAR; - extColTypeName = "VARIANT"; + columnTypeInfo = + new ColumnTypeInfo(Types.VARCHAR, defaultIfNull(extColTypeName, "VARIANT"), baseType); break; case BINARY: - colType = Types.BINARY; - extColTypeName = "BINARY"; + columnTypeInfo = + new ColumnTypeInfo(Types.BINARY, defaultIfNull(extColTypeName, "BINARY"), baseType); break; case GEOGRAPHY: case GEOMETRY: - colType = Types.VARCHAR; + int colType = Types.VARCHAR; extColTypeName = (baseType == GEOGRAPHY) ? "GEOGRAPHY" : "GEOMETRY"; - JsonNode udtOutputType = colNode.path("outputType"); + if (!udtOutputType.isMissingNode()) { SnowflakeType outputType = SnowflakeType.fromString(udtOutputType.asText()); switch (outputType) { @@ -263,6 +306,7 @@ public static SnowflakeColumnMetadata extractColumnMetadata( colType = Types.BINARY; } } + columnTypeInfo = new ColumnTypeInfo(colType, extColTypeName, baseType); break; default: @@ -273,32 +317,58 @@ public static SnowflakeColumnMetadata extractColumnMetadata( "Unknown column type: " + internalColTypeName); } - JsonNode extColTypeNameNode = colNode.path("extTypeName"); - if (!extColTypeNameNode.isMissingNode() - && !Strings.isNullOrEmpty(extColTypeNameNode.asText())) { - extColTypeName = extColTypeNameNode.asText(); - } + return columnTypeInfo; + } - String colSrcDatabase = colNode.path("database").asText(); - String colSrcSchema = colNode.path("schema").asText(); - String colSrcTable = colNode.path("table").asText(); + private static String defaultIfNull(String extColTypeName, String defaultValue) { + return Optional.ofNullable(extColTypeName).orElse(defaultValue); + } - boolean isAutoIncrement = colNode.path("isAutoIncrement").asBoolean(); + static List createFieldsMetadata(ArrayNode fieldsJson, int fixedColType) + throws SnowflakeSQLLoggedException { + List fields = new ArrayList<>(); + for (JsonNode node : fieldsJson) { + String colName = node.path("name").asText(); + int scale = node.path("scale").asInt(); + int precision = node.path("precision").asInt(); + String internalColTypeName = node.path("type").asText(); + boolean nullable = node.path("nullable").asBoolean(); + int length = node.path("length").asInt(); + boolean fixed = node.path("fixed").asBoolean(); + List internalFields = getFieldMetadata(fixedColType, node); + JsonNode outputType = node.path("outputType"); + JsonNode extColTypeNameNode = node.path("extTypeName"); + String extColTypeName = null; + if (!extColTypeNameNode.isMissingNode() + && !Strings.isNullOrEmpty(extColTypeNameNode.asText())) { + extColTypeName = extColTypeNameNode.asText(); + } + ColumnTypeInfo columnTypeInfo = + getSnowflakeType(internalColTypeName, extColTypeName, outputType, null, fixedColType); + fields.add( + new FieldMetadata( + colName, + columnTypeInfo.getExtColTypeName(), + columnTypeInfo.getColumnType(), + nullable, + length, + precision, + scale, + fixed, + columnTypeInfo.getSnowflakeType(), + internalFields)); + } + return fields; + } - return new SnowflakeColumnMetadata( - colName, - colType, - nullable, - length, - precision, - scale, - extColTypeName, - fixed, - baseType, - colSrcDatabase, - colSrcSchema, - colSrcTable, - isAutoIncrement); + private static List getFieldMetadata(int fixedColType, JsonNode node) + throws SnowflakeSQLLoggedException { + if (!node.path("fields").isEmpty()) { + ArrayNode internalFieldsJson = (ArrayNode) node.path("fields"); + return createFieldsMetadata(internalFieldsJson, fixedColType); + } else { + return new ArrayList<>(); + } } static String javaTypeToSFTypeString(int javaType, SFBaseSession session) @@ -413,6 +483,7 @@ static List describeFixedViewColumns( typeName, // type name true, stype, // fixed + new ArrayList<>(), "", // database "", // schema "", diff --git a/src/main/java/net/snowflake/client/util/ThrowingCallable.java b/src/main/java/net/snowflake/client/util/ThrowingCallable.java new file mode 100644 index 000000000..93434cc88 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/ThrowingCallable.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +@FunctionalInterface +public interface ThrowingCallable { + A call() throws T; +} diff --git a/src/main/java/net/snowflake/client/util/ThrowingTriFunction.java b/src/main/java/net/snowflake/client/util/ThrowingTriFunction.java new file mode 100644 index 000000000..941c74d37 --- /dev/null +++ b/src/main/java/net/snowflake/client/util/ThrowingTriFunction.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.util; + +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +@FunctionalInterface +public interface ThrowingTriFunction { + R apply(A a, B b, C c) throws T; +} diff --git a/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties b/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties index 61d0fb4f1..17dee4af3 100644 --- a/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties +++ b/src/main/resources/net/snowflake/client/jdbc/jdbc_error_messages.properties @@ -80,4 +80,5 @@ Error message={3}, Extended error info={4} 200058=Value is too large to be stored as integer at batch index {0}. Use executeLargeBatch() instead. 200059=Invalid Connect String: {0}. 200061=GCS operation failed: Operation={0}, Error code={1}, Message={2}, Reason={3} +200063= Invalid json - Cannot be parsed and converted to structured type. diff --git a/src/test/java/net/snowflake/client/category/TestCategoryStructuredType.java b/src/test/java/net/snowflake/client/category/TestCategoryStructuredType.java new file mode 100644 index 000000000..053ad8362 --- /dev/null +++ b/src/test/java/net/snowflake/client/category/TestCategoryStructuredType.java @@ -0,0 +1,6 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.category; + +public interface TestCategoryStructuredType {} diff --git a/src/test/java/net/snowflake/client/jdbc/AllTypesClass.java b/src/test/java/net/snowflake/client/jdbc/AllTypesClass.java new file mode 100644 index 000000000..cc9d6b322 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/AllTypesClass.java @@ -0,0 +1,121 @@ +package net.snowflake.client.jdbc; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.SQLData; +import java.sql.SQLException; +import java.sql.SQLInput; +import java.sql.SQLOutput; +import java.sql.Time; +import java.sql.Timestamp; + +public class AllTypesClass implements SQLData { + private String string; + private Byte b; + private Short s; + private Integer i; + private Long l; + private Float f; + private Double d; + private BigDecimal bd; + private Boolean bool; + private Timestamp timestampLtz; + private Timestamp timestampNtz; + private Timestamp timestampTz; + private Date date; + private Time time; + private byte[] binary; + private SimpleClass simpleClass; + + @Override + public String getSQLTypeName() throws SQLException { + return null; + } + + @Override + public void readSQL(SQLInput sqlInput, String typeName) throws SQLException { + string = sqlInput.readString(); + b = sqlInput.readByte(); + s = sqlInput.readShort(); + i = sqlInput.readInt(); + l = sqlInput.readLong(); + f = sqlInput.readFloat(); + d = sqlInput.readDouble(); + bd = sqlInput.readBigDecimal(); + bool = sqlInput.readBoolean(); + timestampLtz = sqlInput.readTimestamp(); + timestampNtz = sqlInput.readTimestamp(); + timestampTz = sqlInput.readTimestamp(); + date = sqlInput.readDate(); + time = sqlInput.readTime(); + binary = sqlInput.readBytes(); + simpleClass = sqlInput.readObject(SimpleClass.class); + } + + @Override + public void writeSQL(SQLOutput stream) throws SQLException {} + + public String getString() { + return string; + } + + public Byte getB() { + return b; + } + + public Short getS() { + return s; + } + + public Integer getI() { + return i; + } + + public Long getL() { + return l; + } + + public Float getF() { + return f; + } + + public Double getD() { + return d; + } + + public BigDecimal getBd() { + return bd; + } + + public Boolean getBool() { + return bool; + } + + public Timestamp getTimestampLtz() { + return timestampLtz; + } + + public Timestamp getTimestampNtz() { + return timestampNtz; + } + + public Timestamp getTimestampTz() { + return timestampTz; + } + + public Date getDate() { + return date; + } + + public Time getTime() { + return time; + } + + public byte[] getBinary() { + return binary; + } + + public SimpleClass getSimpleClass() { + return simpleClass; + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/ArrowResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ArrowResultSetStructuredTypesLatestIT.java new file mode 100644 index 000000000..291bb9e34 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/ArrowResultSetStructuredTypesLatestIT.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +public class ArrowResultSetStructuredTypesLatestIT extends ResultSetStructuredTypesLatestIT { + public ArrowResultSetStructuredTypesLatestIT() { + super("ARROW"); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java index 832b353a2..6e881615c 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java @@ -232,6 +232,7 @@ public void testGetMethods() throws Throwable { Clob clob = connection.createClob(); clob.setString(1, "hello world"); Statement statement = connection.createStatement(); + // TODO structuredType - add to test when WRITE is ready - SNOW-1157904 statement.execute( "create or replace table test_get(colA integer, colB number, colC number, colD string, colE double, colF float, colG boolean, colH text, colI binary(3), colJ number(38,9), colK int, colL date, colM time, colN timestamp_ltz)"); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java index 95e04c8e0..539784120 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java @@ -125,7 +125,6 @@ private void checkFeatureNotSupportedException(ResultSet resultSet) throws SQLEx expectFeatureNotSupportedException(() -> resultSet.getNString(1)); expectFeatureNotSupportedException(() -> resultSet.getNCharacterStream(1)); expectFeatureNotSupportedException(() -> resultSet.getNClob(1)); - expectFeatureNotSupportedException(() -> resultSet.getObject(1, String.class)); expectFeatureNotSupportedException(() -> resultSet.updateRef(1, new FakeRef())); expectFeatureNotSupportedException(() -> resultSet.updateBlob(1, new FakeBlob())); @@ -166,7 +165,6 @@ private void checkFeatureNotSupportedException(ResultSet resultSet) throws SQLEx expectFeatureNotSupportedException(() -> resultSet.getNString("col2")); expectFeatureNotSupportedException(() -> resultSet.getNCharacterStream("col2")); expectFeatureNotSupportedException(() -> resultSet.getNClob("col2")); - expectFeatureNotSupportedException(() -> resultSet.getObject("col2", String.class)); expectFeatureNotSupportedException(() -> resultSet.updateRef("col2", new FakeRef())); expectFeatureNotSupportedException(() -> resultSet.updateBlob("col2", new FakeBlob())); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java new file mode 100644 index 000000000..2ec486ef6 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetStructuredTypesLatestIT.java @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.math.BigDecimal; +import java.sql.Connection; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.category.TestCategoryStructuredType; +import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(TestCategoryStructuredType.class) +public class ResultSetStructuredTypesLatestIT extends BaseJDBCTest { + private final String queryResultFormat; + + public ResultSetStructuredTypesLatestIT() { + this("JSON"); + } + + protected ResultSetStructuredTypesLatestIT(String queryResultFormat) { + this.queryResultFormat = queryResultFormat; + } + + public Connection init() throws SQLException { + Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); + stmt.execute("alter session set IGNORE_CLIENT_VESRION_IN_STRUCTURED_TYPES_RESPONSE = true"); + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } + return conn; + } + + // TODO Structured types feature exists only on QA environments + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapStructToObjectWithFactory() throws SQLException { + testMapJson(true); + } + + // TODO Structured types feature exists only on QA environments + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapStructToObjectWithReflection() throws SQLException { + testMapJson(false); + testMapJson(true); + } + + private void testMapJson(boolean registerFactory) throws SQLException { + if (registerFactory) { + SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); + } else { + SnowflakeObjectTypeFactories.unregister(SimpleClass.class); + } + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery("select {'string':'a'}::OBJECT(string VARCHAR)"); ) { + resultSet.next(); + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("a", object.getString()); + } + } + + // TODO Structured types feature exists only on QA environments + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapStructAllTypes() throws SQLException { + testMapAllTypes(false); + testMapAllTypes(true); + } + + private void testMapAllTypes(boolean registerFactory) throws SQLException { + if (registerFactory) { + SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); + } else { + SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); + } + try (Connection connection = init(); + Statement statement = connection.createStatement()) { + statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); + try (ResultSet resultSet = + statement.executeQuery( + "select {" + + "'string': 'a', " + + "'b': 1, " + + "'s': 2, " + + "'i': 3, " + + "'l': 4, " + + "'f': 1.1, " + + "'d': 2.2, " + + "'bd': 3.3, " + + "'bool': true, " + + "'timestamp_ltz': '2021-12-22 09:43:44'::TIMESTAMP_LTZ, " + + "'timestamp_ntz': '2021-12-23 09:44:44'::TIMESTAMP_NTZ, " + + "'timestamp_tz': '2021-12-24 09:45:45 +0800'::TIMESTAMP_TZ, " + + "'date': '2023-12-24'::DATE, " + + "'time': '12:34:56'::TIME, " + + "'binary': TO_BINARY('616263', 'HEX'), " + + "'simpleClass': {'string': 'b'}" + + "}::OBJECT(" + + "string VARCHAR, " + + "b TINYINT, " + + "s SMALLINT, " + + "i INTEGER, " + + "l BIGINT, " + + "f FLOAT, " + + "d DOUBLE, " + + "bd DOUBLE, " + + "bool BOOLEAN, " + + "timestamp_ltz TIMESTAMP_LTZ, " + + "timestamp_ntz TIMESTAMP_NTZ, " + + "timestamp_tz TIMESTAMP_TZ, " + + "date DATE, " + + "time TIME, " + + "binary BINARY, " + + "simpleClass OBJECT(string VARCHAR)" + + ")"); ) { + resultSet.next(); + AllTypesClass object = resultSet.getObject(1, AllTypesClass.class); + assertEquals("a", object.getString()); + assertEquals(1, (long) object.getB()); + assertEquals(2, (long) object.getS()); + assertEquals(3, (long) object.getI()); + assertEquals(4, (long) object.getL()); + assertEquals(1.1, (double) object.getF(), 0.01); + assertEquals(2.2, (double) object.getD(), 0.01); + assertEquals(BigDecimal.valueOf(3.3), object.getBd()); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 22, 9, 43, 44)), object.getTimestampLtz()); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 23, 10, 44, 44)), + object.getTimestampNtz()); + assertEquals( + Timestamp.valueOf(LocalDateTime.of(2021, 12, 24, 2, 45, 45)), object.getTimestampTz()); + assertEquals(Date.valueOf(LocalDate.of(2023, 12, 24)), object.getDate()); + assertEquals(Time.valueOf(LocalTime.of(12, 34, 56)), object.getTime()); + assertArrayEquals(new byte[] {'a', 'b', 'c'}, object.getBinary()); + assertTrue(object.getBool()); + assertEquals("b", object.getSimpleClass().getString()); + } + } + } + + // TODO Structured types feature exists only on QA environments + @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testMapStructsFromChunks() throws SQLException { + try (Connection connection = init(); + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))"); ) { + while (resultSet.next()) { + SimpleClass object = resultSet.getObject(1, SimpleClass.class); + assertEquals("a", object.getString()); + } + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/SimpleClass.java b/src/test/java/net/snowflake/client/jdbc/SimpleClass.java new file mode 100644 index 000000000..ee6745323 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/SimpleClass.java @@ -0,0 +1,31 @@ +package net.snowflake.client.jdbc; + +import java.sql.SQLData; +import java.sql.SQLException; +import java.sql.SQLInput; +import java.sql.SQLOutput; + +public class SimpleClass implements SQLData { + public String getString() { + return string; + } + + private String string; + + public SimpleClass() {} + + @Override + public String getSQLTypeName() throws SQLException { + return null; + } + + @Override + public void readSQL(SQLInput stream, String typeName) throws SQLException { + string = stream.readString(); + } + + @Override + public void writeSQL(SQLOutput stream) throws SQLException { + stream.writeString(string); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java index 1a307ffb3..89d5c0c06 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java @@ -835,10 +835,10 @@ public void testGeoOutputTypes() throws Throwable { regularStatement.execute("insert into t_geo values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); testGeoOutputTypeSingle( - regularStatement, false, "geoJson", "OBJECT", "java.lang.String", Types.VARCHAR); + regularStatement, false, "geoJson", "OBJECT", "java.lang.String", Types.STRUCT); testGeoOutputTypeSingle( - regularStatement, true, "geoJson", "GEOGRAPHY", "java.lang.String", Types.VARCHAR); + regularStatement, true, "geoJson", "GEOGRAPHY", "java.lang.String", Types.STRUCT); testGeoOutputTypeSingle( regularStatement, false, "wkt", "VARCHAR", "java.lang.String", Types.VARCHAR); @@ -986,10 +986,10 @@ public void testGeometryOutputTypes() throws Throwable { "insert into t_geo2 values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)')"); testGeometryOutputTypeSingle( - regularStatement, true, "geoJson", "GEOMETRY", "java.lang.String", Types.VARCHAR); + regularStatement, true, "geoJson", "GEOMETRY", "java.lang.String", Types.STRUCT); testGeometryOutputTypeSingle( - regularStatement, true, "wkt", "GEOMETRY", "java.lang.String", Types.VARCHAR); + regularStatement, true, "wkt", "GEOMETRY", "java.lang.String", Types.STRUCT); } finally { if (regularStatement != null) { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java new file mode 100644 index 000000000..29a5c167e --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.jdbc.SnowflakeUtil.getSnowflakeType; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.core.ObjectMapperFactory; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(TestCategoryCore.class) +public class SnowflakeUtilTest extends BaseJDBCTest { + + private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); + + @Test + public void testCreateMetadata() throws Throwable { + // given + ObjectNode rootNode = createRootNode(); + ArrayNode fields = OBJECT_MAPPER.createArrayNode(); + JsonNode fieldOne = createFieldNode("name1", null, 256, null, "text", false, "collation", 256); + fields.add(fieldOne); + JsonNode fieldTwo = createFieldNode("name2", 5, 128, 2, "real", true, "collation", 256); + fields.add(fieldTwo); + rootNode.put("fields", fields); + SnowflakeColumnMetadata expectedColumnMetadata = + createExpectedMetadata(rootNode, fieldOne, fieldTwo); + // when + SnowflakeColumnMetadata columnMetadata = + SnowflakeUtil.extractColumnMetadata(rootNode, false, null); + // then + assertNotNull(columnMetadata); + assertEquals( + OBJECT_MAPPER.writeValueAsString(expectedColumnMetadata), + OBJECT_MAPPER.writeValueAsString(columnMetadata)); + } + + private static SnowflakeColumnMetadata createExpectedMetadata( + JsonNode rootNode, JsonNode fieldOne, JsonNode fieldTwo) throws SnowflakeSQLLoggedException { + ColumnTypeInfo columnTypeInfo = + getSnowflakeType(rootNode.path("type").asText(), null, null, null, 0); + ColumnTypeInfo columnTypeInfoNodeOne = + getSnowflakeType(fieldOne.path("type").asText(), null, null, null, Types.BIGINT); + ColumnTypeInfo columnTypeInfoNodeTwo = + getSnowflakeType(fieldTwo.path("type").asText(), null, null, null, Types.DECIMAL); + SnowflakeColumnMetadata expectedColumnMetadata = + new SnowflakeColumnMetadata( + rootNode.path("name").asText(), + columnTypeInfo.getColumnType(), + rootNode.path("nullable").asBoolean(), + rootNode.path("length").asInt(), + rootNode.path("precision").asInt(), + rootNode.path("scale").asInt(), + columnTypeInfo.getExtColTypeName(), + false, + columnTypeInfo.getSnowflakeType(), + Arrays.asList( + new FieldMetadata( + fieldOne.path("name").asText(), + columnTypeInfoNodeOne.getExtColTypeName(), + columnTypeInfoNodeOne.getColumnType(), + fieldOne.path("nullable").asBoolean(), + fieldOne.path("length").asInt(), + fieldOne.path("precision").asInt(), + fieldOne.path("scale").asInt(), + fieldOne.path("fixed").asBoolean(), + columnTypeInfoNodeOne.getSnowflakeType(), + new ArrayList<>()), + new FieldMetadata( + fieldTwo.path("name").asText(), + columnTypeInfoNodeTwo.getExtColTypeName(), + columnTypeInfoNodeTwo.getColumnType(), + fieldTwo.path("nullable").asBoolean(), + fieldTwo.path("length").asInt(), + fieldTwo.path("precision").asInt(), + fieldTwo.path("scale").asInt(), + fieldTwo.path("fixed").asBoolean(), + columnTypeInfoNodeTwo.getSnowflakeType(), + new ArrayList<>())), + rootNode.path("database").asText(), + rootNode.path("schema").asText(), + rootNode.path("table").asText(), + false); + return expectedColumnMetadata; + } + + private static ObjectNode createRootNode() { + ObjectNode rootNode = createFieldNode("STRUCT", 2, 128, 8, "object", false, null, 42); + rootNode.put("database", "databaseName"); + rootNode.put("schema", "schemaName"); + rootNode.put("table", "tableName"); + return rootNode; + } + + private static ObjectNode createFieldNode( + String name, + Integer precision, + Integer byteLength, + Integer scale, + String type, + boolean nullable, + String collation, + Integer length) { + ObjectNode fieldNode = OBJECT_MAPPER.createObjectNode(); + fieldNode.put("name", name); + fieldNode.put("type", type); + fieldNode.put("precision", precision); + fieldNode.put("byteLength", byteLength); + fieldNode.put("scale", scale); + fieldNode.put("type", type); + fieldNode.put("nullable", nullable); + fieldNode.put("collation", collation); + fieldNode.put("length", length); + return fieldNode; + } +}