Skip to content

Commit

Permalink
SNOW-974575 Structured type array map (#1654)
Browse files Browse the repository at this point in the history
* SNOW-974575 Implementation of methods: getArray getLists getMap
  • Loading branch information
sfc-gh-pmotacki authored Mar 13, 2024
1 parent 7d7678a commit 75c57f2
Show file tree
Hide file tree
Showing 22 changed files with 866 additions and 99 deletions.
3 changes: 2 additions & 1 deletion src/main/java/net/snowflake/client/core/JsonSqlInput.java
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,8 @@ public Timestamp readTimestamp(TimeZone tz) throws SQLException {
int columnType = ColumnTypeHelper.getColumnType(fieldMetadata.getType(), session);
int columnSubType = fieldMetadata.getType();
int scale = fieldMetadata.getScale();
Timestamp result = getTimestampFromType(columnSubType, (String) value);
Timestamp result =
SnowflakeUtil.getTimestampFromType(columnSubType, (String) value, session);
if (result != null) {
return result;
}
Expand Down
7 changes: 7 additions & 0 deletions src/main/java/net/snowflake/client/core/SFArrowResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.sql.Array;
import java.sql.Date;
import java.sql.SQLException;
import java.sql.Time;
Expand Down Expand Up @@ -508,6 +509,12 @@ public Object getObject(int columnIndex) throws SFException {
return handleObjectType(columnIndex, obj);
}

@Override
public Array getArray(int columnIndex) throws SFException {
// TODO: handleArray SNOW-969794
throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type ARRAY");
}

private Object handleObjectType(int columnIndex, Object obj) throws SFException {
int columnType = resultSetMetaData.getColumnType(columnIndex);
if (columnType == Types.STRUCT
Expand Down
10 changes: 10 additions & 0 deletions src/main/java/net/snowflake/client/core/SFBaseResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
package net.snowflake.client.core;

import java.math.BigDecimal;
import java.sql.Array;
import java.sql.Date;
import java.sql.SQLException;
import java.sql.Time;
Expand All @@ -14,6 +15,7 @@
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import net.snowflake.client.core.json.Converters;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeResultSetSerializable;
import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1;
Expand Down Expand Up @@ -93,6 +95,8 @@ public abstract class SFBaseResultSet {

public abstract Object getObject(int columnIndex) throws SFException;

public abstract Array getArray(int columnIndex) throws SFException;

public abstract BigDecimal getBigDecimal(int columnIndex) throws SFException;

public abstract BigDecimal getBigDecimal(int columnIndex, int scale) throws SFException;
Expand Down Expand Up @@ -193,4 +197,10 @@ public List<SnowflakeResultSetSerializable> getResultSetSerializables(long maxSi
throws SQLException {
return this.resultSetSerializable.splitBySize(maxSizeInBytes);
}

@SnowflakeJdbcInternalApi
public Converters getConverters() {
logger.debug("Json converters weren't created");
return null;
}
}
221 changes: 197 additions & 24 deletions src/main/java/net/snowflake/client/core/SFJsonResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,28 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import java.math.BigDecimal;
import java.sql.Array;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.Iterator;
import java.util.Map;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import net.snowflake.client.core.json.Converters;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.FieldMetadata;
import net.snowflake.client.jdbc.SnowflakeColumnMetadata;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.client.util.JsonStringToTypeConverter;

/** Abstract class used to represent snowflake result set in json format */
public abstract class SFJsonResultSet extends SFBaseResultSet {
Expand Down Expand Up @@ -83,40 +95,27 @@ public Object getObject(int columnIndex) throws SFException {
return getBoolean(columnIndex);

case Types.STRUCT:
if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) {
if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) {
return getSqlInput((String) obj, columnIndex);
} else {
throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type);
}
case Types.ARRAY:
if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) {
return getArray(columnIndex);
} else {
throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type);
}

default:
throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type);
}
}

private Object getSqlInput(String input, int columnIndex) throws SFException {
try {
JsonNode jsonNode = OBJECT_MAPPER.readTree(input);
return new JsonSqlInput(
jsonNode,
session,
converters,
resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields());
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
}
}

/**
* Sometimes large BIGINTS overflow the java Long type. In these cases, return a BigDecimal type
* instead.
*
* @param columnIndex the column index
* @return an object of type long or BigDecimal depending on number size
* @throws SFException
*/
private Object getBigInt(int columnIndex, Object obj) throws SFException {
return converters.getNumberConverter().getBigInt(obj, columnIndex);
@Override
public Array getArray(int columnIndex) throws SFException {
Object obj = getObjectInternal(columnIndex);
return getArrayInternal((String) obj, columnIndex);
}

@Override
Expand Down Expand Up @@ -250,4 +249,178 @@ public Date getDate(int columnIndex, TimeZone tz) throws SFException {
private Timestamp getTimestamp(int columnIndex) throws SFException {
return getTimestamp(columnIndex, TimeZone.getDefault());
}

@Override
@SnowflakeJdbcInternalApi
public Converters getConverters() {
return converters;
}

private Object getSqlInput(String input, int columnIndex) throws SFException {
try {
JsonNode jsonNode = OBJECT_MAPPER.readTree(input);
return new JsonSqlInput(
jsonNode,
session,
converters,
resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields());
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
}
}

private SfSqlArray getArrayInternal(String obj, int columnIndex) throws SFException {
try {
SnowflakeColumnMetadata arrayMetadata =
resultSetMetaData.getColumnMetadata().get(columnIndex - 1);
FieldMetadata fieldMetadata = arrayMetadata.getFields().get(0);

int columnSubType = fieldMetadata.getType();
int columnType = ColumnTypeHelper.getColumnType(columnSubType, session);
int scale = fieldMetadata.getScale();

ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj);
Iterator<JsonNode> nodeElements = arrayNode.elements();

switch (columnSubType) {
case Types.INTEGER:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.integerConverter(columnType))
.toArray(Integer[]::new));
case Types.SMALLINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.smallIntConverter(columnType))
.toArray(Short[]::new));
case Types.TINYINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.tinyIntConverter(columnType))
.toArray(Byte[]::new));
case Types.BIGINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.bigIntConverter(columnType)).toArray(Long[]::new));
case Types.DECIMAL:
case Types.NUMERIC:
return new SfSqlArray(
columnSubType,
convertToFixedArray(nodeElements, converters.bigDecimalConverter(columnType)));
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGNVARCHAR:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.varcharConverter(columnType, columnSubType, scale))
.toArray(String[]::new));
case Types.BINARY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.bytesConverter(columnType, scale))
.toArray(Byte[][]::new));
case Types.FLOAT:
case Types.REAL:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.floatConverter(columnType)).toArray(Float[]::new));
case Types.DOUBLE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.doubleConverter(columnType))
.toArray(Double[]::new));
case Types.DATE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.dateConverter(session)).toArray(Date[]::new));
case Types.TIME:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.timeConverter(session)).toArray(Time[]::new));
case Types.TIMESTAMP:
return new SfSqlArray(
columnSubType,
getStream(
nodeElements,
converters.timestampConverter(columnSubType, columnType, scale, session))
.toArray(Timestamp[]::new));
case Types.BOOLEAN:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.booleanConverter(columnType))
.toArray(Boolean[]::new));
case Types.STRUCT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.structConverter(OBJECT_MAPPER))
.toArray(Map[]::new));
case Types.ARRAY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.arrayConverter(OBJECT_MAPPER))
.toArray(Map[][]::new));
default:
throw new SFException(
ErrorCode.FEATURE_UNSUPPORTED,
"Can't construct array for data type: " + columnSubType);
}
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
}
}

private Object[] convertToFixedArray(
Iterator nodeElements, JsonStringToTypeConverter bigIntConverter) {
AtomicInteger bigDecimalCount = new AtomicInteger();
Object[] elements =
getStream(nodeElements, bigIntConverter)
.peek(
elem -> {
if (elem instanceof BigDecimal) {
bigDecimalCount.incrementAndGet();
}
})
.toArray(
size -> {
boolean shouldbbeReturnAsBigDecimal = bigDecimalCount.get() > 0;
Class<?> returnedClass =
shouldbbeReturnAsBigDecimal ? BigDecimal.class : Long.class;
return java.lang.reflect.Array.newInstance(returnedClass, size);
});
return elements;
}

private Stream getStream(Iterator nodeElements, JsonStringToTypeConverter converter) {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(nodeElements, Spliterator.ORDERED), false)
.map(
elem -> {
try {
return convert(converter, (JsonNode) elem);
} catch (SFException e) {
throw new RuntimeException(e);
}
});
}

private static Object convert(JsonStringToTypeConverter converter, JsonNode node)
throws SFException {
if (node.isValueNode()) {
return converter.convert(node.asText());
} else {
return converter.convert(node.toString());
}
}

/**
* Sometimes large BIGINTS overflow the java Long type. In these cases, return a BigDecimal type
* instead.
*
* @param columnIndex the column index
* @return an object of type long or BigDecimal depending on number size
* @throws SFException
*/
private Object getBigInt(int columnIndex, Object obj) throws SFException {
return converters.getNumberConverter().getBigInt(obj, columnIndex);
}
}
17 changes: 1 addition & 16 deletions src/main/java/net/snowflake/client/core/SFResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -155,22 +155,7 @@ public SFResultSet(
Telemetry telemetryClient,
boolean sortResult)
throws SQLException {
super(
resultSetSerializable.getTimeZone(),
new Converters(
resultSetSerializable.getTimeZone(),
session,
resultSetSerializable.getResultVersion(),
resultSetSerializable.isHonorClientTZForTimestampNTZ(),
resultSetSerializable.getTreatNTZAsUTC(),
resultSetSerializable.getUseSessionTimezone(),
resultSetSerializable.getFormatDateWithTimeZone(),
resultSetSerializable.getBinaryFormatter(),
resultSetSerializable.getDateFormatter(),
resultSetSerializable.getTimeFormatter(),
resultSetSerializable.getTimestampNTZFormatter(),
resultSetSerializable.getTimestampLTZFormatter(),
resultSetSerializable.getTimestampTZFormatter()));
super(resultSetSerializable.getTimeZone(), new Converters(session, resultSetSerializable));
this.resultSetSerializable = resultSetSerializable;
this.columnCount = 0;
this.sortResult = sortResult;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,8 @@ public List<Boolean> getIsAutoIncrementList() {
return isAutoIncrementList;
}

List<SnowflakeColumnMetadata> getColumnMetadata() {
@SnowflakeJdbcInternalApi
public List<SnowflakeColumnMetadata> getColumnMetadata() {
return columnMetadata;
}
}
Loading

0 comments on commit 75c57f2

Please sign in to comment.