Skip to content

Commit

Permalink
SNOW-974575 Fixes after code review
Browse files Browse the repository at this point in the history
SNOW-974575 Fixes after code review

SNOW-974575 Fixes after code review

SNOW-974575 Fixes after code review

SNOW-974575 Fixes after code review

SNOW-974575 Fixes after code review

SNOW-974575 Fixes after code review
  • Loading branch information
sfc-gh-pmotacki committed Mar 12, 2024
1 parent 20987ed commit 2c611c7
Show file tree
Hide file tree
Showing 19 changed files with 497 additions and 399 deletions.
25 changes: 0 additions & 25 deletions src/main/java/net/snowflake/client/core/ConvertersFactory.java

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -511,15 +511,8 @@ public Object getObject(int columnIndex) throws SFException {

@Override
public Array getArray(int columnIndex) throws SFException {
ArrowVectorConverter converter = currentChunkIterator.getCurrentConverter(columnIndex - 1);
int index = currentChunkIterator.getCurrentRowInRecordBatch();
wasNull = converter.isNull(index);
converter.setTreatNTZAsUTC(treatNTZAsUTC);
converter.setUseSessionTimezone(useSessionTimezone);
converter.setSessionTimeZone(timeZone);
Object obj = converter.toObject(index);
// TODO: handleArray SNOW-969794
return null;
throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type ARRAY");
}

private Object handleObjectType(int columnIndex, Object obj) throws SFException {
Expand Down
5 changes: 3 additions & 2 deletions src/main/java/net/snowflake/client/core/SFBaseResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ public abstract class SFBaseResultSet {
// The serializable object which can serialize the metadata for this
// result set
protected SnowflakeResultSetSerializableV1 resultSetSerializable;
protected Converters converters;

public abstract boolean isLast();

Expand Down Expand Up @@ -199,7 +198,9 @@ public List<SnowflakeResultSetSerializable> getResultSetSerializables(long maxSi
return this.resultSetSerializable.splitBySize(maxSizeInBytes);
}

@SnowflakeJdbcInternalApi
public Converters getConverters() {
return converters;
logger.debug("Json converters weren't created");
return null;
}
}
157 changes: 67 additions & 90 deletions src/main/java/net/snowflake/client/core/SFJsonResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@

package net.snowflake.client.core;

import static net.snowflake.client.jdbc.SnowflakeUtil.getTimestampFromType;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
Expand All @@ -16,9 +14,6 @@
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Spliterator;
Expand All @@ -33,9 +28,7 @@
import net.snowflake.client.jdbc.SnowflakeColumnMetadata;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.client.util.TypeConverter;
import net.snowflake.common.core.SFTimestamp;
import net.snowflake.common.core.SnowflakeDateTimeFormat;
import net.snowflake.client.util.JsonStringToTypeConverter;

/** Abstract class used to represent snowflake result set in json format */
public abstract class SFJsonResultSet extends SFBaseResultSet {
Expand Down Expand Up @@ -102,13 +95,13 @@ public Object getObject(int columnIndex) throws SFException {
return getBoolean(columnIndex);

case Types.STRUCT:
if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) {
if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) {
return getSqlInput((String) obj, columnIndex);
} else {
throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type);
}
case Types.ARRAY:
if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) {
if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) {
return getArray(columnIndex);
} else {
throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type);
Expand All @@ -122,7 +115,7 @@ public Object getObject(int columnIndex) throws SFException {
@Override
public Array getArray(int columnIndex) throws SFException {
Object obj = getObjectInternal(columnIndex);
return getArrayInternal((String) obj);
return getArrayInternal((String) obj, columnIndex);
}

@Override
Expand Down Expand Up @@ -257,6 +250,8 @@ private Timestamp getTimestamp(int columnIndex) throws SFException {
return getTimestamp(columnIndex, TimeZone.getDefault());
}

@Override
@SnowflakeJdbcInternalApi
public Converters getConverters() {
return converters;
}
Expand All @@ -274,134 +269,116 @@ private Object getSqlInput(String input, int columnIndex) throws SFException {
}
}

private SfSqlArray getArrayInternal(String obj) throws SFException {
private SfSqlArray getArrayInternal(String obj, int columnIndex) throws SFException {
try {
SnowflakeColumnMetadata arrayMetadata = resultSetMetaData.getColumnMetadata().get(0);
FieldMetadata fieldMetadata = arrayMetadata.getField(1);
SnowflakeColumnMetadata arrayMetadata =
resultSetMetaData.getColumnMetadata().get(columnIndex - 1);
FieldMetadata fieldMetadata = arrayMetadata.getFields().get(0);

int columnSubType = fieldMetadata.getType();
int columnType = ColumnTypeHelper.getColumnType(columnSubType, session);
int scale = fieldMetadata.getScale();

ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj);

Iterator nodeElements = arrayNode.elements();
Iterator<JsonNode> nodeElements = arrayNode.elements();

switch (columnSubType) {
case Types.INTEGER:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.integerConverter(columnType))
.toArray(Integer[]::new));
case Types.SMALLINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.smallIntConverter(columnType))
.toArray(Short[]::new));
case Types.TINYINT:
TypeConverter integerConverter =
value -> converters.getNumberConverter().getInt(value, Types.INTEGER);
return new SfSqlArray(
columnSubType, getStream(nodeElements, integerConverter).toArray(Integer[]::new));
columnSubType,
getStream(nodeElements, converters.tinyIntConverter(columnType))
.toArray(Byte[]::new));
case Types.BIGINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.bigIntConverter(columnType)).toArray(Long[]::new));
case Types.DECIMAL:
case Types.NUMERIC:
TypeConverter bigIntConverter =
value -> converters.getNumberConverter().getBigInt(value, Types.BIGINT);
return new SfSqlArray(
columnSubType, convertToNumericArray(nodeElements, bigIntConverter));
columnSubType,
convertToFixedArray(nodeElements, converters.bigDecimalConverter(columnType)));
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGNVARCHAR:
TypeConverter varcharConverter = value -> value.toString();
return new SfSqlArray(
columnSubType, getStream(nodeElements, varcharConverter).toArray(String[]::new));
columnSubType,
getStream(nodeElements, converters.varcharConverter(columnType, columnSubType, scale))
.toArray(String[]::new));
case Types.BINARY:
TypeConverter bytesConverter =
value ->
converters.getBytesConverter().getBytes(value, columnType, Types.BINARY, scale);
return new SfSqlArray(
columnSubType, getStream(nodeElements, bytesConverter).toArray(Object[]::new));
columnSubType,
getStream(nodeElements, converters.bytesConverter(columnType, scale))
.toArray(Byte[][]::new));
case Types.FLOAT:
case Types.REAL:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, converters.floatConverter(columnType)).toArray(Float[]::new));
case Types.DOUBLE:
TypeConverter doubleConverter =
value -> converters.getNumberConverter().getDouble(value, Types.DOUBLE);
return new SfSqlArray(
columnSubType, getStream(nodeElements, doubleConverter).toArray(Double[]::new));
columnSubType,
getStream(nodeElements, converters.doubleConverter(columnType))
.toArray(Double[]::new));
case Types.DATE:
TypeConverter dateConverter =
value -> {
SnowflakeDateTimeFormat formatter =
SnowflakeDateTimeFormat.fromSqlFormat(
(String) session.getCommonParameters().get("DATE_OUTPUT_FORMAT"));
SFTimestamp timestamp = formatter.parse((String) value);
return Date.valueOf(
Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalDate());
};
return new SfSqlArray(
columnSubType, getStream(nodeElements, dateConverter).toArray(Date[]::new));
columnSubType,
getStream(nodeElements, converters.dateConverter(session)).toArray(Date[]::new));
case Types.TIME:
TypeConverter timeConverter =
value -> {
SnowflakeDateTimeFormat formatter =
SnowflakeDateTimeFormat.fromSqlFormat(
(String) session.getCommonParameters().get("TIME_OUTPUT_FORMAT"));
SFTimestamp timestamp = formatter.parse((String) value);
return Time.valueOf(
Instant.ofEpochMilli(timestamp.getTime()).atZone(ZoneOffset.UTC).toLocalTime());
};
return new SfSqlArray(
columnSubType, getStream(nodeElements, timeConverter).toArray(Time[]::new));
columnSubType,
getStream(nodeElements, converters.timeConverter(session)).toArray(Time[]::new));
case Types.TIMESTAMP:
TypeConverter timestampConverter =
value -> {
Timestamp result = getTimestampFromType(columnSubType, (String) value, session);
if (result != null) {
return result;
}
return converters
.getDateTimeConverter()
.getTimestamp(value, columnType, columnSubType, null, scale);
};
return new SfSqlArray(
columnSubType, getStream(nodeElements, timestampConverter).toArray(Timestamp[]::new));
columnSubType,
getStream(
nodeElements,
converters.timestampConverter(columnSubType, columnType, scale, session))
.toArray(Timestamp[]::new));
case Types.BOOLEAN:
TypeConverter booleanConverter =
value -> converters.getBooleanConverter().getBoolean(value, columnType);
return new SfSqlArray(
columnSubType, getStream(nodeElements, booleanConverter).toArray(Boolean[]::new));
columnSubType,
getStream(nodeElements, converters.booleanConverter(columnType))
.toArray(Boolean[]::new));
case Types.STRUCT:
TypeConverter structConverter =
value -> {
try {
return OBJECT_MAPPER.readValue(value, Map.class);
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
}
};
return new SfSqlArray(
columnSubType, getStream(nodeElements, structConverter).toArray(Map[]::new));
columnSubType,
getStream(nodeElements, converters.structConverter(OBJECT_MAPPER))
.toArray(Map[]::new));
case Types.ARRAY:
TypeConverter arrayConverter =
value -> {
try {
return OBJECT_MAPPER.readValue(value, HashMap[].class);
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
}
};
return new SfSqlArray(
columnSubType, getStream(nodeElements, arrayConverter).toArray(Map[][]::new));
columnSubType,
getStream(nodeElements, converters.arrayConverter(OBJECT_MAPPER))
.toArray(Map[][]::new));
default:
return null;
throw new SFException(
ErrorCode.FEATURE_UNSUPPORTED,
"Can't construct array for data type: " + columnSubType);
}
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
}
}

private Object[] convertToNumericArray(Iterator nodeElements, TypeConverter bigIntConverter) {
private Object[] convertToFixedArray(
Iterator nodeElements, JsonStringToTypeConverter bigIntConverter) {
AtomicInteger bigDecimalCount = new AtomicInteger();
Object[] elements =
getStream(nodeElements, bigIntConverter)
.map(
.peek(
elem -> {
if (elem instanceof BigDecimal) {
bigDecimalCount.incrementAndGet();
}
return elem;
})
.toArray(
size -> {
Expand All @@ -413,7 +390,7 @@ private Object[] convertToNumericArray(Iterator nodeElements, TypeConverter bigI
return elements;
}

private Stream getStream(Iterator nodeElements, TypeConverter converter) {
private Stream getStream(Iterator nodeElements, JsonStringToTypeConverter converter) {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(nodeElements, Spliterator.ORDERED), false)
.map(
Expand All @@ -426,8 +403,8 @@ private Stream getStream(Iterator nodeElements, TypeConverter converter) {
});
}

private static Object convert(TypeConverter converter, JsonNode elem) throws SFException {
JsonNode node = elem;
private static Object convert(JsonStringToTypeConverter converter, JsonNode node)
throws SFException {
if (node.isValueNode()) {
return converter.convert(node.asText());
} else {
Expand Down
5 changes: 2 additions & 3 deletions src/main/java/net/snowflake/client/core/SFResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import java.util.Arrays;
import java.util.Comparator;
import net.snowflake.client.core.BasicEvent.QueryState;
import net.snowflake.client.core.json.Converters;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.JsonResultChunk;
import net.snowflake.client.jdbc.SnowflakeResultChunk;
Expand Down Expand Up @@ -154,9 +155,7 @@ public SFResultSet(
Telemetry telemetryClient,
boolean sortResult)
throws SQLException {
super(
resultSetSerializable.getTimeZone(),
ConvertersFactory.createJsonConverters(session, resultSetSerializable));
super(resultSetSerializable.getTimeZone(), new Converters(session, resultSetSerializable));
this.resultSetSerializable = resultSetSerializable;
this.columnCount = 0;
this.sortResult = sortResult;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -473,6 +473,7 @@ public List<Boolean> getIsAutoIncrementList() {
return isAutoIncrementList;
}

@SnowflakeJdbcInternalApi
public List<SnowflakeColumnMetadata> getColumnMetadata() {
return columnMetadata;
}
Expand Down
12 changes: 6 additions & 6 deletions src/main/java/net/snowflake/client/core/SfSqlArray.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,24 +8,24 @@
import java.util.Map;

@SnowflakeJdbcInternalApi
public class SfSqlArray<T> implements Array {
public class SfSqlArray implements Array {

private int base;
private int baseType;
private Object elements;

public SfSqlArray(int base, Object elements) {
this.base = base;
public SfSqlArray(int baseType, Object elements) {
this.baseType = baseType;
this.elements = elements;
}

@Override
public String getBaseTypeName() throws SQLException {
return JDBCType.valueOf(base).getName();
return JDBCType.valueOf(baseType).getName();
}

@Override
public int getBaseType() throws SQLException {
return base;
return baseType;
}

@Override
Expand Down
Loading

0 comments on commit 2c611c7

Please sign in to comment.