Skip to content

Commit

Permalink
Merge fixes and closing in finally blocks
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-astachowski committed Sep 24, 2024
1 parent 113af82 commit 44f9c15
Show file tree
Hide file tree
Showing 8 changed files with 192 additions and 154 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import java.util.Map;
import java.util.TimeZone;

import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFException;
Expand All @@ -21,7 +20,8 @@
public class ArrowFullVectorConverterUtil {
private ArrowFullVectorConverterUtil() {}

static Types.MinorType deduceType(ValueVector vector, SFBaseSession session) throws SnowflakeSQLLoggedException {
static Types.MinorType deduceType(ValueVector vector, SFBaseSession session)
throws SnowflakeSQLLoggedException {
Types.MinorType type = Types.getMinorTypeForArrowType(vector.getField().getType());
// each column's metadata
Map<String, String> customMeta = vector.getField().getMetadata();
Expand All @@ -42,22 +42,22 @@ static Types.MinorType deduceType(ValueVector vector, SFBaseSession session) thr
case VECTOR:
return Types.MinorType.FIXED_SIZE_LIST;
case TIME:
{
String scaleStr = vector.getField().getMetadata().get("scale");
int sfScale = Integer.parseInt(scaleStr);
if (sfScale == 0) {
return Types.MinorType.TIMESEC;
}
if (sfScale <= 3) {
return Types.MinorType.TIMEMILLI;
}
if (sfScale <= 6) {
return Types.MinorType.TIMEMICRO;
}
if (sfScale <= 9) {
return Types.MinorType.TIMENANO;
{
String scaleStr = vector.getField().getMetadata().get("scale");
int sfScale = Integer.parseInt(scaleStr);
if (sfScale == 0) {
return Types.MinorType.TIMESEC;
}
if (sfScale <= 3) {
return Types.MinorType.TIMEMILLI;
}
if (sfScale <= 6) {
return Types.MinorType.TIMEMICRO;
}
if (sfScale <= 9) {
return Types.MinorType.TIMENANO;
}
}
}
case TIMESTAMP_NTZ:
return Types.MinorType.TIMESTAMPNANO;
case TIMESTAMP_LTZ:
Expand Down Expand Up @@ -101,7 +101,7 @@ public static FieldVector convert(
return new BinaryVectorConverter(allocator, vector, context, session, idx).convert();
case DATEDAY:
return new DateVectorConverter(allocator, vector, context, session, idx, timeZoneToUse)
.convert();
.convert();
case TIMESEC:
return new TimeSecVectorConverter(allocator, vector).convert();
case TIMEMILLI:
Expand All @@ -111,18 +111,23 @@ public static FieldVector convert(
case TIMENANO:
return new TimeNanoVectorConverter(allocator, vector).convert();
case STRUCT:
return new StructVectorConverter(allocator, vector, context, session, idx, null)
.convert();
return new StructVectorConverter(
allocator, vector, context, session, timeZoneToUse, idx, null)
.convert();
case LIST:
return new ListVectorConverter(allocator, vector, context, session, idx, null)
.convert();
return new ListVectorConverter(
allocator, vector, context, session, timeZoneToUse, idx, null)
.convert();
case VARCHAR:
return new VarCharVectorConverter(allocator, vector, context, session, idx).convert();
case MAP:
return new MapVectorConverter(allocator, vector, context, session, idx, null).convert();
return new MapVectorConverter(
allocator, vector, context, session, timeZoneToUse, idx, null)
.convert();
case FIXED_SIZE_LIST:
return new FixedSizeListVectorConverter(allocator, vector, context, session, idx, null)
.convert();
return new FixedSizeListVectorConverter(
allocator, vector, context, session, timeZoneToUse, idx, null)
.convert();
default:
throw new SnowflakeSQLLoggedException(
session,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package net.snowflake.client.core.arrow.fullvectorconverters;

import java.util.ArrayList;
import java.util.TimeZone;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFException;
Expand All @@ -21,43 +22,48 @@ public class FixedSizeListVectorConverter implements ArrowFullVectorConverter {
protected SFBaseSession session;
protected int idx;
protected Object valueTargetType;
private TimeZone timeZoneToUse;

FixedSizeListVectorConverter(
RootAllocator allocator,
ValueVector vector,
DataConversionContext context,
SFBaseSession session,
TimeZone timeZoneToUse,
int idx,
Object valueTargetType) {
this.allocator = allocator;
this.vector = vector;
this.context = context;
this.session = session;
this.timeZoneToUse = timeZoneToUse;
this.idx = idx;
this.valueTargetType = valueTargetType;
}

@Override
public FieldVector convert() throws SFException, SnowflakeSQLException {
FixedSizeListVector listVector = (FixedSizeListVector) vector;
FieldVector dataVector = listVector.getDataVector();
FieldVector convertedDataVector =
ArrowFullVectorConverterUtil.convert(
allocator, dataVector, context, session, 0, valueTargetType);
FixedSizeListVector convertedListVector =
FixedSizeListVector.empty(listVector.getName(), listVector.getListSize(), allocator);
ArrayList<Field> fields = new ArrayList<>();
fields.add(convertedDataVector.getField());
convertedListVector.initializeChildrenFromFields(fields);
convertedListVector.allocateNew();
convertedListVector.setValueCount(listVector.getValueCount());
ArrowBuf validityBuffer = listVector.getValidityBuffer();
convertedListVector
.getValidityBuffer()
.setBytes(0L, validityBuffer, 0L, validityBuffer.capacity());
convertedDataVector.makeTransferPair(convertedListVector.getDataVector()).transfer();

vector.close();
return convertedListVector;
try {
FixedSizeListVector listVector = (FixedSizeListVector) vector;
FieldVector dataVector = listVector.getDataVector();
FieldVector convertedDataVector =
ArrowFullVectorConverterUtil.convert(
allocator, dataVector, context, session, timeZoneToUse, 0, valueTargetType);
FixedSizeListVector convertedListVector =
FixedSizeListVector.empty(listVector.getName(), listVector.getListSize(), allocator);
ArrayList<Field> fields = new ArrayList<>();
fields.add(convertedDataVector.getField());
convertedListVector.initializeChildrenFromFields(fields);
convertedListVector.allocateNew();
convertedListVector.setValueCount(listVector.getValueCount());
ArrowBuf validityBuffer = listVector.getValidityBuffer();
convertedListVector
.getValidityBuffer()
.setBytes(0L, validityBuffer, 0L, validityBuffer.capacity());
convertedDataVector.makeTransferPair(convertedListVector.getDataVector()).transfer();
return convertedListVector;
} finally {
vector.close();
}
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package net.snowflake.client.core.arrow.fullvectorconverters;

import java.util.ArrayList;
import java.util.TimeZone;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFException;
Expand All @@ -21,18 +22,21 @@ public class ListVectorConverter implements ArrowFullVectorConverter {
protected SFBaseSession session;
protected int idx;
protected Object valueTargetType;
private TimeZone timeZoneToUse;

ListVectorConverter(
RootAllocator allocator,
ValueVector vector,
DataConversionContext context,
SFBaseSession session,
TimeZone timeZoneToUse,
int idx,
Object valueTargetType) {
this.allocator = allocator;
this.vector = vector;
this.context = context;
this.session = session;
this.timeZoneToUse = timeZoneToUse;
this.idx = idx;
this.valueTargetType = valueTargetType;
}
Expand All @@ -47,23 +51,26 @@ protected ListVector initVector(String name, Field field) {

@Override
public FieldVector convert() throws SFException, SnowflakeSQLException {
ListVector listVector = (ListVector) vector;
FieldVector dataVector = listVector.getDataVector();
FieldVector convertedDataVector =
ArrowFullVectorConverterUtil.convert(
allocator, dataVector, context, session, 0, valueTargetType);
ListVector convertedListVector = initVector(vector.getName(), dataVector.getField());
convertedListVector.allocateNew();
convertedListVector.setValueCount(listVector.getValueCount());
convertedListVector.getOffsetBuffer().setBytes(0, listVector.getOffsetBuffer());
ArrowBuf validityBuffer = listVector.getValidityBuffer();
convertedListVector
.getValidityBuffer()
.setBytes(0L, validityBuffer, 0L, validityBuffer.capacity());
convertedListVector.setLastSet(listVector.getLastSet());
convertedDataVector.makeTransferPair(convertedListVector.getDataVector()).transfer();

vector.close();
return convertedListVector;
try {
ListVector listVector = (ListVector) vector;
FieldVector dataVector = listVector.getDataVector();
FieldVector convertedDataVector =
ArrowFullVectorConverterUtil.convert(
allocator, dataVector, context, session, timeZoneToUse, 0, valueTargetType);
// TODO: change to convertedDataVector and make all necessary changes to make it work
ListVector convertedListVector = initVector(vector.getName(), dataVector.getField());
convertedListVector.allocateNew();
convertedListVector.setValueCount(listVector.getValueCount());
convertedListVector.getOffsetBuffer().setBytes(0, listVector.getOffsetBuffer());
ArrowBuf validityBuffer = listVector.getValidityBuffer();
convertedListVector
.getValidityBuffer()
.setBytes(0L, validityBuffer, 0L, validityBuffer.capacity());
convertedListVector.setLastSet(listVector.getLastSet());
convertedDataVector.makeTransferPair(convertedListVector.getDataVector()).transfer();
return convertedListVector;
} finally {
vector.close();
}
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package net.snowflake.client.core.arrow.fullvectorconverters;

import java.util.ArrayList;
import java.util.TimeZone;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SnowflakeJdbcInternalApi;
Expand All @@ -18,9 +19,10 @@ public class MapVectorConverter extends ListVectorConverter {
ValueVector vector,
DataConversionContext context,
SFBaseSession session,
TimeZone timeZoneToUse,
int idx,
Object valueTargetType) {
super(allocator, vector, context, session, idx, valueTargetType);
super(allocator, vector, context, session, timeZoneToUse, idx, valueTargetType);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import java.util.stream.Collectors;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFBaseSession;
Expand All @@ -25,51 +26,57 @@ public class StructVectorConverter implements ArrowFullVectorConverter {
protected SFBaseSession session;
protected int idx;
protected Map<String, Object> targetTypes;
private TimeZone timeZoneToUse;

StructVectorConverter(
RootAllocator allocator,
ValueVector vector,
DataConversionContext context,
SFBaseSession session,
TimeZone timeZoneToUse,
int idx,
Map<String, Object> targetTypes) {
this.allocator = allocator;
this.vector = vector;
this.context = context;
this.session = session;
this.timeZoneToUse = timeZoneToUse;
this.idx = idx;
this.targetTypes = targetTypes;
}

public FieldVector convert() throws SFException, SnowflakeSQLException {
StructVector structVector = (StructVector) vector;
List<FieldVector> childVectors = structVector.getChildrenFromFields();
List<FieldVector> convertedVectors = new ArrayList<>();
for (FieldVector childVector : childVectors) {
Object targetType = null;
if (targetTypes != null) {
targetType = targetTypes.get(childVector.getName());
try {
StructVector structVector = (StructVector) vector;
List<FieldVector> childVectors = structVector.getChildrenFromFields();
List<FieldVector> convertedVectors = new ArrayList<>();
for (FieldVector childVector : childVectors) {
Object targetType = null;
if (targetTypes != null) {
targetType = targetTypes.get(childVector.getName());
}
convertedVectors.add(
ArrowFullVectorConverterUtil.convert(
allocator, childVector, context, session, timeZoneToUse, idx, targetType));
}
convertedVectors.add(
ArrowFullVectorConverterUtil.convert(
allocator, childVector, context, session, idx, targetType));
}

List<Field> convertedFields =
convertedVectors.stream().map(ValueVector::getField).collect(Collectors.toList());
StructVector converted = StructVector.empty(vector.getName(), allocator);
converted.allocateNew();
converted.initializeChildrenFromFields(convertedFields);
for (FieldVector convertedVector : convertedVectors) {
TransferPair transferPair =
convertedVector.makeTransferPair(converted.getChild(convertedVector.getName()));
transferPair.transfer();
}
ArrowBuf validityBuffer = structVector.getValidityBuffer();
converted.getValidityBuffer().setBytes(0L, validityBuffer, 0L, validityBuffer.capacity());
converted.setValueCount(vector.getValueCount());
List<Field> convertedFields =
convertedVectors.stream().map(ValueVector::getField).collect(Collectors.toList());
StructVector converted = StructVector.empty(vector.getName(), allocator);
converted.allocateNew();
converted.initializeChildrenFromFields(convertedFields);
for (FieldVector convertedVector : convertedVectors) {
TransferPair transferPair =
convertedVector.makeTransferPair(converted.getChild(convertedVector.getName()));
transferPair.transfer();
}
ArrowBuf validityBuffer = structVector.getValidityBuffer();
converted.getValidityBuffer().setBytes(0L, validityBuffer, 0L, validityBuffer.capacity());
converted.setValueCount(vector.getValueCount());

vector.close();
return converted;
return converted;
} finally {
vector.close();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,20 @@ public TimeVectorConverter(RootAllocator allocator, ValueVector vector) {

@Override
public FieldVector convert() throws SFException, SnowflakeSQLException {
int size = vector.getValueCount();
T converted = initVector();
converted.allocateNew(size);
BaseIntVector srcVector = (BaseIntVector) vector;
int scale = Integer.parseInt(vector.getField().getMetadata().get("scale"));
long scalingFactor = ArrowResultUtil.powerOfTen(targetScale() - scale);
for (int i = 0; i < size; i++) {
convertValue(converted, i, srcVector.getValueAsLong(i) * scalingFactor);
try {
int size = vector.getValueCount();
T converted = initVector();
converted.allocateNew(size);
BaseIntVector srcVector = (BaseIntVector) vector;
int scale = Integer.parseInt(vector.getField().getMetadata().get("scale"));
long scalingFactor = ArrowResultUtil.powerOfTen(targetScale() - scale);
for (int i = 0; i < size; i++) {
convertValue(converted, i, srcVector.getValueAsLong(i) * scalingFactor);
}
converted.setValueCount(size);
return converted;
} finally {
vector.close();
}
converted.setValueCount(size);
vector.close();
return converted;
}
}
Loading

0 comments on commit 44f9c15

Please sign in to comment.