Skip to content

Commit

Permalink
fix formatting issues
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-gdoci committed Sep 16, 2024
1 parent c764a65 commit 453e7fb
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

package net.snowflake.ingest.streaming.internal;

import java.io.ByteArrayOutputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand All @@ -21,9 +20,7 @@ public class ParquetChunkData {
* @param rows buffered row data as a list
* @param metadata chunk metadata
*/
public ParquetChunkData(
List<List<Object>> rows,
Map<String, String> metadata) {
public ParquetChunkData(List<List<Object>> rows, Map<String, String> metadata) {
this.rows = rows;
// create a defensive copy of the parameter map because the argument map passed here
// may currently be shared across multiple threads.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ public class ParquetFlusher implements Flusher<ParquetChunkData> {

private final Constants.BdecParquetCompression bdecParquetCompression;

/**
* Construct parquet flusher from its schema.
*/
/** Construct parquet flusher from its schema. */
public ParquetFlusher(
MessageType schema,
long maxChunkSizeInBytes,
Expand All @@ -43,7 +41,7 @@ public ParquetFlusher(

@Override
public SerializationResult serialize(
List<ChannelData<ParquetChunkData>> channelsDataPerTable, String filePath)
List<ChannelData<ParquetChunkData>> channelsDataPerTable, String filePath)
throws IOException {
return serializeFromJavaObjects(channelsDataPerTable, filePath);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@

package net.snowflake.ingest.streaming.internal;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
Expand All @@ -24,7 +22,6 @@
import net.snowflake.ingest.streaming.OpenChannelRequest;
import net.snowflake.ingest.utils.ErrorCode;
import net.snowflake.ingest.utils.SFException;
import org.apache.parquet.hadoop.BdecParquetWriter;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.Type;
Expand Down Expand Up @@ -126,7 +123,7 @@ float addRow(
}

void writeRow(List<Object> row) {
data.add(row);
data.add(row);
}

@Override
Expand Down Expand Up @@ -231,7 +228,7 @@ boolean hasColumns() {
@Override
Optional<ParquetChunkData> getSnapshot() {
List<List<Object>> oldData = new ArrayList<>();
data.forEach(r -> oldData.add(new ArrayList<>(r)));
data.forEach(r -> oldData.add(new ArrayList<>(r)));
return bufferedRowCount <= 0
? Optional.empty()
: Optional.of(new ParquetChunkData(oldData, metadata));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,16 +62,12 @@ public void testSerializationErrors() throws Exception {
* Creates a channel data configurable number of rows in metadata and 1 physical row (using both
* with and without internal buffering optimization)
*/
private List<ChannelData<ParquetChunkData>> createChannelDataPerTable(
int metadataRowCount) throws IOException {
private List<ChannelData<ParquetChunkData>> createChannelDataPerTable(int metadataRowCount)
throws IOException {
String columnName = "C1";
ChannelData<ParquetChunkData> channelData = Mockito.spy(new ChannelData<>());
MessageType schema = createSchema(columnName);
Mockito.doReturn(
new ParquetFlusher(
schema,
100L,
Constants.BdecParquetCompression.GZIP))
Mockito.doReturn(new ParquetFlusher(schema, 100L, Constants.BdecParquetCompression.GZIP))
.when(channelData)
.createFlusher();

Expand All @@ -88,8 +84,7 @@ private List<ChannelData<ParquetChunkData>> createChannelDataPerTable(
bdecParquetWriter.writeRow(Collections.singletonList("1"));
channelData.setVectors(
new ParquetChunkData(
Collections.singletonList(Collections.singletonList("A")),
new HashMap<>()));
Collections.singletonList(Collections.singletonList("A")), new HashMap<>()));
channelData.setColumnEps(new HashMap<>());
channelData.setRowCount(metadataRowCount);
channelData.setMinMaxInsertTimeInMs(new Pair<>(2L, 3L));
Expand Down

0 comments on commit 453e7fb

Please sign in to comment.