Skip to content

Commit

Permalink
SNOW-983635 Print supported Compression algos
Browse files Browse the repository at this point in the history
Description

Testing
  • Loading branch information
sfc-gh-lthiede committed Dec 21, 2023
1 parent 8e1fc99 commit 05ff7d9
Showing 1 changed file with 3 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import org.apache.parquet.column.values.factory.DefaultV1ValuesWriterFactory;
import org.apache.parquet.crypto.FileEncryptionProperties;
import org.apache.parquet.hadoop.api.WriteSupport;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.io.DelegatingPositionOutputStream;
import org.apache.parquet.io.OutputFile;
import org.apache.parquet.io.ParquetEncodingException;
Expand Down Expand Up @@ -85,6 +86,7 @@ public BdecParquetWriter(
To get code access to this internal initialisation, we have to move the BdecParquetWriter class in the parquet.hadoop package.
*/
codecFactory = new CodecFactory(conf, ParquetWriter.DEFAULT_PAGE_SIZE);
System.out.println(java.util.Arrays.asList(CompressionCodecName.values()));
@SuppressWarnings("deprecation") // Parquet does not support the new one now
CodecFactory.BytesCompressor compressor =
codecFactory.getCompressor(bdecParquetCompression.getCompressionCodec());
Expand All @@ -111,6 +113,7 @@ public void writeRow(List<Object> row) {
@Override
public void close() throws IOException {
try {
System.out.println(java.util.Arrays.asList(CompressionCodecName.values()));
writer.close();
} catch (InterruptedException e) {
throw new IOException(e);
Expand Down

0 comments on commit 05ff7d9

Please sign in to comment.