diff --git a/src/main/java/net/snowflake/ingest/streaming/example/SnowflakeStreamingIngestLargeLobExample.java b/src/main/java/net/snowflake/ingest/streaming/example/SnowflakeStreamingIngestLargeLobExample.java
new file mode 100644
index 000000000..2f5952f0d
--- /dev/null
+++ b/src/main/java/net/snowflake/ingest/streaming/example/SnowflakeStreamingIngestLargeLobExample.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2021 Snowflake Computing Inc. All rights reserved.
+ */
+
+package net.snowflake.ingest.streaming.example;
+
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+import net.snowflake.client.jdbc.internal.apache.tika.utils.StringUtils;
+import net.snowflake.ingest.streaming.InsertValidationResponse;
+import net.snowflake.ingest.streaming.OpenChannelRequest;
+import net.snowflake.ingest.streaming.SnowflakeStreamingIngestChannel;
+import net.snowflake.ingest.streaming.SnowflakeStreamingIngestClient;
+import net.snowflake.ingest.streaming.SnowflakeStreamingIngestClientFactory;
+
+/**
+ * Example on how to use the Streaming Ingest client APIs.
+ *
+ *
Please read the README.md file for detailed steps
+ */
+public class SnowflakeStreamingIngestLargeLobExample {
+ // Please follow the example in profile_streaming.json.example to see the required properties, or
+ // if you have already set up profile.json with Snowpipe before, all you need is to add the "role"
+ // property. If the "role" is not specified, the default user role will be applied.
+ private static String PROFILE_PATH = "profile.json";
+ private static final ObjectMapper mapper = new ObjectMapper();
+
+ public static void main(String[] args) throws Exception {
+ Properties props = new Properties();
+ Iterator> propIt =
+ mapper.readTree(new String(Files.readAllBytes(Paths.get(PROFILE_PATH)))).fields();
+ while (propIt.hasNext()) {
+ Map.Entry prop = propIt.next();
+ props.put(prop.getKey(), prop.getValue().asText());
+ }
+
+ // Create a streaming ingest client
+ try (SnowflakeStreamingIngestClient client =
+ SnowflakeStreamingIngestClientFactory.builder("MY_CLIENT").setProperties(props).build()) {
+
+ // Create an open channel request on table MY_TABLE, note that the corresponding
+ // db/schema/table needs to be present
+ // Example: create or replace table MY_TABLE(c1 number);
+ OpenChannelRequest request1 =
+ OpenChannelRequest.builder("MY_CHANNEL")
+ .setDBName("MY_DATABASE")
+ .setSchemaName("MY_SCHEMA")
+ .setTableName("MY_TABLE")
+ .setOnErrorOption(
+ OpenChannelRequest.OnErrorOption.CONTINUE) // Another ON_ERROR option is ABORT
+ .build();
+
+ // Open a streaming ingest channel from the given client
+ SnowflakeStreamingIngestChannel channel1 = client.openChannel(request1);
+
+ // Insert rows into the channel (Using insertRows API)
+ final int totalRowsInTable = 10;
+ for (int val = 0; val < totalRowsInTable; val++) {
+ Map row = new HashMap<>();
+
+ // large columns corresponds to the column name in table
+ row.put("c1", StringUtils.repeat("a", 127 * 1024 * 1024) + val);
+ row.put("c2", new byte[60 * 1024 * 1024]);
+ row.put("c3", "{\"a\":\"" + StringUtils.repeat("a", 127 * 1024 * 1024) + "\"}");
+ row.put("c4", "{\"a\":\"" + StringUtils.repeat("a", 127 * 1024 * 1024) + "\"}");
+
+ // Insert the row with the current offset_token
+ InsertValidationResponse response = channel1.insertRow(row, String.valueOf(val));
+ if (response.hasErrors()) {
+ // Simply throw if there is an exception, or you can do whatever you want with the
+ // erroneous row
+ throw response.getInsertErrors().get(0).getException();
+ }
+ }
+
+ // If needed, you can check the offset_token registered in Snowflake to make sure everything
+ // is committed
+ final int expectedOffsetTokenInSnowflake = totalRowsInTable - 1; // 0 based offset_token
+ final int maxRetries = 10;
+ int retryCount = 0;
+
+ do {
+ String offsetTokenFromSnowflake = channel1.getLatestCommittedOffsetToken();
+ if (offsetTokenFromSnowflake != null
+ && offsetTokenFromSnowflake.equals(String.valueOf(expectedOffsetTokenInSnowflake))) {
+ System.out.println("SUCCESSFULLY inserted " + totalRowsInTable + " rows");
+ break;
+ }
+ retryCount++;
+ } while (retryCount < maxRetries);
+
+ // Close the channel, the function internally will make sure everything is committed (or throw
+ // an exception if there is any issue)
+ channel1.close().get();
+ }
+ }
+}
diff --git a/src/main/java/net/snowflake/ingest/streaming/internal/DataValidationUtil.java b/src/main/java/net/snowflake/ingest/streaming/internal/DataValidationUtil.java
index 035a88373..b43a44f46 100644
--- a/src/main/java/net/snowflake/ingest/streaming/internal/DataValidationUtil.java
+++ b/src/main/java/net/snowflake/ingest/streaming/internal/DataValidationUtil.java
@@ -12,6 +12,7 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
@@ -69,17 +70,23 @@ class DataValidationUtil {
*/
private static final long MICROSECONDS_LIMIT_FOR_EPOCH = SECONDS_LIMIT_FOR_EPOCH * 1000000L;
- public static final int BYTES_8_MB = 8 * 1024 * 1024;
- public static final int BYTES_16_MB = 2 * BYTES_8_MB;
+ public static final int BYTES_64_MB = 64 * 1024 * 1024;
+ public static final int BYTES_128_MB = 2 * BYTES_64_MB;
// TODO SNOW-664249: There is a few-byte mismatch between the value sent by the user and its
// server-side representation. Validation leaves a small buffer for this difference.
- static final int MAX_SEMI_STRUCTURED_LENGTH = BYTES_16_MB - 64;
+ static final int MAX_SEMI_STRUCTURED_LENGTH = BYTES_128_MB - 64;
private static final ObjectMapper objectMapper = new ObjectMapper();
private static final JsonFactory factory = new JsonFactory();
+ // set the max length to 128 MB
+ static {
+ factory.setStreamReadConstraints(
+ StreamReadConstraints.builder().maxStringLength(BYTES_128_MB).build());
+ }
+
// The version of Jackson we are using does not support serialization of date objects from the
// java.time package. Here we define a module with custom java.time serializers. Additionally, we
// define custom serializer for byte[] because the Jackson default is to serialize it as
@@ -670,13 +677,14 @@ static String validateAndParseString(
}
byte[] utf8Bytes = output.getBytes(StandardCharsets.UTF_8);
- // Strings can never be larger than 16MB
- if (utf8Bytes.length > BYTES_16_MB) {
+ // Strings can never be larger than 128MB
+ if (utf8Bytes.length > BYTES_128_MB) {
throw valueFormatNotAllowedException(
columnName,
"STRING",
String.format(
- "String too long: length=%d bytes maxLength=%d bytes", utf8Bytes.length, BYTES_16_MB),
+ "String too long: length=%d bytes maxLength=%d bytes",
+ utf8Bytes.length, BYTES_128_MB),
insertRowIndex);
}
@@ -815,7 +823,7 @@ static byte[] validateAndParseBinary(
insertRowIndex);
}
- int maxLength = maxLengthOptional.orElse(BYTES_8_MB);
+ int maxLength = maxLengthOptional.orElse(BYTES_64_MB);
if (output.length > maxLength) {
throw valueFormatNotAllowedException(
columnName,
diff --git a/src/main/java/net/snowflake/ingest/utils/Constants.java b/src/main/java/net/snowflake/ingest/utils/Constants.java
index a5e04e21e..1b16d3792 100644
--- a/src/main/java/net/snowflake/ingest/utils/Constants.java
+++ b/src/main/java/net/snowflake/ingest/utils/Constants.java
@@ -64,8 +64,8 @@ public class Constants {
public static final long EP_NDV_UNKNOWN = -1L;
public static final long EP_NV_UNKNOWN = -1L;
public static final int MAX_OAUTH_REFRESH_TOKEN_RETRY = 3;
- public static final int BINARY_COLUMN_MAX_SIZE = 8 * 1024 * 1024;
- public static final int VARCHAR_COLUMN_MAX_SIZE = 16 * 1024 * 1024;
+ public static final int BINARY_COLUMN_MAX_SIZE = 64 * 1024 * 1024;
+ public static final int VARCHAR_COLUMN_MAX_SIZE = 128 * 1024 * 1024;
// Channel level constants
public static final String CHANNEL_STATUS_ENDPOINT = "/v1/streaming/channels/status/";
diff --git a/src/main/java/net/snowflake/ingest/utils/ParameterProvider.java b/src/main/java/net/snowflake/ingest/utils/ParameterProvider.java
index b86e59525..6c73027b1 100644
--- a/src/main/java/net/snowflake/ingest/utils/ParameterProvider.java
+++ b/src/main/java/net/snowflake/ingest/utils/ParameterProvider.java
@@ -230,6 +230,13 @@ private void setParameterMap(
props,
false /* enforceDefault */);
+ this.checkAndUpdate(
+ MAX_ALLOWED_ROW_SIZE_IN_BYTES,
+ MAX_ALLOWED_ROW_SIZE_IN_BYTES_DEFAULT,
+ parameterOverrides,
+ props,
+ false /* enforceDefault */);
+
this.checkAndUpdate(
MAX_CLIENT_LAG,
isIcebergMode ? MAX_CLIENT_LAG_ICEBERG_MODE_DEFAULT : MAX_CLIENT_LAG_DEFAULT,
diff --git a/src/test/java/net/snowflake/ingest/streaming/internal/DataValidationUtilTest.java b/src/test/java/net/snowflake/ingest/streaming/internal/DataValidationUtilTest.java
index 4d0c51596..759345650 100644
--- a/src/test/java/net/snowflake/ingest/streaming/internal/DataValidationUtilTest.java
+++ b/src/test/java/net/snowflake/ingest/streaming/internal/DataValidationUtilTest.java
@@ -6,8 +6,8 @@
import static java.time.ZoneOffset.UTC;
import static net.snowflake.ingest.TestUtils.buildString;
-import static net.snowflake.ingest.streaming.internal.DataValidationUtil.BYTES_16_MB;
-import static net.snowflake.ingest.streaming.internal.DataValidationUtil.BYTES_8_MB;
+import static net.snowflake.ingest.streaming.internal.DataValidationUtil.BYTES_128_MB;
+import static net.snowflake.ingest.streaming.internal.DataValidationUtil.BYTES_64_MB;
import static net.snowflake.ingest.streaming.internal.DataValidationUtil.isAllowedSemiStructuredType;
import static net.snowflake.ingest.streaming.internal.DataValidationUtil.validateAndParseArray;
import static net.snowflake.ingest.streaming.internal.DataValidationUtil.validateAndParseArrayNew;
@@ -451,11 +451,11 @@ public void testValidateAndParseString() {
assertEquals("honk", validateAndParseString("COL", "honk", Optional.empty(), 0));
// Check max byte length
- String maxString = buildString("a", BYTES_16_MB);
+ String maxString = buildString("a", BYTES_128_MB);
assertEquals(maxString, validateAndParseString("COL", maxString, Optional.empty(), 0));
// max byte length - 1 should also succeed
- String maxStringMinusOne = buildString("a", BYTES_16_MB - 1);
+ String maxStringMinusOne = buildString("a", BYTES_128_MB - 1);
assertEquals(
maxStringMinusOne, validateAndParseString("COL", maxStringMinusOne, Optional.empty(), 0));
@@ -763,7 +763,7 @@ public void testValidateAndParseObject() throws Exception {
final String tooLargeObject =
objectMapper.writeValueAsString(
- Collections.singletonMap("key", StringUtils.repeat('a', 20000000)));
+ Collections.singletonMap("key", StringUtils.repeat('a', 128 * 1024 * 1024)));
expectError(
ErrorCode.INVALID_VALUE_ROW, () -> validateAndParseObject("COL", tooLargeObject, 0));
expectError(
@@ -858,7 +858,7 @@ public void testValidateAndParseObject() throws Exception {
@Test
public void testTooLargeVariant() {
- char[] stringContent = new char[16 * 1024 * 1024 - 16]; // {"a":"11","b":""}
+ char[] stringContent = new char[128 * 1024 * 1024 - 16]; // {"a":"11","b":""}
Arrays.fill(stringContent, 'c');
// {"a":"11","b":""}
@@ -873,7 +873,7 @@ public void testTooLargeVariant() {
@Test
public void testTooLargeMultiByteSemiStructuredValues() {
// Variant max size is not in characters, but in bytes
- char[] stringContent = new char[9 * 1024 * 1024]; // 8MB < value < 16MB
+ char[] stringContent = new char[90 * 1024 * 1024]; // 64MB < value < 128MB
Arrays.fill(stringContent, 'Č');
Map m = new HashMap<>();
@@ -882,19 +882,19 @@ public void testTooLargeMultiByteSemiStructuredValues() {
ErrorCode.INVALID_VALUE_ROW,
"The given row cannot be converted to the internal format due to invalid value: Value"
+ " cannot be ingested into Snowflake column COL of type VARIANT, rowIndex:0, reason:"
- + " Variant too long: length=18874376 maxLength=16777152",
+ + " Variant too long: length=188743688 maxLength=134217664",
() -> validateAndParseVariant("COL", m, 0));
expectErrorCodeAndMessage(
ErrorCode.INVALID_VALUE_ROW,
"The given row cannot be converted to the internal format due to invalid value: Value"
+ " cannot be ingested into Snowflake column COL of type ARRAY, rowIndex:0, reason:"
- + " Array too large. length=18874378 maxLength=16777152",
+ + " Array too large. length=188743690 maxLength=134217664",
() -> validateAndParseArray("COL", m, 0));
expectErrorCodeAndMessage(
ErrorCode.INVALID_VALUE_ROW,
"The given row cannot be converted to the internal format due to invalid value: Value"
+ " cannot be ingested into Snowflake column COL of type OBJECT, rowIndex:0, reason:"
- + " Object too large. length=18874376 maxLength=16777152",
+ + " Object too large. length=188743688 maxLength=134217664",
() -> validateAndParseObject("COL", m, 0));
}
@@ -1056,8 +1056,8 @@ public void testValidVariantType() {
@Test
public void testValidateAndParseBinary() throws DecoderException {
- byte[] maxAllowedArray = new byte[BYTES_8_MB];
- byte[] maxAllowedArrayMinusOne = new byte[BYTES_8_MB - 1];
+ byte[] maxAllowedArray = new byte[BYTES_64_MB];
+ byte[] maxAllowedArrayMinusOne = new byte[BYTES_64_MB - 1];
assertArrayEquals(
"honk".getBytes(StandardCharsets.UTF_8),
@@ -1094,7 +1094,7 @@ public void testValidateAndParseBinary() throws DecoderException {
() -> validateAndParseBinary("COL", new byte[1], Optional.of(0), 0));
expectError(
ErrorCode.INVALID_VALUE_ROW,
- () -> validateAndParseBinary("COL", new byte[BYTES_8_MB + 1], Optional.empty(), 0));
+ () -> validateAndParseBinary("COL", new byte[BYTES_64_MB + 1], Optional.empty(), 0));
expectError(
ErrorCode.INVALID_VALUE_ROW,
() -> validateAndParseBinary("COL", new byte[8], Optional.of(7), 0));
diff --git a/src/test/java/net/snowflake/ingest/streaming/internal/RowBufferTest.java b/src/test/java/net/snowflake/ingest/streaming/internal/RowBufferTest.java
index 042834ce7..b056afcf0 100644
--- a/src/test/java/net/snowflake/ingest/streaming/internal/RowBufferTest.java
+++ b/src/test/java/net/snowflake/ingest/streaming/internal/RowBufferTest.java
@@ -5,6 +5,7 @@
package net.snowflake.ingest.streaming.internal;
import static java.time.ZoneOffset.UTC;
+import static net.snowflake.ingest.streaming.internal.DataValidationUtil.BYTES_128_MB;
import static net.snowflake.ingest.utils.Constants.EP_NV_UNKNOWN;
import static net.snowflake.ingest.utils.ParameterProvider.ENABLE_NEW_JSON_PARSING_LOGIC_DEFAULT;
import static net.snowflake.ingest.utils.ParameterProvider.MAX_ALLOWED_ROW_SIZE_IN_BYTES_DEFAULT;
@@ -395,7 +396,7 @@ public void testRowIndexWithMultipleRowsWithErrorHelper(AbstractRowBuffer> row
rows.add(row);
row = new HashMap<>();
- row.put("colChar", StringUtils.repeat('1', 16777217)); // too big
+ row.put("colChar", StringUtils.repeat('1', BYTES_128_MB + 1)); // too big
rows.add(row);
row = new HashMap<>();
@@ -403,7 +404,7 @@ public void testRowIndexWithMultipleRowsWithErrorHelper(AbstractRowBuffer> row
rows.add(row);
row = new HashMap<>();
- row.put("colChar", StringUtils.repeat('1', 16777217)); // too big
+ row.put("colChar", StringUtils.repeat('1', BYTES_128_MB + 1)); // too big
rows.add(row);
InsertValidationResponse response = rowBuffer.insertRows(rows, null, null);
@@ -435,8 +436,8 @@ public void testRowIndexWithMultipleRowsWithErrorHelper(AbstractRowBuffer> row
.equalsIgnoreCase(
"The given row cannot be converted to the internal format due to invalid value:"
+ " Value cannot be ingested into Snowflake column COLCHAR of type STRING,"
- + " rowIndex:1, reason: String too long: length=16777217 bytes"
- + " maxLength=16777216 bytes"));
+ + " rowIndex:1, reason: String too long: length=134217729 bytes"
+ + " maxLength=134217728 bytes"));
Assert.assertTrue(
response
.getInsertErrors()
@@ -446,8 +447,8 @@ public void testRowIndexWithMultipleRowsWithErrorHelper(AbstractRowBuffer> row
.equalsIgnoreCase(
"The given row cannot be converted to the internal format due to invalid value:"
+ " Value cannot be ingested into Snowflake column COLCHAR of type STRING,"
- + " rowIndex:3, reason: String too long: length=16777217 bytes"
- + " maxLength=16777216 bytes"));
+ + " rowIndex:3, reason: String too long: length=134217729 bytes"
+ + " maxLength=134217728 bytes"));
}
private void testStringLengthHelper(AbstractRowBuffer> rowBuffer) {
diff --git a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/AbstractDataTypeTest.java b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/AbstractDataTypeTest.java
index f2c89f164..4d8103f30 100644
--- a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/AbstractDataTypeTest.java
+++ b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/AbstractDataTypeTest.java
@@ -42,6 +42,9 @@
@RunWith(Parameterized.class)
public abstract class AbstractDataTypeTest {
+ protected static final int MB_64 = 64 * 1024 * 1024;
+ protected static final int MB_128 = 128 * 1024 * 1024;
+
private static final String SOURCE_COLUMN_NAME = "source";
private static final String VALUE_COLUMN_NAME = "value";
@@ -101,6 +104,15 @@ protected void setUp(
conn.createStatement().execute(String.format("use database %s;", databaseName));
conn.createStatement().execute(String.format("use schema %s;", schemaName));
+ // setup for the large lob size parameters
+ conn.createStatement()
+ .execute(
+ "alter session set FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY = enabled,"
+ + " FEATURE_INCREASED_MAX_LOB_SIZE_PERSISTED=enabled,"
+ + " ENABLE_ISMAXLENGTH_FIELD_IN_DATATYPE = true,"
+ + " ENABLE_DEFAULT_VARCHAR_AND_BINARY_LENGTH=true, \n"
+ + "DEFAULT_VARCHAR_LENGTH= 134217728;");
+
if (isIceberg) {
switch (serializationPolicy) {
case COMPATIBLE:
@@ -131,6 +143,7 @@ protected void setUp(
// Override Iceberg mode client lag to 1 second for faster test execution
Map parameterMap = new HashMap<>();
parameterMap.put(ParameterProvider.MAX_CLIENT_LAG, 1000L);
+ parameterMap.put(ParameterProvider.MAX_ALLOWED_ROW_SIZE_IN_BYTES, 4L * MB_128);
Properties prop = Utils.createProperties(props);
SnowflakeURL accountURL = new SnowflakeURL(prop.getProperty(Constants.ACCOUNT_URL));
diff --git a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/BinaryIT.java b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/BinaryIT.java
index 9f3f69751..68f9ec37b 100644
--- a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/BinaryIT.java
+++ b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/BinaryIT.java
@@ -71,7 +71,7 @@ public void testBinaryComparison() throws Exception {
@Test
public void testMaxBinary() throws Exception {
- byte[] arr = new byte[8 * 1024 * 1024];
+ byte[] arr = new byte[MB_64];
testJdbcTypeCompatibility("BINARY", arr, new ByteArrayProvider());
}
}
diff --git a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/IcebergStringIT.java b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/IcebergStringIT.java
index 515a0305c..580950630 100644
--- a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/IcebergStringIT.java
+++ b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/IcebergStringIT.java
@@ -40,7 +40,7 @@ public void testString() throws Exception {
testIcebergIngestion("string", true, "true", new StringProvider());
testIcebergIngestion(
"string", new BigDecimal("123456.789"), "123456.789", new StringProvider());
- testIcebergIngestion("string", StringUtils.repeat("a", 16 * 1024 * 1024), new StringProvider());
+ testIcebergIngestion("string", StringUtils.repeat("a", MB_128), new StringProvider());
testIcebergIngestion("string", "❄️", new StringProvider());
testIcebergIngestion("string", null, new StringProvider());
@@ -57,7 +57,7 @@ public void testString() throws Exception {
SFException.class,
() ->
testIcebergIngestion(
- "string", StringUtils.repeat("a", 16 * 1024 * 1024 + 1), new StringProvider()));
+ "string", StringUtils.repeat("a", MB_128 + 1), new StringProvider()));
Assertions.assertThat(ex)
.extracting(SFException::getVendorCode)
.isEqualTo(ErrorCode.INVALID_VALUE_ROW.getMessageCode());
@@ -83,9 +83,9 @@ public void testStringAndQueries() throws Exception {
"select COUNT(*) from {tableName} where {columnName} is null", Arrays.asList(4L));
testIcebergIngestAndQuery(
"string",
- Arrays.asList(StringUtils.repeat("a", 16 * 1024 * 1024), null, null, null, "aaa"),
+ Arrays.asList(StringUtils.repeat("a", MB_128), null, null, null, "aaa"),
"select MAX({columnName}) from {tableName}",
- Arrays.asList(StringUtils.repeat("a", 16 * 1024 * 1024)));
+ Arrays.asList(StringUtils.repeat("a", MB_128)));
testIcebergIngestAndQuery(
"string",
Arrays.asList(StringUtils.repeat("a", 33), StringUtils.repeat("*", 3), null, ""),
diff --git a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/SemiStructuredIT.java b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/SemiStructuredIT.java
index 5905f92d9..030297101 100644
--- a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/SemiStructuredIT.java
+++ b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/SemiStructuredIT.java
@@ -27,7 +27,7 @@ public void before() throws Exception {
// TODO SNOW-664249: There is a few-byte mismatch between the value sent by the user and its
// server-side representation. Validation leaves a small buffer for this difference.
- private static final int MAX_ALLOWED_LENGTH = 16 * 1024 * 1024 - 64;
+ private static final int MAX_ALLOWED_LENGTH = 128 * 1024 * 1024 - 64;
@Test
public void testVariant() throws Exception {
diff --git a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/StringsIT.java b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/StringsIT.java
index e11b82005..a2027c8cc 100644
--- a/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/StringsIT.java
+++ b/src/test/java/net/snowflake/ingest/streaming/internal/datatypes/StringsIT.java
@@ -15,8 +15,6 @@
public class StringsIT extends AbstractDataTypeTest {
- private static final int MB_16 = 16 * 1024 * 1024;
-
@Before
public void before() throws Exception {
super.before();
@@ -94,23 +92,23 @@ public void testStringCreatedFromInvalidBytes() throws Exception {
@Test
public void testMaxAllowedString() throws Exception {
// 1-byte chars
- String maxString = buildString("a", MB_16);
+ String maxString = buildString("a", MB_128);
testIngestion("VARCHAR", maxString, new StringProvider());
expectNotSupported("VARCHAR", maxString + "a");
// 2-byte chars
- maxString = buildString("š", MB_16 / 2);
+ maxString = buildString("š", MB_128 / 2);
testIngestion("VARCHAR", maxString, new StringProvider());
expectNotSupported("VARCHAR", maxString + "a");
// 3-byte chars
- maxString = buildString("❄", MB_16 / 3);
+ maxString = buildString("❄", MB_128 / 3);
testIngestion("VARCHAR", maxString, new StringProvider());
expectNotSupported("VARCHAR", maxString + "aa");
// 4-byte chars
- maxString = buildString("🍞", MB_16 / 4);
+ maxString = buildString("🍞", MB_128 / 4);
testIngestion("VARCHAR", maxString, new StringProvider());
expectNotSupported("VARCHAR", maxString + "a");
}