From 4642ee3aac59d24aaf33135bcf207f3f5cd54582 Mon Sep 17 00:00:00 2001 From: Sergey Nuyanzin Date: Tue, 26 Mar 2024 23:34:47 +0100 Subject: [PATCH] [FLINK-34942] Add support for Flink 1.19, 1.20-SNAPSHOT --- .github/workflows/push_pr.yml | 11 +- .github/workflows/weekly.yml | 16 +- .../core/execution/CheckpointingMode.java | 44 ++++ .../tests/OpensearchSinkE2ECase.java | 28 ++- .../sink/OpensearchSinkBuilderTest.java | 90 ++------ .../sink/OpensearchWriterITCase.java | 19 +- .../sink/TestingSinkWriterMetricGroup.java | 198 ++++++++++++++++++ flink-sql-connector-opensearch/pom.xml | 6 + .../src/main/resources/META-INF/NOTICE | 8 +- pom.xml | 40 +++- 10 files changed, 367 insertions(+), 93 deletions(-) create mode 100644 flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/core/execution/CheckpointingMode.java create mode 100644 flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/TestingSinkWriterMetricGroup.java diff --git a/.github/workflows/push_pr.yml b/.github/workflows/push_pr.yml index 6c2dc05..d0e7fe5 100644 --- a/.github/workflows/push_pr.yml +++ b/.github/workflows/push_pr.yml @@ -25,7 +25,16 @@ jobs: compile_and_test: strategy: matrix: - flink: [1.17.1, 1.18-SNAPSHOT] + flink: [ 1.17-SNAPSHOT ] + jdk: [ '8, 11' ] + include: + - flink: 1.18-SNAPSHOT + jdk: '8, 11, 17' + - flink: 1.19-SNAPSHOT + jdk: '8, 11, 17, 21' + - flink: 1.20-SNAPSHOT + jdk: '8, 11, 17, 21' uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils with: flink_version: ${{ matrix.flink }} + jdk_version: ${{ matrix.flink_branches.jdk || '8, 11' }} diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml index 937446e..cefd3a2 100644 --- a/.github/workflows/weekly.yml +++ b/.github/workflows/weekly.yml @@ -26,21 +26,31 @@ jobs: if: github.repository_owner == 'apache' strategy: matrix: - flink_branches: [{ + flink_branches: [ { flink: 1.17-SNAPSHOT, branch: main }, { flink: 1.18-SNAPSHOT, + jdk: '8, 11, 17', branch: main }, { - flink: 1.16.2, + flink: 1.19-SNAPSHOT, + jdk: '8, 11, 17, 21', + branch: main + }, { + flink: 1.20-SNAPSHOT, + jdk: '8, 11, 17, 21', + branch: main + }, { + flink: 1.17.2, branch: v1.0 }, { - flink: 1.17.1, + flink: 1.18.1, branch: v1.0 }] uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils with: flink_version: ${{ matrix.flink_branches.flink }} + jdk_version: ${{ matrix.flink_branches.jdk || '8, 11' }} connector_branch: ${{ matrix.flink_branches.branch }} run_dependency_convergence: false diff --git a/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/core/execution/CheckpointingMode.java b/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/core/execution/CheckpointingMode.java new file mode 100644 index 0000000..ac0d674 --- /dev/null +++ b/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/core/execution/CheckpointingMode.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.core.execution; + +import org.apache.flink.streaming.tests.OpensearchSinkE2ECase; + +/** + * This is a copy of {@link CheckpointingMode} from flink-core module introduced in Flink 1.20. We + * need it here to make {@link OpensearchSinkE2ECase} compatible with earlier releases. Could be + * removed together with dropping support of Flink 1.19. + */ +public enum CheckpointingMode { + EXACTLY_ONCE, + AT_LEAST_ONCE; + + private CheckpointingMode() {} + + public static org.apache.flink.streaming.api.CheckpointingMode convertFromCheckpointingMode( + org.apache.flink.core.execution.CheckpointingMode semantic) { + switch (semantic) { + case EXACTLY_ONCE: + return org.apache.flink.streaming.api.CheckpointingMode.EXACTLY_ONCE; + case AT_LEAST_ONCE: + return org.apache.flink.streaming.api.CheckpointingMode.AT_LEAST_ONCE; + default: + throw new IllegalArgumentException("Unsupported semantic: " + semantic); + } + } +} diff --git a/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchSinkE2ECase.java b/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchSinkE2ECase.java index 6281e08..3961be7 100644 --- a/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchSinkE2ECase.java +++ b/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchSinkE2ECase.java @@ -40,6 +40,7 @@ import java.util.List; import static org.apache.flink.connector.testframe.utils.CollectIteratorAssertions.assertThat; +import static org.apache.flink.core.execution.CheckpointingMode.convertFromCheckpointingMode; import static org.apache.flink.runtime.testutils.CommonTestUtils.waitUntilCondition; /** End to end test for OpensearchSink based on connector testing framework. */ @@ -85,7 +86,8 @@ public OpensearchSinkE2ECase() throws Exception {} .toUri() .toURL())); - @Override + /** Could be removed together with dropping support of Flink 1.19. */ + @Deprecated protected void checkResultWithSemantic( ExternalSystemDataReader> reader, List> testData, @@ -109,6 +111,30 @@ protected void checkResultWithSemantic( READER_RETRY_ATTEMPTS); } + protected void checkResultWithSemantic( + ExternalSystemDataReader> reader, + List> testData, + org.apache.flink.core.execution.CheckpointingMode semantic) + throws Exception { + waitUntilCondition( + () -> { + try { + List> result = + reader.poll(Duration.ofMillis(READER_TIMEOUT)); + assertThat(sort(result).iterator()) + .matchesRecordsFromSource( + Collections.singletonList(sort(testData)), + convertFromCheckpointingMode(semantic)); + return true; + } catch (Throwable t) { + LOG.warn("Polled results not as expected", t); + return false; + } + }, + 5000, + READER_RETRY_ATTEMPTS); + } + private static > List sort(List list) { Collections.sort(list); return list; diff --git a/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchSinkBuilderTest.java b/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchSinkBuilderTest.java index 693ae44..ce4278b 100644 --- a/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchSinkBuilderTest.java +++ b/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchSinkBuilderTest.java @@ -17,25 +17,17 @@ package org.apache.flink.connector.opensearch.sink; -import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.operators.MailboxExecutor; -import org.apache.flink.api.common.operators.ProcessingTimeService; -import org.apache.flink.api.common.serialization.SerializationSchema; -import org.apache.flink.api.common.typeutils.TypeSerializer; -import org.apache.flink.api.connector.sink2.Sink; import org.apache.flink.api.connector.sink2.Sink.InitContext; import org.apache.flink.connector.base.DeliveryGuarantee; import org.apache.flink.connector.opensearch.sink.BulkResponseInspector.BulkResponseInspectorFactory; import org.apache.flink.connector.opensearch.sink.OpensearchWriter.DefaultBulkResponseInspector; import org.apache.flink.metrics.MetricGroup; -import org.apache.flink.metrics.groups.SinkWriterMetricGroup; import org.apache.flink.metrics.groups.UnregisteredMetricsGroup; -import org.apache.flink.runtime.metrics.groups.InternalSinkWriterMetricGroup; import org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService; import org.apache.flink.util.FlinkRuntimeException; import org.apache.flink.util.SimpleUserCodeClassLoader; import org.apache.flink.util.TestLoggerExtension; -import org.apache.flink.util.UserCodeClassLoader; import org.apache.flink.util.function.ThrowingRunnable; import org.apache.http.HttpHost; @@ -44,8 +36,8 @@ import org.junit.jupiter.api.TestFactory; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mockito; -import java.util.OptionalLong; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Stream; @@ -138,9 +130,12 @@ void testOverrideFailureHandler() { final OpensearchSink sink = createMinimalBuilder().setFailureHandler(failureHandler).build(); - final InitContext sinkInitContext = new MockInitContext(); final BulkResponseInspector bulkResponseInspector = - sink.getBulkResponseInspectorFactory().apply(sinkInitContext::metricGroup); + sink.getBulkResponseInspectorFactory() + .apply( + () -> + TestingSinkWriterMetricGroup.getSinkWriterMetricGroup( + new UnregisteredMetricsGroup())); assertThat(bulkResponseInspector) .isInstanceOf(DefaultBulkResponseInspector.class) .extracting( @@ -163,7 +158,20 @@ void testOverrideBulkResponseInspectorFactory() { .setBulkResponseInspectorFactory(bulkResponseInspectorFactory) .build(); - final InitContext sinkInitContext = new MockInitContext(); + final InitContext sinkInitContext = Mockito.mock(InitContext.class); + Mockito.when(sinkInitContext.metricGroup()) + .thenReturn( + TestingSinkWriterMetricGroup.getSinkWriterMetricGroup( + new UnregisteredMetricsGroup())); + + Mockito.when(sinkInitContext.getMailboxExecutor()) + .thenReturn(new OpensearchSinkBuilderTest.DummyMailboxExecutor()); + Mockito.when(sinkInitContext.getProcessingTimeService()) + .thenReturn(new TestProcessingTimeService()); + Mockito.when(sinkInitContext.getUserCodeClassLoader()) + .thenReturn( + SimpleUserCodeClassLoader.create( + OpensearchSinkBuilderTest.class.getClassLoader())); assertThatCode(() -> sink.createWriter(sinkInitContext)).doesNotThrowAnyException(); assertThat(called).isTrue(); @@ -184,64 +192,6 @@ public boolean tryYield() throws FlinkRuntimeException { } } - private static class MockInitContext - implements Sink.InitContext, SerializationSchema.InitializationContext { - - public UserCodeClassLoader getUserCodeClassLoader() { - return SimpleUserCodeClassLoader.create( - OpensearchSinkBuilderTest.class.getClassLoader()); - } - - public MailboxExecutor getMailboxExecutor() { - return new OpensearchSinkBuilderTest.DummyMailboxExecutor(); - } - - public ProcessingTimeService getProcessingTimeService() { - return new TestProcessingTimeService(); - } - - public int getSubtaskId() { - return 0; - } - - public int getNumberOfParallelSubtasks() { - return 0; - } - - public int getAttemptNumber() { - return 0; - } - - public SinkWriterMetricGroup metricGroup() { - return InternalSinkWriterMetricGroup.mock(new UnregisteredMetricsGroup()); - } - - public MetricGroup getMetricGroup() { - return this.metricGroup(); - } - - public OptionalLong getRestoredCheckpointId() { - return OptionalLong.empty(); - } - - public SerializationSchema.InitializationContext - asSerializationSchemaInitializationContext() { - return this; - } - - public boolean isObjectReuseEnabled() { - return false; - } - - public TypeSerializer createInputSerializer() { - throw new UnsupportedOperationException(); - } - - public JobID getJobId() { - throw new UnsupportedOperationException(); - } - } - private OpensearchSinkBuilder createEmptyBuilder() { return new OpensearchSinkBuilder<>(); } diff --git a/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchWriterITCase.java b/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchWriterITCase.java index fc083a4..838c6bd 100644 --- a/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchWriterITCase.java +++ b/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchWriterITCase.java @@ -30,7 +30,6 @@ import org.apache.flink.metrics.groups.SinkWriterMetricGroup; import org.apache.flink.metrics.testutils.MetricListener; import org.apache.flink.runtime.metrics.MetricNames; -import org.apache.flink.runtime.metrics.groups.InternalSinkWriterMetricGroup; import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups; import org.apache.flink.util.FlinkRuntimeException; import org.apache.flink.util.TestLoggerExtension; @@ -168,17 +167,19 @@ void testIncrementByteOutMetric() throws Exception { final String index = "test-inc-byte-out"; final OperatorIOMetricGroup operatorIOMetricGroup = UnregisteredMetricGroups.createUnregisteredOperatorMetricGroup().getIOMetricGroup(); - final InternalSinkWriterMetricGroup metricGroup = - InternalSinkWriterMetricGroup.mock( - metricListener.getMetricGroup(), operatorIOMetricGroup); final int flushAfterNActions = 2; final BulkProcessorConfig bulkProcessorConfig = new BulkProcessorConfig(flushAfterNActions, -1, -1, FlushBackoffType.NONE, 0, 0); try (final OpensearchWriter> writer = - createWriter(index, false, bulkProcessorConfig, metricGroup)) { + createWriter( + index, + false, + bulkProcessorConfig, + TestingSinkWriterMetricGroup.getSinkWriterMetricGroup( + operatorIOMetricGroup, metricListener.getMetricGroup()))) { final Counter numBytesOut = operatorIOMetricGroup.getNumBytesOutCounter(); - assertThat(numBytesOut.getCount()).isEqualTo(0); + assertThat(numBytesOut.getCount()).isZero(); writer.write(Tuple2.of(1, buildMessage(1)), null); writer.write(Tuple2.of(2, buildMessage(2)), null); @@ -280,7 +281,8 @@ private OpensearchWriter> createWriter( index, flushOnCheckpoint, bulkProcessorConfig, - InternalSinkWriterMetricGroup.mock(metricListener.getMetricGroup()), + TestingSinkWriterMetricGroup.getSinkWriterMetricGroup( + metricListener.getMetricGroup()), new DefaultFailureHandler()); } @@ -293,7 +295,8 @@ private OpensearchWriter> createWriter( index, flushOnCheckpoint, bulkProcessorConfig, - InternalSinkWriterMetricGroup.mock(metricListener.getMetricGroup()), + TestingSinkWriterMetricGroup.getSinkWriterMetricGroup( + metricListener.getMetricGroup()), failureHandler); } diff --git a/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/TestingSinkWriterMetricGroup.java b/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/TestingSinkWriterMetricGroup.java new file mode 100644 index 0000000..0ad609f --- /dev/null +++ b/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/TestingSinkWriterMetricGroup.java @@ -0,0 +1,198 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.opensearch.sink; + +import org.apache.flink.metrics.Counter; +import org.apache.flink.metrics.Gauge; +import org.apache.flink.metrics.MetricGroup; +import org.apache.flink.metrics.groups.OperatorIOMetricGroup; +import org.apache.flink.metrics.groups.SinkWriterMetricGroup; +import org.apache.flink.runtime.metrics.MetricNames; +import org.apache.flink.runtime.metrics.groups.ProxyMetricGroup; +import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups; + +import java.util.function.Consumer; +import java.util.function.Supplier; + +/** Testing implementation for {@link SinkWriterMetricGroup}. */ +public class TestingSinkWriterMetricGroup extends ProxyMetricGroup + implements SinkWriterMetricGroup { + + private final Supplier numRecordsOutErrorsCounterSupplier; + + private final Supplier numRecordsSendErrorsCounterSupplier; + + private final Supplier numRecordsSendCounterSupplier; + + private final Supplier numBytesSendCounterSupplier; + + private final Consumer> currentSendTimeGaugeConsumer; + + private final Supplier ioMetricGroupSupplier; + + public TestingSinkWriterMetricGroup( + MetricGroup parentMetricGroup, + Supplier numRecordsOutErrorsCounterSupplier, + Supplier numRecordsSendErrorsCounterSupplier, + Supplier numRecordsSendCounterSupplier, + Supplier numBytesSendCounterSupplier, + Consumer> currentSendTimeGaugeConsumer, + Supplier ioMetricGroupSupplier) { + super(parentMetricGroup); + this.numRecordsOutErrorsCounterSupplier = numRecordsOutErrorsCounterSupplier; + this.numRecordsSendErrorsCounterSupplier = numRecordsSendErrorsCounterSupplier; + this.numRecordsSendCounterSupplier = numRecordsSendCounterSupplier; + this.numBytesSendCounterSupplier = numBytesSendCounterSupplier; + this.currentSendTimeGaugeConsumer = currentSendTimeGaugeConsumer; + this.ioMetricGroupSupplier = ioMetricGroupSupplier; + } + + @Override + public Counter getNumRecordsOutErrorsCounter() { + return numRecordsOutErrorsCounterSupplier.get(); + } + + @Override + public Counter getNumRecordsSendErrorsCounter() { + return numRecordsSendErrorsCounterSupplier.get(); + } + + @Override + public Counter getNumRecordsSendCounter() { + return numRecordsSendCounterSupplier.get(); + } + + @Override + public Counter getNumBytesSendCounter() { + return numBytesSendCounterSupplier.get(); + } + + @Override + public void setCurrentSendTimeGauge(Gauge gauge) { + currentSendTimeGaugeConsumer.accept(gauge); + } + + @Override + public OperatorIOMetricGroup getIOMetricGroup() { + return ioMetricGroupSupplier.get(); + } + + static TestingSinkWriterMetricGroup getSinkWriterMetricGroup(MetricGroup parentMetricGroup) { + final OperatorIOMetricGroup operatorIOMetricGroup = + UnregisteredMetricGroups.createUnregisteredOperatorMetricGroup().getIOMetricGroup(); + return getSinkWriterMetricGroup(operatorIOMetricGroup, parentMetricGroup); + } + + static TestingSinkWriterMetricGroup getSinkWriterMetricGroup( + OperatorIOMetricGroup operatorIOMetricGroup, MetricGroup parentMetricGroup) { + Counter numRecordsOutErrors = parentMetricGroup.counter(MetricNames.NUM_RECORDS_OUT_ERRORS); + Counter numRecordsSendErrors = + parentMetricGroup.counter(MetricNames.NUM_RECORDS_SEND_ERRORS, numRecordsOutErrors); + Counter numRecordsWritten = + parentMetricGroup.counter( + MetricNames.NUM_RECORDS_SEND, + operatorIOMetricGroup.getNumRecordsOutCounter()); + Counter numBytesWritten = + parentMetricGroup.counter( + MetricNames.NUM_BYTES_SEND, operatorIOMetricGroup.getNumBytesOutCounter()); + Consumer> currentSendTimeGaugeConsumer = + currentSendTimeGauge -> + parentMetricGroup.gauge( + MetricNames.CURRENT_SEND_TIME, currentSendTimeGauge); + return new TestingSinkWriterMetricGroup.Builder() + .setParentMetricGroup(parentMetricGroup) + .setIoMetricGroupSupplier(() -> operatorIOMetricGroup) + .setNumRecordsOutErrorsCounterSupplier(() -> numRecordsOutErrors) + .setNumRecordsSendErrorsCounterSupplier(() -> numRecordsSendErrors) + .setNumRecordsSendCounterSupplier(() -> numRecordsWritten) + .setNumBytesSendCounterSupplier(() -> numBytesWritten) + .setCurrentSendTimeGaugeConsumer(currentSendTimeGaugeConsumer) + .build(); + } + + /** Builder for {@link TestingSinkWriterMetricGroup}. */ + public static class Builder { + + private MetricGroup parentMetricGroup = null; + + private Supplier numRecordsOutErrorsCounterSupplier = () -> null; + + private Supplier numRecordsSendErrorsCounterSupplier = () -> null; + + private Supplier numRecordsSendCounterSupplier = () -> null; + + private Supplier numBytesSendCounterSupplier = () -> null; + + private Consumer> currentSendTimeGaugeConsumer = counter -> {}; + + private Supplier ioMetricGroupSupplier = () -> null; + + public Builder setParentMetricGroup(MetricGroup parentMetricGroup) { + this.parentMetricGroup = parentMetricGroup; + return this; + } + + public Builder setNumRecordsOutErrorsCounterSupplier( + Supplier numRecordsOutErrorsCounterSupplier) { + this.numRecordsOutErrorsCounterSupplier = numRecordsOutErrorsCounterSupplier; + return this; + } + + public Builder setNumRecordsSendErrorsCounterSupplier( + Supplier numRecordsSendErrorsCounterSupplier) { + this.numRecordsSendErrorsCounterSupplier = numRecordsSendErrorsCounterSupplier; + return this; + } + + public Builder setNumRecordsSendCounterSupplier( + Supplier numRecordsSendCounterSupplier) { + this.numRecordsSendCounterSupplier = numRecordsSendCounterSupplier; + return this; + } + + public Builder setNumBytesSendCounterSupplier( + Supplier numBytesSendCounterSupplier) { + this.numBytesSendCounterSupplier = numBytesSendCounterSupplier; + return this; + } + + public Builder setCurrentSendTimeGaugeConsumer( + Consumer> currentSendTimeGaugeConsumer) { + this.currentSendTimeGaugeConsumer = currentSendTimeGaugeConsumer; + return this; + } + + public Builder setIoMetricGroupSupplier( + Supplier ioMetricGroupSupplier) { + this.ioMetricGroupSupplier = ioMetricGroupSupplier; + return this; + } + + public TestingSinkWriterMetricGroup build() { + return new TestingSinkWriterMetricGroup( + parentMetricGroup, + numRecordsOutErrorsCounterSupplier, + numRecordsSendErrorsCounterSupplier, + numRecordsSendCounterSupplier, + numBytesSendCounterSupplier, + currentSendTimeGaugeConsumer, + ioMetricGroupSupplier); + } + } +} diff --git a/flink-sql-connector-opensearch/pom.xml b/flink-sql-connector-opensearch/pom.xml index e3f4cd4..8f3cdc1 100644 --- a/flink-sql-connector-opensearch/pom.xml +++ b/flink-sql-connector-opensearch/pom.xml @@ -40,6 +40,12 @@ under the License. flink-connector-opensearch ${project.version} + + org.apache.flink + flink-test-utils + ${flink.version} + test + diff --git a/flink-sql-connector-opensearch/src/main/resources/META-INF/NOTICE b/flink-sql-connector-opensearch/src/main/resources/META-INF/NOTICE index 79e4365..f897f59 100644 --- a/flink-sql-connector-opensearch/src/main/resources/META-INF/NOTICE +++ b/flink-sql-connector-opensearch/src/main/resources/META-INF/NOTICE @@ -7,10 +7,10 @@ The Apache Software Foundation (http://www.apache.org/). This project bundles the following dependencies under the Apache Software License 2.0. (http://www.apache.org/licenses/LICENSE-2.0.txt) - com.carrotsearch:hppc:0.8.1 -- com.fasterxml.jackson.core:jackson-core:2.13.4 -- com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.13.4 -- com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.4 -- com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.4 +- com.fasterxml.jackson.core:jackson-core:2.15.3 +- com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.15.3 +- com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.15.3 +- com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.15.3 - com.github.spullara.mustache.java:compiler:0.9.10 - commons-codec:commons-codec:1.15 - commons-logging:commons-logging:1.1.3 diff --git a/pom.xml b/pom.xml index d6dbd4c..5b9a767 100644 --- a/pom.xml +++ b/pom.xml @@ -59,12 +59,12 @@ under the License. 1.17.1 - 1.23.0 - 2.13.4.20221013 - 5.9.2 - 3.21.0 - 1.18.3 - 2.21.0 + 1.26.1 + 2.15.3 + 5.10.2 + 3.25.3 + 1.19.7 + 3.12.4 false 1.0.0-1.16 @@ -279,6 +279,27 @@ under the License. ${commons-compress.version} + + + commons-io + commons-io + 2.15.1 + + + + + org.apache.commons + commons-lang3 + 3.14.0 + + + + + net.bytebuddy + byte-buddy + 1.14.13 + + com.fasterxml.jackson @@ -297,6 +318,13 @@ under the License. import + + + org.xerial.snappy + snappy-java + 1.1.10.5 + + org.assertj assertj-core