From 98050a1b966ec08f9e8c285d5f295c274587fd09 Mon Sep 17 00:00:00 2001 From: Varun Jain Date: Thu, 25 Jan 2024 14:33:18 -0800 Subject: [PATCH] Addressing martin and hemmin comments Signed-off-by: Varun Jain --- qa/restart-upgrade/build.gradle | 2 + .../AbstractRestartUpgradeRestTestCase.java | 44 ++++++++- .../neuralsearch/bwc/HybridSearchIT.java | 99 ++++++++----------- .../neuralsearch/bwc/MultiModalSearchIT.java | 45 +++------ .../bwc/NeuralSparseSearchIT.java | 59 ++++------- .../neuralsearch/bwc/SemanticSearchIT.java | 40 ++------ qa/rolling-upgrade/build.gradle | 4 + .../bwc/AbstractRollingUpgradeTestCase.java | 52 +++++++++- .../neuralsearch/bwc/HybridSearchIT.java | 56 ++++------- .../neuralsearch/bwc/MultiModalSearchIT.java | 58 ++++------- .../bwc/NeuralSparseSearchIT.java | 72 +++++--------- .../neuralsearch/bwc/SemanticSearchIT.java | 53 +++------- .../neuralsearch/BaseNeuralSearchIT.java | 16 +++ .../opensearch/neuralsearch/TestUtils.java | 7 +- 14 files changed, 274 insertions(+), 333 deletions(-) diff --git a/qa/restart-upgrade/build.gradle b/qa/restart-upgrade/build.gradle index 29617e1b7..a396fff29 100644 --- a/qa/restart-upgrade/build.gradle +++ b/qa/restart-upgrade/build.gradle @@ -35,6 +35,7 @@ task testAgainstOldCluster(type: StandaloneRestIntegTestTask) { systemProperty 'tests.skip_delete_model_index', 'true' systemProperty 'tests.plugin_bwc_version', neural_search_bwc_version + //Excluding MultiModalSearchIT, HybridSearchIT, NeuralSparseSearchIT tests from neural search version 2.9 and 2.10 because these features were released in 2.11 version. if (neural_search_bwc_version.startsWith("2.9") || neural_search_bwc_version.startsWith("2.10")){ filter { excludeTestsMatching "org.opensearch.neuralsearch.bwc.MultiModalSearchIT.*" @@ -61,6 +62,7 @@ task testAgainstNewCluster(type: StandaloneRestIntegTestTask) { systemProperty 'tests.is_old_cluster', 'false' systemProperty 'tests.plugin_bwc_version', neural_search_bwc_version + //Excluding MultiModalSearchIT, HybridSearchIT, NeuralSparseSearchIT tests from neural search version 2.9 and 2.10 because these features were released in 2.11 version. if (neural_search_bwc_version.startsWith("2.9") || neural_search_bwc_version.startsWith("2.10")){ filter { excludeTestsMatching "org.opensearch.neuralsearch.bwc.MultiModalSearchIT.*" diff --git a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRestartUpgradeRestTestCase.java b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRestartUpgradeRestTestCase.java index cf985d759..c2d2657f4 100644 --- a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRestartUpgradeRestTestCase.java +++ b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRestartUpgradeRestTestCase.java @@ -4,15 +4,18 @@ */ package org.opensearch.neuralsearch.bwc; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Locale; import java.util.Optional; import org.junit.Before; import org.opensearch.common.settings.Settings; import org.opensearch.neuralsearch.BaseNeuralSearchIT; +import static org.opensearch.neuralsearch.TestUtils.NEURAL_SEARCH_BWC_PREFIX; import static org.opensearch.neuralsearch.TestUtils.CLIENT_TIMEOUT_VALUE; import static org.opensearch.neuralsearch.TestUtils.RESTART_UPGRADE_OLD_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.BWC_VERSION; -import static org.opensearch.neuralsearch.TestUtils.NEURAL_SEARCH_BWC_PREFIX; +import static org.opensearch.neuralsearch.TestUtils.generateModelId; import org.opensearch.test.rest.OpenSearchRestTestCase; public abstract class AbstractRestartUpgradeRestTestCase extends BaseNeuralSearchIT { @@ -57,4 +60,43 @@ protected static final boolean isRunningAgainstOldCluster() { protected final Optional getBWCVersion() { return Optional.ofNullable(System.getProperty(BWC_VERSION, null)); } + + protected String uploadTextEmbeddingModel() throws Exception { + String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); + return registerModelGroupAndGetModelId(requestBody); + } + + protected String registerModelGroupAndGetModelId(final String requestBody) throws Exception { + String modelGroupRegisterRequestBody = Files.readString( + Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) + ); + String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); + return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); + } + + protected void createPipelineProcessor(final String modelId, final String pipelineName) throws Exception { + String requestBody = Files.readString(Path.of(classLoader.getResource("processor/PipelineConfiguration.json").toURI())); + createPipelineProcessor(requestBody, pipelineName, modelId); + } + + protected String uploadSparseEncodingModel() throws Exception { + String requestBody = Files.readString( + Path.of(classLoader.getResource("processor/UploadSparseEncodingModelRequestBody.json").toURI()) + ); + return registerModelGroupAndGetModelId(requestBody); + } + + protected void createPipelineForTextImageProcessor(final String modelId, final String pipelineName) throws Exception { + String requestBody = Files.readString( + Path.of(classLoader.getResource("processor/PipelineForTextImageProcessorConfiguration.json").toURI()) + ); + createPipelineProcessor(requestBody, pipelineName, modelId); + } + + protected void createPipelineForSparseEncodingProcessor(final String modelId, final String pipelineName) throws Exception { + String requestBody = Files.readString( + Path.of(classLoader.getResource("processor/PipelineForSparseEncodingProcessorConfiguration.json").toURI()) + ); + createPipelineProcessor(requestBody, pipelineName, modelId); + } } diff --git a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java index 3df5740ec..c23913259 100644 --- a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java +++ b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java @@ -4,6 +4,7 @@ */ package org.opensearch.neuralsearch.bwc; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -11,7 +12,6 @@ import java.util.Map; import org.opensearch.index.query.MatchQueryBuilder; import static org.opensearch.neuralsearch.TestUtils.getModelId; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.PARAM_NAME_WEIGHTS; import static org.opensearch.neuralsearch.TestUtils.TEXT_EMBEDDING_PROCESSOR; @@ -48,28 +48,18 @@ public void testNormalizationProcessor_whenIndexWithMultipleShards_E2EFlow() thr Files.readString(Path.of(classLoader.getResource("processor/IndexMappingMultipleShard.json").toURI())), PIPELINE_NAME ); - addDocument(getIndexNameForTest(), "0", TEST_FIELD, TEXT_1, null, null); - addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_2, null, null); - addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_3, null, null); - addDocument(getIndexNameForTest(), "3", TEST_FIELD, TEXT_4, null, null); - addDocument(getIndexNameForTest(), "4", TEST_FIELD, TEXT_5, null, null); - createSearchPipeline( - SEARCH_PIPELINE_NAME, - DEFAULT_NORMALIZATION_METHOD, - DEFAULT_COMBINATION_METHOD, - Map.of(PARAM_NAME_WEIGHTS, Arrays.toString(new float[] { 0.3f, 0.7f })) - ); + addDocuments(getIndexNameForTest(), true); + createSearchPipeline(SEARCH_PIPELINE_NAME); } else { - Map pipeline = getIngestionPipeline(PIPELINE_NAME); - assertNotNull(pipeline); - String modelId = getModelId(pipeline, TEXT_EMBEDDING_PROCESSOR); - loadModel(modelId); - addDocument(getIndexNameForTest(), "5", TEST_FIELD, TEXT_6, null, null); - validateTestIndex(modelId, getIndexNameForTest(), SEARCH_PIPELINE_NAME); - deleteSearchPipeline(SEARCH_PIPELINE_NAME); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + String modelId = null; + try { + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR); + loadModel(modelId); + addDocuments(getIndexNameForTest(), false); + validateTestIndex(modelId, getIndexNameForTest(), SEARCH_PIPELINE_NAME); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, SEARCH_PIPELINE_NAME); + } } } @@ -87,50 +77,43 @@ public void testNormalizationProcessor_whenIndexWithSingleShard_E2EFlow() throws Files.readString(Path.of(classLoader.getResource("processor/IndexMappingSingleShard.json").toURI())), PIPELINE1_NAME ); - addDocument(getIndexNameForTest(), "0", TEST_FIELD, TEXT_1, null, null); - addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_2, null, null); - addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_3, null, null); - addDocument(getIndexNameForTest(), "3", TEST_FIELD, TEXT_4, null, null); - addDocument(getIndexNameForTest(), "4", TEST_FIELD, TEXT_5, null, null); - createSearchPipeline( - SEARCH_PIPELINE1_NAME, - DEFAULT_NORMALIZATION_METHOD, - DEFAULT_COMBINATION_METHOD, - Map.of(PARAM_NAME_WEIGHTS, Arrays.toString(new float[] { 0.3f, 0.7f })) - ); + addDocuments(getIndexNameForTest(), true); + createSearchPipeline(SEARCH_PIPELINE1_NAME); } else { - Map pipeline = getIngestionPipeline(PIPELINE1_NAME); - assertNotNull(pipeline); - String modelId = getModelId(pipeline, TEXT_EMBEDDING_PROCESSOR); - loadModel(modelId); - addDocument(getIndexNameForTest(), "5", TEST_FIELD, TEXT_6, null, null); - validateTestIndex(modelId, getIndexNameForTest(), SEARCH_PIPELINE1_NAME); - deleteSearchPipeline(SEARCH_PIPELINE1_NAME); - deletePipeline(PIPELINE1_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + String modelId = null; + try { + modelId = getModelId(getIngestionPipeline(PIPELINE1_NAME), TEXT_EMBEDDING_PROCESSOR); + loadModel(modelId); + addDocuments(getIndexNameForTest(), false); + validateTestIndex(modelId, getIndexNameForTest(), SEARCH_PIPELINE1_NAME); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE1_NAME, modelId, SEARCH_PIPELINE1_NAME); + } } } - private String uploadTextEmbeddingModel() throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); - return registerModelGroupAndGetModelId(requestBody); + private void addDocuments(final String indexName, boolean isRunningAgainstOldCluster) throws IOException { + if (isRunningAgainstOldCluster) { + addDocument(indexName, "0", TEST_FIELD, TEXT_1, null, null); + addDocument(indexName, "1", TEST_FIELD, TEXT_2, null, null); + addDocument(indexName, "2", TEST_FIELD, TEXT_3, null, null); + addDocument(indexName, "3", TEST_FIELD, TEXT_4, null, null); + addDocument(indexName, "4", TEST_FIELD, TEXT_5, null, null); + } else { + addDocument(indexName, "5", TEST_FIELD, TEXT_6, null, null); + } } - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) + private void createSearchPipeline(final String pipelineName) { + createSearchPipeline( + pipelineName, + DEFAULT_NORMALIZATION_METHOD, + DEFAULT_COMBINATION_METHOD, + Map.of(PARAM_NAME_WEIGHTS, Arrays.toString(new float[] { 0.3f, 0.7f })) ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - private void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/PipelineConfiguration.json").toURI())); - createPipelineProcessor(requestBody, pipelineName, modelId); } - private void validateTestIndex(String modelId, String index, String searchPipeline) throws Exception { + private void validateTestIndex(final String modelId, final String index, final String searchPipeline) throws Exception { int docCount = getDocCount(index); assertEquals(6, docCount); HybridQueryBuilder hybridQueryBuilder = getQueryBuilder(modelId); @@ -144,7 +127,7 @@ private void validateTestIndex(String modelId, String index, String searchPipeli } } - public HybridQueryBuilder getQueryBuilder(String modelId) { + public HybridQueryBuilder getQueryBuilder(final String modelId) { NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(); neuralQueryBuilder.fieldName("passage_embedding"); neuralQueryBuilder.modelId(modelId); diff --git a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java index cc661ef86..e6749d778 100644 --- a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java +++ b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java @@ -10,7 +10,6 @@ import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.TEXT_IMAGE_EMBEDDING_PROCESSOR; import static org.opensearch.neuralsearch.TestUtils.getModelId; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; import org.opensearch.neuralsearch.query.NeuralQueryBuilder; public class MultiModalSearchIT extends AbstractRestartUpgradeRestTestCase { @@ -29,9 +28,9 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception { waitForClusterHealthGreen(NODES_BWC_CLUSTER); if (isRunningAgainstOldCluster()) { - String modelId = uploadTextImageEmbeddingModel(); + String modelId = uploadTextEmbeddingModel(); loadModel(modelId); - createPipelineProcessor(modelId, PIPELINE_NAME); + createPipelineForTextImageProcessor(modelId, PIPELINE_NAME); createIndexWithConfiguration( getIndexNameForTest(), Files.readString(Path.of(classLoader.getResource("processor/IndexMappingMultipleShard.json").toURI())), @@ -39,19 +38,19 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception { ); addDocument(getIndexNameForTest(), "0", TEST_FIELD, TEXT, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT); } else { - Map pipeline = getIngestionPipeline(PIPELINE_NAME); - assertNotNull(pipeline); - String modelId = getModelId(pipeline, TEXT_IMAGE_EMBEDDING_PROCESSOR); - loadModel(modelId); - addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_1, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT_1); - validateTestIndex(modelId); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + String modelId = null; + try { + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_IMAGE_EMBEDDING_PROCESSOR); + loadModel(modelId); + addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_1, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT_1); + validateTestIndex(modelId); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null); + } } } - private void validateTestIndex(String modelId) throws Exception { + private void validateTestIndex(final String modelId) throws Exception { int docCount = getDocCount(getIndexNameForTest()); assertEquals(2, docCount); NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder("passage_embedding", TEXT, TEST_IMAGE_TEXT, modelId, 1, null, null); @@ -59,24 +58,4 @@ private void validateTestIndex(String modelId) throws Exception { assertNotNull(response); } - private String uploadTextImageEmbeddingModel() throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); - return registerModelGroupAndGetModelId(requestBody); - } - - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) - ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - protected void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString( - Path.of(classLoader.getResource("processor/PipelineForTextImageProcessorConfiguration.json").toURI()) - ); - createPipelineProcessor(requestBody, pipelineName, modelId); - } - } diff --git a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java index 28bb940b8..22bd4a281 100644 --- a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java +++ b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java @@ -14,7 +14,6 @@ import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.SPARSE_ENCODING_PROCESSOR; import static org.opensearch.neuralsearch.TestUtils.objectToFloat; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; import org.opensearch.neuralsearch.query.NeuralSparseQueryBuilder; public class NeuralSparseSearchIT extends AbstractRestartUpgradeRestTestCase { @@ -36,7 +35,7 @@ public void testSparseEncodingProcessor_E2EFlow() throws Exception { if (isRunningAgainstOldCluster()) { String modelId = uploadSparseEncodingModel(); loadModel(modelId); - createPipelineProcessor(modelId, PIPELINE_NAME); + createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME); createIndexWithConfiguration( getIndexNameForTest(), Files.readString(Path.of(classLoader.getResource("processor/SparseIndexMappings.json").toURI())), @@ -52,27 +51,27 @@ public void testSparseEncodingProcessor_E2EFlow() throws Exception { List.of(TEXT_1) ); } else { - Map pipeline = getIngestionPipeline(PIPELINE_NAME); - assertNotNull(pipeline); - String modelId = TestUtils.getModelId(pipeline, SPARSE_ENCODING_PROCESSOR); - loadModel(modelId); - addSparseEncodingDoc( - getIndexNameForTest(), - "1", - List.of(TEST_SPARSE_ENCODING_FIELD), - List.of(testRankFeaturesDoc2), - List.of(TEST_TEXT_FIELD), - List.of(TEXT_2) - ); - validateTestIndex(modelId); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + String modelId = null; + try { + modelId = TestUtils.getModelId(getIngestionPipeline(PIPELINE_NAME), SPARSE_ENCODING_PROCESSOR); + loadModel(modelId); + addSparseEncodingDoc( + getIndexNameForTest(), + "1", + List.of(TEST_SPARSE_ENCODING_FIELD), + List.of(testRankFeaturesDoc2), + List.of(TEST_TEXT_FIELD), + List.of(TEXT_2) + ); + validateTestIndex(modelId); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null); + } } } - private void validateTestIndex(String modelId) throws Exception { + private void validateTestIndex(final String modelId) throws Exception { int docCount = getDocCount(getIndexNameForTest()); assertEquals(2, docCount); BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); @@ -88,26 +87,4 @@ private void validateTestIndex(String modelId) throws Exception { float minExpectedScore = computeExpectedScore(modelId, testRankFeaturesDoc1, TEXT_1); assertTrue(minExpectedScore < objectToFloat(firstInnerHit.get("_score"))); } - - private String uploadSparseEncodingModel() throws Exception { - String requestBody = Files.readString( - Path.of(classLoader.getResource("processor/UploadSparseEncodingModelRequestBody.json").toURI()) - ); - return registerModelGroupAndGetModelId(requestBody); - } - - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) - ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - private void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString( - Path.of(classLoader.getResource("processor/PipelineForSparseEncodingProcessorConfiguration.json").toURI()) - ); - createPipelineProcessor(requestBody, pipelineName, modelId); - } } diff --git a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java index 911b24456..ec5938cd9 100644 --- a/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java +++ b/qa/restart-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java @@ -9,7 +9,6 @@ import java.util.Map; import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.getModelId; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; import static org.opensearch.neuralsearch.TestUtils.TEXT_EMBEDDING_PROCESSOR; import org.opensearch.neuralsearch.query.NeuralQueryBuilder; @@ -37,19 +36,19 @@ public void testTextEmbeddingProcessor_E2EFlow() throws Exception { ); addDocument(getIndexNameForTest(), "0", TEST_FIELD, TEXT, null, null); } else { - Map pipeline = getIngestionPipeline(PIPELINE_NAME); - assertNotNull(pipeline); - String modelId = getModelId(pipeline, TEXT_EMBEDDING_PROCESSOR); - loadModel(modelId); - addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_1, null, null); - validateTestIndex(modelId); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + String modelId = null; + try { + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR); + loadModel(modelId); + addDocument(getIndexNameForTest(), "1", TEST_FIELD, TEXT_1, null, null); + validateTestIndex(modelId); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null); + } } } - private void validateTestIndex(String modelId) throws Exception { + private void validateTestIndex(final String modelId) throws Exception { int docCount = getDocCount(getIndexNameForTest()); assertEquals(2, docCount); NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(); @@ -60,23 +59,4 @@ private void validateTestIndex(String modelId) throws Exception { Map response = search(getIndexNameForTest(), neuralQueryBuilder, 1); assertNotNull(response); } - - private String uploadTextEmbeddingModel() throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); - return registerModelGroupAndGetModelId(requestBody); - } - - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) - ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - private void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/PipelineConfiguration.json").toURI())); - createPipelineProcessor(requestBody, pipelineName, modelId); - } - } diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 7356f4401..6aff5b499 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -35,6 +35,7 @@ task testAgainstOldCluster(type: StandaloneRestIntegTestTask) { systemProperty 'tests.plugin_bwc_version', neural_search_bwc_version systemProperty 'tests.skip_delete_model_index', 'true' + //Excluding MultiModalSearchIT, HybridSearchIT, NeuralSparseSearchIT tests from neural search version 2.9 and 2.10 because these features were released in 2.11 version. if (neural_search_bwc_version.startsWith("2.9") || neural_search_bwc_version.startsWith("2.10")){ filter { excludeTestsMatching "org.opensearch.neuralsearch.bwc.MultiModalSearchIT.*" @@ -62,6 +63,7 @@ task testAgainstOneThirdUpgradedCluster(type: StandaloneRestIntegTestTask) { systemProperty 'tests.skip_delete_model_index', 'true' systemProperty 'tests.plugin_bwc_version', neural_search_bwc_version + //Excluding MultiModalSearchIT, HybridSearchIT, NeuralSparseSearchIT tests from neural search version 2.9 and 2.10 because these features were released in 2.11 version. if (neural_search_bwc_version.startsWith("2.9") || neural_search_bwc_version.startsWith("2.10")){ filter { excludeTestsMatching "org.opensearch.neuralsearch.bwc.MultiModalSearchIT.*" @@ -88,6 +90,7 @@ task testAgainstTwoThirdsUpgradedCluster(type: StandaloneRestIntegTestTask) { systemProperty 'tests.skip_delete_model_index', 'true' systemProperty 'tests.plugin_bwc_version', neural_search_bwc_version + //Excluding MultiModalSearchIT, HybridSearchIT, NeuralSparseSearchIT tests from neural search version 2.9 and 2.10 because these features were released in 2.11 version. if (neural_search_bwc_version.startsWith("2.9") || neural_search_bwc_version.startsWith("2.10")){ filter { excludeTestsMatching "org.opensearch.neuralsearch.bwc.MultiModalSearchIT.*" @@ -114,6 +117,7 @@ task testRollingUpgrade(type: StandaloneRestIntegTestTask) { systemProperty 'tests.skip_delete_model_index', 'true' systemProperty 'tests.plugin_bwc_version', neural_search_bwc_version + //Excluding MultiModalSearchIT, HybridSearchIT, NeuralSparseSearchIT tests from neural search version 2.9 and 2.10 because these features were released in 2.11 version. if (neural_search_bwc_version.startsWith("2.9") || neural_search_bwc_version.startsWith("2.10")){ filter { excludeTestsMatching "org.opensearch.neuralsearch.bwc.MultiModalSearchIT.*" diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRollingUpgradeTestCase.java index 98ce95b72..16ed2d229 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/AbstractRollingUpgradeTestCase.java @@ -4,19 +4,22 @@ */ package org.opensearch.neuralsearch.bwc; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Locale; import java.util.Optional; import org.junit.Before; import org.opensearch.common.settings.Settings; import org.opensearch.neuralsearch.BaseNeuralSearchIT; -import org.opensearch.test.rest.OpenSearchRestTestCase; +import static org.opensearch.neuralsearch.TestUtils.NEURAL_SEARCH_BWC_PREFIX; import static org.opensearch.neuralsearch.TestUtils.OLD_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.MIXED_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.UPGRADED_CLUSTER; -import static org.opensearch.neuralsearch.TestUtils.BWC_VERSION; import static org.opensearch.neuralsearch.TestUtils.ROLLING_UPGRADE_FIRST_ROUND; import static org.opensearch.neuralsearch.TestUtils.BWCSUITE_CLUSTER; -import static org.opensearch.neuralsearch.TestUtils.NEURAL_SEARCH_BWC_PREFIX; +import static org.opensearch.neuralsearch.TestUtils.BWC_VERSION; +import static org.opensearch.neuralsearch.TestUtils.generateModelId; +import org.opensearch.test.rest.OpenSearchRestTestCase; public abstract class AbstractRollingUpgradeTestCase extends BaseNeuralSearchIT { @@ -84,4 +87,47 @@ protected final Optional getBWCVersion() { return Optional.ofNullable(System.getProperty(BWC_VERSION, null)); } + protected String uploadTextEmbeddingModel() throws Exception { + String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); + return registerModelGroupAndGetModelId(requestBody); + } + + protected String registerModelGroupAndGetModelId(String requestBody) throws Exception { + String modelGroupRegisterRequestBody = Files.readString( + Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) + ); + String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); + return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); + } + + protected void createPipelineProcessor(String modelId, String pipelineName) throws Exception { + String requestBody = Files.readString(Path.of(classLoader.getResource("processor/PipelineConfiguration.json").toURI())); + createPipelineProcessor(requestBody, pipelineName, modelId); + } + + protected String uploadTextImageEmbeddingModel() throws Exception { + String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); + return registerModelGroupAndGetModelId(requestBody); + } + + protected void createPipelineForTextImageProcessor(String modelId, String pipelineName) throws Exception { + String requestBody = Files.readString( + Path.of(classLoader.getResource("processor/PipelineForTextImageProcessorConfiguration.json").toURI()) + ); + createPipelineProcessor(requestBody, pipelineName, modelId); + } + + protected String uploadSparseEncodingModel() throws Exception { + String requestBody = Files.readString( + Path.of(classLoader.getResource("processor/UploadSparseEncodingModelRequestBody.json").toURI()) + ); + return registerModelGroupAndGetModelId(requestBody); + } + + protected void createPipelineForSparseEncodingProcessor(String modelId, String pipelineName) throws Exception { + String requestBody = Files.readString( + Path.of(classLoader.getResource("processor/PipelineForSparseEncodingProcessorConfiguration.json").toURI()) + ); + createPipelineProcessor(requestBody, pipelineName, modelId); + } } diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java index 068dffc5f..1e1669c9b 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/HybridSearchIT.java @@ -10,13 +10,12 @@ import java.util.List; import java.util.Map; import org.opensearch.index.query.MatchQueryBuilder; -import org.opensearch.neuralsearch.TestUtils; import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.PARAM_NAME_WEIGHTS; import static org.opensearch.neuralsearch.TestUtils.TEXT_EMBEDDING_PROCESSOR; import static org.opensearch.neuralsearch.TestUtils.DEFAULT_NORMALIZATION_METHOD; import static org.opensearch.neuralsearch.TestUtils.DEFAULT_COMBINATION_METHOD; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; +import static org.opensearch.neuralsearch.TestUtils.getModelId; import org.opensearch.neuralsearch.query.HybridQueryBuilder; import org.opensearch.neuralsearch.query.NeuralQueryBuilder; @@ -30,6 +29,7 @@ public class HybridSearchIT extends AbstractRollingUpgradeTestCase { private static final String TEXT_UPGRADED = "Hi earth"; private static final String QUERY = "Hi world"; private static final int NUM_DOCS_PER_ROUND = 1; + private static String modelId = ""; // Test rolling-upgrade normalization processor when index with multiple shards // Create Text Embedding Processor, Ingestion Pipeline, add document and search pipeline with noramlization processor @@ -38,7 +38,7 @@ public void testNormalizationProcessor_whenIndexWithMultipleShards_E2EFlow() thr waitForClusterHealthGreen(NODES_BWC_CLUSTER); switch (getClusterType()) { case OLD: - String modelId = uploadTextEmbeddingModel(); + modelId = uploadTextEmbeddingModel(); loadModel(modelId); createPipelineProcessor(modelId, PIPELINE_NAME); createIndexWithConfiguration( @@ -55,7 +55,7 @@ public void testNormalizationProcessor_whenIndexWithMultipleShards_E2EFlow() thr ); break; case MIXED: - modelId = getModelId(PIPELINE_NAME); + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR); int totalDocsCountMixed; if (isFirstMixedRound()) { totalDocsCountMixed = NUM_DOCS_PER_ROUND; @@ -67,20 +67,22 @@ public void testNormalizationProcessor_whenIndexWithMultipleShards_E2EFlow() thr } break; case UPGRADED: - modelId = getModelId(PIPELINE_NAME); - int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; - loadModel(modelId); - addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_UPGRADED, null, null); - validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId); - deleteSearchPipeline(SEARCH_PIPELINE_NAME); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + try { + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR); + int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; + loadModel(modelId); + addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_UPGRADED, null, null); + validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, SEARCH_PIPELINE_NAME); + } break; + default: + throw new IllegalStateException("Unexpected value: " + getClusterType()); } } - private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId) throws Exception { + private void validateTestIndexOnUpgrade(final int numberOfDocs, final String modelId) throws Exception { int docCount = getDocCount(getIndexNameForTest()); assertEquals(numberOfDocs, docCount); loadModel(modelId); @@ -101,25 +103,7 @@ private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId) throws } } - private String uploadTextEmbeddingModel() throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); - return registerModelGroupAndGetModelId(requestBody); - } - - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) - ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - private void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/PipelineConfiguration.json").toURI())); - createPipelineProcessor(requestBody, pipelineName, modelId); - } - - public HybridQueryBuilder getQueryBuilder(String modelId) { + public HybridQueryBuilder getQueryBuilder(final String modelId) { NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(); neuralQueryBuilder.fieldName("passage_embedding"); neuralQueryBuilder.modelId(modelId); @@ -134,10 +118,4 @@ public HybridQueryBuilder getQueryBuilder(String modelId) { return hybridQueryBuilder; } - - private String getModelId(String pipelineName) { - Map pipeline = getIngestionPipeline(pipelineName); - assertNotNull(pipeline); - return TestUtils.getModelId(pipeline, TEXT_EMBEDDING_PROCESSOR); - } } diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java index a8570d56f..b91ec1322 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/MultiModalSearchIT.java @@ -7,10 +7,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; -import org.opensearch.neuralsearch.TestUtils; import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.TEXT_IMAGE_EMBEDDING_PROCESSOR; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; +import static org.opensearch.neuralsearch.TestUtils.getModelId; import org.opensearch.neuralsearch.query.NeuralQueryBuilder; public class MultiModalSearchIT extends AbstractRollingUpgradeTestCase { @@ -25,6 +24,7 @@ public class MultiModalSearchIT extends AbstractRollingUpgradeTestCase { private static final String TEST_IMAGE_TEXT_UPGRADED = "/9j/4AAQSkZJR8eydhgfwceocvlk"; private static final int NUM_DOCS_PER_ROUND = 1; + private static String modelId = ""; // Test rolling-upgrade test image embedding processor // Create Text Image Embedding Processor, Ingestion Pipeline and add document @@ -33,9 +33,9 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception { waitForClusterHealthGreen(NODES_BWC_CLUSTER); switch (getClusterType()) { case OLD: - String modelId = uploadTextImageEmbeddingModel(); + modelId = uploadTextImageEmbeddingModel(); loadModel(modelId); - createPipelineProcessor(modelId, PIPELINE_NAME); + createPipelineForTextImageProcessor(modelId, PIPELINE_NAME); createIndexWithConfiguration( getIndexNameForTest(), Files.readString(Path.of(classLoader.getResource("processor/IndexMappings.json").toURI())), @@ -44,7 +44,7 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception { addDocument(getIndexNameForTest(), "0", TEST_FIELD, TEXT, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT); break; case MIXED: - modelId = getModelId(PIPELINE_NAME); + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_IMAGE_EMBEDDING_PROCESSOR); int totalDocsCountMixed; if (isFirstMixedRound()) { totalDocsCountMixed = NUM_DOCS_PER_ROUND; @@ -56,19 +56,23 @@ public void testTextImageEmbeddingProcessor_E2EFlow() throws Exception { } break; case UPGRADED: - modelId = getModelId(PIPELINE_NAME); - int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; - loadModel(modelId); - addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_UPGRADED, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT_UPGRADED); - validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId, TEXT_UPGRADED, TEST_IMAGE_TEXT_UPGRADED); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + try { + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_IMAGE_EMBEDDING_PROCESSOR); + int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; + loadModel(modelId); + addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_UPGRADED, TEST_IMAGE_FIELD, TEST_IMAGE_TEXT_UPGRADED); + validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId, TEXT_UPGRADED, TEST_IMAGE_TEXT_UPGRADED); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null); + } break; + default: + throw new IllegalStateException("Unexpected value: " + getClusterType()); } } - private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId, String text, String imageText) throws Exception { + private void validateTestIndexOnUpgrade(final int numberOfDocs, final String modelId, final String text, final String imageText) + throws Exception { int docCount = getDocCount(getIndexNameForTest()); assertEquals(numberOfDocs, docCount); loadModel(modelId); @@ -76,30 +80,4 @@ private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId, String Map response = search(getIndexNameForTest(), neuralQueryBuilder, 1); assertNotNull(response); } - - private String uploadTextImageEmbeddingModel() throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); - return registerModelGroupAndGetModelId(requestBody); - } - - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) - ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - protected void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString( - Path.of(classLoader.getResource("processor/PipelineForTextImageProcessorConfiguration.json").toURI()) - ); - createPipelineProcessor(requestBody, pipelineName, modelId); - } - - private String getModelId(String pipelineName) { - Map pipeline = getIngestionPipeline(pipelineName); - assertNotNull(pipeline); - return TestUtils.getModelId(pipeline, TEXT_IMAGE_EMBEDDING_PROCESSOR); - } } diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java index d13a85752..70513686b 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/NeuralSparseSearchIT.java @@ -14,7 +14,7 @@ import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.SPARSE_ENCODING_PROCESSOR; import static org.opensearch.neuralsearch.TestUtils.objectToFloat; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; +import static org.opensearch.neuralsearch.TestUtils.getModelId; import org.opensearch.neuralsearch.query.NeuralSparseQueryBuilder; public class NeuralSparseSearchIT extends AbstractRollingUpgradeTestCase { @@ -32,6 +32,7 @@ public class NeuralSparseSearchIT extends AbstractRollingUpgradeTestCase { private final Map testRankFeaturesDoc2 = TestUtils.createRandomTokenWeightMap(TEST_TOKENS_2); private final Map testRankFeaturesDoc3 = TestUtils.createRandomTokenWeightMap(TEST_TOKENS_3); private static final int NUM_DOCS_PER_ROUND = 1; + private static String modelId = ""; // Test rolling-upgrade test sparse embedding processor // Create Sparse Encoding Processor, Ingestion Pipeline and add document @@ -40,9 +41,9 @@ public void testSparseEncodingProcessor_E2EFlow() throws Exception { waitForClusterHealthGreen(NODES_BWC_CLUSTER); switch (getClusterType()) { case OLD: - String modelId = uploadSparseEncodingModel(); + modelId = uploadSparseEncodingModel(); loadModel(modelId); - createPipelineProcessor(modelId, PIPELINE_NAME); + createPipelineForSparseEncodingProcessor(modelId, PIPELINE_NAME); createIndexWithConfiguration( getIndexNameForTest(), Files.readString(Path.of(classLoader.getResource("processor/SparseIndexMappings.json").toURI())), @@ -58,7 +59,7 @@ public void testSparseEncodingProcessor_E2EFlow() throws Exception { ); break; case MIXED: - modelId = getModelId(PIPELINE_NAME); + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), SPARSE_ENCODING_PROCESSOR); int totalDocsCountMixed; if (isFirstMixedRound()) { totalDocsCountMixed = NUM_DOCS_PER_ROUND; @@ -77,26 +78,29 @@ public void testSparseEncodingProcessor_E2EFlow() throws Exception { } break; case UPGRADED: - modelId = getModelId(PIPELINE_NAME); - int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; - loadModel(modelId); - addSparseEncodingDoc( - getIndexNameForTest(), - "2", - List.of(TEST_SPARSE_ENCODING_FIELD), - List.of(testRankFeaturesDoc3), - List.of(TEST_TEXT_FIELD), - List.of(TEXT_UPGRADED) - ); - validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + try { + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), SPARSE_ENCODING_PROCESSOR); + int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; + loadModel(modelId); + addSparseEncodingDoc( + getIndexNameForTest(), + "2", + List.of(TEST_SPARSE_ENCODING_FIELD), + List.of(testRankFeaturesDoc3), + List.of(TEST_TEXT_FIELD), + List.of(TEXT_UPGRADED) + ); + validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null); + } break; + default: + throw new IllegalStateException("Unexpected value: " + getClusterType()); } } - private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId) throws Exception { + private void validateTestIndexOnUpgrade(final int numberOfDocs, final String modelId) throws Exception { int docCount = getDocCount(getIndexNameForTest()); assertEquals(numberOfDocs, docCount); loadModel(modelId); @@ -113,32 +117,4 @@ private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId) throws float minExpectedScore = computeExpectedScore(modelId, testRankFeaturesDoc1, TEXT); assertTrue(minExpectedScore < objectToFloat(firstInnerHit.get("_score"))); } - - private String uploadSparseEncodingModel() throws Exception { - String requestBody = Files.readString( - Path.of(classLoader.getResource("processor/UploadSparseEncodingModelRequestBody.json").toURI()) - ); - return registerModelGroupAndGetModelId(requestBody); - } - - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) - ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - private void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString( - Path.of(classLoader.getResource("processor/PipelineForSparseEncodingProcessorConfiguration.json").toURI()) - ); - createPipelineProcessor(requestBody, pipelineName, modelId); - } - - private String getModelId(String pipelineName) { - Map pipeline = getIngestionPipeline(pipelineName); - assertNotNull(pipeline); - return TestUtils.getModelId(pipeline, SPARSE_ENCODING_PROCESSOR); - } } diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java index 54f0bfb71..51e548474 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/neuralsearch/bwc/SemanticSearchIT.java @@ -7,10 +7,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; -import org.opensearch.neuralsearch.TestUtils; import static org.opensearch.neuralsearch.TestUtils.NODES_BWC_CLUSTER; import static org.opensearch.neuralsearch.TestUtils.TEXT_EMBEDDING_PROCESSOR; -import static org.opensearch.neuralsearch.TestUtils.generateModelId; +import static org.opensearch.neuralsearch.TestUtils.getModelId; import org.opensearch.neuralsearch.query.NeuralQueryBuilder; public class SemanticSearchIT extends AbstractRollingUpgradeTestCase { @@ -20,6 +19,7 @@ public class SemanticSearchIT extends AbstractRollingUpgradeTestCase { private static final String TEXT_MIXED = "Hello world mixed"; private static final String TEXT_UPGRADED = "Hello world upgraded"; private static final int NUM_DOCS_PER_ROUND = 1; + private static String modelId = ""; // Test rolling-upgrade Semantic Search // Create Text Embedding Processor, Ingestion Pipeline and add document @@ -28,7 +28,7 @@ public void testSemanticSearch_E2EFlow() throws Exception { waitForClusterHealthGreen(NODES_BWC_CLUSTER); switch (getClusterType()) { case OLD: - String modelId = uploadTextEmbeddingModel(); + modelId = uploadTextEmbeddingModel(); loadModel(modelId); createPipelineProcessor(modelId, PIPELINE_NAME); createIndexWithConfiguration( @@ -39,7 +39,7 @@ public void testSemanticSearch_E2EFlow() throws Exception { addDocument(getIndexNameForTest(), "0", TEST_FIELD, TEXT, null, null); break; case MIXED: - modelId = getModelId(PIPELINE_NAME); + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR); int totalDocsCountMixed; if (isFirstMixedRound()) { totalDocsCountMixed = NUM_DOCS_PER_ROUND; @@ -51,20 +51,23 @@ public void testSemanticSearch_E2EFlow() throws Exception { } break; case UPGRADED: - modelId = getModelId(PIPELINE_NAME); - int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; - loadModel(modelId); - addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_UPGRADED, null, null); - validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId, TEXT_UPGRADED); - deletePipeline(PIPELINE_NAME); - deleteModel(modelId); - deleteIndex(getIndexNameForTest()); + try { + modelId = getModelId(getIngestionPipeline(PIPELINE_NAME), TEXT_EMBEDDING_PROCESSOR); + int totalDocsCountUpgraded = 3 * NUM_DOCS_PER_ROUND; + loadModel(modelId); + addDocument(getIndexNameForTest(), "2", TEST_FIELD, TEXT_UPGRADED, null, null); + validateTestIndexOnUpgrade(totalDocsCountUpgraded, modelId, TEXT_UPGRADED); + } finally { + wipeOfTestResources(getIndexNameForTest(), PIPELINE_NAME, modelId, null); + } break; + default: + throw new IllegalStateException("Unexpected value: " + getClusterType()); } } - private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId, String text) throws Exception { + private void validateTestIndexOnUpgrade(final int numberOfDocs, final String modelId, final String text) throws Exception { int docCount = getDocCount(getIndexNameForTest()); assertEquals(numberOfDocs, docCount); loadModel(modelId); @@ -76,28 +79,4 @@ private void validateTestIndexOnUpgrade(int numberOfDocs, String modelId, String Map response = search(getIndexNameForTest(), neuralQueryBuilder, 1); assertNotNull(response); } - - private String uploadTextEmbeddingModel() throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/UploadModelRequestBody.json").toURI())); - return registerModelGroupAndGetModelId(requestBody); - } - - private String registerModelGroupAndGetModelId(String requestBody) throws Exception { - String modelGroupRegisterRequestBody = Files.readString( - Path.of(classLoader.getResource("processor/CreateModelGroupRequestBody.json").toURI()) - ); - String modelGroupId = registerModelGroup(String.format(LOCALE, modelGroupRegisterRequestBody, generateModelId())); - return uploadModel(String.format(LOCALE, requestBody, modelGroupId)); - } - - protected void createPipelineProcessor(String modelId, String pipelineName) throws Exception { - String requestBody = Files.readString(Path.of(classLoader.getResource("processor/PipelineConfiguration.json").toURI())); - createPipelineProcessor(requestBody, pipelineName, modelId); - } - - private String getModelId(String pipelineName) { - Map pipeline = getIngestionPipeline(pipelineName); - assertNotNull(pipeline); - return TestUtils.getModelId(pipeline, TEXT_EMBEDDING_PROCESSOR); - } } diff --git a/src/testFixtures/java/org/opensearch/neuralsearch/BaseNeuralSearchIT.java b/src/testFixtures/java/org/opensearch/neuralsearch/BaseNeuralSearchIT.java index 3d3da5cc3..e062e439e 100644 --- a/src/testFixtures/java/org/opensearch/neuralsearch/BaseNeuralSearchIT.java +++ b/src/testFixtures/java/org/opensearch/neuralsearch/BaseNeuralSearchIT.java @@ -1056,6 +1056,22 @@ protected Float getFeatureFieldCompressedNumber(Float originNumber) { return Float.intBitsToFloat(freqBits); } + // Wipe of all the resources after execution of the tests. + protected void wipeOfTestResources(String indexName, String ingestPipeline, String modelId, String searchPipeline) throws IOException { + if (ingestPipeline != null) { + deletePipeline(ingestPipeline); + } + if (searchPipeline != null) { + deleteSearchPipeline(searchPipeline); + } + if (modelId != null) { + deleteModel(modelId); + } + if (indexName != null) { + deleteIndex(indexName); + } + } + /** * Enumeration for types of pipeline processors, used to lookup resources like create * processor request as those are type specific diff --git a/src/testFixtures/java/org/opensearch/neuralsearch/TestUtils.java b/src/testFixtures/java/org/opensearch/neuralsearch/TestUtils.java index 1b5edcd76..a6f4a3e0f 100644 --- a/src/testFixtures/java/org/opensearch/neuralsearch/TestUtils.java +++ b/src/testFixtures/java/org/opensearch/neuralsearch/TestUtils.java @@ -315,11 +315,12 @@ private static Optional getMaxScore(Map searchResponseAsM } public static String getModelId(Map pipeline, String processor) { + assertNotNull(pipeline); ArrayList> processors = (ArrayList>) pipeline.get("processors"); - Map textEmbeddingProcessor = (Map) processors.get(0).get(processor); - - return (String) textEmbeddingProcessor.get("model_id"); + String modelId = (String) textEmbeddingProcessor.get("model_id"); + assertNotNull(modelId); + return modelId; } public static String generateModelId() {