Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix for Flaky test for issue 384 #559

Merged
merged 11 commits into from
Jan 30, 2024
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fixing multiple issues reported in #497 ([#524](https://github.com/opensearch-project/neural-search/pull/524))
- Fix Flaky test reported in #433 ([#533](https://github.com/opensearch-project/neural-search/pull/533))
- Enable support for default model id on HybridQueryBuilder ([#541](https://github.com/opensearch-project/neural-search/pull/541))
- Fix Flaky test reported in #384 ([#559](https://github.com/opensearch-project/neural-search/pull/559))
### Infrastructure
- BWC tests for Neural Search ([#515](https://github.com/opensearch-project/neural-search/pull/515))
- Github action to run integ tests in secure opensearch cluster ([#535](https://github.com/opensearch-project/neural-search/pull/535))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import java.util.Collections;
import java.util.Map;

import org.junit.After;
import org.junit.Before;
import org.opensearch.common.settings.Settings;
import org.opensearch.neuralsearch.BaseNeuralSearchIT;
Expand All @@ -34,22 +33,12 @@ public class NeuralQueryEnricherProcessorIT extends BaseNeuralSearchIT {
public void setUp() throws Exception {
super.setUp();
updateClusterSettings();
prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(search_pipeline);
findDeployedModels().forEach(this::deleteModel);
deleteIndex(index);
}

@SneakyThrows
public void testNeuralQueryEnricherProcessor_whenNoModelIdPassed_thenSuccess() {
initializeIndexIfNotExist();
String modelId = getDeployedModelId();
initializeIndexIfNotExist(index);
String modelId = prepareModel();
createSearchRequestProcessor(modelId, search_pipeline);
createPipelineProcessor(modelId, ingest_pipeline, ProcessorType.TEXT_EMBEDDING);
updateIndexSettings(index, Settings.builder().put("index.search.default_pipeline", search_pipeline));
Expand All @@ -60,13 +49,13 @@ public void testNeuralQueryEnricherProcessor_whenNoModelIdPassed_thenSuccess() {
Map<String, Object> response = search(index, neuralQueryBuilder, 2);

assertFalse(response.isEmpty());

wipeOfTestResources(index, ingest_pipeline, modelId, search_pipeline);
vibrantvarun marked this conversation as resolved.
Show resolved Hide resolved
}

@SneakyThrows
public void testNeuralQueryEnricherProcessor_whenHybridQueryBuilderAndNoModelIdPassed_thenSuccess() {
initializeIndexIfNotExist();
String modelId = getDeployedModelId();
initializeIndexIfNotExist(index);
String modelId = prepareModel();
createSearchRequestProcessor(modelId, search_pipeline);
createPipelineProcessor(modelId, ingest_pipeline, ProcessorType.TEXT_EMBEDDING);
updateIndexSettings(index, Settings.builder().put("index.search.default_pipeline", search_pipeline));
Expand All @@ -79,23 +68,24 @@ public void testNeuralQueryEnricherProcessor_whenHybridQueryBuilderAndNoModelIdP
Map<String, Object> response = search(index, hybridQueryBuilder, 2);

assertFalse(response.isEmpty());
wipeOfTestResources(index, ingest_pipeline, modelId, search_pipeline);

}

@SneakyThrows
private void initializeIndexIfNotExist() {
if (index.equals(NeuralQueryEnricherProcessorIT.index) && !indexExists(index)) {
private void initializeIndexIfNotExist(String indexName) {
if (indexName.equals(NeuralQueryEnricherProcessorIT.index) && !indexExists(indexName)) {
prepareKnnIndex(
index,
indexName,
Collections.singletonList(new KNNFieldConfig(TEST_KNN_VECTOR_FIELD_NAME_1, TEST_DIMENSION, TEST_SPACE_TYPE))
);
addKnnDoc(
index,
indexName,
"1",
Collections.singletonList(TEST_KNN_VECTOR_FIELD_NAME_1),
Collections.singletonList(Floats.asList(testVector).toArray())
);
assertEquals(1, getDocCount(index));
assertEquals(1, getDocCount(indexName));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import java.util.stream.IntStream;

import org.apache.commons.lang3.Range;
import org.junit.After;
import org.junit.Before;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.index.query.TermQueryBuilder;
Expand Down Expand Up @@ -57,15 +56,7 @@ public class NormalizationProcessorIT extends BaseNeuralSearchIT {
@Before
public void setUp() throws Exception {
super.setUp();
prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(SEARCH_PIPELINE);
findDeployedModels().forEach(this::deleteModel);
updateClusterSettings();
}

/**
Expand All @@ -89,8 +80,8 @@ public void tearDown() {
@SneakyThrows
public void testResultProcessor_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipelineWithResultsPostProcessor(SEARCH_PIPELINE);
String modelId = getDeployedModelId();

NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(
TEST_KNN_VECTOR_FIELD_NAME_1,
Expand All @@ -115,6 +106,7 @@ public void testResultProcessor_whenOneShardAndQueryMatches_thenSuccessful() {
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 5, false);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

/**
Expand All @@ -132,8 +124,8 @@ public void testResultProcessor_whenOneShardAndQueryMatches_thenSuccessful() {
@SneakyThrows
public void testResultProcessor_whenDefaultProcessorConfigAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipelineWithDefaultResultsPostProcessor(SEARCH_PIPELINE);
String modelId = getDeployedModelId();

NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(
TEST_KNN_VECTOR_FIELD_NAME_1,
Expand All @@ -158,13 +150,14 @@ public void testResultProcessor_whenDefaultProcessorConfigAndQueryMatches_thenSu
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 5, false);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

@SneakyThrows
public void testResultProcessor_whenMultipleShardsAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME);
String modelId = prepareModel();
createSearchPipelineWithResultsPostProcessor(SEARCH_PIPELINE);
String modelId = getDeployedModelId();
int totalExpectedDocQty = 6;

NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(
Expand Down Expand Up @@ -218,6 +211,7 @@ public void testResultProcessor_whenMultipleShardsAndQueryMatches_thenSuccessful

// verify that all ids are unique
assertEquals(Set.copyOf(ids).size(), ids.size());
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, modelId, SEARCH_PIPELINE);
}

@SneakyThrows
Expand All @@ -237,6 +231,7 @@ public void testResultProcessor_whenMultipleShardsAndNoMatches_thenSuccessful()
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 0, true);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, null, SEARCH_PIPELINE);
}

@SneakyThrows
Expand All @@ -257,6 +252,7 @@ public void testResultProcessor_whenMultipleShardsAndPartialMatches_thenSuccessf
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 4, true, Range.between(0.33f, 1.0f));
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, null, SEARCH_PIPELINE);
}

private void initializeIndexIfNotExist(String indexName) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import java.util.Collections;
import java.util.Map;

import org.junit.After;
import org.junit.Before;
import org.opensearch.client.ResponseException;
import org.opensearch.index.query.QueryBuilders;
Expand All @@ -33,8 +32,8 @@
import lombok.SneakyThrows;

public class ScoreCombinationIT extends BaseNeuralSearchIT {
private static final String TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME = "test-neural-multi-doc-one-shard-index";
private static final String TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME = "test-neural-multi-doc-three-shards-index";
private static final String TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME = "test-score-combination-neural-multi-doc-one-shard-index";
private static final String TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME = "test-score-combination-neural-multi-doc-three-shards-index";
vibrantvarun marked this conversation as resolved.
Show resolved Hide resolved
private static final String TEST_QUERY_TEXT3 = "hello";
private static final String TEST_QUERY_TEXT4 = "place";
private static final String TEST_QUERY_TEXT7 = "notexistingwordtwo";
Expand All @@ -58,15 +57,7 @@ public class ScoreCombinationIT extends BaseNeuralSearchIT {
@Before
public void setUp() throws Exception {
super.setUp();
prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(SEARCH_PIPELINE);
findDeployedModels().forEach(this::deleteModel);
updateClusterSettings();
}

/**
Expand Down Expand Up @@ -182,6 +173,7 @@ public void testArithmeticWeightedMean_whenWeightsPassed_thenSuccessful() {
containsString("in hybrid query")
)
);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, null, SEARCH_PIPELINE);
}

/**
Expand All @@ -205,13 +197,13 @@ public void testArithmeticWeightedMean_whenWeightsPassed_thenSuccessful() {
@SneakyThrows
public void testHarmonicMeanCombination_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
DEFAULT_NORMALIZATION_METHOD,
HARMONIC_MEAN_COMBINATION_METHOD,
Map.of(PARAM_NAME_WEIGHTS, Arrays.toString(new float[] { 0.533f, 0.466f }))
);
String modelId = getDeployedModelId();

HybridQueryBuilder hybridQueryBuilderDefaultNorm = new HybridQueryBuilder();
hybridQueryBuilderDefaultNorm.add(new NeuralQueryBuilder(TEST_KNN_VECTOR_FIELD_NAME_1, TEST_DOC_TEXT1, "", modelId, 5, null, null));
Expand Down Expand Up @@ -248,6 +240,7 @@ public void testHarmonicMeanCombination_whenOneShardAndQueryMatches_thenSuccessf
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertHybridSearchResults(searchResponseAsMapL2Norm, 5, new float[] { 0.5f, 1.0f });
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

/**
Expand All @@ -271,13 +264,13 @@ public void testHarmonicMeanCombination_whenOneShardAndQueryMatches_thenSuccessf
@SneakyThrows
public void testGeometricMeanCombination_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
DEFAULT_NORMALIZATION_METHOD,
GEOMETRIC_MEAN_COMBINATION_METHOD,
Map.of(PARAM_NAME_WEIGHTS, Arrays.toString(new float[] { 0.533f, 0.466f }))
);
String modelId = getDeployedModelId();

HybridQueryBuilder hybridQueryBuilderDefaultNorm = new HybridQueryBuilder();
hybridQueryBuilderDefaultNorm.add(new NeuralQueryBuilder(TEST_KNN_VECTOR_FIELD_NAME_1, TEST_DOC_TEXT1, "", modelId, 5, null, null));
Expand Down Expand Up @@ -314,6 +307,7 @@ public void testGeometricMeanCombination_whenOneShardAndQueryMatches_thenSuccess
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertHybridSearchResults(searchResponseAsMapL2Norm, 5, new float[] { 0.5f, 1.0f });
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

private void initializeIndexIfNotExist(String indexName) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import java.util.Collections;
import java.util.Map;

import org.junit.After;
import org.junit.Before;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.neuralsearch.BaseNeuralSearchIT;
Expand All @@ -29,7 +28,7 @@
import lombok.SneakyThrows;

public class ScoreNormalizationIT extends BaseNeuralSearchIT {
private static final String TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME = "test-neural-multi-doc-one-shard-index";
private static final String TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME = "test-score-normalization-neural-multi-doc-one-shard-index";
private static final String TEST_QUERY_TEXT3 = "hello";
private static final String TEST_DOC_TEXT1 = "Hello world";
private static final String TEST_DOC_TEXT2 = "Hi to this place";
Expand All @@ -51,15 +50,6 @@ public class ScoreNormalizationIT extends BaseNeuralSearchIT {
public void setUp() throws Exception {
super.setUp();
updateClusterSettings();
prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(SEARCH_PIPELINE);
findDeployedModels().forEach(this::deleteModel);
}

@Override
Expand Down Expand Up @@ -88,13 +78,13 @@ public boolean isUpdateClusterSettings() {
@SneakyThrows
public void testL2Norm_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
L2_NORMALIZATION_METHOD,
DEFAULT_COMBINATION_METHOD,
Map.of(PARAM_NAME_WEIGHTS, Arrays.toString(new float[] { 0.533f, 0.466f }))
);
String modelId = getDeployedModelId();

HybridQueryBuilder hybridQueryBuilderArithmeticMean = new HybridQueryBuilder();
hybridQueryBuilderArithmeticMean.add(
Expand Down Expand Up @@ -160,6 +150,7 @@ public void testL2Norm_whenOneShardAndQueryMatches_thenSuccessful() {
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertHybridSearchResults(searchResponseAsMapGeometricMean, 5, new float[] { 0.5f, 1.0f });
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

/**
Expand All @@ -183,13 +174,13 @@ public void testL2Norm_whenOneShardAndQueryMatches_thenSuccessful() {
@SneakyThrows
public void testMinMaxNorm_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
DEFAULT_NORMALIZATION_METHOD,
DEFAULT_COMBINATION_METHOD,
Map.of(PARAM_NAME_WEIGHTS, Arrays.toString(new float[] { 0.533f, 0.466f }))
);
String modelId = getDeployedModelId();

HybridQueryBuilder hybridQueryBuilderArithmeticMean = new HybridQueryBuilder();
hybridQueryBuilderArithmeticMean.add(
Expand Down Expand Up @@ -255,6 +246,7 @@ public void testMinMaxNorm_whenOneShardAndQueryMatches_thenSuccessful() {
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertHybridSearchResults(searchResponseAsMapGeometricMean, 5, new float[] { 0.6f, 1.0f });
deleteIndex(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
}

private void initializeIndexIfNotExist(String indexName) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,31 +11,24 @@
import org.apache.hc.core5.http.HttpHeaders;
import org.apache.hc.core5.http.io.entity.EntityUtils;
import org.apache.hc.core5.http.message.BasicHeader;
import org.junit.After;
import org.junit.Before;
import org.opensearch.client.Response;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.neuralsearch.BaseNeuralSearchIT;

import com.google.common.collect.ImmutableList;

import lombok.SneakyThrows;

public class SparseEncodingProcessIT extends BaseNeuralSearchIT {

private static final String INDEX_NAME = "sparse_encoding_index";

private static final String PIPELINE_NAME = "pipeline-sparse-encoding";

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
/* this is required to minimize chance of model not being deployed due to open memory CB,
* this happens in case we leave model from previous test case. We use new model for every test, and old model
* can be undeployed and deleted to free resources after each test case execution.
*/
findDeployedModels().forEach(this::deleteModel);
@Before
public void setUp() throws Exception {
super.setUp();
updateClusterSettings();
}

public void testSparseEncodingProcessor() throws Exception {
Expand All @@ -44,6 +37,7 @@ public void testSparseEncodingProcessor() throws Exception {
createSparseEncodingIndex();
ingestDocument();
assertEquals(1, getDocCount(INDEX_NAME));
wipeOfTestResources(INDEX_NAME, PIPELINE_NAME, modelId, null);
}

private void createSparseEncodingIndex() throws Exception {
Expand Down
Loading
Loading