Skip to content

Commit

Permalink
Addressing Heemin's comment:
Browse files Browse the repository at this point in the history
Signed-off-by: Varun Jain <[email protected]>
  • Loading branch information
vibrantvarun committed Jan 29, 2024
1 parent 8b29c4b commit 3e9fff3
Show file tree
Hide file tree
Showing 10 changed files with 86 additions and 176 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import java.util.Collections;
import java.util.Map;

import org.junit.After;
import org.junit.Before;
import org.opensearch.common.settings.Settings;
import org.opensearch.neuralsearch.BaseNeuralSearchIT;
Expand All @@ -29,27 +28,17 @@ public class NeuralQueryEnricherProcessorIT extends BaseNeuralSearchIT {
private static final String ingest_pipeline = "nlp-pipeline";
private static final String TEST_KNN_VECTOR_FIELD_NAME_1 = "test-knn-vector-1";
private final float[] testVector = createRandomVector(TEST_DIMENSION);
private String modelId;

@Before
public void setUp() throws Exception {
super.setUp();
updateClusterSettings();
modelId = prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(search_pipeline);
deleteModel(modelId);
deleteIndex(index);
}

@SneakyThrows
public void testNeuralQueryEnricherProcessor_whenNoModelIdPassed_thenSuccess() {
initializeIndexIfNotExist();
initializeIndexIfNotExist(index);
String modelId = prepareModel();
createSearchRequestProcessor(modelId, search_pipeline);
createPipelineProcessor(modelId, ingest_pipeline, ProcessorType.TEXT_EMBEDDING);
updateIndexSettings(index, Settings.builder().put("index.search.default_pipeline", search_pipeline));
Expand All @@ -60,12 +49,13 @@ public void testNeuralQueryEnricherProcessor_whenNoModelIdPassed_thenSuccess() {
Map<String, Object> response = search(index, neuralQueryBuilder, 2);

assertFalse(response.isEmpty());

wipeOfTestResources(index, ingest_pipeline, modelId, search_pipeline);
}

@SneakyThrows
public void testNeuralQueryEnricherProcessor_whenHybridQueryBuilderAndNoModelIdPassed_thenSuccess() {
initializeIndexIfNotExist();
initializeIndexIfNotExist(index);
String modelId = prepareModel();
createSearchRequestProcessor(modelId, search_pipeline);
createPipelineProcessor(modelId, ingest_pipeline, ProcessorType.TEXT_EMBEDDING);
updateIndexSettings(index, Settings.builder().put("index.search.default_pipeline", search_pipeline));
Expand All @@ -78,23 +68,24 @@ public void testNeuralQueryEnricherProcessor_whenHybridQueryBuilderAndNoModelIdP
Map<String, Object> response = search(index, hybridQueryBuilder, 2);

assertFalse(response.isEmpty());
wipeOfTestResources(index, ingest_pipeline, modelId, search_pipeline);

}

@SneakyThrows
private void initializeIndexIfNotExist() {
if (index.equals(NeuralQueryEnricherProcessorIT.index) && !indexExists(index)) {
private void initializeIndexIfNotExist(String indexName) {
if (indexName.equals(NeuralQueryEnricherProcessorIT.index) && !indexExists(indexName)) {
prepareKnnIndex(
index,
indexName,
Collections.singletonList(new KNNFieldConfig(TEST_KNN_VECTOR_FIELD_NAME_1, TEST_DIMENSION, TEST_SPACE_TYPE))
);
addKnnDoc(
index,
indexName,
"1",
Collections.singletonList(TEST_KNN_VECTOR_FIELD_NAME_1),
Collections.singletonList(Floats.asList(testVector).toArray())
);
assertEquals(1, getDocCount(index));
assertEquals(1, getDocCount(indexName));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import java.util.stream.IntStream;

import org.apache.commons.lang3.Range;
import org.junit.After;
import org.junit.Before;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.index.query.TermQueryBuilder;
Expand Down Expand Up @@ -53,20 +52,11 @@ public class NormalizationProcessorIT extends BaseNeuralSearchIT {
private final float[] testVector2 = createRandomVector(TEST_DIMENSION);
private final float[] testVector3 = createRandomVector(TEST_DIMENSION);
private final float[] testVector4 = createRandomVector(TEST_DIMENSION);
private String modelId;

@Before
public void setUp() throws Exception {
super.setUp();
modelId = prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(SEARCH_PIPELINE);
deleteModel(modelId);
updateClusterSettings();
}

/**
Expand All @@ -90,6 +80,7 @@ public void tearDown() {
@SneakyThrows
public void testResultProcessor_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipelineWithResultsPostProcessor(SEARCH_PIPELINE);

NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(
Expand All @@ -115,7 +106,7 @@ public void testResultProcessor_whenOneShardAndQueryMatches_thenSuccessful() {
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 5, false);
deleteIndex(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

/**
Expand All @@ -133,6 +124,7 @@ public void testResultProcessor_whenOneShardAndQueryMatches_thenSuccessful() {
@SneakyThrows
public void testResultProcessor_whenDefaultProcessorConfigAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipelineWithDefaultResultsPostProcessor(SEARCH_PIPELINE);

NeuralQueryBuilder neuralQueryBuilder = new NeuralQueryBuilder(
Expand All @@ -158,12 +150,13 @@ public void testResultProcessor_whenDefaultProcessorConfigAndQueryMatches_thenSu
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 5, false);
deleteIndex(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

@SneakyThrows
public void testResultProcessor_whenMultipleShardsAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME);
String modelId = prepareModel();
createSearchPipelineWithResultsPostProcessor(SEARCH_PIPELINE);
int totalExpectedDocQty = 6;

Expand Down Expand Up @@ -218,7 +211,7 @@ public void testResultProcessor_whenMultipleShardsAndQueryMatches_thenSuccessful

// verify that all ids are unique
assertEquals(Set.copyOf(ids).size(), ids.size());
deleteIndex(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, modelId, SEARCH_PIPELINE);
}

@SneakyThrows
Expand All @@ -238,7 +231,7 @@ public void testResultProcessor_whenMultipleShardsAndNoMatches_thenSuccessful()
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 0, true);
deleteIndex(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, null, SEARCH_PIPELINE);
}

@SneakyThrows
Expand All @@ -259,7 +252,7 @@ public void testResultProcessor_whenMultipleShardsAndPartialMatches_thenSuccessf
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertQueryResults(searchResponseAsMap, 4, true, Range.between(0.33f, 1.0f));
deleteIndex(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, null, SEARCH_PIPELINE);
}

private void initializeIndexIfNotExist(String indexName) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import java.util.Collections;
import java.util.Map;

import org.junit.After;
import org.junit.Before;
import org.opensearch.client.ResponseException;
import org.opensearch.index.query.QueryBuilders;
Expand Down Expand Up @@ -54,20 +53,11 @@ public class ScoreCombinationIT extends BaseNeuralSearchIT {
private static final String L2_NORMALIZATION_METHOD = "l2";
private static final String HARMONIC_MEAN_COMBINATION_METHOD = "harmonic_mean";
private static final String GEOMETRIC_MEAN_COMBINATION_METHOD = "geometric_mean";
private String modelId;

@Before
public void setUp() throws Exception {
super.setUp();
modelId = prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(SEARCH_PIPELINE);
deleteModel(modelId);
updateClusterSettings();
}

/**
Expand Down Expand Up @@ -183,7 +173,7 @@ public void testArithmeticWeightedMean_whenWeightsPassed_thenSuccessful() {
containsString("in hybrid query")
)
);
deleteIndex(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_THREE_SHARDS_NAME, null, null, SEARCH_PIPELINE);
}

/**
Expand All @@ -207,6 +197,7 @@ public void testArithmeticWeightedMean_whenWeightsPassed_thenSuccessful() {
@SneakyThrows
public void testHarmonicMeanCombination_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
DEFAULT_NORMALIZATION_METHOD,
Expand Down Expand Up @@ -249,7 +240,7 @@ public void testHarmonicMeanCombination_whenOneShardAndQueryMatches_thenSuccessf
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertHybridSearchResults(searchResponseAsMapL2Norm, 5, new float[] { 0.5f, 1.0f });
deleteIndex(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

/**
Expand All @@ -273,6 +264,7 @@ public void testHarmonicMeanCombination_whenOneShardAndQueryMatches_thenSuccessf
@SneakyThrows
public void testGeometricMeanCombination_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
DEFAULT_NORMALIZATION_METHOD,
Expand Down Expand Up @@ -315,7 +307,7 @@ public void testGeometricMeanCombination_whenOneShardAndQueryMatches_thenSuccess
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertHybridSearchResults(searchResponseAsMapL2Norm, 5, new float[] { 0.5f, 1.0f });
deleteIndex(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

private void initializeIndexIfNotExist(String indexName) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import java.util.Collections;
import java.util.Map;

import org.junit.After;
import org.junit.Before;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.neuralsearch.BaseNeuralSearchIT;
Expand Down Expand Up @@ -46,21 +45,11 @@ public class ScoreNormalizationIT extends BaseNeuralSearchIT {
private static final String L2_NORMALIZATION_METHOD = "l2";
private static final String HARMONIC_MEAN_COMBINATION_METHOD = "harmonic_mean";
private static final String GEOMETRIC_MEAN_COMBINATION_METHOD = "geometric_mean";
private String modelId;

@Before
public void setUp() throws Exception {
super.setUp();
updateClusterSettings();
modelId = prepareModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
deleteSearchPipeline(SEARCH_PIPELINE);
deleteModel(modelId);
}

@Override
Expand Down Expand Up @@ -89,6 +78,7 @@ public boolean isUpdateClusterSettings() {
@SneakyThrows
public void testL2Norm_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
L2_NORMALIZATION_METHOD,
Expand Down Expand Up @@ -160,7 +150,7 @@ public void testL2Norm_whenOneShardAndQueryMatches_thenSuccessful() {
Map.of("search_pipeline", SEARCH_PIPELINE)
);
assertHybridSearchResults(searchResponseAsMapGeometricMean, 5, new float[] { 0.5f, 1.0f });
deleteIndex(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
wipeOfTestResources(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME, null, modelId, SEARCH_PIPELINE);
}

/**
Expand All @@ -184,6 +174,7 @@ public void testL2Norm_whenOneShardAndQueryMatches_thenSuccessful() {
@SneakyThrows
public void testMinMaxNorm_whenOneShardAndQueryMatches_thenSuccessful() {
initializeIndexIfNotExist(TEST_MULTI_DOC_INDEX_ONE_SHARD_NAME);
String modelId = prepareModel();
createSearchPipeline(
SEARCH_PIPELINE,
DEFAULT_NORMALIZATION_METHOD,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.apache.hc.core5.http.HttpHeaders;
import org.apache.hc.core5.http.io.entity.EntityUtils;
import org.apache.hc.core5.http.message.BasicHeader;
import org.junit.After;
import org.junit.Before;
import org.opensearch.client.Response;
import org.opensearch.common.xcontent.XContentHelper;
Expand All @@ -20,38 +19,25 @@

import com.google.common.collect.ImmutableList;

import lombok.SneakyThrows;

public class SparseEncodingProcessIT extends BaseNeuralSearchIT {

private static final String INDEX_NAME = "sparse_encoding_index";

private static final String PIPELINE_NAME = "pipeline-sparse-encoding";
private String modelId;

@Before
public void setUp() throws Exception {
super.setUp();
modelId = prepareSparseEncodingModel();
}

@After
@SneakyThrows
public void tearDown() {
super.tearDown();
/* this is required to minimize chance of model not being deployed due to open memory CB,
* this happens in case we leave model from previous test case. We use new model for every test, and old model
* can be undeployed and deleted to free resources after each test case execution.
*/
deleteModel(modelId);
deleteIndex(INDEX_NAME);
updateClusterSettings();
}

public void testSparseEncodingProcessor() throws Exception {
String modelId = prepareSparseEncodingModel();
createPipelineProcessor(modelId, PIPELINE_NAME, ProcessorType.SPARSE_ENCODING);
createSparseEncodingIndex();
ingestDocument();
assertEquals(1, getDocCount(INDEX_NAME));
wipeOfTestResources(INDEX_NAME, PIPELINE_NAME, modelId, null);
}

private void createSparseEncodingIndex() throws Exception {
Expand Down
Loading

0 comments on commit 3e9fff3

Please sign in to comment.