diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 1090226b0..1d8bf6884 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -4,7 +4,8 @@ on:
branches:
- "*"
- "feature/**"
- pull_request:
+ pull_request_target:
+ types: [ opened, synchronize, reopened ]
branches:
- "*"
- "feature/**"
@@ -55,7 +56,7 @@ jobs:
- name: Run build
run: |
- ./gradlew precommit
+ ./gradlew precommit --parallel
- name: Upload Coverage Report
uses: codecov/codecov-action@v1
diff --git a/.idea/runConfigurations/DebugNeuralSearch.xml b/.idea/runConfigurations/DebugNeuralSearch.xml
new file mode 100644
index 000000000..6246c430e
--- /dev/null
+++ b/.idea/runConfigurations/DebugNeuralSearch.xml
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/runConfigurations/Run_Neural_Search.xml b/.idea/runConfigurations/Run_Neural_Search.xml
new file mode 100644
index 000000000..d881bd512
--- /dev/null
+++ b/.idea/runConfigurations/Run_Neural_Search.xml
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+
+
+
+ true
+ true
+ false
+
+
+
\ No newline at end of file
diff --git a/.idea/runConfigurations/Run_With_Debug_Port.xml b/.idea/runConfigurations/Run_With_Debug_Port.xml
new file mode 100644
index 000000000..3a2b6fe4f
--- /dev/null
+++ b/.idea/runConfigurations/Run_With_Debug_Port.xml
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ true
+ false
+
+
+
\ No newline at end of file
diff --git a/build.gradle b/build.gradle
index 7bde310fa..614131f33 100644
--- a/build.gradle
+++ b/build.gradle
@@ -6,6 +6,7 @@
* This project uses @Incubating APIs which are subject to change.
*/
import org.opensearch.gradle.test.RestIntegTestTask
+import java.util.concurrent.Callable
apply plugin: 'java'
apply plugin: 'idea'
@@ -13,6 +14,7 @@ apply plugin: 'opensearch.opensearchplugin'
apply plugin: 'opensearch.pluginzip'
apply plugin: 'jacoco'
apply plugin: "com.diffplug.spotless"
+apply plugin: 'io.freefair.lombok'
def pluginName = 'neural-search'
def pluginDescription = 'A plugin that adds dense neural retrieval into the OpenSearch ecosytem'
@@ -59,7 +61,6 @@ opensearchplugin {
noticeFile rootProject.file('NOTICE')
}
-licenseHeaders.enabled = true
dependencyLicenses.enabled = false
thirdPartyAudit.enabled = false
loggerUsageCheck.enabled = false
@@ -68,7 +69,9 @@ validateNebulaPom.enabled = false
buildscript {
ext {
- opensearch_version = System.getProperty("opensearch.version", "3.0.0-SNAPSHOT")
+ // as we don't have 3.0.0, 2.4.0 version for K-NN on darwin we need to keep OpenSearch version as 2.3 for now.
+ // Github issue: https://github.com/opensearch-project/opensearch-build/issues/2662
+ opensearch_version = System.getProperty("opensearch.version", "2.3.0-SNAPSHOT")
buildVersionQualifier = System.getProperty("build.version_qualifier", "")
isSnapshot = "true" == System.getProperty("build.snapshot", "true")
version_tokens = opensearch_version.tokenize('-')
@@ -82,6 +85,13 @@ buildscript {
opensearch_build += "-SNAPSHOT"
}
opensearch_group = "org.opensearch"
+ opensearch_no_snapshot = opensearch_build.replace("-SNAPSHOT","")
+ k_NN_resource_folder = "build/resources/k-NN"
+ ml_common_resource_folder = "build/resources/ml-commons"
+ //TODO: we need a better way to construct this URL as, this URL is valid for released version of K-NN, ML-Plugin.
+ // Github issue: https://github.com/opensearch-project/opensearch-build/issues/2662
+ k_NN_build_download_url = "https://aws.oss.sonatype.org/content/repositories/releases/org/opensearch/plugin/opensearch-knn/" + opensearch_no_snapshot + "/opensearch-knn-" + opensearch_no_snapshot +".zip"
+ ml_common_build_download_url = "https://aws.oss.sonatype.org/content/repositories/releases/org/opensearch/plugin/opensearch-ml-plugin/" + opensearch_no_snapshot + "/opensearch-ml-plugin-" + opensearch_no_snapshot +".zip"
}
repositories {
@@ -93,6 +103,7 @@ buildscript {
dependencies {
classpath "${opensearch_group}.gradle:build-tools:${opensearch_version}"
classpath "com.diffplug.spotless:spotless-plugin-gradle:5.6.1"
+ classpath "io.freefair.gradle:lombok-plugin:6.4.3"
}
}
@@ -113,12 +124,25 @@ repositories {
maven { url "https://plugins.gradle.org/m2/" }
}
+dependencies {
+ api "org.opensearch:opensearch:${opensearch_version}"
+ api group: 'org.opensearch', name:'opensearch-ml-client', version: "${opensearch_build}"
+}
+
+compileJava {
+ options.compilerArgs.addAll(["-processor", 'lombok.launch.AnnotationProcessorHider$AnnotationProcessor'])
+}
+compileTestJava {
+ options.compilerArgs.addAll(["-processor", 'lombok.launch.AnnotationProcessorHider$AnnotationProcessor'])
+}
+
def opensearch_tmp_dir = rootProject.file('build/private/opensearch_tmp').absoluteFile
opensearch_tmp_dir.mkdirs()
def _numNodes = findProperty('numNodes') as Integer ?: 1
test {
include '**/*Tests.class'
+ systemProperty 'tests.security.manager', 'false'
}
// Setting up Integration Tests
@@ -161,8 +185,47 @@ integTest {
testClusters.integTest {
testDistribution = "ARCHIVE"
+ // Install ML-Plugin on the integTest cluster nodes
+ plugin(provider(new Callable(){
+ @Override
+ RegularFile call() throws Exception {
+ return new RegularFile() {
+ @Override
+ File getAsFile() {
+ if (new File("$project.rootDir/$ml_common_resource_folder").exists()) {
+ project.delete(files("$project.rootDir/$ml_common_resource_folder"))
+ }
+ project.mkdir ml_common_resource_folder
+ ant.get(src: ml_common_build_download_url,
+ dest: ml_common_resource_folder,
+ httpusecaches: false)
+ return fileTree(ml_common_resource_folder).getSingleFile()
+ }
+ }
+ }
+ }))
+
+ // Install K-NN plugin on the integTest cluster nodes
+ plugin(provider(new Callable(){
+ @Override
+ RegularFile call() throws Exception {
+ return new RegularFile() {
+ @Override
+ File getAsFile() {
+ if (new File("$project.rootDir/$k_NN_resource_folder").exists()) {
+ project.delete(files("$project.rootDir/$k_NN_resource_folder"))
+ }
+ project.mkdir k_NN_resource_folder
+ ant.get(src: k_NN_build_download_url,
+ dest: k_NN_resource_folder,
+ httpusecaches: false)
+ return fileTree(k_NN_resource_folder).getSingleFile()
+ }
+ }
+ }
+ }))
- // This installs our plugin into the testClusters
+ // This installs our neural-search plugin into the testClusters
plugin(project.tasks.bundlePlugin.archiveFile)
// Cluster shrink exception thrown if we try to set numberOfNodes to 1, so only apply if > 1
if (_numNodes > 1) numberOfNodes = _numNodes
@@ -178,6 +241,28 @@ testClusters.integTest {
}
}
+// Remote Integration Tests
+task integTestRemote(type: RestIntegTestTask) {
+ testClassesDirs = sourceSets.test.output.classesDirs
+ classpath = sourceSets.test.runtimeClasspath
+
+ systemProperty "https", System.getProperty("https")
+ systemProperty "user", System.getProperty("user")
+ systemProperty "password", System.getProperty("password")
+
+ systemProperty 'cluster.number_of_nodes', "${_numNodes}"
+
+ systemProperty 'tests.security.manager', 'false'
+
+ // Run tests with remote cluster only if rest case is defined
+ if (System.getProperty("tests.rest.cluster") != null) {
+ filter {
+ includeTestsMatching "org.opensearch.neuralsearch.*IT"
+ }
+ }
+}
+
+
run {
useCluster testClusters.integTest
}
diff --git a/src/test/java/org/opensearch/neuralsearch/OpenSearchSecureRestTestCase.java b/src/test/java/org/opensearch/neuralsearch/OpenSearchSecureRestTestCase.java
new file mode 100644
index 000000000..19fd74659
--- /dev/null
+++ b/src/test/java/org/opensearch/neuralsearch/OpenSearchSecureRestTestCase.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package org.opensearch.neuralsearch;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+import org.apache.http.Header;
+import org.apache.http.HttpHost;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CredentialsProvider;
+import org.apache.http.conn.ssl.NoopHostnameVerifier;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.ssl.SSLContextBuilder;
+import org.junit.After;
+import org.opensearch.client.Request;
+import org.opensearch.client.Response;
+import org.opensearch.client.RestClient;
+import org.opensearch.client.RestClientBuilder;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.unit.TimeValue;
+import org.opensearch.common.util.concurrent.ThreadContext;
+import org.opensearch.common.xcontent.DeprecationHandler;
+import org.opensearch.common.xcontent.NamedXContentRegistry;
+import org.opensearch.common.xcontent.XContentParser;
+import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.test.rest.OpenSearchRestTestCase;
+
+/**
+ * Base class for running the integration tests on a secure cluster. The plugin IT test should either extend this
+ * class or create another base class by extending this class to make sure that their IT can be run on secure clusters.
+ */
+public abstract class OpenSearchSecureRestTestCase extends OpenSearchRestTestCase {
+
+ private static final String PROTOCOL_HTTP = "http";
+ private static final String PROTOCOL_HTTPS = "https";
+ private static final String SYS_PROPERTY_KEY_HTTPS = "https";
+ private static final String SYS_PROPERTY_KEY_CLUSTER_ENDPOINT = "tests.rest.cluster";
+ private static final String SYS_PROPERTY_KEY_USER = "user";
+ private static final String SYS_PROPERTY_KEY_PASSWORD = "password";
+ private static final String DEFAULT_SOCKET_TIMEOUT = "60s";
+ private static final String INTERNAL_INDICES_PREFIX = ".";
+ private static String protocol;
+
+ @Override
+ protected String getProtocol() {
+ if (protocol == null) {
+ protocol = readProtocolFromSystemProperty();
+ }
+ return protocol;
+ }
+
+ private String readProtocolFromSystemProperty() {
+ final boolean isHttps = Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_HTTPS)).map("true"::equalsIgnoreCase).orElse(false);
+ if (!isHttps) {
+ return PROTOCOL_HTTP;
+ }
+
+ // currently only external cluster is supported for security enabled testing
+ if (Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_CLUSTER_ENDPOINT)).isEmpty()) {
+ throw new RuntimeException("cluster url should be provided for security enabled testing");
+ }
+ return PROTOCOL_HTTPS;
+ }
+
+ @Override
+ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException {
+ final RestClientBuilder builder = RestClient.builder(hosts);
+ if (PROTOCOL_HTTPS.equals(getProtocol())) {
+ configureHttpsClient(builder, settings);
+ } else {
+ configureClient(builder, settings);
+ }
+
+ return builder.build();
+ }
+
+ private void configureHttpsClient(final RestClientBuilder builder, final Settings settings) {
+ final Map headers = ThreadContext.buildDefaultHeaders(settings);
+ final Header[] defaultHeaders = new Header[headers.size()];
+ int i = 0;
+ for (Map.Entry entry : headers.entrySet()) {
+ defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue());
+ }
+ builder.setDefaultHeaders(defaultHeaders);
+ builder.setHttpClientConfigCallback(httpClientBuilder -> {
+ final String userName = Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_USER))
+ .orElseThrow(() -> new RuntimeException("user name is missing"));
+ final String password = Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_PASSWORD))
+ .orElseThrow(() -> new RuntimeException("password is missing"));
+ final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+ credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password));
+ try {
+ return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)
+ // disable the certificate since our testing cluster just uses the default security configuration
+ .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE)
+ .setSSLContext(SSLContextBuilder.create().loadTrustMaterial(null, (chains, authType) -> true).build());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+
+ final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT);
+ final TimeValue socketTimeout = TimeValue.parseTimeValue(
+ socketTimeoutString == null ? DEFAULT_SOCKET_TIMEOUT : socketTimeoutString,
+ CLIENT_SOCKET_TIMEOUT
+ );
+ builder.setRequestConfigCallback(conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis())));
+ if (settings.hasValue(CLIENT_PATH_PREFIX)) {
+ builder.setPathPrefix(settings.get(CLIENT_PATH_PREFIX));
+ }
+ }
+
+ /**
+ * wipeAllIndices won't work since it cannot delete security index. Use deleteExternalIndices instead.
+ */
+ @Override
+ protected boolean preserveIndicesUponCompletion() {
+ return true;
+ }
+
+ @After
+ public void deleteExternalIndices() throws IOException {
+ final Response response = client().performRequest(new Request("GET", "/_cat/indices?format=json" + "&expand_wildcards=all"));
+ final XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue());
+ try (
+ final XContentParser parser = xContentType.xContent()
+ .createParser(
+ NamedXContentRegistry.EMPTY,
+ DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
+ response.getEntity().getContent()
+ )
+ ) {
+ final XContentParser.Token token = parser.nextToken();
+ final List