diff --git a/.github/workflows/delete_backport_branch.yml b/.github/workflows/delete_backport_branch.yml index 1c73d3d250ce3..22ce83c69a5d8 100644 --- a/.github/workflows/delete_backport_branch.yml +++ b/.github/workflows/delete_backport_branch.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest permissions: contents: write - if: startsWith(github.event.pull_request.head.ref,'backport/') + if: github.repository == 'opensearch-project/OpenSearch' && startsWith(github.event.pull_request.head.ref,'backport/') steps: - name: Delete merged branch uses: actions/github-script@v7 diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index 7c65df1f677a5..572f6c981a052 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -7,7 +7,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - java: [ 11, 17, 21 ] + java: [ 11, 17, 21, 23 ] os: [ubuntu-latest, windows-latest, macos-latest, macos-13] steps: - uses: actions/checkout@v4 diff --git a/CHANGELOG.md b/CHANGELOG.md index 0cad8c764a8e5..f5622a177ee6c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,8 +7,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Added - Add support for async deletion in S3BlobContainer ([#15621](https://github.com/opensearch-project/OpenSearch/pull/15621)) - MultiTermQueries in keyword fields now default to `indexed` approach and gated behind cluster setting ([#15637](https://github.com/opensearch-project/OpenSearch/pull/15637)) +- [Workload Management] Add Integration Tests for Workload Management CRUD APIs ([#15955](https://github.com/opensearch-project/OpenSearch/pull/15955)) - [Workload Management] QueryGroup resource cancellation framework changes ([#15651](https://github.com/opensearch-project/OpenSearch/pull/15651)) - [Workload Management] Add orchestrator for wlm resiliency (QueryGroupService) ([#15925](https://github.com/opensearch-project/OpenSearch/pull/15925)) +- [Workload Management] Add QueryGroup Stats API Logic ([15777](https://github.com/opensearch-project/OpenSearch/pull/15777)) - Fallback to Remote cluster-state on Term-Version check mismatch - ([#15424](https://github.com/opensearch-project/OpenSearch/pull/15424)) - Implement WithFieldName interface in ValuesSourceAggregationBuilder & FieldSortBuilder ([#15916](https://github.com/opensearch-project/OpenSearch/pull/15916)) - Add successfulSearchShardIndices in searchRequestContext ([#15967](https://github.com/opensearch-project/OpenSearch/pull/15967), [#16110](https://github.com/opensearch-project/OpenSearch/pull/16110)) @@ -42,7 +44,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.azure:azure-core-http-netty` from 1.15.3 to 1.15.4 ([#16133](https://github.com/opensearch-project/OpenSearch/pull/16133)) - Bump `org.jline:jline` from 3.26.3 to 3.27.0 ([#16135](https://github.com/opensearch-project/OpenSearch/pull/16135)) - Bump `netty` from 4.1.112.Final to 4.1.114.Final ([#16182](https://github.com/opensearch-project/OpenSearch/pull/16182)) +- Bump `com.google.api-client:google-api-client` from 2.2.0 to 2.7.0 ([#16216](https://github.com/opensearch-project/OpenSearch/pull/16216)) - Bump `com.azure:azure-json` from 1.1.0 to 1.3.0 ([#16217](https://github.com/opensearch-project/OpenSearch/pull/16217)) +- Bump `io.grpc:grpc-api` from 1.57.2 to 1.68.0 ([#16213](https://github.com/opensearch-project/OpenSearch/pull/16213)) +- Bump `com.squareup.okio:okio` from 3.9.0 to 3.9.1 ([#16212](https://github.com/opensearch-project/OpenSearch/pull/16212)) ### Changed - Add support for docker compose v2 in TestFixturesPlugin ([#16049](https://github.com/opensearch-project/OpenSearch/pull/16049)) @@ -65,6 +70,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix race condition in node-join and node-left ([#15521](https://github.com/opensearch-project/OpenSearch/pull/15521)) - Streaming bulk request hangs ([#16158](https://github.com/opensearch-project/OpenSearch/pull/16158)) - Fix warnings from SLF4J on startup when repository-s3 is installed ([#16194](https://github.com/opensearch-project/OpenSearch/pull/16194)) +- Fix protobuf-java leak through client library dependencies ([#16254](https://github.com/opensearch-project/OpenSearch/pull/16254)) ### Security diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 43a62ca42c39b..3ec10c3646cd1 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -104,7 +104,7 @@ dependencies { api 'com.github.johnrengelman:shadow:8.1.1' api 'org.jdom:jdom2:2.0.6.1' api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}" - api 'de.thetaphi:forbiddenapis:3.6' + api 'de.thetaphi:forbiddenapis:3.8' api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6' api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}" api 'org.apache.maven:maven-model:3.9.6' diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java index d83f1b01ee043..988b8b749ee64 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -51,7 +51,7 @@ public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { public TaskProvider createTask(Project project) { project.getPlugins().apply(CompileOnlyResolvePlugin.class); project.getConfigurations().create("forbiddenApisCliJar"); - project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.5.1"); + project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.8"); Configuration jdkJarHellConfig = project.getConfigurations().create(JDK_JAR_HELL_CONFIG_NAME); if (BuildParams.isInternal() && project.getPath().equals(":libs:opensearch-core") == false) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java index 8d5ce9143cbac..9365f1c732229 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java @@ -77,9 +77,9 @@ import java.util.stream.Stream; public class DistroTestPlugin implements Plugin { - private static final String SYSTEM_JDK_VERSION = "21.0.4+7"; + private static final String SYSTEM_JDK_VERSION = "23+37"; private static final String SYSTEM_JDK_VENDOR = "adoptium"; - private static final String GRADLE_JDK_VERSION = "21.0.4+7"; + private static final String GRADLE_JDK_VERSION = "23+37"; private static final String GRADLE_JDK_VENDOR = "adoptium"; // all distributions used by distro tests. this is temporary until tests are per distribution diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index fdc93d8037ce6..b08df5bf09bac 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -50,7 +50,9 @@ restResources { } dependencies { - api project(':server') + api(project(':server')) { + exclude group: 'com.google.protobuf' + } api project(':client:rest') api project(':modules:mapper-extras') api project(':modules:parent-join') diff --git a/gradle.properties b/gradle.properties index 4e8c5b98116c1..d4634f1a7aeea 100644 --- a/gradle.properties +++ b/gradle.properties @@ -31,9 +31,7 @@ systemProp.org.gradle.dependency.duplicate.project.detection=false # Enforce the build to fail on deprecated gradle api usage systemProp.org.gradle.warning.mode=fail -# forcing to use TLS1.2 to avoid failure in vault -# see https://github.com/hashicorp/vault/issues/8750#issuecomment-631236121 -systemProp.jdk.tls.client.protocols=TLSv1.2 +systemProp.jdk.tls.client.protocols=TLSv1.2,TLSv1.3 # jvm args for faster test execution by default systemProp.tests.jvm.argline=-XX:TieredStopAtLevel=1 -XX:ReservedCodeCacheSize=64m diff --git a/gradle/code-coverage.gradle b/gradle/code-coverage.gradle index 3ca6b1fe84ea7..eb27dd1a76634 100644 --- a/gradle/code-coverage.gradle +++ b/gradle/code-coverage.gradle @@ -19,7 +19,7 @@ repositories { allprojects { plugins.withId('jacoco') { - jacoco.toolVersion = '0.8.10' + jacoco.toolVersion = '0.8.12' } } diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 5d38f85c40c89..6b1f6898eec9b 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -111,7 +111,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_2_17_0 = new Version(2170099, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_17_1 = new Version(2170199, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_17_2 = new Version(2170299, org.apache.lucene.util.Version.LUCENE_9_11_1); - public static final Version V_2_18_0 = new Version(2180099, org.apache.lucene.util.Version.LUCENE_9_11_1); + public static final Version V_2_18_0 = new Version(2180099, org.apache.lucene.util.Version.LUCENE_9_12_0); public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_12_0); public static final Version CURRENT = V_3_0_0; diff --git a/libs/grok/src/main/java/org/opensearch/grok/GrokCaptureConfig.java b/libs/grok/src/main/java/org/opensearch/grok/GrokCaptureConfig.java index 6d1985f2165cd..6fabad3a7d5bd 100644 --- a/libs/grok/src/main/java/org/opensearch/grok/GrokCaptureConfig.java +++ b/libs/grok/src/main/java/org/opensearch/grok/GrokCaptureConfig.java @@ -114,8 +114,8 @@ public GrokCaptureExtracter forBoolean(Function, GrokCaptureEx /** * Build an extract that has access to the "native" type of the extracter * match. This means that patterns like {@code %{NUMBER:bytes:float}} has - * access to an actual {@link float}. Extracters returned from this method - * should be stateless stateless and can be reused. Pathological implementations + * access to an actual float. Extracters returned from this method + * should be stateless and can be reused. Pathological implementations * of the {@code map} parameter could violate this, but the caller should * take care to stay sane. *

@@ -144,27 +144,27 @@ public interface NativeExtracterMap { T forString(Function, GrokCaptureExtracter> buildExtracter); /** - * Called when the native type is an {@link int}. + * Called when the native type is an int. */ T forInt(Function buildExtracter); /** - * Called when the native type is an {@link long}. + * Called when the native type is an long. */ T forLong(Function buildExtracter); /** - * Called when the native type is an {@link float}. + * Called when the native type is an float. */ T forFloat(Function buildExtracter); /** - * Called when the native type is an {@link double}. + * Called when the native type is an double. */ T forDouble(Function buildExtracter); /** - * Called when the native type is an {@link boolean}. + * Called when the native type is an boolean. */ T forBoolean(Function, GrokCaptureExtracter> buildExtracter); } diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java index 02e1ff40f7ed6..7a56cfdb6278e 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java @@ -175,6 +175,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no exchange.getResponseHeaders().set("Content-Type", "text/xml; charset=UTF-8"); exchange.sendResponseHeaders(HttpStatus.SC_OK, responseBody.length); exchange.getResponseBody().write(responseBody); + exchange.getResponseBody().flush(); return; } } diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java index ce097667f9c4b..194a262c6ed7f 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java @@ -125,6 +125,7 @@ public void testEC2DiscoveryRetriesOnRateLimiting() throws IOException { exchange.getResponseHeaders().set("Content-Type", "text/xml; charset=UTF-8"); exchange.sendResponseHeaders(HttpStatus.SC_OK, responseBody.length); exchange.getResponseBody().write(responseBody); + exchange.getResponseBody().flush(); return; } } diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 80aae03bc0332..a08fa1d968e30 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -30,7 +30,7 @@ dependencies { api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" - api 'io.grpc:grpc-api:1.57.2' + api "io.grpc:grpc-api:${versions.grpc}" api 'io.opencensus:opencensus-api:0.31.1' api 'io.opencensus:opencensus-contrib-http-util:0.31.1' runtimeOnly "com.google.guava:guava:${versions.guava}" diff --git a/plugins/discovery-gce/licenses/grpc-api-1.57.2.jar.sha1 b/plugins/discovery-gce/licenses/grpc-api-1.57.2.jar.sha1 deleted file mode 100644 index 8b320fdd2f9cc..0000000000000 --- a/plugins/discovery-gce/licenses/grpc-api-1.57.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c71a006b81ddae7bc4b7cb1d2da78c1b173761f4 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 b/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 new file mode 100644 index 0000000000000..bf45716c5b8ce --- /dev/null +++ b/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 @@ -0,0 +1 @@ +9a9f25c58d8d5b0fcf37ae889a50fec87e34ac08 \ No newline at end of file diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 94d25af94e67f..ab129ab7f116a 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -58,7 +58,7 @@ dependencies { api 'com.google.apis:google-api-services-storage:v1-rev20230617-2.0.0' - api 'com.google.api-client:google-api-client:2.2.0' + api 'com.google.api-client:google-api-client:2.7.0' api 'com.google.api.grpc:proto-google-common-protos:2.37.1' api 'com.google.api.grpc:proto-google-iam-v1:1.33.0' @@ -86,7 +86,7 @@ dependencies { api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" api 'org.threeten:threetenbp:1.4.4' - api 'io.grpc:grpc-api:1.57.2' + api "io.grpc:grpc-api:${versions.grpc}" api 'io.opencensus:opencensus-api:0.31.1' api 'io.opencensus:opencensus-contrib-http-util:0.31.1' diff --git a/plugins/repository-gcs/licenses/google-api-client-2.2.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-client-2.2.0.jar.sha1 deleted file mode 100644 index f9604d6837ca9..0000000000000 --- a/plugins/repository-gcs/licenses/google-api-client-2.2.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -10e53fd4d987e37190432e896bdaa62e8ea2c628 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-api-client-2.7.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-client-2.7.0.jar.sha1 new file mode 100644 index 0000000000000..dcbd27a0009bf --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-client-2.7.0.jar.sha1 @@ -0,0 +1 @@ +59c8e5e3c03f146561a83051af3ca945d40e02c6 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-api-1.57.2.jar.sha1 b/plugins/repository-gcs/licenses/grpc-api-1.57.2.jar.sha1 deleted file mode 100644 index 8b320fdd2f9cc..0000000000000 --- a/plugins/repository-gcs/licenses/grpc-api-1.57.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c71a006b81ddae7bc4b7cb1d2da78c1b173761f4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 new file mode 100644 index 0000000000000..bf45716c5b8ce --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 @@ -0,0 +1 @@ +9a9f25c58d8d5b0fcf37ae889a50fec87e34ac08 \ No newline at end of file diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 15c4a1647fd38..faa9b2bfff84d 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -146,6 +146,10 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', } final List miniHDFSArgs = [] + if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_23) { + miniHDFSArgs.add('-Djava.security.manager=allow') + } + // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}"); diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RepositoryPlugin.java index 110d91bfbd822..1048ec784ec4e 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RepositoryPlugin.java @@ -188,7 +188,7 @@ private static int allocatedProcessors(Settings settings) { } private static int urgentPoolCount(Settings settings) { - return boundedBy((allocatedProcessors(settings) + 7) / 8, 1, 2); + return boundedBy((allocatedProcessors(settings) + 1) / 2, 1, 2); } private static int priorityPoolCount(Settings settings) { diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryPluginTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryPluginTests.java new file mode 100644 index 0000000000000..9ac1564c807c3 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryPluginTests.java @@ -0,0 +1,81 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.s3; + +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.SizeUnit; +import org.opensearch.common.unit.SizeValue; +import org.opensearch.common.util.concurrent.OpenSearchThreadPoolExecutor; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ExecutorBuilder; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.threadpool.ThreadPool.ThreadPoolType; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; +import java.util.concurrent.Executor; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class S3RepositoryPluginTests extends OpenSearchTestCase { + + private static final String URGENT_FUTURE_COMPLETION = "urgent_future_completion"; + + public void testGetExecutorBuilders() throws IOException { + final int processors = randomIntBetween(1, 64); + Settings settings = Settings.builder().put("node.name", "test").put("node.processors", processors).build(); + Path configPath = createTempDir(); + ThreadPool threadPool = null; + try (S3RepositoryPlugin plugin = new S3RepositoryPlugin(settings, configPath)) { + List> executorBuilders = plugin.getExecutorBuilders(settings); + assertNotNull(executorBuilders); + assertFalse(executorBuilders.isEmpty()); + threadPool = new ThreadPool(settings, executorBuilders.toArray(new ExecutorBuilder[0])); + final Executor executor = threadPool.executor(URGENT_FUTURE_COMPLETION); + assertNotNull(executor); + assertThat(executor, instanceOf(OpenSearchThreadPoolExecutor.class)); + final OpenSearchThreadPoolExecutor openSearchThreadPoolExecutor = (OpenSearchThreadPoolExecutor) executor; + final ThreadPool.Info info = threadPool.info(URGENT_FUTURE_COMPLETION); + int size = boundedBy((processors + 1) / 2, 1, 2); + assertThat(info.getName(), equalTo(URGENT_FUTURE_COMPLETION)); + assertThat(info.getThreadPoolType(), equalTo(ThreadPoolType.FIXED)); + assertThat(info.getQueueSize(), notNullValue()); + assertThat(info.getQueueSize(), equalTo(new SizeValue(10, SizeUnit.KILO))); + assertThat(openSearchThreadPoolExecutor.getQueue().remainingCapacity(), equalTo(10_000)); + + assertThat(info.getMin(), equalTo(size)); + assertThat(openSearchThreadPoolExecutor.getCorePoolSize(), equalTo(size)); + assertThat(info.getMax(), equalTo(size)); + assertThat(openSearchThreadPoolExecutor.getMaximumPoolSize(), equalTo(size)); + + final int availableProcessors = Runtime.getRuntime().availableProcessors(); + if (processors > availableProcessors) { + assertWarnings( + "setting [node.processors] to value [" + + processors + + "] which is more than available processors [" + + availableProcessors + + "] is deprecated" + ); + } + } finally { + if (threadPool != null) { + terminate(threadPool); + } + } + } + + private static int boundedBy(int value, int min, int max) { + return Math.min(max, Math.max(min, value)); + } + +} diff --git a/plugins/workload-management/build.gradle b/plugins/workload-management/build.gradle index cb14d22ef149f..ad6737bbd24b0 100644 --- a/plugins/workload-management/build.gradle +++ b/plugins/workload-management/build.gradle @@ -10,6 +10,7 @@ */ apply plugin: 'opensearch.yaml-rest-test' +apply plugin: 'opensearch.java-rest-test' apply plugin: 'opensearch.internal-cluster-test' opensearchplugin { diff --git a/plugins/workload-management/src/javaRestTest/java/org/opensearch/rest/WorkloadManagementRestIT.java b/plugins/workload-management/src/javaRestTest/java/org/opensearch/rest/WorkloadManagementRestIT.java new file mode 100644 index 0000000000000..7e1d61e57b6f7 --- /dev/null +++ b/plugins/workload-management/src/javaRestTest/java/org/opensearch/rest/WorkloadManagementRestIT.java @@ -0,0 +1,174 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.rest; + +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; +import org.opensearch.test.rest.OpenSearchRestTestCase; + +import java.io.IOException; + +public class WorkloadManagementRestIT extends OpenSearchRestTestCase { + + public void testCreate() throws Exception { + Response response = performOperation("PUT", "_wlm/query_group", getCreateJson("analytics", "enforced", 0.4, 0.2)); + assertEquals(response.getStatusLine().getStatusCode(), 200); + performOperation("DELETE", "_wlm/query_group/analytics", null); + } + + public void testMultipleCreate() throws Exception { + Response response = performOperation("PUT", "_wlm/query_group", getCreateJson("analytics2", "enforced", 0.4, 0.2)); + assertEquals(response.getStatusLine().getStatusCode(), 200); + + Response response2 = performOperation("PUT", "_wlm/query_group", getCreateJson("users", "soft", 0.2, 0.1)); + assertEquals(response2.getStatusLine().getStatusCode(), 200); + + assertThrows( + ResponseException.class, + () -> performOperation("PUT", "_wlm/query_group", getCreateJson("users2", "soft", 0.41, 0.71)) + ); + performOperation("DELETE", "_wlm/query_group/analytics2", null); + performOperation("DELETE", "_wlm/query_group/users", null); + } + + public void testGet() throws Exception { + Response response = performOperation("PUT", "_wlm/query_group", getCreateJson("analytics3", "enforced", 0.4, 0.2)); + assertEquals(response.getStatusLine().getStatusCode(), 200); + + Response response2 = performOperation("GET", "_wlm/query_group/analytics3", null); + assertEquals(response2.getStatusLine().getStatusCode(), 200); + String responseBody2 = EntityUtils.toString(response2.getEntity()); + assertTrue(responseBody2.contains("\"name\":\"analytics3\"")); + assertTrue(responseBody2.contains("\"resiliency_mode\":\"enforced\"")); + assertTrue(responseBody2.contains("\"cpu\":0.4")); + assertTrue(responseBody2.contains("\"memory\":0.2")); + + assertThrows(ResponseException.class, () -> performOperation("GET", "_wlm/query_group/analytics97", null)); + performOperation("DELETE", "_wlm/query_group/analytics3", null); + } + + public void testDelete() throws Exception { + Response response = performOperation("PUT", "_wlm/query_group", getCreateJson("analytics4", "enforced", 0.4, 0.2)); + assertEquals(response.getStatusLine().getStatusCode(), 200); + + Response response2 = performOperation("DELETE", "_wlm/query_group/analytics4", null); + assertEquals(response2.getStatusLine().getStatusCode(), 200); + assertTrue(EntityUtils.toString(response2.getEntity()).contains("\"acknowledged\":true")); + + assertThrows(ResponseException.class, () -> performOperation("DELETE", "_wlm/query_group/analytics99", null)); + } + + public void testUpdate() throws Exception { + Response response = performOperation("PUT", "_wlm/query_group", getCreateJson("analytics5", "enforced", 0.4, 0.2)); + assertEquals(response.getStatusLine().getStatusCode(), 200); + + Response response2 = performOperation("PUT", "_wlm/query_group/analytics5", getUpdateJson("monitor", 0.41, 0.21)); + assertEquals(response2.getStatusLine().getStatusCode(), 200); + String responseBody2 = EntityUtils.toString(response2.getEntity()); + assertTrue(responseBody2.contains("\"name\":\"analytics5\"")); + assertTrue(responseBody2.contains("\"resiliency_mode\":\"monitor\"")); + assertTrue(responseBody2.contains("\"cpu\":0.41")); + assertTrue(responseBody2.contains("\"memory\":0.21")); + + String json = "{\n" + + " \"resource_limits\": {\n" + + " \"cpu\" : 1.1,\n" + + " \"memory\" : -0.1\n" + + " }\n" + + "}'"; + assertThrows(ResponseException.class, () -> performOperation("PUT", "_wlm/query_group/analytics5", json)); + assertThrows( + ResponseException.class, + () -> performOperation("PUT", "_wlm/query_group/analytics98", getUpdateJson("monitor", 0.43, 0.23)) + ); + performOperation("DELETE", "_wlm/query_group/analytics5", null); + } + + public void testCRUD() throws Exception { + Response response = performOperation("PUT", "_wlm/query_group", getCreateJson("analytics6", "enforced", 0.4, 0.2)); + assertEquals(response.getStatusLine().getStatusCode(), 200); + + Response response1 = performOperation("PUT", "_wlm/query_group/analytics6", getUpdateJson("monitor", 0.41, 0.21)); + assertEquals(response1.getStatusLine().getStatusCode(), 200); + + Response response2 = performOperation("GET", "_wlm/query_group/analytics6", null); + assertEquals(response2.getStatusLine().getStatusCode(), 200); + String responseBody2 = EntityUtils.toString(response2.getEntity()); + assertTrue(responseBody2.contains("\"name\":\"analytics6\"")); + assertTrue(responseBody2.contains("\"resiliency_mode\":\"monitor\"")); + assertTrue(responseBody2.contains("\"cpu\":0.41")); + assertTrue(responseBody2.contains("\"memory\":0.21")); + + assertThrows( + ResponseException.class, + () -> performOperation("PUT", "_wlm/query_group", getCreateJson("users3", "monitor", 0.6, 0.8)) + ); + + Response response4 = performOperation("PUT", "_wlm/query_group", getCreateJson("users3", "monitor", 0.59, 0.79)); + assertEquals(response4.getStatusLine().getStatusCode(), 200); + + Response response5 = performOperation("DELETE", "_wlm/query_group/analytics6", null); + assertEquals(response5.getStatusLine().getStatusCode(), 200); + String responseBody5 = EntityUtils.toString(response5.getEntity()); + assertTrue(responseBody5.contains("\"acknowledged\":true")); + + Response response6 = performOperation("GET", "_wlm/query_group", null); + assertEquals(response6.getStatusLine().getStatusCode(), 200); + String responseBody6 = EntityUtils.toString(response6.getEntity()); + assertTrue(responseBody6.contains("\"query_groups\"")); + assertTrue(responseBody6.contains("\"users3\"")); + assertFalse(responseBody6.contains("\"analytics6\"")); + performOperation("DELETE", "_wlm/query_group/users3", null); + } + + static String getCreateJson(String name, String resiliencyMode, double cpu, double memory) { + return "{\n" + + " \"name\": \"" + + name + + "\",\n" + + " \"resiliency_mode\": \"" + + resiliencyMode + + "\",\n" + + " \"resource_limits\": {\n" + + " \"cpu\" : " + + cpu + + ",\n" + + " \"memory\" : " + + memory + + "\n" + + " }\n" + + "}"; + } + + static String getUpdateJson(String resiliencyMode, double cpu, double memory) { + return "{\n" + + " \"resiliency_mode\": \"" + + resiliencyMode + + "\",\n" + + " \"resource_limits\": {\n" + + " \"cpu\" : " + + cpu + + ",\n" + + " \"memory\" : " + + memory + + "\n" + + " }\n" + + "}"; + } + + Response performOperation(String method, String uriPath, String json) throws IOException { + Request request = new Request(method, uriPath); + if (json != null) { + request.setJsonEntity(json); + } + return client().performRequest(request); + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml index a75b1d0eac793..996c2aae8cfe4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml @@ -678,11 +678,6 @@ setup: - '{"index": {}}' - '{"date": "2016-03-01"}' - - do: - indices.forcemerge: - index: test_2 - max_num_segments: 1 - - do: search: index: test_2 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index ade9eb3eee0dc..408e3c9fe0f9f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -1102,11 +1102,6 @@ setup: - '{"index": {}}' - '{"date": "2016-03-01"}' - - do: - indices.forcemerge: - index: test_2 - max_num_segments: 1 - - do: search: index: test_2 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml index 0897e0bdd894b..fc82517788c91 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml @@ -133,11 +133,6 @@ setup: - '{"index": {}}' - '{"date": "2020-03-09", "v": 4}' - - do: - indices.forcemerge: - index: test_profile - max_num_segments: 1 - - do: search: index: test_profile diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/360_date_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/360_date_histogram.yml index 0cabbf6f25133..fe143653c0674 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/360_date_histogram.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/360_date_histogram.yml @@ -140,11 +140,6 @@ setup: - '{"index": {}}' - '{"date": "2025-02-14"}' - - do: - indices.forcemerge: - index: dhisto-agg-w-query - max_num_segments: 1 - - do: search: index: dhisto-agg-w-query diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml index 93ca668f93a6f..2cc49061d53e8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml @@ -568,11 +568,6 @@ setup: - '{"index": {}}' - '{"double" : 50}' - - do: - indices.forcemerge: - index: test_profile - max_num_segments: 1 - - do: search: index: test_profile diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index bcf23a37c0010..8d0a3b5788a70 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -1366,7 +1366,8 @@ public void testSingleShardStoreFetchUsingBatchAction() throws ExecutionExceptio DiscoveryNode[] nodes = getDiscoveryNodes(); TransportNodesListShardStoreMetadataBatch.NodesStoreFilesMetadataBatch response = prepareAndSendRequest( new String[] { indexName }, - nodes + nodes, + false ); Index index = resolveIndex(indexName); ShardId shardId = new ShardId(index, 0); @@ -1379,12 +1380,14 @@ public void testSingleShardStoreFetchUsingBatchAction() throws ExecutionExceptio public void testShardStoreFetchMultiNodeMultiIndexesUsingBatchAction() throws Exception { internalCluster().startNodes(2); + ensureStableCluster(2); String indexName1 = "test1"; String indexName2 = "test2"; DiscoveryNode[] nodes = getDiscoveryNodes(); TransportNodesListShardStoreMetadataBatch.NodesStoreFilesMetadataBatch response = prepareAndSendRequest( new String[] { indexName1, indexName2 }, - nodes + nodes, + true ); ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards(indexName1, indexName2).get(); for (ClusterSearchShardsGroup clusterSearchShardsGroup : searchShardsResponse.getGroups()) { @@ -1406,7 +1409,8 @@ public void testShardStoreFetchNodeNotConnectedUsingBatchAction() { String indexName = "test"; TransportNodesListShardStoreMetadataBatch.NodesStoreFilesMetadataBatch response = prepareAndSendRequest( new String[] { indexName }, - new DiscoveryNode[] { nonExistingNode } + new DiscoveryNode[] { nonExistingNode }, + false ); assertTrue(response.hasFailures()); assertEquals(1, response.failures().size()); @@ -1513,10 +1517,14 @@ private void prepareIndices(String[] indices, int numberOfPrimaryShards, int num private TransportNodesListShardStoreMetadataBatch.NodesStoreFilesMetadataBatch prepareAndSendRequest( String[] indices, - DiscoveryNode[] nodes + DiscoveryNode[] nodes, + boolean ensureGreen ) { Map shardAttributesMap = null; prepareIndices(indices, 1, 1); + if (ensureGreen) { + ensureGreen(indices); + } shardAttributesMap = prepareRequestMap(indices, 1); TransportNodesListShardStoreMetadataBatch.NodesStoreFilesMetadataBatch response; return ActionTestUtils.executeBlocking( diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java index 3e1127e0ce240..ee51eff4e1bd5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java @@ -11,21 +11,29 @@ import org.opensearch.action.LatchedActionListener; import org.opensearch.action.admin.cluster.node.stats.NodeStats; import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; +import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; +import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.action.ActionListener; import org.opensearch.indices.RemoteStoreSettings; import org.opensearch.node.remotestore.RemoteStorePinnedTimestampService; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.test.OpenSearchIntegTestCase; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.REMOTE_STORE; +import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT; +import static org.opensearch.repositories.fs.ReloadableFsRepository.REPOSITORIES_SLOWDOWN_SETTING; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class RemoteStorePinnedTimestampsIT extends RemoteStoreBaseIntegTestCase { @@ -33,18 +41,29 @@ public class RemoteStorePinnedTimestampsIT extends RemoteStoreBaseIntegTestCase @Override protected Settings nodeSettings(int nodeOrdinal) { + String segmentRepoTypeAttributeKey = String.format( + Locale.getDefault(), + "node.attr." + REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT, + REPOSITORY_NAME + ); + return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) + .put(segmentRepoTypeAttributeKey, ReloadableFsRepository.TYPE) .put(RemoteStoreSettings.CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_ENABLED.getKey(), true) .build(); } ActionListener noOpActionListener = new ActionListener<>() { @Override - public void onResponse(Void unused) {} + public void onResponse(Void unused) { + // do nothing + } @Override - public void onFailure(Exception e) {} + public void onFailure(Exception e) { + fail(); + } }; public void testTimestampPinUnpin() throws Exception { @@ -57,15 +76,8 @@ public void testTimestampPinUnpin() throws Exception { ); Tuple> pinnedTimestampWithFetchTimestamp = RemoteStorePinnedTimestampService.getPinnedTimestamps(); - long lastFetchTimestamp = pinnedTimestampWithFetchTimestamp.v1(); - assertEquals(-1L, lastFetchTimestamp); assertEquals(Set.of(), pinnedTimestampWithFetchTimestamp.v2()); - assertThrows( - IllegalArgumentException.class, - () -> remoteStorePinnedTimestampService.pinTimestamp(1234L, "ss1", noOpActionListener) - ); - long timestamp1 = System.currentTimeMillis() + 30000L; long timestamp2 = System.currentTimeMillis() + 60000L; long timestamp3 = System.currentTimeMillis() + 900000L; @@ -197,6 +209,104 @@ public void onFailure(Exception e) { remoteStorePinnedTimestampService.rescheduleAsyncUpdatePinnedTimestampTask(TimeValue.timeValueMinutes(3)); } + public void testPinExceptionsOlderTimestamp() throws InterruptedException { + prepareCluster(1, 1, INDEX_NAME, 0, 2); + ensureGreen(INDEX_NAME); + + RemoteStorePinnedTimestampService remoteStorePinnedTimestampService = internalCluster().getInstance( + RemoteStorePinnedTimestampService.class, + primaryNodeName(INDEX_NAME) + ); + + CountDownLatch latch = new CountDownLatch(1); + remoteStorePinnedTimestampService.pinTimestamp(1234L, "ss1", new LatchedActionListener<>(new ActionListener<>() { + @Override + public void onResponse(Void unused) { + // We expect onFailure to be called + fail(); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e instanceof IllegalArgumentException); + } + }, latch)); + + latch.await(); + } + + public void testPinExceptionsRemoteStoreCallTakeTime() throws InterruptedException, ExecutionException { + prepareCluster(1, 1, INDEX_NAME, 0, 2); + ensureGreen(INDEX_NAME); + + RemoteStorePinnedTimestampService remoteStorePinnedTimestampService = internalCluster().getInstance( + RemoteStorePinnedTimestampService.class, + primaryNodeName(INDEX_NAME) + ); + + CountDownLatch latch = new CountDownLatch(1); + slowDownRepo(REPOSITORY_NAME, 10); + RemoteStoreSettings.setPinnedTimestampsLookbackInterval(TimeValue.timeValueSeconds(1)); + long timestampToBePinned = System.currentTimeMillis() + 600000; + remoteStorePinnedTimestampService.pinTimestamp(timestampToBePinned, "ss1", new LatchedActionListener<>(new ActionListener<>() { + @Override + public void onResponse(Void unused) { + // We expect onFailure to be called + fail(); + } + + @Override + public void onFailure(Exception e) { + logger.error(e.getMessage()); + assertTrue(e instanceof RuntimeException); + assertTrue(e.getMessage().contains("Timestamp pinning took")); + + // Check if the timestamp was unpinned + remoteStorePinnedTimestampService.forceSyncPinnedTimestamps(); + assertFalse(RemoteStorePinnedTimestampService.getPinnedTimestamps().v2().contains(timestampToBePinned)); + } + }, latch)); + + latch.await(); + } + + protected void slowDownRepo(String repoName, int value) throws ExecutionException, InterruptedException { + GetRepositoriesRequest gr = new GetRepositoriesRequest(new String[] { repoName }); + GetRepositoriesResponse res = client().admin().cluster().getRepositories(gr).get(); + RepositoryMetadata rmd = res.repositories().get(0); + Settings.Builder settings = Settings.builder() + .put("location", rmd.settings().get("location")) + .put(REPOSITORIES_SLOWDOWN_SETTING.getKey(), value); + createRepository(repoName, rmd.type(), settings); + } + + public void testUnpinException() throws InterruptedException { + prepareCluster(1, 1, INDEX_NAME, 0, 2); + ensureGreen(INDEX_NAME); + + RemoteStorePinnedTimestampService remoteStorePinnedTimestampService = internalCluster().getInstance( + RemoteStorePinnedTimestampService.class, + primaryNodeName(INDEX_NAME) + ); + + CountDownLatch latch = new CountDownLatch(1); + remoteStorePinnedTimestampService.unpinTimestamp(1234L, "dummy-entity", new LatchedActionListener<>(new ActionListener<>() { + @Override + public void onResponse(Void unused) { + // We expect onFailure to be called + fail(); + } + + @Override + public void onFailure(Exception e) { + logger.error(e.getMessage()); + assertTrue(e instanceof IllegalArgumentException); + } + }, latch)); + + latch.await(); + } + public void testLastSuccessfulFetchOfPinnedTimestampsPresentInNodeStats() throws Exception { logger.info("Starting up cluster manager"); logger.info("cluster.remote_store.pinned_timestamps.enabled set to true"); diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index 12c7626e385a4..1e6eae87af53a 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -123,6 +123,8 @@ import org.opensearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksAction; import org.opensearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; +import org.opensearch.action.admin.cluster.wlm.TransportWlmStatsAction; +import org.opensearch.action.admin.cluster.wlm.WlmStatsAction; import org.opensearch.action.admin.indices.alias.IndicesAliasesAction; import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; import org.opensearch.action.admin.indices.alias.TransportIndicesAliasesAction; @@ -375,6 +377,7 @@ import org.opensearch.rest.action.admin.cluster.RestRestoreSnapshotAction; import org.opensearch.rest.action.admin.cluster.RestSnapshotsStatusAction; import org.opensearch.rest.action.admin.cluster.RestVerifyRepositoryAction; +import org.opensearch.rest.action.admin.cluster.RestWlmStatsAction; import org.opensearch.rest.action.admin.cluster.dangling.RestDeleteDanglingIndexAction; import org.opensearch.rest.action.admin.cluster.dangling.RestImportDanglingIndexAction; import org.opensearch.rest.action.admin.cluster.dangling.RestListDanglingIndicesAction; @@ -622,6 +625,7 @@ public void reg actions.register(NodesInfoAction.INSTANCE, TransportNodesInfoAction.class); actions.register(RemoteInfoAction.INSTANCE, TransportRemoteInfoAction.class); actions.register(NodesStatsAction.INSTANCE, TransportNodesStatsAction.class); + actions.register(WlmStatsAction.INSTANCE, TransportWlmStatsAction.class); actions.register(RemoteStoreStatsAction.INSTANCE, TransportRemoteStoreStatsAction.class); actions.register(NodesUsageAction.INSTANCE, TransportNodesUsageAction.class); actions.register(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class); @@ -828,6 +832,7 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestClearVotingConfigExclusionsAction()); registerHandler.accept(new RestMainAction()); registerHandler.accept(new RestNodesInfoAction(settingsFilter)); + registerHandler.accept(new RestWlmStatsAction()); registerHandler.accept(new RestRemoteClusterInfoAction()); registerHandler.accept(new RestNodesStatsAction()); registerHandler.accept(new RestNodesUsageAction()); diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/wlm/TransportWlmStatsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/TransportWlmStatsAction.java new file mode 100644 index 0000000000000..9c2fb3f1689ec --- /dev/null +++ b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/TransportWlmStatsAction.java @@ -0,0 +1,79 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.cluster.wlm; + +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.nodes.TransportNodesAction; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; +import org.opensearch.wlm.QueryGroupService; +import org.opensearch.wlm.stats.WlmStats; + +import java.io.IOException; +import java.util.List; + +/** + * Transport action for obtaining WlmStats + * + * @opensearch.experimental + */ +public class TransportWlmStatsAction extends TransportNodesAction { + + final QueryGroupService queryGroupService; + + @Inject + public TransportWlmStatsAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + QueryGroupService queryGroupService, + ActionFilters actionFilters + ) { + super( + WlmStatsAction.NAME, + threadPool, + clusterService, + transportService, + actionFilters, + WlmStatsRequest::new, + WlmStatsRequest::new, + ThreadPool.Names.MANAGEMENT, + WlmStats.class + ); + this.queryGroupService = queryGroupService; + } + + @Override + protected WlmStatsResponse newResponse(WlmStatsRequest request, List wlmStats, List failures) { + return new WlmStatsResponse(clusterService.getClusterName(), wlmStats, failures); + } + + @Override + protected WlmStatsRequest newNodeRequest(WlmStatsRequest request) { + return request; + } + + @Override + protected WlmStats newNodeResponse(StreamInput in) throws IOException { + return new WlmStats(in); + } + + @Override + protected WlmStats nodeOperation(WlmStatsRequest wlmStatsRequest) { + assert transportService.getLocalNode() != null; + return new WlmStats( + transportService.getLocalNode(), + queryGroupService.nodeStats(wlmStatsRequest.getQueryGroupIds(), wlmStatsRequest.isBreach()) + ); + } +} diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsAction.java new file mode 100644 index 0000000000000..2dfb10fc5dc90 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsAction.java @@ -0,0 +1,25 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.cluster.wlm; + +import org.opensearch.action.ActionType; + +/** + * Transport action for obtaining Workload Management Stats. + * + * @opensearch.experimental + */ +public class WlmStatsAction extends ActionType { + public static final WlmStatsAction INSTANCE = new WlmStatsAction(); + public static final String NAME = "cluster:monitor/wlm/stats"; + + private WlmStatsAction() { + super(NAME, WlmStatsResponse::new); + } +} diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsRequest.java new file mode 100644 index 0000000000000..bf4f79faff478 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsRequest.java @@ -0,0 +1,65 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.cluster.wlm; + +import org.opensearch.action.support.nodes.BaseNodesRequest; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +/** + * A request to get Workload Management Stats + */ +@ExperimentalApi +public class WlmStatsRequest extends BaseNodesRequest { + + private final Set queryGroupIds; + private final Boolean breach; + + public WlmStatsRequest(StreamInput in) throws IOException { + super(in); + this.queryGroupIds = new HashSet<>(Set.of(in.readStringArray())); + this.breach = in.readOptionalBoolean(); + } + + /** + * Get QueryGroup stats from nodes based on the nodes ids specified. If none are passed, stats + * for all nodes will be returned. + */ + public WlmStatsRequest(String[] nodesIds, Set queryGroupIds, Boolean breach) { + super(false, nodesIds); + this.queryGroupIds = queryGroupIds; + this.breach = breach; + } + + public WlmStatsRequest() { + super(false, (String[]) null); + queryGroupIds = new HashSet<>(); + this.breach = false; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(queryGroupIds.toArray(new String[0])); + out.writeOptionalBoolean(breach); + } + + public Set getQueryGroupIds() { + return queryGroupIds; + } + + public Boolean isBreach() { + return breach; + } +} diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsResponse.java new file mode 100644 index 0000000000000..2ce1b09a61fc6 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/WlmStatsResponse.java @@ -0,0 +1,73 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.cluster.wlm; + +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.nodes.BaseNodesResponse; +import org.opensearch.cluster.ClusterName; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.wlm.stats.QueryGroupStats; +import org.opensearch.wlm.stats.WlmStats; + +import java.io.IOException; +import java.util.List; + +/** + * A response for obtaining Workload Management Stats + */ +@ExperimentalApi +public class WlmStatsResponse extends BaseNodesResponse implements ToXContentFragment { + + WlmStatsResponse(StreamInput in) throws IOException { + super(in); + } + + WlmStatsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(WlmStats::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeList(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + for (WlmStats wlmStats : getNodes()) { + builder.startObject(wlmStats.getNode().getId()); + QueryGroupStats queryGroupStats = wlmStats.getQueryGroupStats(); + queryGroupStats.toXContent(builder, params); + builder.endObject(); + } + return builder; + } + + @Override + public String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.toString(); + } catch (IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite99/package-info.java b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/package-info.java similarity index 54% rename from server/src/main/java/org/opensearch/index/codec/composite/composite99/package-info.java rename to server/src/main/java/org/opensearch/action/admin/cluster/wlm/package-info.java index 3d6f130b9a7c8..13724b335e7c6 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite99/package-info.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/wlm/package-info.java @@ -6,7 +6,5 @@ * compatible open source license. */ -/** - * Responsible for handling all composite index codecs and operations associated with Composite99 codec - */ -package org.opensearch.index.codec.composite.composite99; +/** WlmStats transport handlers. */ +package org.opensearch.action.admin.cluster.wlm; diff --git a/server/src/main/java/org/opensearch/client/ClusterAdminClient.java b/server/src/main/java/org/opensearch/client/ClusterAdminClient.java index 05f09c1a6e661..5ce4d442fbe0b 100644 --- a/server/src/main/java/org/opensearch/client/ClusterAdminClient.java +++ b/server/src/main/java/org/opensearch/client/ClusterAdminClient.java @@ -137,6 +137,8 @@ import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder; import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksResponse; +import org.opensearch.action.admin.cluster.wlm.WlmStatsRequest; +import org.opensearch.action.admin.cluster.wlm.WlmStatsResponse; import org.opensearch.action.admin.indices.dangling.delete.DeleteDanglingIndexRequest; import org.opensearch.action.admin.indices.dangling.import_index.ImportDanglingIndexRequest; import org.opensearch.action.admin.indices.dangling.list.ListDanglingIndicesRequest; @@ -320,6 +322,13 @@ public interface ClusterAdminClient extends OpenSearchClient { */ NodesStatsRequestBuilder prepareNodesStats(String... nodesIds); + /** + * QueryGroup stats of the cluster. + * @param request The wlmStatsRequest + * @param listener A listener to be notified with a result + */ + void wlmStats(WlmStatsRequest request, ActionListener listener); + void remoteStoreStats(RemoteStoreStatsRequest request, ActionListener listener); RemoteStoreStatsRequestBuilder prepareRemoteStoreStats(String index, String shardId); diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 509cd732357d6..f4683ab516cef 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -179,6 +179,9 @@ import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder; import org.opensearch.action.admin.cluster.tasks.PendingClusterTasksResponse; +import org.opensearch.action.admin.cluster.wlm.WlmStatsAction; +import org.opensearch.action.admin.cluster.wlm.WlmStatsRequest; +import org.opensearch.action.admin.cluster.wlm.WlmStatsResponse; import org.opensearch.action.admin.indices.alias.IndicesAliasesAction; import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; import org.opensearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; @@ -918,6 +921,11 @@ public NodesStatsRequestBuilder prepareNodesStats(String... nodesIds) { return new NodesStatsRequestBuilder(this, NodesStatsAction.INSTANCE).setNodesIds(nodesIds); } + @Override + public void wlmStats(final WlmStatsRequest request, final ActionListener listener) { + execute(WlmStatsAction.INSTANCE, request, listener); + } + @Override public void remoteStoreStats(final RemoteStoreStatsRequest request, final ActionListener listener) { execute(RemoteStoreStatsAction.INSTANCE, request, listener); diff --git a/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java b/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java index e5b07f867d609..9cffc7051d756 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java @@ -108,7 +108,7 @@ public CoordinationState( // ToDo: revisit this check while making the setting dynamic this.isRemotePublicationEnabled = isRemoteStateEnabled && REMOTE_PUBLICATION_SETTING.get(settings) - && localNode.isRemoteStatePublicationConfigured(); + && localNode.isRemoteStatePublicationEnabled(); } public boolean isRemotePublicationEnabled() { diff --git a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java index aca8653be4dd8..13033b670d44b 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java @@ -513,10 +513,10 @@ private static void ensureRemoteClusterStateNodesCompatibility(DiscoveryNode joi assert existingNodes.isEmpty() == false; Optional remotePublicationNode = existingNodes.stream() - .filter(DiscoveryNode::isRemoteStatePublicationConfigured) + .filter(DiscoveryNode::isRemoteStatePublicationEnabled) .findFirst(); - if (remotePublicationNode.isPresent() && joiningNode.isRemoteStatePublicationConfigured()) { + if (remotePublicationNode.isPresent() && joiningNode.isRemoteStatePublicationEnabled()) { ensureRepositoryCompatibility(joiningNode, remotePublicationNode.get(), REMOTE_CLUSTER_PUBLICATION_REPO_NAME_ATTRIBUTES); } } diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java index 6277b70f9a7c8..42aa55433dd5f 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java @@ -370,7 +370,7 @@ private boolean validateRemotePublicationConfiguredOnAllNodes(DiscoveryNodes dis assert ClusterMetadataManifest.getCodecForVersion(discoveryNodes.getMinNodeVersion()) >= ClusterMetadataManifest.CODEC_V0; for (DiscoveryNode node : discoveryNodes.getNodes().values()) { // if a node is non-remote then created local publication context - if (node.isRemoteStatePublicationConfigured() == false) { + if (node.isRemoteStatePublicationEnabled() == false) { return false; } } diff --git a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java index b06c2ef0bdbe4..8c9a37a767ede 100644 --- a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java +++ b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java @@ -518,7 +518,7 @@ public boolean isRemoteStoreNode() { * Returns whether settings required for remote cluster state publication is configured * @return true if the node contains remote cluster state node attribute and remote routing table node attribute */ - public boolean isRemoteStatePublicationConfigured() { + public boolean isRemoteStatePublicationEnabled() { return this.getAttributes() .keySet() .stream() diff --git a/server/src/main/java/org/opensearch/common/cache/Cache.java b/server/src/main/java/org/opensearch/common/cache/Cache.java index da683bfff088d..e01a1223955ed 100644 --- a/server/src/main/java/org/opensearch/common/cache/Cache.java +++ b/server/src/main/java/org/opensearch/common/cache/Cache.java @@ -116,13 +116,13 @@ public class Cache { private RemovalListener removalListener = notification -> {}; private final int numberOfSegments; - private static final int NUMBER_OF_SEGMENTS_DEFAULT = 256; + public static final int NUMBER_OF_SEGMENTS = 256; Cache(final int numberOfSegments) { if (numberOfSegments != -1) { this.numberOfSegments = numberOfSegments; } else { - this.numberOfSegments = NUMBER_OF_SEGMENTS_DEFAULT; + this.numberOfSegments = NUMBER_OF_SEGMENTS; } this.segments = new CacheSegment[this.numberOfSegments]; for (int i = 0; i < this.numberOfSegments; i++) { diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java index cc0468efb243e..5563511aa8d45 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -90,8 +90,6 @@ public void setMaxExpansions(int maxExpansions) { /** * Sets the phrase slop for this query. - * - * @see org.apache.lucene.search.PhraseQuery.Builder#getSlop() */ public int getSlop() { return slop; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java index f5003417c1298..674773bb86354 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java @@ -13,7 +13,6 @@ import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.index.codec.composite.composite912.Composite912Codec; -import org.opensearch.index.codec.composite.composite99.Composite99Codec; import org.opensearch.index.mapper.MapperService; import java.util.HashMap; @@ -33,7 +32,7 @@ public class CompositeCodecFactory { // we can use this to track the latest composite codec - public static final String COMPOSITE_CODEC = Composite99Codec.COMPOSITE_INDEX_CODEC_NAME; + public static final String COMPOSITE_CODEC = Composite912Codec.COMPOSITE_INDEX_CODEC_NAME; public CompositeCodecFactory() {} diff --git a/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java index d85205d239648..80555597fd48a 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesProducerWrapper; import org.apache.lucene.index.SegmentReadState; -import org.opensearch.index.codec.composite.composite99.Composite99Codec; +import org.opensearch.index.codec.composite.composite912.Composite912Codec; import java.io.IOException; @@ -39,7 +39,7 @@ public static DocValuesProducer getDocValuesProducerForCompositeCodec( ) throws IOException { switch (compositeCodec) { - case Composite99Codec.COMPOSITE_INDEX_CODEC_NAME: + case Composite912Codec.COMPOSITE_INDEX_CODEC_NAME: Lucene90DocValuesProducerWrapper lucene90DocValuesProducerWrapper = new Lucene90DocValuesProducerWrapper( state, dataCodec, diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java index 9bbff7d344c2f..6f7a3c6852cf6 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java @@ -15,7 +15,6 @@ import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.index.codec.PerFieldMappingPostingFormatCodec; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; import org.opensearch.index.mapper.MapperService; /** @@ -53,6 +52,6 @@ protected Composite912Codec(String name, Codec delegate, MapperService mapperSer @Override public DocValuesFormat docValuesFormat() { - return new Composite99DocValuesFormat(mapperService); + return new Composite912DocValuesFormat(mapperService); } } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesFormat.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesFormat.java similarity index 79% rename from server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesFormat.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesFormat.java index 63fe5a6af76f7..3a8e748e5a9e6 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesFormat.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesFormat.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite.composite99; +package org.opensearch.index.codec.composite.composite912; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesFormat; @@ -25,7 +25,7 @@ * @opensearch.experimental */ @ExperimentalApi -public class Composite99DocValuesFormat extends DocValuesFormat { +public class Composite912DocValuesFormat extends DocValuesFormat { /** * Creates a new docvalues format. * @@ -38,10 +38,10 @@ public class Composite99DocValuesFormat extends DocValuesFormat { private final MapperService mapperService; /** Data codec name for Composite Doc Values Format */ - public static final String DATA_CODEC_NAME = "Composite99FormatData"; + public static final String DATA_CODEC_NAME = "Composite912FormatData"; /** Meta codec name for Composite Doc Values Format */ - public static final String META_CODEC_NAME = "Composite99FormatMeta"; + public static final String META_CODEC_NAME = "Composite912FormatMeta"; /** Filename extension for the composite index data */ public static final String DATA_EXTENSION = "cid"; @@ -50,10 +50,10 @@ public class Composite99DocValuesFormat extends DocValuesFormat { public static final String META_EXTENSION = "cim"; /** Data doc values codec name for Composite Doc Values Format */ - public static final String DATA_DOC_VALUES_CODEC = "Composite99DocValuesData"; + public static final String DATA_DOC_VALUES_CODEC = "Composite912DocValuesData"; /** Meta doc values codec name for Composite Doc Values Format */ - public static final String META_DOC_VALUES_CODEC = "Composite99DocValuesMetadata"; + public static final String META_DOC_VALUES_CODEC = "Composite912DocValuesMetadata"; /** Filename extension for the composite index data doc values */ public static final String DATA_DOC_VALUES_EXTENSION = "cidvd"; @@ -68,15 +68,15 @@ public class Composite99DocValuesFormat extends DocValuesFormat { public static final int VERSION_CURRENT = VERSION_START; // needed for SPI - public Composite99DocValuesFormat() { + public Composite912DocValuesFormat() { this(new Lucene90DocValuesFormat(), null); } - public Composite99DocValuesFormat(MapperService mapperService) { + public Composite912DocValuesFormat(MapperService mapperService) { this(new Lucene90DocValuesFormat(), mapperService); } - public Composite99DocValuesFormat(DocValuesFormat delegate, MapperService mapperService) { + public Composite912DocValuesFormat(DocValuesFormat delegate, MapperService mapperService) { super(delegate.getName()); this.delegate = delegate; this.mapperService = mapperService; @@ -84,11 +84,11 @@ public Composite99DocValuesFormat(DocValuesFormat delegate, MapperService mapper @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - return new Composite99DocValuesWriter(delegate.fieldsConsumer(state), state, mapperService); + return new Composite912DocValuesWriter(delegate.fieldsConsumer(state), state, mapperService); } @Override public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { - return new Composite99DocValuesReader(delegate.fieldsProducer(state), state); + return new Composite912DocValuesReader(delegate.fieldsProducer(state), state); } } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesReader.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java similarity index 91% rename from server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesReader.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java index 7901336151c8e..bb8a07d856d87 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesReader.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite.composite99; +package org.opensearch.index.codec.composite.composite912; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -56,8 +56,8 @@ * @opensearch.experimental */ @ExperimentalApi -public class Composite99DocValuesReader extends DocValuesProducer implements CompositeIndexReader { - private static final Logger logger = LogManager.getLogger(Composite99DocValuesReader.class); +public class Composite912DocValuesReader extends DocValuesProducer implements CompositeIndexReader { + private static final Logger logger = LogManager.getLogger(Composite912DocValuesReader.class); private final DocValuesProducer delegate; private IndexInput dataIn; @@ -69,20 +69,20 @@ public class Composite99DocValuesReader extends DocValuesProducer implements Com private final List compositeFieldInfos = new ArrayList<>(); private SegmentReadState readState; - public Composite99DocValuesReader(DocValuesProducer producer, SegmentReadState readState) throws IOException { + public Composite912DocValuesReader(DocValuesProducer producer, SegmentReadState readState) throws IOException { this.delegate = producer; this.fields = new ArrayList<>(); String metaFileName = IndexFileNames.segmentFileName( readState.segmentInfo.name, readState.segmentSuffix, - Composite99DocValuesFormat.META_EXTENSION + Composite912DocValuesFormat.META_EXTENSION ); String dataFileName = IndexFileNames.segmentFileName( readState.segmentInfo.name, readState.segmentSuffix, - Composite99DocValuesFormat.DATA_EXTENSION + Composite912DocValuesFormat.DATA_EXTENSION ); boolean success = false; @@ -92,9 +92,9 @@ public Composite99DocValuesReader(DocValuesProducer producer, SegmentReadState r dataIn = readState.directory.openInput(dataFileName, readState.context); CodecUtil.checkIndexHeader( dataIn, - Composite99DocValuesFormat.DATA_CODEC_NAME, - Composite99DocValuesFormat.VERSION_START, - Composite99DocValuesFormat.VERSION_CURRENT, + Composite912DocValuesFormat.DATA_CODEC_NAME, + Composite912DocValuesFormat.VERSION_START, + Composite912DocValuesFormat.VERSION_CURRENT, readState.segmentInfo.getId(), readState.segmentSuffix ); @@ -105,9 +105,9 @@ public Composite99DocValuesReader(DocValuesProducer producer, SegmentReadState r try { CodecUtil.checkIndexHeader( metaIn, - Composite99DocValuesFormat.META_CODEC_NAME, - Composite99DocValuesFormat.VERSION_START, - Composite99DocValuesFormat.VERSION_CURRENT, + Composite912DocValuesFormat.META_CODEC_NAME, + Composite912DocValuesFormat.VERSION_START, + Composite912DocValuesFormat.VERSION_CURRENT, readState.segmentInfo.getId(), readState.segmentSuffix ); @@ -190,12 +190,12 @@ public Composite99DocValuesReader(DocValuesProducer producer, SegmentReadState r // initialize star-tree doc values producer compositeDocValuesProducer = LuceneDocValuesProducerFactory.getDocValuesProducerForCompositeCodec( - Composite99Codec.COMPOSITE_INDEX_CODEC_NAME, + Composite912Codec.COMPOSITE_INDEX_CODEC_NAME, this.readState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); } catch (Throwable t) { diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesWriter.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java similarity index 91% rename from server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesWriter.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java index 0d4e35f7c3ab8..2225870afae8e 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesWriter.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite.composite99; +package org.opensearch.index.codec.composite.composite912; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.DocValuesConsumer; @@ -55,14 +55,14 @@ * @opensearch.experimental */ @ExperimentalApi -public class Composite99DocValuesWriter extends DocValuesConsumer { +public class Composite912DocValuesWriter extends DocValuesConsumer { private final DocValuesConsumer delegate; private final SegmentWriteState state; private final MapperService mapperService; AtomicReference mergeState = new AtomicReference<>(); private final Set compositeMappedFieldTypes; private final Set compositeFieldSet; - private DocValuesConsumer composite99DocValuesConsumer; + private DocValuesConsumer compositeDocValuesConsumer; public IndexOutput dataOut; public IndexOutput metaOut; @@ -72,7 +72,7 @@ public class Composite99DocValuesWriter extends DocValuesConsumer { private final Map fieldProducerMap = new HashMap<>(); - public Composite99DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState segmentWriteState, MapperService mapperService) + public Composite912DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState segmentWriteState, MapperService mapperService) throws IOException { this.delegate = delegate; @@ -101,24 +101,24 @@ public Composite99DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState // so that all the fields are sparse numeric doc values and not dense numeric doc values SegmentWriteState consumerWriteState = getSegmentWriteState(segmentWriteState); - this.composite99DocValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( + this.compositeDocValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( consumerWriteState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); String dataFileName = IndexFileNames.segmentFileName( this.state.segmentInfo.name, this.state.segmentSuffix, - Composite99DocValuesFormat.DATA_EXTENSION + Composite912DocValuesFormat.DATA_EXTENSION ); dataOut = this.state.directory.createOutput(dataFileName, this.state.context); CodecUtil.writeIndexHeader( dataOut, - Composite99DocValuesFormat.DATA_CODEC_NAME, - Composite99DocValuesFormat.VERSION_CURRENT, + Composite912DocValuesFormat.DATA_CODEC_NAME, + Composite912DocValuesFormat.VERSION_CURRENT, this.state.segmentInfo.getId(), this.state.segmentSuffix ); @@ -126,13 +126,13 @@ public Composite99DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState String metaFileName = IndexFileNames.segmentFileName( this.state.segmentInfo.name, this.state.segmentSuffix, - Composite99DocValuesFormat.META_EXTENSION + Composite912DocValuesFormat.META_EXTENSION ); metaOut = this.state.directory.createOutput(metaFileName, this.state.context); CodecUtil.writeIndexHeader( metaOut, - Composite99DocValuesFormat.META_CODEC_NAME, - Composite99DocValuesFormat.VERSION_CURRENT, + Composite912DocValuesFormat.META_CODEC_NAME, + Composite912DocValuesFormat.VERSION_CURRENT, this.state.segmentInfo.getId(), this.state.segmentSuffix ); @@ -197,12 +197,12 @@ public void close() throws IOException { success = true; } finally { if (success) { - IOUtils.close(dataOut, metaOut, composite99DocValuesConsumer); + IOUtils.close(dataOut, metaOut, compositeDocValuesConsumer); } else { - IOUtils.closeWhileHandlingException(dataOut, metaOut, composite99DocValuesConsumer); + IOUtils.closeWhileHandlingException(dataOut, metaOut, compositeDocValuesConsumer); } metaOut = dataOut = null; - composite99DocValuesConsumer = null; + compositeDocValuesConsumer = null; } } @@ -224,7 +224,7 @@ private void createCompositeIndicesIfPossible(DocValuesProducer valuesProducer, for (CompositeMappedFieldType mappedType : compositeMappedFieldTypes) { if (mappedType instanceof StarTreeMapper.StarTreeFieldType) { try (StarTreesBuilder starTreesBuilder = new StarTreesBuilder(state, mapperService, fieldNumberAcrossCompositeFields)) { - starTreesBuilder.build(metaOut, dataOut, fieldProducerMap, composite99DocValuesConsumer); + starTreesBuilder.build(metaOut, dataOut, fieldProducerMap, compositeDocValuesConsumer); } } } @@ -313,7 +313,7 @@ private void mergeStarTreeFields(MergeState mergeState) throws IOException { } } try (StarTreesBuilder starTreesBuilder = new StarTreesBuilder(state, mapperService, fieldNumberAcrossCompositeFields)) { - starTreesBuilder.buildDuringMerge(metaOut, dataOut, starTreeSubsPerField, composite99DocValuesConsumer); + starTreesBuilder.buildDuringMerge(metaOut, dataOut, starTreeSubsPerField, compositeDocValuesConsumer); } } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99Codec.java b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99Codec.java deleted file mode 100644 index 20f1c7e9b7614..0000000000000 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99Codec.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.index.codec.composite.composite99; - -import org.apache.lucene.backward_codecs.lucene99.Lucene99Codec; -import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.DocValuesFormat; -import org.apache.lucene.codecs.FilterCodec; -import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.index.mapper.MapperService; - -/** - * Extends the Codec to support new file formats for composite indices eg: star tree index - * based on the mappings. - * - * @opensearch.experimental - */ -@ExperimentalApi -public class Composite99Codec extends FilterCodec { - public static final String COMPOSITE_INDEX_CODEC_NAME = "Composite99Codec"; - private final MapperService mapperService; - - // needed for SPI - this is used in reader path - public Composite99Codec() { - this(COMPOSITE_INDEX_CODEC_NAME, new Lucene99Codec(), null); - } - - /** - * Sole constructor. When subclassing this codec, create a no-arg ctor and pass the delegate codec and a unique name to - * this ctor. - * - * @param name name of the codec - * @param delegate codec delegate - * @param mapperService mapper service instance - */ - protected Composite99Codec(String name, Codec delegate, MapperService mapperService) { - super(name, delegate); - this.mapperService = mapperService; - } - - @Override - public DocValuesFormat docValuesFormat() { - return new Composite99DocValuesFormat(mapperService); - } -} diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java index 255ad343cde32..003ebeafeae45 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java @@ -35,7 +35,7 @@ import java.util.Set; import java.util.function.Supplier; -import static org.opensearch.index.codec.composite.composite99.Composite99DocValuesReader.getSortedNumericDocValues; +import static org.opensearch.index.codec.composite.composite912.Composite912DocValuesReader.getSortedNumericDocValues; import static org.opensearch.index.compositeindex.CompositeIndexConstants.SEGMENT_DOCS_COUNT; import static org.opensearch.index.compositeindex.CompositeIndexConstants.STAR_TREE_DOCS_COUNT; import static org.opensearch.index.compositeindex.datacube.startree.utils.StarTreeUtils.fullyQualifiedFieldNameForStarTreeDimensionsDocValues; diff --git a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java index 8a5a4a5a94ce6..653907862b18c 100644 --- a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java +++ b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java @@ -41,7 +41,7 @@ import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.ReplicationStats; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat; import org.opensearch.index.remote.RemoteSegmentStats; @@ -98,10 +98,10 @@ public class SegmentsStats implements Writeable, ToXContentFragment { Map.entry("tvd", "Term Vector Documents"), Map.entry("tvf", "Term Vector Fields"), Map.entry("liv", "Live Documents"), - Map.entry(Composite99DocValuesFormat.DATA_EXTENSION, "Composite Index"), - Map.entry(Composite99DocValuesFormat.META_EXTENSION, "Composite Index"), - Map.entry(Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, "Composite Index DocValues"), - Map.entry(Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION, "Composite Index DocValues"), + Map.entry(Composite912DocValuesFormat.DATA_EXTENSION, "Composite Index"), + Map.entry(Composite912DocValuesFormat.META_EXTENSION, "Composite Index"), + Map.entry(Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, "Composite Index DocValues"), + Map.entry(Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION, "Composite Index DocValues"), Map.entry(FuzzyFilterPostingsFormat.FUZZY_FILTER_FILE_EXTENSION, "Fuzzy Filter") ); diff --git a/server/src/main/java/org/opensearch/index/recovery/RemoteStoreRestoreService.java b/server/src/main/java/org/opensearch/index/recovery/RemoteStoreRestoreService.java index 03d841d13b7f7..73a79a54ca588 100644 --- a/server/src/main/java/org/opensearch/index/recovery/RemoteStoreRestoreService.java +++ b/server/src/main/java/org/opensearch/index/recovery/RemoteStoreRestoreService.java @@ -27,6 +27,7 @@ import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; +import org.opensearch.common.Priority; import org.opensearch.common.UUIDs; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; @@ -95,7 +96,7 @@ public RemoteStoreRestoreService( * @param listener restore listener */ public void restore(RestoreRemoteStoreRequest request, final ActionListener listener) { - clusterService.submitStateUpdateTask("restore[remote_store]", new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("restore[remote_store]", new ClusterStateUpdateTask(Priority.URGENT) { String restoreUUID; RestoreInfo restoreInfo = null; diff --git a/server/src/main/java/org/opensearch/index/search/MatchQuery.java b/server/src/main/java/org/opensearch/index/search/MatchQuery.java index ec6755ea25703..86ea799ab311d 100644 --- a/server/src/main/java/org/opensearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/opensearch/index/search/MatchQuery.java @@ -227,9 +227,6 @@ public void setOccur(BooleanClause.Occur occur) { this.occur = occur; } - /** - * @deprecated See {@link MatchQueryBuilder#setCommonTermsCutoff(Float)} for more details - */ @Deprecated public void setCommonTermsCutoff(Float cutoff) { this.commonTermsCutoff = cutoff; diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSource.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSource.java index 24f0cb15ddb25..5341f9507bef4 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSource.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSource.java @@ -32,7 +32,7 @@ public interface SegmentReplicationSource { /** * Get Metadata for a ReplicationCheckpoint. * - * @param replicationId {@link long} - ID of the replication event. + * @param replicationId long - ID of the replication event. * @param checkpoint {@link ReplicationCheckpoint} Checkpoint to fetch metadata for. * @param listener {@link ActionListener} listener that completes with a {@link CheckpointInfoResponse}. */ @@ -41,7 +41,7 @@ public interface SegmentReplicationSource { /** * Fetch the requested segment files. Passes a listener that completes when files are stored locally. * - * @param replicationId {@link long} - ID of the replication event. + * @param replicationId long - ID of the replication event. * @param checkpoint {@link ReplicationCheckpoint} Checkpoint to fetch metadata for. * @param filesToFetch {@link List} List of files to fetch. * @param indexShard {@link IndexShard} Reference to the IndexShard. diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 5c965b06a4b69..584d95b9ff6b5 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -354,12 +354,12 @@ public class Node implements Closeable { ); /** - * controls whether the node is allowed to persist things like metadata to disk - * Note that this does not control whether the node stores actual indices (see - * {@link #NODE_DATA_SETTING}). However, if this is false, {@link #NODE_DATA_SETTING} - * and {@link #NODE_MASTER_SETTING} must also be false. - * - */ + * controls whether the node is allowed to persist things like metadata to disk + * Note that this does not control whether the node stores actual indices (see + * {@link #NODE_DATA_SETTING}). However, if this is false, {@link #NODE_DATA_SETTING} + * and {@link #NODE_MASTER_SETTING} must also be false. + * + */ public static final Setting NODE_LOCAL_STORAGE_SETTING = Setting.boolSetting( "node.local_storage", true, diff --git a/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java b/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java index a3382d8568ec5..98fcad0e6c496 100644 --- a/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java +++ b/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java @@ -130,16 +130,16 @@ public static Map> fetchPinnedTimestamps(Settings settings, Re * @throws IllegalArgumentException If the timestamp is less than the current time minus one second */ public void pinTimestamp(long timestamp, String pinningEntity, ActionListener listener) { - // If a caller uses current system time to pin the timestamp, following check will almost always fail. - // So, we allow pinning timestamp in the past upto some buffer - long lookbackIntervalInMills = RemoteStoreSettings.getPinnedTimestampsLookbackInterval().millis(); - if (timestamp < (System.currentTimeMillis() - lookbackIntervalInMills)) { - throw new IllegalArgumentException( - "Timestamp to be pinned is less than current timestamp - value of cluster.remote_store.pinned_timestamps.lookback_interval" - ); - } - long startTime = System.nanoTime(); try { + // If a caller uses current system time to pin the timestamp, following check will almost always fail. + // So, we allow pinning timestamp in the past upto some buffer + long lookbackIntervalInMills = RemoteStoreSettings.getPinnedTimestampsLookbackInterval().millis(); + if (timestamp < (System.currentTimeMillis() - lookbackIntervalInMills)) { + throw new IllegalArgumentException( + "Timestamp to be pinned is less than current timestamp - value of cluster.remote_store.pinned_timestamps.lookback_interval" + ); + } + long startTime = System.nanoTime(); logger.debug("Pinning timestamp = {} against entity = {}", timestamp, pinningEntity); blobContainer.writeBlob(getBlobName(timestamp, pinningEntity), new ByteArrayInputStream(new byte[0]), 0, true); long elapsedTime = System.nanoTime() - startTime; @@ -155,7 +155,7 @@ public void pinTimestamp(long timestamp, String pinningEntity, ActionListener findMatchingShardPaths(String indexId, Map snapshotShardPaths) { - return snapshotShardPaths.keySet().stream().filter(s -> s.startsWith(indexId)).collect(Collectors.toList()); + return snapshotShardPaths.keySet() + .stream() + .filter(s -> (s.startsWith(indexId) || s.startsWith(SnapshotShardPaths.FILE_PREFIX + indexId))) + .collect(Collectors.toList()); } /** @@ -2546,11 +2550,11 @@ public void finalizeSnapshot( */ private void cleanupRedundantSnapshotShardPaths(Set updatedShardPathsIndexIds) { Set updatedIndexIds = updatedShardPathsIndexIds.stream() - .map(s -> s.split("\\" + SnapshotShardPaths.DELIMITER)[0]) + .map(s -> getIndexId(s.split("\\" + SnapshotShardPaths.DELIMITER)[0])) .collect(Collectors.toSet()); Set indexIdShardPaths = getSnapshotShardPaths().keySet(); List staleShardPaths = indexIdShardPaths.stream().filter(s -> updatedShardPathsIndexIds.contains(s) == false).filter(s -> { - String indexId = s.split("\\" + SnapshotShardPaths.DELIMITER)[0]; + String indexId = getIndexId(s.split("\\" + SnapshotShardPaths.DELIMITER)[0]); return updatedIndexIds.contains(indexId); }).collect(Collectors.toList()); try { @@ -2595,7 +2599,7 @@ String writeIndexShardPaths(IndexId indexId, SnapshotId snapshotId, int shardCou List paths = getShardPaths(indexId, shardCount); int pathType = indexId.getShardPathType(); int pathHashAlgorithm = FNV_1A_COMPOSITE_1.getCode(); - String blobName = String.join( + String name = String.join( SnapshotShardPaths.DELIMITER, indexId.getId(), indexId.getName(), @@ -2611,9 +2615,9 @@ String writeIndexShardPaths(IndexId indexId, SnapshotId snapshotId, int shardCou PathType.fromCode(pathType), PathHashAlgorithm.fromCode(pathHashAlgorithm) ); - SNAPSHOT_SHARD_PATHS_FORMAT.write(shardPaths, snapshotShardPathBlobContainer(), blobName); + SNAPSHOT_SHARD_PATHS_FORMAT.write(shardPaths, snapshotShardPathBlobContainer(), name); logShardPathsOperationSuccess(indexId, snapshotId); - return blobName; + return SnapshotShardPaths.FILE_PREFIX + name; } catch (IOException e) { logShardPathsOperationWarning(indexId, snapshotId, e); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestWlmStatsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestWlmStatsAction.java new file mode 100644 index 0000000000000..51bd313e74df0 --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestWlmStatsAction.java @@ -0,0 +1,58 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.rest.action.admin.cluster; + +import org.opensearch.action.admin.cluster.wlm.WlmStatsRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.common.Strings; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestActions; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static java.util.Arrays.asList; +import static java.util.Collections.unmodifiableList; +import static org.opensearch.rest.RestRequest.Method.GET; + +/** + * Transport action to get Workload Management stats + * + * @opensearch.experimental + */ +public class RestWlmStatsAction extends BaseRestHandler { + + @Override + public List routes() { + return unmodifiableList( + asList( + new Route(GET, "_wlm/stats"), + new Route(GET, "_wlm/{nodeId}/stats"), + new Route(GET, "_wlm/stats/{queryGroupId}"), + new Route(GET, "_wlm/{nodeId}/stats/{queryGroupId}") + ) + ); + } + + @Override + public String getName() { + return "wlm_stats_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + Set queryGroupIds = Strings.tokenizeByCommaToSet(request.param("queryGroupId", "_all")); + Boolean breach = request.hasParam("breach") ? Boolean.parseBoolean(request.param("boolean")) : null; + WlmStatsRequest wlmStatsRequest = new WlmStatsRequest(nodesIds, queryGroupIds, breach); + return channel -> client.admin().cluster().wlmStats(wlmStatsRequest, new RestActions.NodesResponseRestListener<>(channel)); + } +} diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestBulkStreamingAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestBulkStreamingAction.java index 2e0d1b8ead814..42b4ed3ef3b84 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestBulkStreamingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestBulkStreamingAction.java @@ -8,7 +8,6 @@ package org.opensearch.rest.action.document; -import com.google.protobuf.ExperimentalApi; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.bulk.BulkItemResponse; @@ -18,6 +17,7 @@ import org.opensearch.action.support.ActiveShardCount; import org.opensearch.client.Requests; import org.opensearch.client.node.NodeClient; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -59,7 +59,7 @@ * { "type1" : { "field1" : "value1" } } * * - * @opensearch.api + * @opensearch.experimental */ @ExperimentalApi public class RestBulkStreamingAction extends BaseRestHandler { diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotShardPaths.java b/server/src/main/java/org/opensearch/snapshots/SnapshotShardPaths.java index 88af14e2232f9..878c2baba4ce9 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotShardPaths.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotShardPaths.java @@ -29,7 +29,8 @@ public class SnapshotShardPaths implements ToXContent { public static final String DELIMITER = "."; - public static final String FILE_NAME_FORMAT = "%s"; + public static final String FILE_PREFIX = "snapshot_path_"; + public static final String FILE_NAME_FORMAT = FILE_PREFIX + "%s"; private static final String PATHS_FIELD = "paths"; private static final String INDEX_ID_FIELD = "indexId"; @@ -101,7 +102,7 @@ public static ShardInfo parseShardPath(String shardPath) { throw new IllegalArgumentException("Invalid shard path format: " + shardPath); } try { - IndexId indexId = new IndexId(parts[1], parts[0], Integer.parseInt(parts[3])); + IndexId indexId = new IndexId(parts[1], getIndexId(parts[0]), Integer.parseInt(parts[3])); int shardCount = Integer.parseInt(parts[2]); return new ShardInfo(indexId, shardCount); } catch (NumberFormatException e) { @@ -109,6 +110,13 @@ public static ShardInfo parseShardPath(String shardPath) { } } + public static String getIndexId(String indexIdField) { + if (indexIdField.startsWith(FILE_PREFIX)) { + return indexIdField.substring(FILE_PREFIX.length()); + } + return indexIdField; + } + /** * Represents parsed information from a shard path. * This class encapsulates the index ID and shard count extracted from a shard path string. diff --git a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java index d795fd252b7fc..e0b15e54f6e2e 100644 --- a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java @@ -293,12 +293,7 @@ public ThreadPool( ); builders.put( Names.REMOTE_STATE_READ, - new ScalingExecutorBuilder( - Names.REMOTE_STATE_READ, - 1, - twiceAllocatedProcessors(allocatedProcessors), - TimeValue.timeValueMinutes(5) - ) + new ScalingExecutorBuilder(Names.REMOTE_STATE_READ, 1, boundedBy(4 * allocatedProcessors, 4, 32), TimeValue.timeValueMinutes(5)) ); builders.put( Names.INDEX_SEARCHER, diff --git a/server/src/main/java/org/opensearch/wlm/QueryGroupService.java b/server/src/main/java/org/opensearch/wlm/QueryGroupService.java index cda5916db26f3..cb0be5597766a 100644 --- a/server/src/main/java/org/opensearch/wlm/QueryGroupService.java +++ b/server/src/main/java/org/opensearch/wlm/QueryGroupService.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.search.SearchShardTask; +import org.opensearch.ResourceNotFoundException; import org.opensearch.cluster.ClusterChangedEvent; import org.opensearch.cluster.ClusterStateListener; import org.opensearch.cluster.metadata.Metadata; @@ -42,6 +42,7 @@ /** * As of now this is a stub and main implementation PR will be raised soon.Coming PR will collate these changes with core QueryGroupService changes + * @opensearch.experimental */ public class QueryGroupService extends AbstractLifecycleComponent implements @@ -49,7 +50,6 @@ public class QueryGroupService extends AbstractLifecycleComponent TaskResourceTrackingService.TaskCompletionListener { private static final Logger logger = LogManager.getLogger(QueryGroupService.class); - private final QueryGroupTaskCancellationService taskCancellationService; private volatile Scheduler.Cancellable scheduledFuture; private final ThreadPool threadPool; @@ -205,16 +205,48 @@ public void incrementFailuresFor(final String queryGroupId) { /** * @return node level query group stats */ - public QueryGroupStats nodeStats() { + public QueryGroupStats nodeStats(Set queryGroupIds, Boolean requestedBreached) { final Map statsHolderMap = new HashMap<>(); - for (Map.Entry queryGroupsState : queryGroupsStateAccessor.getQueryGroupStateMap().entrySet()) { - final String queryGroupId = queryGroupsState.getKey(); - final QueryGroupState currentState = queryGroupsState.getValue(); + Map existingStateMap = queryGroupsStateAccessor.getQueryGroupStateMap(); + if (!queryGroupIds.contains("_all")) { + for (String id : queryGroupIds) { + if (!existingStateMap.containsKey(id)) { + throw new ResourceNotFoundException("QueryGroup with id " + id + " does not exist"); + } + } + } + if (existingStateMap != null) { + existingStateMap.forEach((queryGroupId, currentState) -> { + boolean shouldInclude = queryGroupIds.contains("_all") || queryGroupIds.contains(queryGroupId); + if (shouldInclude) { + if (requestedBreached == null || requestedBreached == resourceLimitBreached(queryGroupId, currentState)) { + statsHolderMap.put(queryGroupId, QueryGroupStatsHolder.from(currentState)); + } + } + }); + } + return new QueryGroupStats(statsHolderMap); + } - statsHolderMap.put(queryGroupId, QueryGroupStatsHolder.from(currentState)); + /** + * @return if the QueryGroup breaches any resource limit based on the LastRecordedUsage + */ + public boolean resourceLimitBreached(String id, QueryGroupState currentState) { + QueryGroup queryGroup = clusterService.state().metadata().queryGroups().get(id); + if (queryGroup == null) { + throw new ResourceNotFoundException("QueryGroup with id " + id + " does not exist"); } - return new QueryGroupStats(statsHolderMap); + for (ResourceType resourceType : TRACKED_RESOURCES) { + if (queryGroup.getResourceLimits().containsKey(resourceType)) { + final double threshold = getNormalisedRejectionThreshold(queryGroup.getResourceLimits().get(resourceType), resourceType); + final double lastRecordedUsage = currentState.getResourceState().get(resourceType).getLastRecordedUsage(); + if (threshold < lastRecordedUsage) { + return true; + } + } + } + return false; } /** @@ -321,10 +353,6 @@ public void onTaskCompleted(Task task) { queryGroupId = QueryGroupTask.DEFAULT_QUERY_GROUP_ID_SUPPLIER.get(); } - if (task instanceof SearchShardTask) { - queryGroupsStateAccessor.getQueryGroupState(queryGroupId).shardCompletions.inc(); - } else { - queryGroupsStateAccessor.getQueryGroupState(queryGroupId).completions.inc(); - } + queryGroupsStateAccessor.getQueryGroupState(queryGroupId).totalCompletions.inc(); } } diff --git a/server/src/main/java/org/opensearch/wlm/stats/QueryGroupState.java b/server/src/main/java/org/opensearch/wlm/stats/QueryGroupState.java index cbc7046a79464..a082ed159e829 100644 --- a/server/src/main/java/org/opensearch/wlm/stats/QueryGroupState.java +++ b/server/src/main/java/org/opensearch/wlm/stats/QueryGroupState.java @@ -21,12 +21,7 @@ public class QueryGroupState { /** * co-ordinator level completions at the query group level, this is a cumulative counter since the Opensearch start time */ - public final CounterMetric completions = new CounterMetric(); - - /** - * shard level completions at the query group level, this is a cumulative counter since the Opensearch start time - */ - public final CounterMetric shardCompletions = new CounterMetric(); + public final CounterMetric totalCompletions = new CounterMetric(); /** * rejections at the query group level, this is a cumulative counter since the OpenSearch start time @@ -61,16 +56,8 @@ public QueryGroupState() { * * @return co-ordinator completions in the query group */ - public long getCompletions() { - return completions.count(); - } - - /** - * - * @return shard completions in the query group - */ - public long getShardCompletions() { - return shardCompletions.count(); + public long getTotalCompletions() { + return totalCompletions.count(); } /** diff --git a/server/src/main/java/org/opensearch/wlm/stats/QueryGroupStats.java b/server/src/main/java/org/opensearch/wlm/stats/QueryGroupStats.java index 9d74201de252b..42ce3ac0019db 100644 --- a/server/src/main/java/org/opensearch/wlm/stats/QueryGroupStats.java +++ b/server/src/main/java/org/opensearch/wlm/stats/QueryGroupStats.java @@ -82,21 +82,23 @@ public int hashCode() { return Objects.hash(stats); } + public Map getStats() { + return stats; + } + /** * This is a stats holder object which will hold the data for a query group at a point in time * the instance will only be created on demand through stats api */ public static class QueryGroupStatsHolder implements ToXContentObject, Writeable { - public static final String COMPLETIONS = "completions"; - public static final String REJECTIONS = "rejections"; + public static final String COMPLETIONS = "total_completions"; + public static final String REJECTIONS = "total_rejections"; public static final String TOTAL_CANCELLATIONS = "total_cancellations"; public static final String FAILURES = "failures"; - public static final String SHARD_COMPLETIONS = "shard_completions"; private long completions; - private long shardCompletions; private long rejections; private long failures; - private long totalCancellations; + private long cancellations; private Map resourceStats; // this is needed to support the factory method @@ -106,15 +108,13 @@ public QueryGroupStatsHolder( long completions, long rejections, long failures, - long totalCancellations, - long shardCompletions, + long cancellations, Map resourceStats ) { this.completions = completions; this.rejections = rejections; this.failures = failures; - this.shardCompletions = shardCompletions; - this.totalCancellations = totalCancellations; + this.cancellations = cancellations; this.resourceStats = resourceStats; } @@ -122,8 +122,7 @@ public QueryGroupStatsHolder(StreamInput in) throws IOException { this.completions = in.readVLong(); this.rejections = in.readVLong(); this.failures = in.readVLong(); - this.totalCancellations = in.readVLong(); - this.shardCompletions = in.readVLong(); + this.cancellations = in.readVLong(); this.resourceStats = in.readMap((i) -> ResourceType.fromName(i.readString()), ResourceStats::new); } @@ -141,11 +140,10 @@ public static QueryGroupStatsHolder from(QueryGroupState queryGroupState) { resourceStatsMap.put(resourceTypeStateEntry.getKey(), ResourceStats.from(resourceTypeStateEntry.getValue())); } - statsHolder.completions = queryGroupState.getCompletions(); + statsHolder.completions = queryGroupState.getTotalCompletions(); statsHolder.rejections = queryGroupState.getTotalRejections(); statsHolder.failures = queryGroupState.getFailures(); - statsHolder.totalCancellations = queryGroupState.getTotalCancellations(); - statsHolder.shardCompletions = queryGroupState.getShardCompletions(); + statsHolder.cancellations = queryGroupState.getTotalCancellations(); statsHolder.resourceStats = resourceStatsMap; return statsHolder; } @@ -160,8 +158,7 @@ public static void writeTo(StreamOutput out, QueryGroupStatsHolder statsHolder) out.writeVLong(statsHolder.completions); out.writeVLong(statsHolder.rejections); out.writeVLong(statsHolder.failures); - out.writeVLong(statsHolder.totalCancellations); - out.writeVLong(statsHolder.shardCompletions); + out.writeVLong(statsHolder.cancellations); out.writeMap(statsHolder.resourceStats, (o, val) -> o.writeString(val.getName()), ResourceStats::writeTo); } @@ -173,10 +170,10 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(COMPLETIONS, completions); - builder.field(SHARD_COMPLETIONS, shardCompletions); + // builder.field(SHARD_COMPLETIONS, shardCompletions); builder.field(REJECTIONS, rejections); - builder.field(FAILURES, failures); - builder.field(TOTAL_CANCELLATIONS, totalCancellations); + // builder.field(FAILURES, failures); + builder.field(TOTAL_CANCELLATIONS, cancellations); for (ResourceType resourceType : ResourceType.getSortedValues()) { ResourceStats resourceStats1 = resourceStats.get(resourceType); @@ -195,15 +192,14 @@ public boolean equals(Object o) { QueryGroupStatsHolder that = (QueryGroupStatsHolder) o; return completions == that.completions && rejections == that.rejections - && shardCompletions == that.shardCompletions && Objects.equals(resourceStats, that.resourceStats) && failures == that.failures - && totalCancellations == that.totalCancellations; + && cancellations == that.cancellations; } @Override public int hashCode() { - return Objects.hash(completions, shardCompletions, rejections, totalCancellations, failures, resourceStats); + return Objects.hash(completions, rejections, cancellations, failures, resourceStats); } } @@ -213,6 +209,7 @@ public int hashCode() { public static class ResourceStats implements ToXContentObject, Writeable { public static final String CURRENT_USAGE = "current_usage"; public static final String CANCELLATIONS = "cancellations"; + public static final String REJECTIONS = "rejections"; public static final double PRECISION = 1e-9; private final double currentUsage; private final long cancellations; @@ -264,7 +261,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(CURRENT_USAGE, currentUsage); builder.field(CANCELLATIONS, cancellations); - builder.field(QueryGroupStatsHolder.REJECTIONS, rejections); + builder.field(REJECTIONS, rejections); return builder; } diff --git a/server/src/main/java/org/opensearch/wlm/stats/WlmStats.java b/server/src/main/java/org/opensearch/wlm/stats/WlmStats.java new file mode 100644 index 0000000000000..3313021bfae52 --- /dev/null +++ b/server/src/main/java/org/opensearch/wlm/stats/WlmStats.java @@ -0,0 +1,65 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.wlm.stats; + +import org.opensearch.action.support.nodes.BaseNodeResponse; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * This class contains the stats for Workload Management + */ +public class WlmStats extends BaseNodeResponse implements ToXContentObject, Writeable { + private final QueryGroupStats queryGroupStats; + + public WlmStats(DiscoveryNode node, QueryGroupStats queryGroupStats) { + super(node); + this.queryGroupStats = queryGroupStats; + } + + public WlmStats(StreamInput in) throws IOException { + super(in); + queryGroupStats = new QueryGroupStats(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + queryGroupStats.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return queryGroupStats.toXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WlmStats that = (WlmStats) o; + return Objects.equals(getQueryGroupStats(), that.getQueryGroupStats()); + } + + @Override + public int hashCode() { + return Objects.hash(queryGroupStats); + } + + public QueryGroupStats getQueryGroupStats() { + return queryGroupStats; + } +} diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 432d7e4906c08..16742fd33ed43 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,2 +1 @@ -org.opensearch.index.codec.composite.composite99.Composite99Codec org.opensearch.index.codec.composite.composite912.Composite912Codec diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/wlm/WlmStatsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/wlm/WlmStatsResponseTests.java new file mode 100644 index 0000000000000..01dc033568a95 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/admin/cluster/wlm/WlmStatsResponseTests.java @@ -0,0 +1,100 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.cluster.wlm; + +import org.opensearch.Version; +import org.opensearch.action.FailedNodeException; +import org.opensearch.cluster.ClusterName; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.node.DiscoveryNodeRole; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.wlm.ResourceType; +import org.opensearch.wlm.stats.QueryGroupStats; +import org.opensearch.wlm.stats.WlmStats; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class WlmStatsResponseTests extends OpenSearchTestCase { + ClusterName clusterName = new ClusterName("test-cluster"); + String testQueryGroupId = "safjgagnaeekg-3r3fads"; + DiscoveryNode node = new DiscoveryNode( + "node-1", + buildNewFakeTransportAddress(), + new HashMap<>(), + Set.of(DiscoveryNodeRole.DATA_ROLE), + Version.CURRENT + ); + Map statsHolderMap = new HashMap<>(); + QueryGroupStats queryGroupStats = new QueryGroupStats( + Map.of( + testQueryGroupId, + new QueryGroupStats.QueryGroupStatsHolder( + 0, + 0, + 1, + 0, + Map.of( + ResourceType.CPU, + new QueryGroupStats.ResourceStats(0, 0, 0), + ResourceType.MEMORY, + new QueryGroupStats.ResourceStats(0, 0, 0) + ) + ) + ) + ); + WlmStats wlmStats = new WlmStats(node, queryGroupStats); + List wlmStatsList = List.of(wlmStats); + List failedNodeExceptionList = new ArrayList<>(); + + public void testSerializationAndDeserialization() throws IOException { + WlmStatsResponse queryGroupStatsResponse = new WlmStatsResponse(clusterName, wlmStatsList, failedNodeExceptionList); + BytesStreamOutput out = new BytesStreamOutput(); + queryGroupStatsResponse.writeTo(out); + StreamInput in = out.bytes().streamInput(); + WlmStatsResponse deserializedResponse = new WlmStatsResponse(in); + assertEquals(queryGroupStatsResponse.getClusterName(), deserializedResponse.getClusterName()); + assertEquals(queryGroupStatsResponse.getNodes().size(), deserializedResponse.getNodes().size()); + } + + public void testToString() { + WlmStatsResponse queryGroupStatsResponse = new WlmStatsResponse(clusterName, wlmStatsList, failedNodeExceptionList); + String responseString = queryGroupStatsResponse.toString(); + assertEquals( + "{\n" + + " \"node-1\" : {\n" + + " \"query_groups\" : {\n" + + " \"safjgagnaeekg-3r3fads\" : {\n" + + " \"total_completions\" : 0,\n" + + " \"total_rejections\" : 0,\n" + + " \"total_cancellations\" : 0,\n" + + " \"cpu\" : {\n" + + " \"current_usage\" : 0.0,\n" + + " \"cancellations\" : 0,\n" + + " \"rejections\" : 0\n" + + " },\n" + + " \"memory\" : {\n" + + " \"current_usage\" : 0.0,\n" + + " \"cancellations\" : 0,\n" + + " \"rejections\" : 0\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}", + responseString + ); + } +} diff --git a/server/src/test/java/org/opensearch/action/support/nodes/TransportWlmStatsActionTests.java b/server/src/test/java/org/opensearch/action/support/nodes/TransportWlmStatsActionTests.java new file mode 100644 index 0000000000000..49d2cc4d23e62 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/support/nodes/TransportWlmStatsActionTests.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.support.nodes; + +import org.opensearch.action.admin.cluster.wlm.TransportWlmStatsAction; +import org.opensearch.action.admin.cluster.wlm.WlmStatsRequest; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.PlainActionFuture; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.test.transport.CapturingTransport; +import org.opensearch.wlm.QueryGroupService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.mockito.Mockito.mock; + +public class TransportWlmStatsActionTests extends TransportNodesActionTests { + + /** + * We don't want to send discovery nodes list to each request that is sent across from the coordinator node. + * This behavior is asserted in this test. + */ + public void testWlmStatsActionWithRetentionOfDiscoveryNodesList() { + WlmStatsRequest request = new WlmStatsRequest(); + Map> combinedSentRequest = performWlmStatsAction(request); + + assertNotNull(combinedSentRequest); + combinedSentRequest.forEach((node, capturedRequestList) -> { + assertNotNull(capturedRequestList); + capturedRequestList.forEach(sentRequest -> { assertNull(sentRequest.concreteNodes()); }); + }); + } + + private Map> performWlmStatsAction(WlmStatsRequest request) { + TransportNodesAction action = new TransportWlmStatsAction( + THREAD_POOL, + clusterService, + transportService, + mock(QueryGroupService.class), + new ActionFilters(Collections.emptySet()) + ); + PlainActionFuture listener = new PlainActionFuture<>(); + action.new AsyncAction(null, request, listener).start(); + Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); + Map> combinedSentRequest = new HashMap<>(); + + capturedRequests.forEach((node, capturedRequestList) -> { + List sentRequestList = new ArrayList<>(); + + capturedRequestList.forEach(preSentRequest -> { + BytesStreamOutput out = new BytesStreamOutput(); + try { + WlmStatsRequest wlmStatsRequestFromCoordinator = (WlmStatsRequest) preSentRequest.request; + wlmStatsRequestFromCoordinator.writeTo(out); + StreamInput in = out.bytes().streamInput(); + WlmStatsRequest wlmStatsRequest = new WlmStatsRequest(in); + sentRequestList.add(wlmStatsRequest); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + + combinedSentRequest.put(node, sentRequestList); + }); + + return combinedSentRequest; + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java index 5abb01dd02e1b..b39dd77ea225e 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java @@ -17,7 +17,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.Version; -import org.opensearch.index.codec.composite.composite99.Composite99Codec; +import org.opensearch.index.codec.composite.composite912.Composite912Codec; import org.opensearch.test.OpenSearchTestCase; import org.junit.After; import org.junit.Before; @@ -73,7 +73,7 @@ public void testGetDocValuesConsumerForCompositeCodec() throws IOException { ); assertEquals("org.apache.lucene.codecs.lucene90.Lucene90DocValuesConsumer", consumer.getClass().getName()); - assertEquals(CompositeCodecFactory.COMPOSITE_CODEC, Composite99Codec.COMPOSITE_INDEX_CODEC_NAME); + assertEquals(CompositeCodecFactory.COMPOSITE_CODEC, Composite912Codec.COMPOSITE_INDEX_CODEC_NAME); consumer.close(); } diff --git a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java index 67a953d923b22..7206828ff4723 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java @@ -19,7 +19,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.Version; -import org.opensearch.index.codec.composite.composite99.Composite99Codec; +import org.opensearch.index.codec.composite.composite912.Composite912Codec; import org.opensearch.test.OpenSearchTestCase; import org.junit.After; import org.junit.Before; @@ -85,7 +85,7 @@ public void testGetDocValuesProducerForCompositeCodec99() throws IOException { newIOContext(random()) ); DocValuesProducer producer = LuceneDocValuesProducerFactory.getDocValuesProducerForCompositeCodec( - Composite99Codec.COMPOSITE_INDEX_CODEC_NAME, + Composite912Codec.COMPOSITE_INDEX_CODEC_NAME, segmentReadState, dataCodec, dataExtension, diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java index 705700aada2ff..012d04c575f55 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java @@ -25,7 +25,7 @@ import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.Version; import org.opensearch.common.settings.Settings; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; @@ -150,14 +150,14 @@ public static void setup() throws IOException { String dataFileName = IndexFileNames.segmentFileName( writeState.segmentInfo.name, writeState.segmentSuffix, - Composite99DocValuesFormat.DATA_EXTENSION + Composite912DocValuesFormat.DATA_EXTENSION ); dataOut = writeState.directory.createOutput(dataFileName, writeState.context); String metaFileName = IndexFileNames.segmentFileName( writeState.segmentInfo.name, writeState.segmentSuffix, - Composite99DocValuesFormat.META_EXTENSION + Composite912DocValuesFormat.META_EXTENSION ); metaOut = writeState.directory.createOutput(metaFileName, writeState.context); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java index bb31bd6a7cc27..cc6c1758697dd 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java @@ -26,8 +26,8 @@ import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.Version; import org.opensearch.index.codec.composite.LuceneDocValuesProducerFactory; -import org.opensearch.index.codec.composite.composite99.Composite99Codec; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; +import org.opensearch.index.codec.composite.composite912.Composite912Codec; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; @@ -351,12 +351,12 @@ public static void validateStarTreeFileFormats( ); DocValuesProducer compositeDocValuesProducer = LuceneDocValuesProducerFactory.getDocValuesProducerForCompositeCodec( - Composite99Codec.COMPOSITE_INDEX_CODEC_NAME, + Composite912Codec.COMPOSITE_INDEX_CODEC_NAME, readState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); IndexInput dataIn = readState.directory.openInput(dataFileName, IOContext.DEFAULT); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java index 095eda2986b3a..c4d6fe6f19812 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java @@ -28,7 +28,7 @@ import org.apache.lucene.util.Version; import org.opensearch.common.settings.Settings; import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; @@ -179,10 +179,10 @@ public void test_build_halfFloatMetrics() throws IOException { List metricsIterators = getMetricIterators(segmentStarTreeDocuments); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); @@ -298,10 +298,10 @@ public void test_build_floatMetrics() throws IOException { ); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder.build(segmentStarTreeDocumentIterator, new AtomicInteger(), docValuesConsumer); @@ -391,10 +391,10 @@ public void test_build_longMetrics() throws IOException { List metricsIterators = getMetricIterators(segmentStarTreeDocuments); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator segmentStarTreeDocumentIterator = builder.sortAndAggregateSegmentDocuments( @@ -701,10 +701,10 @@ public void test_build() throws IOException { ); docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder.build(segmentStarTreeDocumentIterator, new AtomicInteger(), docValuesConsumer); @@ -796,10 +796,10 @@ public void test_build_starTreeDataset() throws IOException { writeState = new SegmentWriteState(InfoStream.getDefault(), segmentInfo.dir, segmentInfo, fieldInfos, null, newIOContext(random())); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); mapperService = mock(MapperService.class); DocumentMapper documentMapper = mock(DocumentMapper.class); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java index 1aa830e3587df..a92ac39cb7020 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java @@ -16,7 +16,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.IndexInput; import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; @@ -95,10 +95,10 @@ public void testFlushFlow() throws IOException { */ this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder.build(starTreeDocumentIterator, new AtomicInteger(), docValuesConsumer); List starTreeDocuments = builder.getStarTreeDocuments(); @@ -182,10 +182,10 @@ public void testFlushFlowDimsReverse() throws IOException { writeState = getWriteState(6, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); SequentialDocValuesIterator[] dimDvs = { new SequentialDocValuesIterator(d1sndv), new SequentialDocValuesIterator(d2sndv) }; @@ -284,10 +284,10 @@ public void testFlushFlowBuild() throws IOException { SegmentWriteState consumerWriteState = getWriteState(DocIdSetIterator.NO_MORE_DOCS, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( consumerWriteState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); @@ -361,10 +361,10 @@ public void testFlushFlowWithTimestamps() throws IOException { ); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, getWriteState(6, writeState.segmentInfo.getId()), mapperService); SequentialDocValuesIterator[] dimDvs = { new SequentialDocValuesIterator(d1sndv), new SequentialDocValuesIterator(d2sndv) }; diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java index f983365dfec30..00e53534a7606 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java @@ -14,7 +14,8 @@ import org.apache.lucene.search.DocIdSetIterator; import org.opensearch.common.settings.Settings; import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; +import org.opensearch.index.compositeindex.CompositeIndexConstants; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; @@ -194,10 +195,10 @@ public void testMergeFlow() throws IOException { this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -310,10 +311,10 @@ public void testMergeFlowWithSum() throws IOException { writeState = getWriteState(6, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -386,10 +387,10 @@ public void testMergeFlowWithCount() throws IOException { writeState = getWriteState(6, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -528,10 +529,10 @@ public void testMergeFlowWithMissingDocs() throws IOException { writeState = getWriteState(4, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -615,10 +616,10 @@ public void testMergeFlowWithMissingDocsWithZero() throws IOException { SegmentWriteState consumerWriteState = getWriteState(DocIdSetIterator.NO_MORE_DOCS, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( consumerWriteState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -702,10 +703,10 @@ public void testMergeFlowWithMissingDocsWithZeroComplexCase() throws IOException writeState = getWriteState(4, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -793,10 +794,10 @@ public void testMergeFlowWithMissingDocsInSecondDim() throws IOException { writeState = getWriteState(4, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -879,10 +880,10 @@ public void testMergeFlowWithDocsMissingAtTheEnd() throws IOException { ); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -957,10 +958,10 @@ public void testMergeFlowWithEmptyFieldsInOneSegment() throws IOException { writeState = getWriteState(0, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -1086,10 +1087,10 @@ public void testMergeFlowWithDuplicateDimensionValues() throws IOException { ); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); builder.build(builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)), new AtomicInteger(), docValuesConsumer); @@ -1234,10 +1235,10 @@ public void testMergeFlowWithMaxLeafDocs() throws IOException { this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); builder.build(builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)), new AtomicInteger(), docValuesConsumer); @@ -1352,10 +1353,10 @@ public void testMergeFlowWithDifferentDocsFromSegments() throws IOException { writeState = getWriteState(4, writeState.segmentInfo.getId()); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -1489,10 +1490,10 @@ public void testMergeFlowWithDuplicateDimensionValueWithMaxLeafDocs() throws IOE ); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); builder.build(builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)), new AtomicInteger(), docValuesConsumer); @@ -1618,10 +1619,10 @@ public void testMergeFlowWithMaxLeafDocsAndStarTreeNodesAssertion() throws IOExc ); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); builder.build(builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)), new AtomicInteger(), docValuesConsumer); @@ -1727,10 +1728,10 @@ public void testMergeFlowWithTimestamps() throws IOException { ); this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( writeState, - Composite99DocValuesFormat.DATA_DOC_VALUES_CODEC, - Composite99DocValuesFormat.DATA_DOC_VALUES_EXTENSION, - Composite99DocValuesFormat.META_DOC_VALUES_CODEC, - Composite99DocValuesFormat.META_DOC_VALUES_EXTENSION + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION ); builder = getStarTreeBuilder(metaOut, dataOut, compositeField, getWriteState(4, writeState.segmentInfo.getId()), mapperService); Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); @@ -1850,7 +1851,7 @@ private StarTreeValues getStarTreeValues( null, dimDocIdSetIterators, metricDocIdSetIterators, - Map.of(SEGMENT_DOCS_COUNT, number), + Map.of(CompositeIndexConstants.SEGMENT_DOCS_COUNT, number), null ); return starTreeValues; diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java index 4c854f7546197..6733cac12f657 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java @@ -26,7 +26,7 @@ import org.apache.lucene.util.NumericUtils; import org.opensearch.common.Rounding; import org.opensearch.common.settings.Settings; -import org.opensearch.index.codec.composite.composite99.Composite99DocValuesFormat; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.CompositeIndexConstants; import org.opensearch.index.compositeindex.datacube.DataCubeDateTimeUnit; import org.opensearch.index.compositeindex.datacube.DateDimension; @@ -158,14 +158,14 @@ public void setup() throws IOException { dataFileName = IndexFileNames.segmentFileName( writeState.segmentInfo.name, writeState.segmentSuffix, - Composite99DocValuesFormat.DATA_EXTENSION + Composite912DocValuesFormat.DATA_EXTENSION ); dataOut = writeState.directory.createOutput(dataFileName, writeState.context); metaFileName = IndexFileNames.segmentFileName( writeState.segmentInfo.name, writeState.segmentSuffix, - Composite99DocValuesFormat.META_EXTENSION + Composite912DocValuesFormat.META_EXTENSION ); metaOut = writeState.directory.createOutput(metaFileName, writeState.context); diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index d9622aae4c378..aa10b7dc18381 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -382,6 +382,7 @@ public void testParseShardPath() { IndexId indexId = repoData.getIndices().values().iterator().next(); int shardCount = repoData.shardGenerations().getGens(indexId).size(); + // Version 2.17 has file name starting with indexId String shardPath = String.join( SnapshotShardPaths.DELIMITER, indexId.getId(), @@ -391,7 +392,19 @@ public void testParseShardPath() { "1" ); ShardInfo shardInfo = SnapshotShardPaths.parseShardPath(shardPath); + assertEquals(shardInfo.getIndexId(), indexId); + assertEquals(shardInfo.getShardCount(), shardCount); + // Version 2.17 has file name starting with snapshot_path_ + shardPath = String.join( + SnapshotShardPaths.DELIMITER, + SnapshotShardPaths.FILE_PREFIX + indexId.getId(), + indexId.getName(), + String.valueOf(shardCount), + String.valueOf(indexId.getShardPathType()), + "1" + ); + shardInfo = SnapshotShardPaths.parseShardPath(shardPath); assertEquals(shardInfo.getIndexId(), indexId); assertEquals(shardInfo.getShardCount(), shardCount); } diff --git a/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java b/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java index 4f59f9688fb7e..b4726bab50198 100644 --- a/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java +++ b/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java @@ -156,7 +156,7 @@ private int expectedSize(final String threadPoolName, final int numberOfProcesso sizes.put(ThreadPool.Names.REMOTE_PURGE, ThreadPool::halfAllocatedProcessors); sizes.put(ThreadPool.Names.REMOTE_REFRESH_RETRY, ThreadPool::halfAllocatedProcessors); sizes.put(ThreadPool.Names.REMOTE_RECOVERY, ThreadPool::twiceAllocatedProcessors); - sizes.put(ThreadPool.Names.REMOTE_STATE_READ, ThreadPool::twiceAllocatedProcessors); + sizes.put(ThreadPool.Names.REMOTE_STATE_READ, n -> ThreadPool.boundedBy(4 * n, 4, 32)); return sizes.get(threadPoolName).apply(numberOfProcessors); } diff --git a/server/src/test/java/org/opensearch/wlm/QueryGroupServiceTests.java b/server/src/test/java/org/opensearch/wlm/QueryGroupServiceTests.java index c5cf0dac4f807..45428865259c3 100644 --- a/server/src/test/java/org/opensearch/wlm/QueryGroupServiceTests.java +++ b/server/src/test/java/org/opensearch/wlm/QueryGroupServiceTests.java @@ -401,14 +401,14 @@ public void testOnTaskCompleted() { ((QueryGroupTask) task).setQueryGroupId(mockThreadPool.getThreadContext()); queryGroupService.onTaskCompleted(task); - assertEquals(1, queryGroupState.completions.count()); + assertEquals(1, queryGroupState.totalCompletions.count()); // test non QueryGroupTask task = new Task(1, "simple", "test", "mock task", null, null); queryGroupService.onTaskCompleted(task); // It should still be 1 - assertEquals(1, queryGroupState.completions.count()); + assertEquals(1, queryGroupState.totalCompletions.count()); mockThreadPool.shutdown(); } diff --git a/server/src/test/java/org/opensearch/wlm/listeners/QueryGroupRequestOperationListenerTests.java b/server/src/test/java/org/opensearch/wlm/listeners/QueryGroupRequestOperationListenerTests.java index 1127b50399d24..016588acf1e24 100644 --- a/server/src/test/java/org/opensearch/wlm/listeners/QueryGroupRequestOperationListenerTests.java +++ b/server/src/test/java/org/opensearch/wlm/listeners/QueryGroupRequestOperationListenerTests.java @@ -29,6 +29,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; @@ -56,6 +57,10 @@ public void setUp() throws Exception { queryGroupStateMap = new HashMap<>(); testQueryGroupId = "safjgagnakg-3r3fads"; testThreadPool = new TestThreadPool("RejectionTestThreadPool"); + ClusterState mockClusterState = mock(ClusterState.class); + when(mockClusterService.state()).thenReturn(mockClusterState); + Metadata mockMetaData = mock(Metadata.class); + when(mockClusterState.metadata()).thenReturn(mockMetaData); queryGroupService = mock(QueryGroupService.class); sut = new QueryGroupRequestOperationListener(queryGroupService, testThreadPool); } @@ -90,7 +95,6 @@ public void testValidQueryGroupRequestFailure() throws IOException { 0, 1, 0, - 0, Map.of( ResourceType.CPU, new QueryGroupStats.ResourceStats(0, 0, 0), @@ -104,7 +108,6 @@ public void testValidQueryGroupRequestFailure() throws IOException { 0, 0, 0, - 0, Map.of( ResourceType.CPU, new QueryGroupStats.ResourceStats(0, 0, 0), @@ -155,7 +158,9 @@ public void testMultiThreadedValidQueryGroupRequestFailures() { } }); - QueryGroupStats actualStats = queryGroupService.nodeStats(); + HashSet set = new HashSet<>(); + set.add("_all"); + QueryGroupStats actualStats = queryGroupService.nodeStats(set, null); QueryGroupStats expectedStats = new QueryGroupStats( Map.of( @@ -165,7 +170,6 @@ public void testMultiThreadedValidQueryGroupRequestFailures() { 0, ITERATIONS, 0, - 0, Map.of( ResourceType.CPU, new QueryGroupStats.ResourceStats(0, 0, 0), @@ -179,7 +183,6 @@ public void testMultiThreadedValidQueryGroupRequestFailures() { 0, 0, 0, - 0, Map.of( ResourceType.CPU, new QueryGroupStats.ResourceStats(0, 0, 0), @@ -202,7 +205,6 @@ public void testInvalidQueryGroupFailure() throws IOException { 0, 0, 0, - 0, Map.of( ResourceType.CPU, new QueryGroupStats.ResourceStats(0, 0, 0), @@ -216,7 +218,6 @@ public void testInvalidQueryGroupFailure() throws IOException { 0, 1, 0, - 0, Map.of( ResourceType.CPU, new QueryGroupStats.ResourceStats(0, 0, 0), @@ -254,11 +255,12 @@ private void assertSuccess( Collections.emptySet(), Collections.emptySet() ); - sut = new QueryGroupRequestOperationListener(queryGroupService, testThreadPool); sut.onRequestFailure(null, null); - QueryGroupStats actualStats = queryGroupService.nodeStats(); + HashSet set = new HashSet<>(); + set.add("_all"); + QueryGroupStats actualStats = queryGroupService.nodeStats(set, null); assertEquals(expectedStats, actualStats); } diff --git a/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStateTests.java b/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStateTests.java index 566c4261d6878..c0dfa06a0fba1 100644 --- a/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStateTests.java +++ b/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStateTests.java @@ -23,13 +23,7 @@ public void testRandomQueryGroupsStateUpdates() { for (int i = 0; i < 25; i++) { if (i % 5 == 0) { - updaterThreads.add(new Thread(() -> { - if (randomBoolean()) { - queryGroupState.completions.inc(); - } else { - queryGroupState.shardCompletions.inc(); - } - })); + updaterThreads.add(new Thread(() -> { queryGroupState.totalCompletions.inc(); })); } else if (i % 5 == 1) { updaterThreads.add(new Thread(() -> { queryGroupState.totalRejections.inc(); @@ -63,7 +57,7 @@ public void testRandomQueryGroupsStateUpdates() { } }); - assertEquals(5, queryGroupState.getCompletions() + queryGroupState.getShardCompletions()); + assertEquals(5, queryGroupState.getTotalCompletions()); assertEquals(5, queryGroupState.getTotalRejections()); final long sumOfRejectionsDueToResourceTypes = queryGroupState.getResourceState().get(ResourceType.CPU).rejections.count() diff --git a/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStatsTests.java b/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStatsTests.java index ac6d19580dacb..6fc4d178e54bc 100644 --- a/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStatsTests.java +++ b/server/src/test/java/org/opensearch/wlm/stats/QueryGroupStatsTests.java @@ -8,17 +8,24 @@ package org.opensearch.wlm.stats; +import org.opensearch.Version; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.AbstractWireSerializingTestCase; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.VersionUtils; import org.opensearch.wlm.ResourceType; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static java.util.Collections.emptyMap; + public class QueryGroupStatsTests extends AbstractWireSerializingTestCase { public void testToXContent() throws IOException { @@ -31,7 +38,6 @@ public void testToXContent() throws IOException { 13, 2, 0, - 1213718, Map.of(ResourceType.CPU, new QueryGroupStats.ResourceStats(0.3, 13, 2)) ) ); @@ -41,7 +47,7 @@ public void testToXContent() throws IOException { queryGroupStats.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); assertEquals( - "{\"query_groups\":{\"afakjklaj304041-afaka\":{\"completions\":123456789,\"shard_completions\":1213718,\"rejections\":13,\"failures\":2,\"total_cancellations\":0,\"cpu\":{\"current_usage\":0.3,\"cancellations\":13,\"rejections\":2}}}}", + "{\"query_groups\":{\"afakjklaj304041-afaka\":{\"total_completions\":123456789,\"total_rejections\":13,\"total_cancellations\":0,\"cpu\":{\"current_usage\":0.3,\"cancellations\":13,\"rejections\":2}}}}", builder.toString() ); } @@ -61,7 +67,6 @@ protected QueryGroupStats createTestInstance() { randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), Map.of( ResourceType.CPU, new QueryGroupStats.ResourceStats( @@ -72,6 +77,13 @@ protected QueryGroupStats createTestInstance() { ) ) ); + DiscoveryNode discoveryNode = new DiscoveryNode( + "node", + OpenSearchTestCase.buildNewFakeTransportAddress(), + emptyMap(), + DiscoveryNodeRole.BUILT_IN_ROLES, + VersionUtils.randomCompatibleVersion(random(), Version.CURRENT) + ); return new QueryGroupStats(stats); } } diff --git a/server/src/test/java/org/opensearch/wlm/stats/WlmStatsTests.java b/server/src/test/java/org/opensearch/wlm/stats/WlmStatsTests.java new file mode 100644 index 0000000000000..6910ca7f9937c --- /dev/null +++ b/server/src/test/java/org/opensearch/wlm/stats/WlmStatsTests.java @@ -0,0 +1,74 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.wlm.stats; + +import org.opensearch.Version; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.node.DiscoveryNodeRole; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.test.AbstractWireSerializingTestCase; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.VersionUtils; +import org.opensearch.wlm.ResourceType; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static org.mockito.Mockito.mock; + +public class WlmStatsTests extends AbstractWireSerializingTestCase { + + public void testToXContent() throws IOException { + final Map stats = new HashMap<>(); + final String queryGroupId = "afakjklaj304041-afaka"; + stats.put( + queryGroupId, + new QueryGroupStats.QueryGroupStatsHolder( + 123456789, + 13, + 2, + 0, + Map.of(ResourceType.CPU, new QueryGroupStats.ResourceStats(0.3, 13, 2)) + ) + ); + XContentBuilder builder = JsonXContent.contentBuilder(); + QueryGroupStats queryGroupStats = new QueryGroupStats(stats); + WlmStats wlmStats = new WlmStats(mock(DiscoveryNode.class), queryGroupStats); + builder.startObject(); + wlmStats.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + assertEquals( + "{\"query_groups\":{\"afakjklaj304041-afaka\":{\"total_completions\":123456789,\"total_rejections\":13,\"total_cancellations\":0,\"cpu\":{\"current_usage\":0.3,\"cancellations\":13,\"rejections\":2}}}}", + builder.toString() + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return WlmStats::new; + } + + @Override + protected WlmStats createTestInstance() { + DiscoveryNode discoveryNode = new DiscoveryNode( + "node", + OpenSearchTestCase.buildNewFakeTransportAddress(), + emptyMap(), + DiscoveryNodeRole.BUILT_IN_ROLES, + VersionUtils.randomCompatibleVersion(random(), Version.CURRENT) + ); + QueryGroupStatsTests queryGroupStatsTests = new QueryGroupStatsTests(); + return new WlmStats(discoveryNode, queryGroupStatsTests.createTestInstance()); + } +} diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index bc63362980bea..b3a90b5de2589 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -88,6 +88,6 @@ dependencies { runtimeOnly("com.squareup.okhttp3:okhttp:4.12.0") { exclude group: "com.squareup.okio" } - runtimeOnly "com.squareup.okio:okio:3.9.0" + runtimeOnly "com.squareup.okio:okio:3.9.1" runtimeOnly "org.xerial.snappy:snappy-java:1.1.10.6" } diff --git a/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 74c75ea05b1f3..e3d75cbb69638 100644 --- a/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -404,8 +404,6 @@ protected void sendIncompleteContent(HttpExchange exchange, byte[] bytes) throws if (bytesToSend > 0) { exchange.getResponseBody().write(bytes, rangeStart, bytesToSend); } - if (randomBoolean()) { - exchange.getResponseBody().flush(); - } + exchange.getResponseBody().flush(); } }