diff --git a/.github/ISSUE_TEMPLATE/bug_template.yml b/.github/ISSUE_TEMPLATE/bug_template.yml index 2cd1ee8a7e688..5f0798abe0f68 100644 --- a/.github/ISSUE_TEMPLATE/bug_template.yml +++ b/.github/ISSUE_TEMPLATE/bug_template.yml @@ -15,7 +15,7 @@ body: description: Choose a specific OpenSearch component your bug belongs to. If you are unsure which to select or if the component is not present, select "Other". multiple: false options: - - Other + - # Empty first option to force selection - Build - Clients - Cluster Manager @@ -24,6 +24,7 @@ body: - Indexing:Replication - Indexing - Libraries + - Other - Plugins - Search:Aggregations - Search:Performance diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index d93ac8b590706..0159e771f7f80 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -22,7 +22,7 @@ body: description: Choose a specific OpenSearch component your feature request belongs to. If you are unsure of which component to select or if the component is not present, select "Other". multiple: false options: - - Other + - # Empty first option to force selection - Build - Clients - Cluster Manager @@ -31,6 +31,7 @@ body: - Indexing:Replication - Indexing - Libraries + - Other - Plugins - Search:Aggregations - Search:Performance diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index ca026f530b4af..61962c91b4903 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: lychee Link Checker id: lychee - uses: lycheeverse/lychee-action@v1.8.0 + uses: lycheeverse/lychee-action@v1.9.1 with: args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes fail: true diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml index 2f87afd372d90..34e8f57cc1878 100644 --- a/.github/workflows/maintainer-approval.yml +++ b/.github/workflows/maintainer-approval.yml @@ -2,7 +2,6 @@ name: Maintainers approval on: pull_request_review: - types: [submitted] jobs: maintainer-approved-check: @@ -26,7 +25,7 @@ jobs: return maintainersResponse.data.map(item => item.login).join(', '); - - uses: peternied/required-approval@v1.2 + - uses: peternied/required-approval@v1.3 with: token: ${{ secrets.GITHUB_TOKEN }} min-required: 1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 79491875c53dc..f8ae343894d5d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -82,6 +82,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944)) - Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993)) - Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439)) +- Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836)) +- Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872)) ### Security @@ -100,6 +102,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255)) - Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) - Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670)) +- [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275)) +- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753)) - [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853)) - Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247)) - Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248)) @@ -107,6 +111,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679)) - Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618)) - [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672)) +- Enable must_exist parameter for update aliases API ([#11210](https://github.com/opensearch-project/OpenSearch/pull/11210)) - Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024)) - Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650)) - Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040)) @@ -121,11 +126,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) - Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) - Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) +- Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) -- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560)) +- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796)) - Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) - Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630)) - Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) @@ -139,7 +145,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) - Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271)) - Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273)) -- Bump `netty` from 4.1.100.Final to 4.1.101.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294)) +- Bump `netty` from 4.1.100.Final to 4.1.104.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)) - Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) - Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) - Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) @@ -159,6 +165,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559)) - Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) - Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) +- Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) +- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) +- Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) +- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.1.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) @@ -185,6 +195,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) - Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) - Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) +- Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582)) +- Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) +- Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) +- Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) ### Deprecated @@ -207,10 +221,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) - Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425)) - Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) +- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485)) - Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) - Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) - Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711)) +- Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490)) +- Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815)) ### Security diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index f9936aad0cf8c..21adbb0305ab1 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -183,6 +183,12 @@ Run OpenSearch using `gradlew run`. ./gradlew run ``` +[Plugins](plugins/) may be installed by passing a `-PinstalledPlugins` property: + +```bash +./gradlew run -PinstalledPlugins="['plugin1', 'plugin2']" +``` + That will build OpenSearch and start it, writing its log above Gradle's status message. We log a lot of stuff on startup, specifically these lines tell you that OpenSearch is ready. ``` @@ -578,7 +584,7 @@ explicitly marked by an annotation should not be extended by external implementa any time. The `@DeprecatedApi` annotation could also be added to any classes annotated with `@PublicApi` (or documented as `@opensearch.api`) or their methods that are either changed (with replacement) or planned to be removed across major versions. -The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`) +The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`) annotation. The presence of this annotation signals that API may change at any time (major, minor or even patch releases). In general, the classes annotated with `@PublicApi` may expose other classes or methods annotated with `@ExperimentalApi`, in such cases the backward compatibility guarantees would not apply to latter (see please [Experimental Development](#experimental-development) for more details). diff --git a/build.gradle b/build.gradle index b1cd1d532bfeb..296c30391af09 100644 --- a/build.gradle +++ b/build.gradle @@ -54,7 +54,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.20.0" apply false + id "com.diffplug.spotless" version "6.23.2" apply false id "org.gradle.test-retry" version "1.5.4" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index a42976fef572c..1cb21acd14af7 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -110,7 +110,7 @@ dependencies { api 'com.netflix.nebula:gradle-info-plugin:12.1.6' api 'org.apache.rat:apache-rat:0.15' api 'commons-io:commons-io:2.15.1' - api "net.java.dev.jna:jna:5.13.0" + api "net.java.dev.jna:jna:5.14.0" api 'com.github.johnrengelman:shadow:8.1.1' api 'org.jdom:jdom2:2.0.6.1' api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}" @@ -118,7 +118,7 @@ dependencies { api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6' api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}" api 'org.apache.maven:maven-model:3.9.6' - api 'com.networknt:json-schema-validator:1.0.86' + api 'com.networknt:json-schema-validator:1.1.0' api 'org.jruby.jcodings:jcodings:1.0.58' api 'org.jruby.joni:joni:2.2.1' api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}" diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 74d655cfb1045..dd7f2e1eaabf0 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ opensearch = 3.0.0 -lucene = 9.8.0 +lucene = 9.9.1 bundled_jdk_vendor = adoptium bundled_jdk = 21.0.1+12 @@ -7,8 +7,8 @@ bundled_jdk = 21.0.1+12 # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.16.0 -jackson_databind = 2.16.0 +jackson = 2.16.1 +jackson_databind = 2.16.1 snakeyaml = 2.1 icu4j = 70.1 supercsv = 2.4.0 @@ -26,7 +26,7 @@ jakarta_annotation = 1.3.5 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.13.0 -netty = 4.1.101.Final +netty = 4.1.104.Final joda = 2.12.2 # project reactor @@ -70,5 +70,5 @@ jzlib = 1.1.3 resteasy = 6.2.4.Final # opentelemetry dependencies -opentelemetry = 1.32.0 +opentelemetry = 1.34.1 opentelemetrysemconv = 1.23.1-alpha diff --git a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 deleted file mode 100644 index c2b70fb4ae202..0000000000000 --- a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -899e5cf01be55fbf094ad72b2edb0c5df99111ee \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..908d071b34a2a --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 @@ -0,0 +1 @@ +9456bb3cdd0f79f91a5f730a1b1bb041a380c91f \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/gradle/run.gradle b/gradle/run.gradle index 639479e97d28f..34651f1d94964 100644 --- a/gradle/run.gradle +++ b/gradle/run.gradle @@ -39,6 +39,12 @@ testClusters { testDistribution = 'archive' if (numZones > 1) numberOfZones = numZones if (numNodes > 1) numberOfNodes = numNodes + if (findProperty("installedPlugins")) { + installedPlugins = Eval.me(installedPlugins) + for (String p : installedPlugins) { + plugin('plugins:'.concat(p)) + } + } } } diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java index 1864aec4aa951..569f48a8465f3 100644 --- a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java +++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java @@ -113,7 +113,7 @@ private void process(ExecutableElement executable, Element enclosing) { // The executable element should not be internal (unless constructor for injectable core component) checkNotInternal(enclosing, executable); - // Check this elements annotations + // Check this element's annotations for (final AnnotationMirror annotation : executable.getAnnotationMirrors()) { final Element element = annotation.getAnnotationType().asElement(); if (inspectable(element)) { @@ -210,7 +210,7 @@ private void process(ExecutableElement executable, ReferenceType ref) { } } - // Check this elements annotations + // Check this element's annotations for (final AnnotationMirror annotation : ref.getAnnotationMirrors()) { final Element element = annotation.getAnnotationType().asElement(); if (inspectable(element)) { @@ -316,7 +316,7 @@ private void checkPublic(@Nullable Element referencedBy, final Element element) reportFailureAs, "The element " + element - + " is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi" + + " is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi" + ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "") ); } diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java index df04709458b29..8d8a4c7895339 100644 --- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java +++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java @@ -35,7 +35,7 @@ public void testPublicApiMethodArgumentNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotated)" ) ) @@ -56,7 +56,7 @@ public void testPublicApiMethodArgumentNotAnnotatedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedGenerics)" ) ) @@ -77,7 +77,7 @@ public void testPublicApiMethodThrowsNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodThrowsNotAnnotated)" ) ) @@ -111,7 +111,7 @@ public void testPublicApiMethodArgumentNotAnnotatedPackagePrivate() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedPackagePrivate)" ) ) @@ -209,7 +209,7 @@ public void testPublicApiMethodReturnNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotated)" ) ) @@ -230,7 +230,7 @@ public void testPublicApiMethodReturnNotAnnotatedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedGenerics)" ) ) @@ -251,7 +251,7 @@ public void testPublicApiMethodReturnNotAnnotatedArray() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedArray)" ) ) @@ -272,7 +272,7 @@ public void testPublicApiMethodReturnNotAnnotatedBoundedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedBoundedGenerics)" ) ) @@ -297,7 +297,7 @@ public void testPublicApiMethodReturnNotAnnotatedAnnotation() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedAnnotation)" ) ) @@ -388,7 +388,7 @@ public void testPublicApiMethodGenericsArgumentNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodGenericsArgumentNotAnnotated)" ) ) @@ -453,7 +453,7 @@ public void testPublicApiMethodReturnAnnotatedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnAnnotatedGenerics)" ) ) diff --git a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 b/libs/core/licenses/jackson-core-2.16.0.jar.sha1 deleted file mode 100644 index c2b70fb4ae202..0000000000000 --- a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -899e5cf01be55fbf094ad72b2edb0c5df99111ee \ No newline at end of file diff --git a/libs/core/licenses/jackson-core-2.16.1.jar.sha1 b/libs/core/licenses/jackson-core-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..908d071b34a2a --- /dev/null +++ b/libs/core/licenses/jackson-core-2.16.1.jar.sha1 @@ -0,0 +1 @@ +9456bb3cdd0f79f91a5f730a1b1bb041a380c91f \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 b/libs/core/licenses/lucene-core-9.8.0.jar.sha1 deleted file mode 100644 index f9a3e2f3cbee6..0000000000000 --- a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e8421c5f8573bcf22e9265fc7e19469545a775a \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-9.9.1.jar.sha1 b/libs/core/licenses/lucene-core-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..ae596196d9e6a --- /dev/null +++ b/libs/core/licenses/lucene-core-9.9.1.jar.sha1 @@ -0,0 +1 @@ +55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index d94be3f25b53d..6a92993f5dd42 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -98,8 +98,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0); - public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_8_0); - public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_8_0); + public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_1); + public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_1); public static final Version CURRENT = V_3_0_0; public static Version fromId(int id) { diff --git a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java index c0abad7ed727f..1e48cf1f476da 100644 --- a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java +++ b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java @@ -32,6 +32,7 @@ package org.opensearch.core.index.shard; +import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.common.annotation.PublicApi; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; @@ -55,6 +56,8 @@ public class ShardId implements Comparable, ToXContentFragment, Writeab private final int shardId; private final int hashCode; + private final static long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ShardId.class); + /** * Constructs a new shard id. * @param index the index name @@ -88,6 +91,10 @@ public ShardId(StreamInput in) throws IOException { hashCode = computeHashCode(); } + public long getBaseRamBytesUsed() { + return BASE_RAM_BYTES_USED; + } + /** * Writes this shard id to a stream. * @param out the stream to write to diff --git a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 deleted file mode 100644 index c2b70fb4ae202..0000000000000 --- a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -899e5cf01be55fbf094ad72b2edb0c5df99111ee \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..908d071b34a2a --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 @@ -0,0 +1 @@ +9456bb3cdd0f79f91a5f730a1b1bb041a380c91f \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 deleted file mode 100644 index 8da478fc6013d..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35e8b7bf4fc1d078766bb155103d433ed5bb1627 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..b4b781f604910 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 @@ -0,0 +1 @@ +1be7098dccc079171464dca7e386bd8df623b031 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 deleted file mode 100644 index 3e952ffe92418..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3c422d7f3901c9a1becf9df3cf41efc68a5ab95c \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..ad91e748ebe94 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 @@ -0,0 +1 @@ +c4ddbc5277670f2e56b1f5e44e83afa748bcb125 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 deleted file mode 100644 index d62b5874ab023..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2033e2c5f531785d17f3a2bc31842e3bbb7983b2 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..9b30e7bf921b2 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 @@ -0,0 +1 @@ +8e4f1923d73cd55f2b4c0d56ee4ed80419297354 \ No newline at end of file diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java new file mode 100644 index 0000000000000..4c05f0058f2ed --- /dev/null +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.xcontent; + +import com.fasterxml.jackson.core.StreamReadConstraints; + +import org.opensearch.common.annotation.InternalApi; + +/** + * Consolidates the XContent constraints (primarily reflecting Jackson's {@link StreamReadConstraints} constraints) + * + * @opensearch.internal + */ +@InternalApi +public interface XContentContraints { + final String DEFAULT_MAX_STRING_LEN_PROPERTY = "opensearch.xcontent.string.length.max"; + final String DEFAULT_MAX_NAME_LEN_PROPERTY = "opensearch.xcontent.name.length.max"; + final String DEFAULT_MAX_DEPTH_PROPERTY = "opensearch.xcontent.depth.max"; + + final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(System.getProperty(DEFAULT_MAX_STRING_LEN_PROPERTY, "50000000" /* ~50 Mb */)); + + final int DEFAULT_MAX_NAME_LEN = Integer.parseInt( + System.getProperty(DEFAULT_MAX_NAME_LEN_PROPERTY, "50000" /* StreamReadConstraints.DEFAULT_MAX_NAME_LEN */) + ); + + final int DEFAULT_MAX_DEPTH = Integer.parseInt( + System.getProperty(DEFAULT_MAX_DEPTH_PROPERTY, "1000" /* StreamReadConstraints.DEFAULT_MAX_DEPTH */) + ); +} diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java index 81f8fe9b6366f..7e92f236213d4 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java @@ -37,8 +37,10 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -58,11 +60,7 @@ /** * A CBOR based content implementation using Jackson. */ -public class CborXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class CborXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(cborXContent); } @@ -76,7 +74,14 @@ public static XContentBuilder contentBuilder() throws IOException { // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - cborFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + cborFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + cborFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); cborFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); cborXContent = new CborXContent(); } diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java index 4bd7c4c99bb46..91f6bbeb4f786 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java @@ -38,7 +38,9 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -57,11 +59,7 @@ /** * A JSON based content implementation using Jackson. */ -public class JsonXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class JsonXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(jsonXContent); } @@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException { // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - jsonFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + jsonFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + jsonFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); jsonFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); jsonXContent = new JsonXContent(); } diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java index e824d4e1ae991..c73e126102a80 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java @@ -37,9 +37,11 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -58,11 +60,7 @@ /** * A Smile based content implementation using Jackson. */ -public class SmileXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class SmileXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(smileXContent); } @@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException { // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - smileFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + smileFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + smileFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); smileFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); smileXContent = new SmileXContent(); } diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java index 0ad3c44e0168a..3f6a4b3aeead7 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java @@ -36,8 +36,10 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -56,11 +58,7 @@ /** * A YAML based content implementation using Jackson. */ -public class YamlXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class YamlXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(yamlXContent); } @@ -71,7 +69,14 @@ public static XContentBuilder contentBuilder() throws IOException { static { yamlFactory = new YAMLFactory(); yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - yamlFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + yamlFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + yamlFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); yamlFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); yamlXContent = new YamlXContent(); } diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java index d3d9ea174cf1b..0e431d8ea4277 100644 --- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java @@ -40,6 +40,7 @@ import org.opensearch.common.xcontent.cbor.CborXContent; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.smile.SmileXContent; +import org.opensearch.common.xcontent.yaml.YamlXContent; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParseException; @@ -48,16 +49,20 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; +import java.io.InputStream; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.function.Supplier; +import java.util.zip.GZIPInputStream; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -67,6 +72,7 @@ import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assume.assumeThat; import static org.junit.internal.matchers.ThrowableMessageMatcher.hasMessage; public class XContentParserTests extends OpenSearchTestCase { @@ -94,6 +100,50 @@ public class XContentParserTests extends OpenSearchTestCase { () -> randomRealisticUnicodeOfCodepointLength(3145730) ); + private static final Map> FIELD_NAME_GENERATORS = Map.of( + XContentType.JSON, + () -> randomAlphaOfLengthBetween(1, JsonXContent.DEFAULT_MAX_NAME_LEN), + XContentType.CBOR, + () -> randomAlphaOfLengthBetween(1, CborXContent.DEFAULT_MAX_NAME_LEN), + XContentType.SMILE, + () -> randomAlphaOfLengthBetween(1, SmileXContent.DEFAULT_MAX_NAME_LEN), + XContentType.YAML, + () -> randomAlphaOfLengthBetween(1, YamlXContent.DEFAULT_MAX_NAME_LEN) + ); + + private static final Map> FIELD_NAME_OFF_LIMIT_GENERATORS = Map.of( + XContentType.JSON, + () -> randomAlphaOfLength(JsonXContent.DEFAULT_MAX_NAME_LEN + 1), + XContentType.CBOR, + () -> randomAlphaOfLength(CborXContent.DEFAULT_MAX_NAME_LEN + 1), + XContentType.SMILE, + () -> randomAlphaOfLength(SmileXContent.DEFAULT_MAX_NAME_LEN + 1), + XContentType.YAML, + () -> randomAlphaOfLength(YamlXContent.DEFAULT_MAX_NAME_LEN + 1) + ); + + private static final Map> DEPTH_GENERATORS = Map.of( + XContentType.JSON, + () -> randomIntBetween(1, JsonXContent.DEFAULT_MAX_DEPTH), + XContentType.CBOR, + () -> randomIntBetween(1, CborXContent.DEFAULT_MAX_DEPTH), + XContentType.SMILE, + () -> randomIntBetween(1, SmileXContent.DEFAULT_MAX_DEPTH), + XContentType.YAML, + () -> randomIntBetween(1, YamlXContent.DEFAULT_MAX_DEPTH) + ); + + private static final Map> OFF_LIMIT_DEPTH_GENERATORS = Map.of( + XContentType.JSON, + () -> JsonXContent.DEFAULT_MAX_DEPTH + 1, + XContentType.CBOR, + () -> CborXContent.DEFAULT_MAX_DEPTH + 1, + XContentType.SMILE, + () -> SmileXContent.DEFAULT_MAX_DEPTH + 1, + XContentType.YAML, + () -> YamlXContent.DEFAULT_MAX_DEPTH + 1 + ); + public void testStringOffLimit() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); @@ -155,6 +205,188 @@ public void testString() throws IOException { } } + public void testFieldNameOffLimit() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final String field = FIELD_NAME_OFF_LIMIT_GENERATORS.get(xContentType).get(); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + builder.startObject(); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + builder.endObject(); + + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + // See please https://github.com/FasterXML/jackson-dataformats-binary/issues/392, support + // for CBOR, Smile is coming + if (xContentType != XContentType.JSON) { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field, parser.currentName()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } else { + assertThrows(StreamConstraintsException.class, () -> parser.nextToken()); + } + } + } + } + + public void testFieldName() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final String field = FIELD_NAME_GENERATORS.get(xContentType).get(); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + builder.startObject(); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + builder.endObject(); + + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field, parser.currentName()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + } + } + + public void testWriteDepthOffLimit() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + // Branching off YAML logic into separate test case testWriteDepthOffLimitYaml since it behaves differently + assumeThat(xContentType, not(XContentType.YAML)); + + final String field = randomAlphaOfLengthBetween(1, 5); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1; + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.startObject(); + builder.field(field + depth); + } + + // The behavior here is very interesting: the generator does write the new object tag (changing the internal state) + // BUT throws the exception after the fact, this is why we have to close the object at the end. + assertThrows(StreamConstraintsException.class, () -> builder.startObject()); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + + builder.endObject(); + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.endObject(); + } + } + } + + public void testWriteDepthOffLimitYaml() throws IOException { + final String field = randomAlphaOfLengthBetween(1, 5); + try (XContentBuilder builder = XContentBuilder.builder(XContentType.YAML.xContent())) { + final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(XContentType.YAML).get() - 1; + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.startObject(); + builder.field(field + depth); + } + + // The behavior here is very interesting: the generator does write the new object tag (changing the internal state) + // BUT throws the exception after the fact, this is why we have to close the object at the end. + assertThrows(StreamConstraintsException.class, () -> builder.startObject()); + } catch (final IllegalStateException ex) { + // YAML parser is having really hard time recovering from StreamConstraintsException, the internal + // state seems to be completely messed up and the closing cleanly seems to be not feasible. + } + } + + public void testReadDepthOffLimit() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1; + + // Since parser and generator use the same max depth constraints, we could not generate the content with off limits, + // using precreated test files instead. + try ( + InputStream in = new GZIPInputStream( + getDataInputStream("depth-off-limit." + xContentType.name().toLowerCase(Locale.US) + ".gz") + ) + ) { + try (XContentParser parser = createParser(xContentType.xContent(), in)) { + for (int depth = 0; depth < maxDepth; ++depth) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + } + + if (xContentType != XContentType.YAML) { + assertThrows(StreamConstraintsException.class, () -> parser.nextToken()); + } + } + } + } + + public void testDepth() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final String field = randomAlphaOfLengthBetween(1, 5); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + final int maxDepth = DEPTH_GENERATORS.get(xContentType).get() - 1; + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.startObject(); + builder.field(field + depth); + } + + builder.startObject(); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + builder.endObject(); + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.endObject(); + } + + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { + for (int depth = 0; depth < maxDepth; ++depth) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field + depth, parser.currentName()); + } + + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field, parser.currentName()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + + for (int depth = 0; depth < maxDepth; ++depth) { + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + } + + assertNull(parser.nextToken()); + } + } + } + public void testFloat() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz new file mode 100644 index 0000000000000..88de7e590e7f0 Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz differ diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz new file mode 100644 index 0000000000000..76274910542ac Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz differ diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz new file mode 100644 index 0000000000000..e248778b37253 Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz differ diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz new file mode 100644 index 0000000000000..3b36594482a68 Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz differ diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java index cc8889af27621..d1b4a0961b7bd 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java @@ -102,8 +102,11 @@ public IngestDocument execute(IngestDocument document) { } else { ingestScript = precompiledIngestScript; } - ingestScript.execute(document.getSourceAndMetadata()); - CollectionUtils.ensureNoSelfReferences(document.getSourceAndMetadata(), "ingest script"); + IngestDocument mutableDocument = new IngestDocument(document); + ingestScript.execute(mutableDocument.getSourceAndMetadata()); + CollectionUtils.ensureNoSelfReferences(mutableDocument.getSourceAndMetadata(), "ingest script"); + document.getSourceAndMetadata().clear(); + document.getSourceAndMetadata().putAll(mutableDocument.getSourceAndMetadata()); return document; } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java index 96d9be75c4ab7..e900458e361ce 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java @@ -105,4 +105,16 @@ private void assertIngestDocument(IngestDocument ingestDocument) { int bytesTotal = ingestDocument.getFieldValue("bytes_in", Integer.class) + ingestDocument.getFieldValue("bytes_out", Integer.class); assertThat(ingestDocument.getSourceAndMetadata().get("bytes_total"), is(bytesTotal)); } + + public void testScriptingWithSelfReferencingSourceMetadata() { + ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), null, script, null, scriptService); + IngestDocument originalIngestDocument = randomDocument(); + String index = originalIngestDocument.getSourceAndMetadata().get(IngestDocument.Metadata.INDEX.getFieldName()).toString(); + String id = originalIngestDocument.getSourceAndMetadata().get(IngestDocument.Metadata.ID.getFieldName()).toString(); + Map sourceMetadata = originalIngestDocument.getSourceAndMetadata(); + originalIngestDocument.getSourceAndMetadata().put("_source", sourceMetadata); + IngestDocument ingestDocument = new IngestDocument(index, id, null, null, null, originalIngestDocument.getSourceAndMetadata()); + expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); + } + } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml index 3230fb37b43f7..a66f02d6b6a6d 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml @@ -202,3 +202,79 @@ teardown: id: 1 - match: { _source.source_field: "foo%20bar" } - match: { _source.target_field: "foo bar" } + +--- +"Test self referencing source with ignore failure": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.foo['foo']=ctx.foo;ctx['test-field']='test-value'", + "ignore_failure": true + } + }, + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "fooBar", foo: {foo: "bar"}} + + - do: + get: + index: test + id: 1 + - match: { _source.source_field: "fooBar" } + - match: { _source.target_field: "FOOBAR"} + - match: { _source.test-field: null} + +--- +"Test self referencing source without ignoring failure": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.foo['foo']=ctx.foo;ctx['test-field']='test-value'" + } + }, + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: bad_request + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "fooBar", foo: {foo: "bar"}} + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Iterable object is self-referencing itself (ingest script)" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml index 7c073739f6a1f..edd649a310d42 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml @@ -1113,3 +1113,48 @@ teardown: - match: { status: 400 } - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.root_cause.0.reason: "Failed to parse parameter [_if_primary_term], only int or long is accepted" } + +--- +"Test simulate with pipeline with ignore failure and cyclic field assignments in script": + - do: + ingest.simulate: + verbose: true + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "script" : { + "ignore_failure" : true, + "lang": "painless", + "source": "ctx.foo['foo']=ctx.foo;ctx.tag='recursive'" + } + }, + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.foo.foo)" + } + } + ] + }, + "docs": [ + { + "_source": { + "foo": { + "foo": "bar" + } + } + } + ] + } + - length: { docs: 1 } + - length: { docs.0.processor_results: 2 } + - match: { docs.0.processor_results.0.status: "error_ignored" } + - match: { docs.0.processor_results.0.ignored_error.error.type: "illegal_argument_exception" } + - match: { docs.0.processor_results.0.doc._source.tag: null } + - match: { docs.0.processor_results.1.doc._source.target_field: "BAR" } + - match: { docs.0.processor_results.1.doc._source.foo.foo: "bar" } + - match: { docs.0.processor_results.1.status: "success" } + - match: { docs.0.processor_results.1.processor_type: "script" } diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 deleted file mode 100644 index 892865a017f48..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7725476acfcb9bdfeff1b813ce15c39c6b857dc2 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..402cc36ba3d68 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 @@ -0,0 +1 @@ +1782a69d0e83af9cc3c65db0dcd2e7e7c1e5f90e \ No newline at end of file diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index 21b03b85d8edd..69b789dd2aa25 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -25,7 +25,7 @@ protected abstract boolean isSlashRegex(); } WS: [ \t\n\r]+ -> skip; -COMMENT: ( '//' .*? [\n\r] | '/*' .*? '*/' ) -> skip; +COMMENT: ( '//' ~[\n\r]* | '/*' .*? '*/' ) -> skip; LBRACK: '{'; RBRACK: '}'; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java index fb33cf6e2a6f5..260a2fc0c062c 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java @@ -435,7 +435,7 @@ private boolean REGEX_sempred(RuleContext _localctx, int predIndex) { return true; } - public static final String _serializedATN = "\u0004\u0000U\u0278\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007" + public static final String _serializedATN = "\u0004\u0000U\u0277\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007" + "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007" + "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007" + "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n" @@ -459,381 +459,381 @@ private boolean REGEX_sempred(RuleContext _localctx, int predIndex) { + "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0001\u0000\u0004\u0000\u00ae\b\u0000" + "\u000b\u0000\f\u0000\u00af\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001" + "\u0001\u0001\u0001\u0001\u0005\u0001\u00b8\b\u0001\n\u0001\f\u0001\u00bb" - + "\t\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005" - + "\u0001\u00c2\b\u0001\n\u0001\f\u0001\u00c5\t\u0001\u0001\u0001\u0001\u0001" - + "\u0003\u0001\u00c9\b\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002" - + "\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005" - + "\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001" - + "\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001" - + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001" - + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001" - + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001" - + "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001" - + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001" - + "\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001" - + "\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001" - + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001" - + "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001" - + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001" - + "\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001" - + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001" - + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001" - + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001" - + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001" - + "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001" - + "\"\u0001\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001" - + "%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001" - + ")\u0001)\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001" - + ",\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u00010\u0001" - + "0\u00010\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00014\u0001" - + "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u0001" - + "7\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001" - + ":\u0001;\u0001;\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001" - + ">\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001" - + "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001" - + "D\u0001E\u0001E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001F\u0001F\u0001" - + "G\u0001G\u0004G\u01b8\bG\u000bG\fG\u01b9\u0001G\u0003G\u01bd\bG\u0001" - + "H\u0001H\u0001H\u0004H\u01c2\bH\u000bH\fH\u01c3\u0001H\u0003H\u01c7\b" - + "H\u0001I\u0001I\u0001I\u0005I\u01cc\bI\nI\fI\u01cf\tI\u0003I\u01d1\bI" - + "\u0001I\u0003I\u01d4\bI\u0001J\u0001J\u0001J\u0005J\u01d9\bJ\nJ\fJ\u01dc" - + "\tJ\u0003J\u01de\bJ\u0001J\u0001J\u0004J\u01e2\bJ\u000bJ\fJ\u01e3\u0003" - + "J\u01e6\bJ\u0001J\u0001J\u0003J\u01ea\bJ\u0001J\u0004J\u01ed\bJ\u000b" - + "J\fJ\u01ee\u0003J\u01f1\bJ\u0001J\u0003J\u01f4\bJ\u0001K\u0001K\u0001" - + "K\u0001K\u0001K\u0001K\u0005K\u01fc\bK\nK\fK\u01ff\tK\u0001K\u0001K\u0001" - + "K\u0001K\u0001K\u0001K\u0001K\u0005K\u0208\bK\nK\fK\u020b\tK\u0001K\u0003" - + "K\u020e\bK\u0001L\u0001L\u0001L\u0001L\u0004L\u0214\bL\u000bL\fL\u0215" - + "\u0001L\u0001L\u0005L\u021a\bL\nL\fL\u021d\tL\u0001L\u0001L\u0001M\u0001" - + "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001N\u0001" - + "O\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" + + "\t\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u00c1" + + "\b\u0001\n\u0001\f\u0001\u00c4\t\u0001\u0001\u0001\u0001\u0001\u0003\u0001" + + "\u00c8\b\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0003" + + "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006" + + "\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001" + + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\u000b\u0001\u000b" + + "\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e" + + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f" + + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010" + + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012" + + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012" + + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013" + + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014" + + "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015" + + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017" + + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018" + + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019" + + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a" + + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a" + + "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c" + + "\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f" + + "\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001" + + "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001&\u0001" + + "&\u0001&\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001" + + "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001" + + ",\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00010\u0001" + + "1\u00011\u00011\u00012\u00012\u00013\u00013\u00014\u00014\u00014\u0001" + + "5\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u00018\u0001" + + "8\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001" + + ";\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001" + + "?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001" + + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001D\u0001E\u0001" + + "E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001F\u0001F\u0001G\u0001G\u0004" + + "G\u01b7\bG\u000bG\fG\u01b8\u0001G\u0003G\u01bc\bG\u0001H\u0001H\u0001" + + "H\u0004H\u01c1\bH\u000bH\fH\u01c2\u0001H\u0003H\u01c6\bH\u0001I\u0001" + + "I\u0001I\u0005I\u01cb\bI\nI\fI\u01ce\tI\u0003I\u01d0\bI\u0001I\u0003I" + + "\u01d3\bI\u0001J\u0001J\u0001J\u0005J\u01d8\bJ\nJ\fJ\u01db\tJ\u0003J\u01dd" + + "\bJ\u0001J\u0001J\u0004J\u01e1\bJ\u000bJ\fJ\u01e2\u0003J\u01e5\bJ\u0001" + + "J\u0001J\u0003J\u01e9\bJ\u0001J\u0004J\u01ec\bJ\u000bJ\fJ\u01ed\u0003" + + "J\u01f0\bJ\u0001J\u0003J\u01f3\bJ\u0001K\u0001K\u0001K\u0001K\u0001K\u0001" + + "K\u0005K\u01fb\bK\nK\fK\u01fe\tK\u0001K\u0001K\u0001K\u0001K\u0001K\u0001" + + "K\u0001K\u0005K\u0207\bK\nK\fK\u020a\tK\u0001K\u0003K\u020d\bK\u0001L" + + "\u0001L\u0001L\u0001L\u0004L\u0213\bL\u000bL\fL\u0214\u0001L\u0001L\u0005" + + "L\u0219\bL\nL\fL\u021c\tL\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001" + + "M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001" + + "O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" - + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" - + "P\u0001P\u0001P\u0003P\u0257\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001" - + "R\u0005R\u025f\bR\nR\fR\u0262\tR\u0001S\u0001S\u0001S\u0005S\u0267\bS" - + "\nS\fS\u026a\tS\u0003S\u026c\bS\u0001S\u0001S\u0001T\u0001T\u0005T\u0272" - + "\bT\nT\fT\u0275\tT\u0001T\u0001T\u0005\u00b9\u00c3\u01fd\u0209\u0215\u0000" - + "U\u0002\u0001\u0004\u0002\u0006\u0003\b\u0004\n\u0005\f\u0006\u000e\u0007" - + "\u0010\b\u0012\t\u0014\n\u0016\u000b\u0018\f\u001a\r\u001c\u000e\u001e" - + "\u000f \u0010\"\u0011$\u0012&\u0013(\u0014*\u0015,\u0016.\u00170\u0018" - + "2\u00194\u001a6\u001b8\u001c:\u001d<\u001e>\u001f@ B!D\"F#H$J%L&N\'P(" - + "R)T*V+X,Z-\\.^/`0b1d2f3h4j5l6n7p8r9t:v;x~?\u0080@\u0082A\u0084B\u0086" - + "C\u0088D\u008aE\u008cF\u008eG\u0090H\u0092I\u0094J\u0096K\u0098L\u009a" - + "M\u009cN\u009eO\u00a0P\u00a2Q\u00a4R\u00a6S\u00a8T\u00aaU\u0002\u0000" - + "\u0001\u0013\u0003\u0000\t\n\r\r \u0002\u0000\n\n\r\r\u0001\u000007\u0002" - + "\u0000LLll\u0002\u0000XXxx\u0003\u000009AFaf\u0001\u000019\u0001\u0000" - + "09\u0006\u0000DDFFLLddffll\u0002\u0000EEee\u0002\u0000++--\u0004\u0000" - + "DDFFddff\u0002\u0000\"\"\\\\\u0002\u0000\'\'\\\\\u0001\u0000\n\n\u0002" - + "\u0000\n\n//\u0007\u0000UUcciilmssuuxx\u0003\u0000AZ__az\u0004\u00000" - + "9AZ__az\u029e\u0000\u0002\u0001\u0000\u0000\u0000\u0000\u0004\u0001\u0000" - + "\u0000\u0000\u0000\u0006\u0001\u0000\u0000\u0000\u0000\b\u0001\u0000\u0000" - + "\u0000\u0000\n\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000" - + "\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000" - + "\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000" - + "\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000" - + "\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000" - + "\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000" - + "\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001" - + "\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000" - + "\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u0000" - + "0\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001" - + "\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000" - + "\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000" - + ">\u0001\u0000\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001" - + "\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000" - + "\u0000\u0000H\u0001\u0000\u0000\u0000\u0000J\u0001\u0000\u0000\u0000\u0000" - + "L\u0001\u0000\u0000\u0000\u0000N\u0001\u0000\u0000\u0000\u0000P\u0001" - + "\u0000\u0000\u0000\u0000R\u0001\u0000\u0000\u0000\u0000T\u0001\u0000\u0000" - + "\u0000\u0000V\u0001\u0000\u0000\u0000\u0000X\u0001\u0000\u0000\u0000\u0000" - + "Z\u0001\u0000\u0000\u0000\u0000\\\u0001\u0000\u0000\u0000\u0000^\u0001" - + "\u0000\u0000\u0000\u0000`\u0001\u0000\u0000\u0000\u0000b\u0001\u0000\u0000" - + "\u0000\u0000d\u0001\u0000\u0000\u0000\u0000f\u0001\u0000\u0000\u0000\u0000" - + "h\u0001\u0000\u0000\u0000\u0000j\u0001\u0000\u0000\u0000\u0000l\u0001" - + "\u0000\u0000\u0000\u0000n\u0001\u0000\u0000\u0000\u0000p\u0001\u0000\u0000" - + "\u0000\u0000r\u0001\u0000\u0000\u0000\u0000t\u0001\u0000\u0000\u0000\u0000" - + "v\u0001\u0000\u0000\u0000\u0000x\u0001\u0000\u0000\u0000\u0000z\u0001" - + "\u0000\u0000\u0000\u0000|\u0001\u0000\u0000\u0000\u0000~\u0001\u0000\u0000" - + "\u0000\u0000\u0080\u0001\u0000\u0000\u0000\u0000\u0082\u0001\u0000\u0000" - + "\u0000\u0000\u0084\u0001\u0000\u0000\u0000\u0000\u0086\u0001\u0000\u0000" - + "\u0000\u0000\u0088\u0001\u0000\u0000\u0000\u0000\u008a\u0001\u0000\u0000" - + "\u0000\u0000\u008c\u0001\u0000\u0000\u0000\u0000\u008e\u0001\u0000\u0000" - + "\u0000\u0000\u0090\u0001\u0000\u0000\u0000\u0000\u0092\u0001\u0000\u0000" - + "\u0000\u0000\u0094\u0001\u0000\u0000\u0000\u0000\u0096\u0001\u0000\u0000" - + "\u0000\u0000\u0098\u0001\u0000\u0000\u0000\u0000\u009a\u0001\u0000\u0000" - + "\u0000\u0000\u009c\u0001\u0000\u0000\u0000\u0000\u009e\u0001\u0000\u0000" - + "\u0000\u0000\u00a0\u0001\u0000\u0000\u0000\u0000\u00a2\u0001\u0000\u0000" - + "\u0000\u0000\u00a4\u0001\u0000\u0000\u0000\u0000\u00a6\u0001\u0000\u0000" - + "\u0000\u0001\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000" - + "\u0000\u0002\u00ad\u0001\u0000\u0000\u0000\u0004\u00c8\u0001\u0000\u0000" - + "\u0000\u0006\u00cc\u0001\u0000\u0000\u0000\b\u00ce\u0001\u0000\u0000\u0000" - + "\n\u00d0\u0001\u0000\u0000\u0000\f\u00d2\u0001\u0000\u0000\u0000\u000e" - + "\u00d4\u0001\u0000\u0000\u0000\u0010\u00d6\u0001\u0000\u0000\u0000\u0012" - + "\u00d8\u0001\u0000\u0000\u0000\u0014\u00dc\u0001\u0000\u0000\u0000\u0016" - + "\u00e1\u0001\u0000\u0000\u0000\u0018\u00e3\u0001\u0000\u0000\u0000\u001a" - + "\u00e5\u0001\u0000\u0000\u0000\u001c\u00e8\u0001\u0000\u0000\u0000\u001e" - + "\u00eb\u0001\u0000\u0000\u0000 \u00f0\u0001\u0000\u0000\u0000\"\u00f6" - + "\u0001\u0000\u0000\u0000$\u00f9\u0001\u0000\u0000\u0000&\u00fd\u0001\u0000" - + "\u0000\u0000(\u0106\u0001\u0000\u0000\u0000*\u010c\u0001\u0000\u0000\u0000" - + ",\u0113\u0001\u0000\u0000\u0000.\u0117\u0001\u0000\u0000\u00000\u011b" - + "\u0001\u0000\u0000\u00002\u0121\u0001\u0000\u0000\u00004\u0127\u0001\u0000" - + "\u0000\u00006\u012c\u0001\u0000\u0000\u00008\u0137\u0001\u0000\u0000\u0000" - + ":\u0139\u0001\u0000\u0000\u0000<\u013b\u0001\u0000\u0000\u0000>\u013d" - + "\u0001\u0000\u0000\u0000@\u0140\u0001\u0000\u0000\u0000B\u0142\u0001\u0000" - + "\u0000\u0000D\u0144\u0001\u0000\u0000\u0000F\u0146\u0001\u0000\u0000\u0000" - + "H\u0149\u0001\u0000\u0000\u0000J\u014c\u0001\u0000\u0000\u0000L\u0150" - + "\u0001\u0000\u0000\u0000N\u0152\u0001\u0000\u0000\u0000P\u0155\u0001\u0000" - + "\u0000\u0000R\u0157\u0001\u0000\u0000\u0000T\u015a\u0001\u0000\u0000\u0000" - + "V\u015d\u0001\u0000\u0000\u0000X\u0161\u0001\u0000\u0000\u0000Z\u0164" - + "\u0001\u0000\u0000\u0000\\\u0168\u0001\u0000\u0000\u0000^\u016a\u0001" - + "\u0000\u0000\u0000`\u016c\u0001\u0000\u0000\u0000b\u016e\u0001\u0000\u0000" - + "\u0000d\u0171\u0001\u0000\u0000\u0000f\u0174\u0001\u0000\u0000\u0000h" - + "\u0176\u0001\u0000\u0000\u0000j\u0178\u0001\u0000\u0000\u0000l\u017b\u0001" - + "\u0000\u0000\u0000n\u017e\u0001\u0000\u0000\u0000p\u0181\u0001\u0000\u0000" - + "\u0000r\u0184\u0001\u0000\u0000\u0000t\u0188\u0001\u0000\u0000\u0000v" - + "\u018b\u0001\u0000\u0000\u0000x\u018e\u0001\u0000\u0000\u0000z\u0190\u0001" - + "\u0000\u0000\u0000|\u0193\u0001\u0000\u0000\u0000~\u0196\u0001\u0000\u0000" - + "\u0000\u0080\u0199\u0001\u0000\u0000\u0000\u0082\u019c\u0001\u0000\u0000" - + "\u0000\u0084\u019f\u0001\u0000\u0000\u0000\u0086\u01a2\u0001\u0000\u0000" - + "\u0000\u0088\u01a5\u0001\u0000\u0000\u0000\u008a\u01a8\u0001\u0000\u0000" - + "\u0000\u008c\u01ac\u0001\u0000\u0000\u0000\u008e\u01b0\u0001\u0000\u0000" - + "\u0000\u0090\u01b5\u0001\u0000\u0000\u0000\u0092\u01be\u0001\u0000\u0000" - + "\u0000\u0094\u01d0\u0001\u0000\u0000\u0000\u0096\u01dd\u0001\u0000\u0000" - + "\u0000\u0098\u020d\u0001\u0000\u0000\u0000\u009a\u020f\u0001\u0000\u0000" - + "\u0000\u009c\u0220\u0001\u0000\u0000\u0000\u009e\u0225\u0001\u0000\u0000" - + "\u0000\u00a0\u022b\u0001\u0000\u0000\u0000\u00a2\u0256\u0001\u0000\u0000" - + "\u0000\u00a4\u0258\u0001\u0000\u0000\u0000\u00a6\u025c\u0001\u0000\u0000" - + "\u0000\u00a8\u026b\u0001\u0000\u0000\u0000\u00aa\u026f\u0001\u0000\u0000" - + "\u0000\u00ac\u00ae\u0007\u0000\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000" - + "\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000" - + "\u0000\u00af\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000" - + "\u0000\u00b1\u00b2\u0006\u0000\u0000\u0000\u00b2\u0003\u0001\u0000\u0000" - + "\u0000\u00b3\u00b4\u0005/\u0000\u0000\u00b4\u00b5\u0005/\u0000\u0000\u00b5" - + "\u00b9\u0001\u0000\u0000\u0000\u00b6\u00b8\t\u0000\u0000\u0000\u00b7\u00b6" - + "\u0001\u0000\u0000\u0000\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00ba" - + "\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000\u00ba\u00bc" - + "\u0001\u0000\u0000\u0000\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00c9" - + "\u0007\u0001\u0000\u0000\u00bd\u00be\u0005/\u0000\u0000\u00be\u00bf\u0005" - + "*\u0000\u0000\u00bf\u00c3\u0001\u0000\u0000\u0000\u00c0\u00c2\t\u0000" - + "\u0000\u0000\u00c1\u00c0\u0001\u0000\u0000\u0000\u00c2\u00c5\u0001\u0000" - + "\u0000\u0000\u00c3\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000" - + "\u0000\u0000\u00c4\u00c6\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000" - + "\u0000\u0000\u00c6\u00c7\u0005*\u0000\u0000\u00c7\u00c9\u0005/\u0000\u0000" - + "\u00c8\u00b3\u0001\u0000\u0000\u0000\u00c8\u00bd\u0001\u0000\u0000\u0000" - + "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0006\u0001\u0000\u0000" - + "\u00cb\u0005\u0001\u0000\u0000\u0000\u00cc\u00cd\u0005{\u0000\u0000\u00cd" - + "\u0007\u0001\u0000\u0000\u0000\u00ce\u00cf\u0005}\u0000\u0000\u00cf\t" - + "\u0001\u0000\u0000\u0000\u00d0\u00d1\u0005[\u0000\u0000\u00d1\u000b\u0001" - + "\u0000\u0000\u0000\u00d2\u00d3\u0005]\u0000\u0000\u00d3\r\u0001\u0000" - + "\u0000\u0000\u00d4\u00d5\u0005(\u0000\u0000\u00d5\u000f\u0001\u0000\u0000" - + "\u0000\u00d6\u00d7\u0005)\u0000\u0000\u00d7\u0011\u0001\u0000\u0000\u0000" - + "\u00d8\u00d9\u0005.\u0000\u0000\u00d9\u00da\u0001\u0000\u0000\u0000\u00da" - + "\u00db\u0006\b\u0001\u0000\u00db\u0013\u0001\u0000\u0000\u0000\u00dc\u00dd" - + "\u0005?\u0000\u0000\u00dd\u00de\u0005.\u0000\u0000\u00de\u00df\u0001\u0000" - + "\u0000\u0000\u00df\u00e0\u0006\t\u0001\u0000\u00e0\u0015\u0001\u0000\u0000" - + "\u0000\u00e1\u00e2\u0005,\u0000\u0000\u00e2\u0017\u0001\u0000\u0000\u0000" - + "\u00e3\u00e4\u0005;\u0000\u0000\u00e4\u0019\u0001\u0000\u0000\u0000\u00e5" - + "\u00e6\u0005i\u0000\u0000\u00e6\u00e7\u0005f\u0000\u0000\u00e7\u001b\u0001" - + "\u0000\u0000\u0000\u00e8\u00e9\u0005i\u0000\u0000\u00e9\u00ea\u0005n\u0000" - + "\u0000\u00ea\u001d\u0001\u0000\u0000\u0000\u00eb\u00ec\u0005e\u0000\u0000" - + "\u00ec\u00ed\u0005l\u0000\u0000\u00ed\u00ee\u0005s\u0000\u0000\u00ee\u00ef" - + "\u0005e\u0000\u0000\u00ef\u001f\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005" - + "w\u0000\u0000\u00f1\u00f2\u0005h\u0000\u0000\u00f2\u00f3\u0005i\u0000" - + "\u0000\u00f3\u00f4\u0005l\u0000\u0000\u00f4\u00f5\u0005e\u0000\u0000\u00f5" - + "!\u0001\u0000\u0000\u0000\u00f6\u00f7\u0005d\u0000\u0000\u00f7\u00f8\u0005" - + "o\u0000\u0000\u00f8#\u0001\u0000\u0000\u0000\u00f9\u00fa\u0005f\u0000" - + "\u0000\u00fa\u00fb\u0005o\u0000\u0000\u00fb\u00fc\u0005r\u0000\u0000\u00fc" - + "%\u0001\u0000\u0000\u0000\u00fd\u00fe\u0005c\u0000\u0000\u00fe\u00ff\u0005" - + "o\u0000\u0000\u00ff\u0100\u0005n\u0000\u0000\u0100\u0101\u0005t\u0000" - + "\u0000\u0101\u0102\u0005i\u0000\u0000\u0102\u0103\u0005n\u0000\u0000\u0103" - + "\u0104\u0005u\u0000\u0000\u0104\u0105\u0005e\u0000\u0000\u0105\'\u0001" - + "\u0000\u0000\u0000\u0106\u0107\u0005b\u0000\u0000\u0107\u0108\u0005r\u0000" - + "\u0000\u0108\u0109\u0005e\u0000\u0000\u0109\u010a\u0005a\u0000\u0000\u010a" - + "\u010b\u0005k\u0000\u0000\u010b)\u0001\u0000\u0000\u0000\u010c\u010d\u0005" - + "r\u0000\u0000\u010d\u010e\u0005e\u0000\u0000\u010e\u010f\u0005t\u0000" - + "\u0000\u010f\u0110\u0005u\u0000\u0000\u0110\u0111\u0005r\u0000\u0000\u0111" - + "\u0112\u0005n\u0000\u0000\u0112+\u0001\u0000\u0000\u0000\u0113\u0114\u0005" - + "n\u0000\u0000\u0114\u0115\u0005e\u0000\u0000\u0115\u0116\u0005w\u0000" - + "\u0000\u0116-\u0001\u0000\u0000\u0000\u0117\u0118\u0005t\u0000\u0000\u0118" - + "\u0119\u0005r\u0000\u0000\u0119\u011a\u0005y\u0000\u0000\u011a/\u0001" - + "\u0000\u0000\u0000\u011b\u011c\u0005c\u0000\u0000\u011c\u011d\u0005a\u0000" - + "\u0000\u011d\u011e\u0005t\u0000\u0000\u011e\u011f\u0005c\u0000\u0000\u011f" - + "\u0120\u0005h\u0000\u0000\u01201\u0001\u0000\u0000\u0000\u0121\u0122\u0005" - + "t\u0000\u0000\u0122\u0123\u0005h\u0000\u0000\u0123\u0124\u0005r\u0000" - + "\u0000\u0124\u0125\u0005o\u0000\u0000\u0125\u0126\u0005w\u0000\u0000\u0126" - + "3\u0001\u0000\u0000\u0000\u0127\u0128\u0005t\u0000\u0000\u0128\u0129\u0005" - + "h\u0000\u0000\u0129\u012a\u0005i\u0000\u0000\u012a\u012b\u0005s\u0000" - + "\u0000\u012b5\u0001\u0000\u0000\u0000\u012c\u012d\u0005i\u0000\u0000\u012d" - + "\u012e\u0005n\u0000\u0000\u012e\u012f\u0005s\u0000\u0000\u012f\u0130\u0005" - + "t\u0000\u0000\u0130\u0131\u0005a\u0000\u0000\u0131\u0132\u0005n\u0000" - + "\u0000\u0132\u0133\u0005c\u0000\u0000\u0133\u0134\u0005e\u0000\u0000\u0134" - + "\u0135\u0005o\u0000\u0000\u0135\u0136\u0005f\u0000\u0000\u01367\u0001" - + "\u0000\u0000\u0000\u0137\u0138\u0005!\u0000\u0000\u01389\u0001\u0000\u0000" - + "\u0000\u0139\u013a\u0005~\u0000\u0000\u013a;\u0001\u0000\u0000\u0000\u013b" - + "\u013c\u0005*\u0000\u0000\u013c=\u0001\u0000\u0000\u0000\u013d\u013e\u0005" - + "/\u0000\u0000\u013e\u013f\u0004\u001e\u0000\u0000\u013f?\u0001\u0000\u0000" - + "\u0000\u0140\u0141\u0005%\u0000\u0000\u0141A\u0001\u0000\u0000\u0000\u0142" - + "\u0143\u0005+\u0000\u0000\u0143C\u0001\u0000\u0000\u0000\u0144\u0145\u0005" - + "-\u0000\u0000\u0145E\u0001\u0000\u0000\u0000\u0146\u0147\u0005<\u0000" - + "\u0000\u0147\u0148\u0005<\u0000\u0000\u0148G\u0001\u0000\u0000\u0000\u0149" - + "\u014a\u0005>\u0000\u0000\u014a\u014b\u0005>\u0000\u0000\u014bI\u0001" - + "\u0000\u0000\u0000\u014c\u014d\u0005>\u0000\u0000\u014d\u014e\u0005>\u0000" - + "\u0000\u014e\u014f\u0005>\u0000\u0000\u014fK\u0001\u0000\u0000\u0000\u0150" - + "\u0151\u0005<\u0000\u0000\u0151M\u0001\u0000\u0000\u0000\u0152\u0153\u0005" - + "<\u0000\u0000\u0153\u0154\u0005=\u0000\u0000\u0154O\u0001\u0000\u0000" - + "\u0000\u0155\u0156\u0005>\u0000\u0000\u0156Q\u0001\u0000\u0000\u0000\u0157" - + "\u0158\u0005>\u0000\u0000\u0158\u0159\u0005=\u0000\u0000\u0159S\u0001" - + "\u0000\u0000\u0000\u015a\u015b\u0005=\u0000\u0000\u015b\u015c\u0005=\u0000" - + "\u0000\u015cU\u0001\u0000\u0000\u0000\u015d\u015e\u0005=\u0000\u0000\u015e" - + "\u015f\u0005=\u0000\u0000\u015f\u0160\u0005=\u0000\u0000\u0160W\u0001" - + "\u0000\u0000\u0000\u0161\u0162\u0005!\u0000\u0000\u0162\u0163\u0005=\u0000" - + "\u0000\u0163Y\u0001\u0000\u0000\u0000\u0164\u0165\u0005!\u0000\u0000\u0165" - + "\u0166\u0005=\u0000\u0000\u0166\u0167\u0005=\u0000\u0000\u0167[\u0001" - + "\u0000\u0000\u0000\u0168\u0169\u0005&\u0000\u0000\u0169]\u0001\u0000\u0000" - + "\u0000\u016a\u016b\u0005^\u0000\u0000\u016b_\u0001\u0000\u0000\u0000\u016c" - + "\u016d\u0005|\u0000\u0000\u016da\u0001\u0000\u0000\u0000\u016e\u016f\u0005" - + "&\u0000\u0000\u016f\u0170\u0005&\u0000\u0000\u0170c\u0001\u0000\u0000" - + "\u0000\u0171\u0172\u0005|\u0000\u0000\u0172\u0173\u0005|\u0000\u0000\u0173" - + "e\u0001\u0000\u0000\u0000\u0174\u0175\u0005?\u0000\u0000\u0175g\u0001" - + "\u0000\u0000\u0000\u0176\u0177\u0005:\u0000\u0000\u0177i\u0001\u0000\u0000" - + "\u0000\u0178\u0179\u0005?\u0000\u0000\u0179\u017a\u0005:\u0000\u0000\u017a" - + "k\u0001\u0000\u0000\u0000\u017b\u017c\u0005:\u0000\u0000\u017c\u017d\u0005" - + ":\u0000\u0000\u017dm\u0001\u0000\u0000\u0000\u017e\u017f\u0005-\u0000" - + "\u0000\u017f\u0180\u0005>\u0000\u0000\u0180o\u0001\u0000\u0000\u0000\u0181" - + "\u0182\u0005=\u0000\u0000\u0182\u0183\u0005~\u0000\u0000\u0183q\u0001" - + "\u0000\u0000\u0000\u0184\u0185\u0005=\u0000\u0000\u0185\u0186\u0005=\u0000" - + "\u0000\u0186\u0187\u0005~\u0000\u0000\u0187s\u0001\u0000\u0000\u0000\u0188" - + "\u0189\u0005+\u0000\u0000\u0189\u018a\u0005+\u0000\u0000\u018au\u0001" - + "\u0000\u0000\u0000\u018b\u018c\u0005-\u0000\u0000\u018c\u018d\u0005-\u0000" - + "\u0000\u018dw\u0001\u0000\u0000\u0000\u018e\u018f\u0005=\u0000\u0000\u018f" - + "y\u0001\u0000\u0000\u0000\u0190\u0191\u0005+\u0000\u0000\u0191\u0192\u0005" - + "=\u0000\u0000\u0192{\u0001\u0000\u0000\u0000\u0193\u0194\u0005-\u0000" - + "\u0000\u0194\u0195\u0005=\u0000\u0000\u0195}\u0001\u0000\u0000\u0000\u0196" - + "\u0197\u0005*\u0000\u0000\u0197\u0198\u0005=\u0000\u0000\u0198\u007f\u0001" - + "\u0000\u0000\u0000\u0199\u019a\u0005/\u0000\u0000\u019a\u019b\u0005=\u0000" - + "\u0000\u019b\u0081\u0001\u0000\u0000\u0000\u019c\u019d\u0005%\u0000\u0000" - + "\u019d\u019e\u0005=\u0000\u0000\u019e\u0083\u0001\u0000\u0000\u0000\u019f" - + "\u01a0\u0005&\u0000\u0000\u01a0\u01a1\u0005=\u0000\u0000\u01a1\u0085\u0001" - + "\u0000\u0000\u0000\u01a2\u01a3\u0005^\u0000\u0000\u01a3\u01a4\u0005=\u0000" - + "\u0000\u01a4\u0087\u0001\u0000\u0000\u0000\u01a5\u01a6\u0005|\u0000\u0000" - + "\u01a6\u01a7\u0005=\u0000\u0000\u01a7\u0089\u0001\u0000\u0000\u0000\u01a8" - + "\u01a9\u0005<\u0000\u0000\u01a9\u01aa\u0005<\u0000\u0000\u01aa\u01ab\u0005" - + "=\u0000\u0000\u01ab\u008b\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005>\u0000" - + "\u0000\u01ad\u01ae\u0005>\u0000\u0000\u01ae\u01af\u0005=\u0000\u0000\u01af" - + "\u008d\u0001\u0000\u0000\u0000\u01b0\u01b1\u0005>\u0000\u0000\u01b1\u01b2" - + "\u0005>\u0000\u0000\u01b2\u01b3\u0005>\u0000\u0000\u01b3\u01b4\u0005=" - + "\u0000\u0000\u01b4\u008f\u0001\u0000\u0000\u0000\u01b5\u01b7\u00050\u0000" - + "\u0000\u01b6\u01b8\u0007\u0002\u0000\u0000\u01b7\u01b6\u0001\u0000\u0000" - + "\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9\u01b7\u0001\u0000\u0000" - + "\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bc\u0001\u0000\u0000" - + "\u0000\u01bb\u01bd\u0007\u0003\u0000\u0000\u01bc\u01bb\u0001\u0000\u0000" - + "\u0000\u01bc\u01bd\u0001\u0000\u0000\u0000\u01bd\u0091\u0001\u0000\u0000" - + "\u0000\u01be\u01bf\u00050\u0000\u0000\u01bf\u01c1\u0007\u0004\u0000\u0000" - + "\u01c0\u01c2\u0007\u0005\u0000\u0000\u01c1\u01c0\u0001\u0000\u0000\u0000" - + "\u01c2\u01c3\u0001\u0000\u0000\u0000\u01c3\u01c1\u0001\u0000\u0000\u0000" - + "\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c6\u0001\u0000\u0000\u0000" - + "\u01c5\u01c7\u0007\u0003\u0000\u0000\u01c6\u01c5\u0001\u0000\u0000\u0000" - + "\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u0093\u0001\u0000\u0000\u0000" - + "\u01c8\u01d1\u00050\u0000\u0000\u01c9\u01cd\u0007\u0006\u0000\u0000\u01ca" - + "\u01cc\u0007\u0007\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cc" - + "\u01cf\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01cd" - + "\u01ce\u0001\u0000\u0000\u0000\u01ce\u01d1\u0001\u0000\u0000\u0000\u01cf" - + "\u01cd\u0001\u0000\u0000\u0000\u01d0\u01c8\u0001\u0000\u0000\u0000\u01d0" - + "\u01c9\u0001\u0000\u0000\u0000\u01d1\u01d3\u0001\u0000\u0000\u0000\u01d2" - + "\u01d4\u0007\b\u0000\u0000\u01d3\u01d2\u0001\u0000\u0000\u0000\u01d3\u01d4" - + "\u0001\u0000\u0000\u0000\u01d4\u0095\u0001\u0000\u0000\u0000\u01d5\u01de" - + "\u00050\u0000\u0000\u01d6\u01da\u0007\u0006\u0000\u0000\u01d7\u01d9\u0007" - + "\u0007\u0000\u0000\u01d8\u01d7\u0001\u0000\u0000\u0000\u01d9\u01dc\u0001" - + "\u0000\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01da\u01db\u0001" - + "\u0000\u0000\u0000\u01db\u01de\u0001\u0000\u0000\u0000\u01dc\u01da\u0001" - + "\u0000\u0000\u0000\u01dd\u01d5\u0001\u0000\u0000\u0000\u01dd\u01d6\u0001" - + "\u0000\u0000\u0000\u01de\u01e5\u0001\u0000\u0000\u0000\u01df\u01e1\u0003" - + "\u0012\b\u0000\u01e0\u01e2\u0007\u0007\u0000\u0000\u01e1\u01e0\u0001\u0000" - + "\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000" - + "\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e6\u0001\u0000" - + "\u0000\u0000\u01e5\u01df\u0001\u0000\u0000\u0000\u01e5\u01e6\u0001\u0000" - + "\u0000\u0000\u01e6\u01f0\u0001\u0000\u0000\u0000\u01e7\u01e9\u0007\t\u0000" - + "\u0000\u01e8\u01ea\u0007\n\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000" - + "\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ec\u0001\u0000\u0000\u0000" - + "\u01eb\u01ed\u0007\u0007\u0000\u0000\u01ec\u01eb\u0001\u0000\u0000\u0000" - + "\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ec\u0001\u0000\u0000\u0000" - + "\u01ee\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f1\u0001\u0000\u0000\u0000" - + "\u01f0\u01e7\u0001\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000" - + "\u01f1\u01f3\u0001\u0000\u0000\u0000\u01f2\u01f4\u0007\u000b\u0000\u0000" - + "\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000" - + "\u01f4\u0097\u0001\u0000\u0000\u0000\u01f5\u01fd\u0005\"\u0000\u0000\u01f6" - + "\u01f7\u0005\\\u0000\u0000\u01f7\u01fc\u0005\"\u0000\u0000\u01f8\u01f9" - + "\u0005\\\u0000\u0000\u01f9\u01fc\u0005\\\u0000\u0000\u01fa\u01fc\b\f\u0000" - + "\u0000\u01fb\u01f6\u0001\u0000\u0000\u0000\u01fb\u01f8\u0001\u0000\u0000" - + "\u0000\u01fb\u01fa\u0001\u0000\u0000\u0000\u01fc\u01ff\u0001\u0000\u0000" - + "\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000" - + "\u0000\u01fe\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000" - + "\u0000\u0200\u020e\u0005\"\u0000\u0000\u0201\u0209\u0005\'\u0000\u0000" - + "\u0202\u0203\u0005\\\u0000\u0000\u0203\u0208\u0005\'\u0000\u0000\u0204" - + "\u0205\u0005\\\u0000\u0000\u0205\u0208\u0005\\\u0000\u0000\u0206\u0208" - + "\b\r\u0000\u0000\u0207\u0202\u0001\u0000\u0000\u0000\u0207\u0204\u0001" - + "\u0000\u0000\u0000\u0207\u0206\u0001\u0000\u0000\u0000\u0208\u020b\u0001" - + "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u0209\u0207\u0001" - + "\u0000\u0000\u0000\u020a\u020c\u0001\u0000\u0000\u0000\u020b\u0209\u0001" - + "\u0000\u0000\u0000\u020c\u020e\u0005\'\u0000\u0000\u020d\u01f5\u0001\u0000" - + "\u0000\u0000\u020d\u0201\u0001\u0000\u0000\u0000\u020e\u0099\u0001\u0000" - + "\u0000\u0000\u020f\u0213\u0005/\u0000\u0000\u0210\u0211\u0005\\\u0000" - + "\u0000\u0211\u0214\b\u000e\u0000\u0000\u0212\u0214\b\u000f\u0000\u0000" - + "\u0213\u0210\u0001\u0000\u0000\u0000\u0213\u0212\u0001\u0000\u0000\u0000" - + "\u0214\u0215\u0001\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000" - + "\u0215\u0213\u0001\u0000\u0000\u0000\u0216\u0217\u0001\u0000\u0000\u0000" - + "\u0217\u021b\u0005/\u0000\u0000\u0218\u021a\u0007\u0010\u0000\u0000\u0219" - + "\u0218\u0001\u0000\u0000\u0000\u021a\u021d\u0001\u0000\u0000\u0000\u021b" - + "\u0219\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c" - + "\u021e\u0001\u0000\u0000\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e" - + "\u021f\u0004L\u0001\u0000\u021f\u009b\u0001\u0000\u0000\u0000\u0220\u0221" - + "\u0005t\u0000\u0000\u0221\u0222\u0005r\u0000\u0000\u0222\u0223\u0005u" - + "\u0000\u0000\u0223\u0224\u0005e\u0000\u0000\u0224\u009d\u0001\u0000\u0000" - + "\u0000\u0225\u0226\u0005f\u0000\u0000\u0226\u0227\u0005a\u0000\u0000\u0227" - + "\u0228\u0005l\u0000\u0000\u0228\u0229\u0005s\u0000\u0000\u0229\u022a\u0005" - + "e\u0000\u0000\u022a\u009f\u0001\u0000\u0000\u0000\u022b\u022c\u0005n\u0000" - + "\u0000\u022c\u022d\u0005u\u0000\u0000\u022d\u022e\u0005l\u0000\u0000\u022e" - + "\u022f\u0005l\u0000\u0000\u022f\u00a1\u0001\u0000\u0000\u0000\u0230\u0231" - + "\u0005b\u0000\u0000\u0231\u0232\u0005o\u0000\u0000\u0232\u0233\u0005o" - + "\u0000\u0000\u0233\u0234\u0005l\u0000\u0000\u0234\u0235\u0005e\u0000\u0000" - + "\u0235\u0236\u0005a\u0000\u0000\u0236\u0257\u0005n\u0000\u0000\u0237\u0238" - + "\u0005b\u0000\u0000\u0238\u0239\u0005y\u0000\u0000\u0239\u023a\u0005t" - + "\u0000\u0000\u023a\u0257\u0005e\u0000\u0000\u023b\u023c\u0005s\u0000\u0000" - + "\u023c\u023d\u0005h\u0000\u0000\u023d\u023e\u0005o\u0000\u0000\u023e\u023f" - + "\u0005r\u0000\u0000\u023f\u0257\u0005t\u0000\u0000\u0240\u0241\u0005c" - + "\u0000\u0000\u0241\u0242\u0005h\u0000\u0000\u0242\u0243\u0005a\u0000\u0000" - + "\u0243\u0257\u0005r\u0000\u0000\u0244\u0245\u0005i\u0000\u0000\u0245\u0246" - + "\u0005n\u0000\u0000\u0246\u0257\u0005t\u0000\u0000\u0247\u0248\u0005l" - + "\u0000\u0000\u0248\u0249\u0005o\u0000\u0000\u0249\u024a\u0005n\u0000\u0000" - + "\u024a\u0257\u0005g\u0000\u0000\u024b\u024c\u0005f\u0000\u0000\u024c\u024d" - + "\u0005l\u0000\u0000\u024d\u024e\u0005o\u0000\u0000\u024e\u024f\u0005a" - + "\u0000\u0000\u024f\u0257\u0005t\u0000\u0000\u0250\u0251\u0005d\u0000\u0000" - + "\u0251\u0252\u0005o\u0000\u0000\u0252\u0253\u0005u\u0000\u0000\u0253\u0254" - + "\u0005b\u0000\u0000\u0254\u0255\u0005l\u0000\u0000\u0255\u0257\u0005e" - + "\u0000\u0000\u0256\u0230\u0001\u0000\u0000\u0000\u0256\u0237\u0001\u0000" - + "\u0000\u0000\u0256\u023b\u0001\u0000\u0000\u0000\u0256\u0240\u0001\u0000" - + "\u0000\u0000\u0256\u0244\u0001\u0000\u0000\u0000\u0256\u0247\u0001\u0000" - + "\u0000\u0000\u0256\u024b\u0001\u0000\u0000\u0000\u0256\u0250\u0001\u0000" - + "\u0000\u0000\u0257\u00a3\u0001\u0000\u0000\u0000\u0258\u0259\u0005d\u0000" - + "\u0000\u0259\u025a\u0005e\u0000\u0000\u025a\u025b\u0005f\u0000\u0000\u025b" - + "\u00a5\u0001\u0000\u0000\u0000\u025c\u0260\u0007\u0011\u0000\u0000\u025d" - + "\u025f\u0007\u0012\u0000\u0000\u025e\u025d\u0001\u0000\u0000\u0000\u025f" - + "\u0262\u0001\u0000\u0000\u0000\u0260\u025e\u0001\u0000\u0000\u0000\u0260" - + "\u0261\u0001\u0000\u0000\u0000\u0261\u00a7\u0001\u0000\u0000\u0000\u0262" - + "\u0260\u0001\u0000\u0000\u0000\u0263\u026c\u00050\u0000\u0000\u0264\u0268" - + "\u0007\u0006\u0000\u0000\u0265\u0267\u0007\u0007\u0000\u0000\u0266\u0265" - + "\u0001\u0000\u0000\u0000\u0267\u026a\u0001\u0000\u0000\u0000\u0268\u0266" - + "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026c" - + "\u0001\u0000\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026b\u0263" - + "\u0001\u0000\u0000\u0000\u026b\u0264\u0001\u0000\u0000\u0000\u026c\u026d" - + "\u0001\u0000\u0000\u0000\u026d\u026e\u0006S\u0002\u0000\u026e\u00a9\u0001" - + "\u0000\u0000\u0000\u026f\u0273\u0007\u0011\u0000\u0000\u0270\u0272\u0007" - + "\u0012\u0000\u0000\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001" - + "\u0000\u0000\u0000\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001" - + "\u0000\u0000\u0000\u0274\u0276\u0001\u0000\u0000\u0000\u0275\u0273\u0001" - + "\u0000\u0000\u0000\u0276\u0277\u0006T\u0002\u0000\u0277\u00ab\u0001\u0000" - + "\u0000\u0000\"\u0000\u0001\u00af\u00b9\u00c3\u00c8\u01b9\u01bc\u01c3\u01c6" - + "\u01cd\u01d0\u01d3\u01da\u01dd\u01e3\u01e5\u01e9\u01ee\u01f0\u01f3\u01fb" - + "\u01fd\u0207\u0209\u020d\u0213\u0215\u021b\u0256\u0260\u0268\u026b\u0273" - + "\u0003\u0006\u0000\u0000\u0002\u0001\u0000\u0002\u0000\u0000"; + + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0003" + + "P\u0256\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0005R\u025e\bR\n" + + "R\fR\u0261\tR\u0001S\u0001S\u0001S\u0005S\u0266\bS\nS\fS\u0269\tS\u0003" + + "S\u026b\bS\u0001S\u0001S\u0001T\u0001T\u0005T\u0271\bT\nT\fT\u0274\tT" + + "\u0001T\u0001T\u0004\u00c2\u01fc\u0208\u0214\u0000U\u0002\u0001\u0004" + + "\u0002\u0006\u0003\b\u0004\n\u0005\f\u0006\u000e\u0007\u0010\b\u0012\t" + + "\u0014\n\u0016\u000b\u0018\f\u001a\r\u001c\u000e\u001e\u000f \u0010\"" + + "\u0011$\u0012&\u0013(\u0014*\u0015,\u0016.\u00170\u00182\u00194\u001a" + + "6\u001b8\u001c:\u001d<\u001e>\u001f@ B!D\"F#H$J%L&N\'P(R)T*V+X,Z-\\.^" + + "/`0b1d2f3h4j5l6n7p8r9t:v;x~?\u0080@\u0082A\u0084B\u0086C\u0088D\u008a" + + "E\u008cF\u008eG\u0090H\u0092I\u0094J\u0096K\u0098L\u009aM\u009cN\u009e" + + "O\u00a0P\u00a2Q\u00a4R\u00a6S\u00a8T\u00aaU\u0002\u0000\u0001\u0013\u0003" + + "\u0000\t\n\r\r \u0002\u0000\n\n\r\r\u0001\u000007\u0002\u0000LLll\u0002" + + "\u0000XXxx\u0003\u000009AFaf\u0001\u000019\u0001\u000009\u0006\u0000D" + + "DFFLLddffll\u0002\u0000EEee\u0002\u0000++--\u0004\u0000DDFFddff\u0002" + + "\u0000\"\"\\\\\u0002\u0000\'\'\\\\\u0001\u0000\n\n\u0002\u0000\n\n//\u0007" + + "\u0000UUcciilmssuuxx\u0003\u0000AZ__az\u0004\u000009AZ__az\u029d\u0000" + + "\u0002\u0001\u0000\u0000\u0000\u0000\u0004\u0001\u0000\u0000\u0000\u0000" + + "\u0006\u0001\u0000\u0000\u0000\u0000\b\u0001\u0000\u0000\u0000\u0000\n" + + "\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001" + + "\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001" + + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001" + + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001" + + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001" + + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000" + + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000" + + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000," + + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000" + + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000" + + "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:" + + "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000" + + "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000" + + "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0000H" + + "\u0001\u0000\u0000\u0000\u0000J\u0001\u0000\u0000\u0000\u0000L\u0001\u0000" + + "\u0000\u0000\u0000N\u0001\u0000\u0000\u0000\u0000P\u0001\u0000\u0000\u0000" + + "\u0000R\u0001\u0000\u0000\u0000\u0000T\u0001\u0000\u0000\u0000\u0000V" + + "\u0001\u0000\u0000\u0000\u0000X\u0001\u0000\u0000\u0000\u0000Z\u0001\u0000" + + "\u0000\u0000\u0000\\\u0001\u0000\u0000\u0000\u0000^\u0001\u0000\u0000" + + "\u0000\u0000`\u0001\u0000\u0000\u0000\u0000b\u0001\u0000\u0000\u0000\u0000" + + "d\u0001\u0000\u0000\u0000\u0000f\u0001\u0000\u0000\u0000\u0000h\u0001" + + "\u0000\u0000\u0000\u0000j\u0001\u0000\u0000\u0000\u0000l\u0001\u0000\u0000" + + "\u0000\u0000n\u0001\u0000\u0000\u0000\u0000p\u0001\u0000\u0000\u0000\u0000" + + "r\u0001\u0000\u0000\u0000\u0000t\u0001\u0000\u0000\u0000\u0000v\u0001" + + "\u0000\u0000\u0000\u0000x\u0001\u0000\u0000\u0000\u0000z\u0001\u0000\u0000" + + "\u0000\u0000|\u0001\u0000\u0000\u0000\u0000~\u0001\u0000\u0000\u0000\u0000" + + "\u0080\u0001\u0000\u0000\u0000\u0000\u0082\u0001\u0000\u0000\u0000\u0000" + + "\u0084\u0001\u0000\u0000\u0000\u0000\u0086\u0001\u0000\u0000\u0000\u0000" + + "\u0088\u0001\u0000\u0000\u0000\u0000\u008a\u0001\u0000\u0000\u0000\u0000" + + "\u008c\u0001\u0000\u0000\u0000\u0000\u008e\u0001\u0000\u0000\u0000\u0000" + + "\u0090\u0001\u0000\u0000\u0000\u0000\u0092\u0001\u0000\u0000\u0000\u0000" + + "\u0094\u0001\u0000\u0000\u0000\u0000\u0096\u0001\u0000\u0000\u0000\u0000" + + "\u0098\u0001\u0000\u0000\u0000\u0000\u009a\u0001\u0000\u0000\u0000\u0000" + + "\u009c\u0001\u0000\u0000\u0000\u0000\u009e\u0001\u0000\u0000\u0000\u0000" + + "\u00a0\u0001\u0000\u0000\u0000\u0000\u00a2\u0001\u0000\u0000\u0000\u0000" + + "\u00a4\u0001\u0000\u0000\u0000\u0000\u00a6\u0001\u0000\u0000\u0000\u0001" + + "\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0002" + + "\u00ad\u0001\u0000\u0000\u0000\u0004\u00c7\u0001\u0000\u0000\u0000\u0006" + + "\u00cb\u0001\u0000\u0000\u0000\b\u00cd\u0001\u0000\u0000\u0000\n\u00cf" + + "\u0001\u0000\u0000\u0000\f\u00d1\u0001\u0000\u0000\u0000\u000e\u00d3\u0001" + + "\u0000\u0000\u0000\u0010\u00d5\u0001\u0000\u0000\u0000\u0012\u00d7\u0001" + + "\u0000\u0000\u0000\u0014\u00db\u0001\u0000\u0000\u0000\u0016\u00e0\u0001" + + "\u0000\u0000\u0000\u0018\u00e2\u0001\u0000\u0000\u0000\u001a\u00e4\u0001" + + "\u0000\u0000\u0000\u001c\u00e7\u0001\u0000\u0000\u0000\u001e\u00ea\u0001" + + "\u0000\u0000\u0000 \u00ef\u0001\u0000\u0000\u0000\"\u00f5\u0001\u0000" + + "\u0000\u0000$\u00f8\u0001\u0000\u0000\u0000&\u00fc\u0001\u0000\u0000\u0000" + + "(\u0105\u0001\u0000\u0000\u0000*\u010b\u0001\u0000\u0000\u0000,\u0112" + + "\u0001\u0000\u0000\u0000.\u0116\u0001\u0000\u0000\u00000\u011a\u0001\u0000" + + "\u0000\u00002\u0120\u0001\u0000\u0000\u00004\u0126\u0001\u0000\u0000\u0000" + + "6\u012b\u0001\u0000\u0000\u00008\u0136\u0001\u0000\u0000\u0000:\u0138" + + "\u0001\u0000\u0000\u0000<\u013a\u0001\u0000\u0000\u0000>\u013c\u0001\u0000" + + "\u0000\u0000@\u013f\u0001\u0000\u0000\u0000B\u0141\u0001\u0000\u0000\u0000" + + "D\u0143\u0001\u0000\u0000\u0000F\u0145\u0001\u0000\u0000\u0000H\u0148" + + "\u0001\u0000\u0000\u0000J\u014b\u0001\u0000\u0000\u0000L\u014f\u0001\u0000" + + "\u0000\u0000N\u0151\u0001\u0000\u0000\u0000P\u0154\u0001\u0000\u0000\u0000" + + "R\u0156\u0001\u0000\u0000\u0000T\u0159\u0001\u0000\u0000\u0000V\u015c" + + "\u0001\u0000\u0000\u0000X\u0160\u0001\u0000\u0000\u0000Z\u0163\u0001\u0000" + + "\u0000\u0000\\\u0167\u0001\u0000\u0000\u0000^\u0169\u0001\u0000\u0000" + + "\u0000`\u016b\u0001\u0000\u0000\u0000b\u016d\u0001\u0000\u0000\u0000d" + + "\u0170\u0001\u0000\u0000\u0000f\u0173\u0001\u0000\u0000\u0000h\u0175\u0001" + + "\u0000\u0000\u0000j\u0177\u0001\u0000\u0000\u0000l\u017a\u0001\u0000\u0000" + + "\u0000n\u017d\u0001\u0000\u0000\u0000p\u0180\u0001\u0000\u0000\u0000r" + + "\u0183\u0001\u0000\u0000\u0000t\u0187\u0001\u0000\u0000\u0000v\u018a\u0001" + + "\u0000\u0000\u0000x\u018d\u0001\u0000\u0000\u0000z\u018f\u0001\u0000\u0000" + + "\u0000|\u0192\u0001\u0000\u0000\u0000~\u0195\u0001\u0000\u0000\u0000\u0080" + + "\u0198\u0001\u0000\u0000\u0000\u0082\u019b\u0001\u0000\u0000\u0000\u0084" + + "\u019e\u0001\u0000\u0000\u0000\u0086\u01a1\u0001\u0000\u0000\u0000\u0088" + + "\u01a4\u0001\u0000\u0000\u0000\u008a\u01a7\u0001\u0000\u0000\u0000\u008c" + + "\u01ab\u0001\u0000\u0000\u0000\u008e\u01af\u0001\u0000\u0000\u0000\u0090" + + "\u01b4\u0001\u0000\u0000\u0000\u0092\u01bd\u0001\u0000\u0000\u0000\u0094" + + "\u01cf\u0001\u0000\u0000\u0000\u0096\u01dc\u0001\u0000\u0000\u0000\u0098" + + "\u020c\u0001\u0000\u0000\u0000\u009a\u020e\u0001\u0000\u0000\u0000\u009c" + + "\u021f\u0001\u0000\u0000\u0000\u009e\u0224\u0001\u0000\u0000\u0000\u00a0" + + "\u022a\u0001\u0000\u0000\u0000\u00a2\u0255\u0001\u0000\u0000\u0000\u00a4" + + "\u0257\u0001\u0000\u0000\u0000\u00a6\u025b\u0001\u0000\u0000\u0000\u00a8" + + "\u026a\u0001\u0000\u0000\u0000\u00aa\u026e\u0001\u0000\u0000\u0000\u00ac" + + "\u00ae\u0007\u0000\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000\u0000\u00ae" + + "\u00af\u0001\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00af" + + "\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1" + + "\u00b2\u0006\u0000\u0000\u0000\u00b2\u0003\u0001\u0000\u0000\u0000\u00b3" + + "\u00b4\u0005/\u0000\u0000\u00b4\u00b5\u0005/\u0000\u0000\u00b5\u00b9\u0001" + + "\u0000\u0000\u0000\u00b6\u00b8\b\u0001\u0000\u0000\u00b7\u00b6\u0001\u0000" + + "\u0000\u0000\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000" + + "\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00c8\u0001\u0000" + + "\u0000\u0000\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00bd\u0005/\u0000" + + "\u0000\u00bd\u00be\u0005*\u0000\u0000\u00be\u00c2\u0001\u0000\u0000\u0000" + + "\u00bf\u00c1\t\u0000\u0000\u0000\u00c0\u00bf\u0001\u0000\u0000\u0000\u00c1" + + "\u00c4\u0001\u0000\u0000\u0000\u00c2\u00c3\u0001\u0000\u0000\u0000\u00c2" + + "\u00c0\u0001\u0000\u0000\u0000\u00c3\u00c5\u0001\u0000\u0000\u0000\u00c4" + + "\u00c2\u0001\u0000\u0000\u0000\u00c5\u00c6\u0005*\u0000\u0000\u00c6\u00c8" + + "\u0005/\u0000\u0000\u00c7\u00b3\u0001\u0000\u0000\u0000\u00c7\u00bc\u0001" + + "\u0000\u0000\u0000\u00c8\u00c9\u0001\u0000\u0000\u0000\u00c9\u00ca\u0006" + + "\u0001\u0000\u0000\u00ca\u0005\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005" + + "{\u0000\u0000\u00cc\u0007\u0001\u0000\u0000\u0000\u00cd\u00ce\u0005}\u0000" + + "\u0000\u00ce\t\u0001\u0000\u0000\u0000\u00cf\u00d0\u0005[\u0000\u0000" + + "\u00d0\u000b\u0001\u0000\u0000\u0000\u00d1\u00d2\u0005]\u0000\u0000\u00d2" + + "\r\u0001\u0000\u0000\u0000\u00d3\u00d4\u0005(\u0000\u0000\u00d4\u000f" + + "\u0001\u0000\u0000\u0000\u00d5\u00d6\u0005)\u0000\u0000\u00d6\u0011\u0001" + + "\u0000\u0000\u0000\u00d7\u00d8\u0005.\u0000\u0000\u00d8\u00d9\u0001\u0000" + + "\u0000\u0000\u00d9\u00da\u0006\b\u0001\u0000\u00da\u0013\u0001\u0000\u0000" + + "\u0000\u00db\u00dc\u0005?\u0000\u0000\u00dc\u00dd\u0005.\u0000\u0000\u00dd" + + "\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0006\t\u0001\u0000\u00df\u0015" + + "\u0001\u0000\u0000\u0000\u00e0\u00e1\u0005,\u0000\u0000\u00e1\u0017\u0001" + + "\u0000\u0000\u0000\u00e2\u00e3\u0005;\u0000\u0000\u00e3\u0019\u0001\u0000" + + "\u0000\u0000\u00e4\u00e5\u0005i\u0000\u0000\u00e5\u00e6\u0005f\u0000\u0000" + + "\u00e6\u001b\u0001\u0000\u0000\u0000\u00e7\u00e8\u0005i\u0000\u0000\u00e8" + + "\u00e9\u0005n\u0000\u0000\u00e9\u001d\u0001\u0000\u0000\u0000\u00ea\u00eb" + + "\u0005e\u0000\u0000\u00eb\u00ec\u0005l\u0000\u0000\u00ec\u00ed\u0005s" + + "\u0000\u0000\u00ed\u00ee\u0005e\u0000\u0000\u00ee\u001f\u0001\u0000\u0000" + + "\u0000\u00ef\u00f0\u0005w\u0000\u0000\u00f0\u00f1\u0005h\u0000\u0000\u00f1" + + "\u00f2\u0005i\u0000\u0000\u00f2\u00f3\u0005l\u0000\u0000\u00f3\u00f4\u0005" + + "e\u0000\u0000\u00f4!\u0001\u0000\u0000\u0000\u00f5\u00f6\u0005d\u0000" + + "\u0000\u00f6\u00f7\u0005o\u0000\u0000\u00f7#\u0001\u0000\u0000\u0000\u00f8" + + "\u00f9\u0005f\u0000\u0000\u00f9\u00fa\u0005o\u0000\u0000\u00fa\u00fb\u0005" + + "r\u0000\u0000\u00fb%\u0001\u0000\u0000\u0000\u00fc\u00fd\u0005c\u0000" + + "\u0000\u00fd\u00fe\u0005o\u0000\u0000\u00fe\u00ff\u0005n\u0000\u0000\u00ff" + + "\u0100\u0005t\u0000\u0000\u0100\u0101\u0005i\u0000\u0000\u0101\u0102\u0005" + + "n\u0000\u0000\u0102\u0103\u0005u\u0000\u0000\u0103\u0104\u0005e\u0000" + + "\u0000\u0104\'\u0001\u0000\u0000\u0000\u0105\u0106\u0005b\u0000\u0000" + + "\u0106\u0107\u0005r\u0000\u0000\u0107\u0108\u0005e\u0000\u0000\u0108\u0109" + + "\u0005a\u0000\u0000\u0109\u010a\u0005k\u0000\u0000\u010a)\u0001\u0000" + + "\u0000\u0000\u010b\u010c\u0005r\u0000\u0000\u010c\u010d\u0005e\u0000\u0000" + + "\u010d\u010e\u0005t\u0000\u0000\u010e\u010f\u0005u\u0000\u0000\u010f\u0110" + + "\u0005r\u0000\u0000\u0110\u0111\u0005n\u0000\u0000\u0111+\u0001\u0000" + + "\u0000\u0000\u0112\u0113\u0005n\u0000\u0000\u0113\u0114\u0005e\u0000\u0000" + + "\u0114\u0115\u0005w\u0000\u0000\u0115-\u0001\u0000\u0000\u0000\u0116\u0117" + + "\u0005t\u0000\u0000\u0117\u0118\u0005r\u0000\u0000\u0118\u0119\u0005y" + + "\u0000\u0000\u0119/\u0001\u0000\u0000\u0000\u011a\u011b\u0005c\u0000\u0000" + + "\u011b\u011c\u0005a\u0000\u0000\u011c\u011d\u0005t\u0000\u0000\u011d\u011e" + + "\u0005c\u0000\u0000\u011e\u011f\u0005h\u0000\u0000\u011f1\u0001\u0000" + + "\u0000\u0000\u0120\u0121\u0005t\u0000\u0000\u0121\u0122\u0005h\u0000\u0000" + + "\u0122\u0123\u0005r\u0000\u0000\u0123\u0124\u0005o\u0000\u0000\u0124\u0125" + + "\u0005w\u0000\u0000\u01253\u0001\u0000\u0000\u0000\u0126\u0127\u0005t" + + "\u0000\u0000\u0127\u0128\u0005h\u0000\u0000\u0128\u0129\u0005i\u0000\u0000" + + "\u0129\u012a\u0005s\u0000\u0000\u012a5\u0001\u0000\u0000\u0000\u012b\u012c" + + "\u0005i\u0000\u0000\u012c\u012d\u0005n\u0000\u0000\u012d\u012e\u0005s" + + "\u0000\u0000\u012e\u012f\u0005t\u0000\u0000\u012f\u0130\u0005a\u0000\u0000" + + "\u0130\u0131\u0005n\u0000\u0000\u0131\u0132\u0005c\u0000\u0000\u0132\u0133" + + "\u0005e\u0000\u0000\u0133\u0134\u0005o\u0000\u0000\u0134\u0135\u0005f" + + "\u0000\u0000\u01357\u0001\u0000\u0000\u0000\u0136\u0137\u0005!\u0000\u0000" + + "\u01379\u0001\u0000\u0000\u0000\u0138\u0139\u0005~\u0000\u0000\u0139;" + + "\u0001\u0000\u0000\u0000\u013a\u013b\u0005*\u0000\u0000\u013b=\u0001\u0000" + + "\u0000\u0000\u013c\u013d\u0005/\u0000\u0000\u013d\u013e\u0004\u001e\u0000" + + "\u0000\u013e?\u0001\u0000\u0000\u0000\u013f\u0140\u0005%\u0000\u0000\u0140" + + "A\u0001\u0000\u0000\u0000\u0141\u0142\u0005+\u0000\u0000\u0142C\u0001" + + "\u0000\u0000\u0000\u0143\u0144\u0005-\u0000\u0000\u0144E\u0001\u0000\u0000" + + "\u0000\u0145\u0146\u0005<\u0000\u0000\u0146\u0147\u0005<\u0000\u0000\u0147" + + "G\u0001\u0000\u0000\u0000\u0148\u0149\u0005>\u0000\u0000\u0149\u014a\u0005" + + ">\u0000\u0000\u014aI\u0001\u0000\u0000\u0000\u014b\u014c\u0005>\u0000" + + "\u0000\u014c\u014d\u0005>\u0000\u0000\u014d\u014e\u0005>\u0000\u0000\u014e" + + "K\u0001\u0000\u0000\u0000\u014f\u0150\u0005<\u0000\u0000\u0150M\u0001" + + "\u0000\u0000\u0000\u0151\u0152\u0005<\u0000\u0000\u0152\u0153\u0005=\u0000" + + "\u0000\u0153O\u0001\u0000\u0000\u0000\u0154\u0155\u0005>\u0000\u0000\u0155" + + "Q\u0001\u0000\u0000\u0000\u0156\u0157\u0005>\u0000\u0000\u0157\u0158\u0005" + + "=\u0000\u0000\u0158S\u0001\u0000\u0000\u0000\u0159\u015a\u0005=\u0000" + + "\u0000\u015a\u015b\u0005=\u0000\u0000\u015bU\u0001\u0000\u0000\u0000\u015c" + + "\u015d\u0005=\u0000\u0000\u015d\u015e\u0005=\u0000\u0000\u015e\u015f\u0005" + + "=\u0000\u0000\u015fW\u0001\u0000\u0000\u0000\u0160\u0161\u0005!\u0000" + + "\u0000\u0161\u0162\u0005=\u0000\u0000\u0162Y\u0001\u0000\u0000\u0000\u0163" + + "\u0164\u0005!\u0000\u0000\u0164\u0165\u0005=\u0000\u0000\u0165\u0166\u0005" + + "=\u0000\u0000\u0166[\u0001\u0000\u0000\u0000\u0167\u0168\u0005&\u0000" + + "\u0000\u0168]\u0001\u0000\u0000\u0000\u0169\u016a\u0005^\u0000\u0000\u016a" + + "_\u0001\u0000\u0000\u0000\u016b\u016c\u0005|\u0000\u0000\u016ca\u0001" + + "\u0000\u0000\u0000\u016d\u016e\u0005&\u0000\u0000\u016e\u016f\u0005&\u0000" + + "\u0000\u016fc\u0001\u0000\u0000\u0000\u0170\u0171\u0005|\u0000\u0000\u0171" + + "\u0172\u0005|\u0000\u0000\u0172e\u0001\u0000\u0000\u0000\u0173\u0174\u0005" + + "?\u0000\u0000\u0174g\u0001\u0000\u0000\u0000\u0175\u0176\u0005:\u0000" + + "\u0000\u0176i\u0001\u0000\u0000\u0000\u0177\u0178\u0005?\u0000\u0000\u0178" + + "\u0179\u0005:\u0000\u0000\u0179k\u0001\u0000\u0000\u0000\u017a\u017b\u0005" + + ":\u0000\u0000\u017b\u017c\u0005:\u0000\u0000\u017cm\u0001\u0000\u0000" + + "\u0000\u017d\u017e\u0005-\u0000\u0000\u017e\u017f\u0005>\u0000\u0000\u017f" + + "o\u0001\u0000\u0000\u0000\u0180\u0181\u0005=\u0000\u0000\u0181\u0182\u0005" + + "~\u0000\u0000\u0182q\u0001\u0000\u0000\u0000\u0183\u0184\u0005=\u0000" + + "\u0000\u0184\u0185\u0005=\u0000\u0000\u0185\u0186\u0005~\u0000\u0000\u0186" + + "s\u0001\u0000\u0000\u0000\u0187\u0188\u0005+\u0000\u0000\u0188\u0189\u0005" + + "+\u0000\u0000\u0189u\u0001\u0000\u0000\u0000\u018a\u018b\u0005-\u0000" + + "\u0000\u018b\u018c\u0005-\u0000\u0000\u018cw\u0001\u0000\u0000\u0000\u018d" + + "\u018e\u0005=\u0000\u0000\u018ey\u0001\u0000\u0000\u0000\u018f\u0190\u0005" + + "+\u0000\u0000\u0190\u0191\u0005=\u0000\u0000\u0191{\u0001\u0000\u0000" + + "\u0000\u0192\u0193\u0005-\u0000\u0000\u0193\u0194\u0005=\u0000\u0000\u0194" + + "}\u0001\u0000\u0000\u0000\u0195\u0196\u0005*\u0000\u0000\u0196\u0197\u0005" + + "=\u0000\u0000\u0197\u007f\u0001\u0000\u0000\u0000\u0198\u0199\u0005/\u0000" + + "\u0000\u0199\u019a\u0005=\u0000\u0000\u019a\u0081\u0001\u0000\u0000\u0000" + + "\u019b\u019c\u0005%\u0000\u0000\u019c\u019d\u0005=\u0000\u0000\u019d\u0083" + + "\u0001\u0000\u0000\u0000\u019e\u019f\u0005&\u0000\u0000\u019f\u01a0\u0005" + + "=\u0000\u0000\u01a0\u0085\u0001\u0000\u0000\u0000\u01a1\u01a2\u0005^\u0000" + + "\u0000\u01a2\u01a3\u0005=\u0000\u0000\u01a3\u0087\u0001\u0000\u0000\u0000" + + "\u01a4\u01a5\u0005|\u0000\u0000\u01a5\u01a6\u0005=\u0000\u0000\u01a6\u0089" + + "\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005<\u0000\u0000\u01a8\u01a9\u0005" + + "<\u0000\u0000\u01a9\u01aa\u0005=\u0000\u0000\u01aa\u008b\u0001\u0000\u0000" + + "\u0000\u01ab\u01ac\u0005>\u0000\u0000\u01ac\u01ad\u0005>\u0000\u0000\u01ad" + + "\u01ae\u0005=\u0000\u0000\u01ae\u008d\u0001\u0000\u0000\u0000\u01af\u01b0" + + "\u0005>\u0000\u0000\u01b0\u01b1\u0005>\u0000\u0000\u01b1\u01b2\u0005>" + + "\u0000\u0000\u01b2\u01b3\u0005=\u0000\u0000\u01b3\u008f\u0001\u0000\u0000" + + "\u0000\u01b4\u01b6\u00050\u0000\u0000\u01b5\u01b7\u0007\u0002\u0000\u0000" + + "\u01b6\u01b5\u0001\u0000\u0000\u0000\u01b7\u01b8\u0001\u0000\u0000\u0000" + + "\u01b8\u01b6\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000" + + "\u01b9\u01bb\u0001\u0000\u0000\u0000\u01ba\u01bc\u0007\u0003\u0000\u0000" + + "\u01bb\u01ba\u0001\u0000\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000" + + "\u01bc\u0091\u0001\u0000\u0000\u0000\u01bd\u01be\u00050\u0000\u0000\u01be" + + "\u01c0\u0007\u0004\u0000\u0000\u01bf\u01c1\u0007\u0005\u0000\u0000\u01c0" + + "\u01bf\u0001\u0000\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2" + + "\u01c0\u0001\u0000\u0000\u0000\u01c2\u01c3\u0001\u0000\u0000\u0000\u01c3" + + "\u01c5\u0001\u0000\u0000\u0000\u01c4\u01c6\u0007\u0003\u0000\u0000\u01c5" + + "\u01c4\u0001\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6" + + "\u0093\u0001\u0000\u0000\u0000\u01c7\u01d0\u00050\u0000\u0000\u01c8\u01cc" + + "\u0007\u0006\u0000\u0000\u01c9\u01cb\u0007\u0007\u0000\u0000\u01ca\u01c9" + + "\u0001\u0000\u0000\u0000\u01cb\u01ce\u0001\u0000\u0000\u0000\u01cc\u01ca" + + "\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd\u01d0" + + "\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001\u0000\u0000\u0000\u01cf\u01c7" + + "\u0001\u0000\u0000\u0000\u01cf\u01c8\u0001\u0000\u0000\u0000\u01d0\u01d2" + + "\u0001\u0000\u0000\u0000\u01d1\u01d3\u0007\b\u0000\u0000\u01d2\u01d1\u0001" + + "\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u0095\u0001" + + "\u0000\u0000\u0000\u01d4\u01dd\u00050\u0000\u0000\u01d5\u01d9\u0007\u0006" + + "\u0000\u0000\u01d6\u01d8\u0007\u0007\u0000\u0000\u01d7\u01d6\u0001\u0000" + + "\u0000\u0000\u01d8\u01db\u0001\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000" + + "\u0000\u0000\u01d9\u01da\u0001\u0000\u0000\u0000\u01da\u01dd\u0001\u0000" + + "\u0000\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01dc\u01d4\u0001\u0000" + + "\u0000\u0000\u01dc\u01d5\u0001\u0000\u0000\u0000\u01dd\u01e4\u0001\u0000" + + "\u0000\u0000\u01de\u01e0\u0003\u0012\b\u0000\u01df\u01e1\u0007\u0007\u0000" + + "\u0000\u01e0\u01df\u0001\u0000\u0000\u0000\u01e1\u01e2\u0001\u0000\u0000" + + "\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000" + + "\u0000\u01e3\u01e5\u0001\u0000\u0000\u0000\u01e4\u01de\u0001\u0000\u0000" + + "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01ef\u0001\u0000\u0000" + + "\u0000\u01e6\u01e8\u0007\t\u0000\u0000\u01e7\u01e9\u0007\n\u0000\u0000" + + "\u01e8\u01e7\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000" + + "\u01e9\u01eb\u0001\u0000\u0000\u0000\u01ea\u01ec\u0007\u0007\u0000\u0000" + + "\u01eb\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000" + + "\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000" + + "\u01ee\u01f0\u0001\u0000\u0000\u0000\u01ef\u01e6\u0001\u0000\u0000\u0000" + + "\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f2\u0001\u0000\u0000\u0000" + + "\u01f1\u01f3\u0007\u000b\u0000\u0000\u01f2\u01f1\u0001\u0000\u0000\u0000" + + "\u01f2\u01f3\u0001\u0000\u0000\u0000\u01f3\u0097\u0001\u0000\u0000\u0000" + + "\u01f4\u01fc\u0005\"\u0000\u0000\u01f5\u01f6\u0005\\\u0000\u0000\u01f6" + + "\u01fb\u0005\"\u0000\u0000\u01f7\u01f8\u0005\\\u0000\u0000\u01f8\u01fb" + + "\u0005\\\u0000\u0000\u01f9\u01fb\b\f\u0000\u0000\u01fa\u01f5\u0001\u0000" + + "\u0000\u0000\u01fa\u01f7\u0001\u0000\u0000\u0000\u01fa\u01f9\u0001\u0000" + + "\u0000\u0000\u01fb\u01fe\u0001\u0000\u0000\u0000\u01fc\u01fd\u0001\u0000" + + "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01ff\u0001\u0000" + + "\u0000\u0000\u01fe\u01fc\u0001\u0000\u0000\u0000\u01ff\u020d\u0005\"\u0000" + + "\u0000\u0200\u0208\u0005\'\u0000\u0000\u0201\u0202\u0005\\\u0000\u0000" + + "\u0202\u0207\u0005\'\u0000\u0000\u0203\u0204\u0005\\\u0000\u0000\u0204" + + "\u0207\u0005\\\u0000\u0000\u0205\u0207\b\r\u0000\u0000\u0206\u0201\u0001" + + "\u0000\u0000\u0000\u0206\u0203\u0001\u0000\u0000\u0000\u0206\u0205\u0001" + + "\u0000\u0000\u0000\u0207\u020a\u0001\u0000\u0000\u0000\u0208\u0209\u0001" + + "\u0000\u0000\u0000\u0208\u0206\u0001\u0000\u0000\u0000\u0209\u020b\u0001" + + "\u0000\u0000\u0000\u020a\u0208\u0001\u0000\u0000\u0000\u020b\u020d\u0005" + + "\'\u0000\u0000\u020c\u01f4\u0001\u0000\u0000\u0000\u020c\u0200\u0001\u0000" + + "\u0000\u0000\u020d\u0099\u0001\u0000\u0000\u0000\u020e\u0212\u0005/\u0000" + + "\u0000\u020f\u0210\u0005\\\u0000\u0000\u0210\u0213\b\u000e\u0000\u0000" + + "\u0211\u0213\b\u000f\u0000\u0000\u0212\u020f\u0001\u0000\u0000\u0000\u0212" + + "\u0211\u0001\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000\u0000\u0214" + + "\u0215\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215" + + "\u0216\u0001\u0000\u0000\u0000\u0216\u021a\u0005/\u0000\u0000\u0217\u0219" + + "\u0007\u0010\u0000\u0000\u0218\u0217\u0001\u0000\u0000\u0000\u0219\u021c" + + "\u0001\u0000\u0000\u0000\u021a\u0218\u0001\u0000\u0000\u0000\u021a\u021b" + + "\u0001\u0000\u0000\u0000\u021b\u021d\u0001\u0000\u0000\u0000\u021c\u021a" + + "\u0001\u0000\u0000\u0000\u021d\u021e\u0004L\u0001\u0000\u021e\u009b\u0001" + + "\u0000\u0000\u0000\u021f\u0220\u0005t\u0000\u0000\u0220\u0221\u0005r\u0000" + + "\u0000\u0221\u0222\u0005u\u0000\u0000\u0222\u0223\u0005e\u0000\u0000\u0223" + + "\u009d\u0001\u0000\u0000\u0000\u0224\u0225\u0005f\u0000\u0000\u0225\u0226" + + "\u0005a\u0000\u0000\u0226\u0227\u0005l\u0000\u0000\u0227\u0228\u0005s" + + "\u0000\u0000\u0228\u0229\u0005e\u0000\u0000\u0229\u009f\u0001\u0000\u0000" + + "\u0000\u022a\u022b\u0005n\u0000\u0000\u022b\u022c\u0005u\u0000\u0000\u022c" + + "\u022d\u0005l\u0000\u0000\u022d\u022e\u0005l\u0000\u0000\u022e\u00a1\u0001" + + "\u0000\u0000\u0000\u022f\u0230\u0005b\u0000\u0000\u0230\u0231\u0005o\u0000" + + "\u0000\u0231\u0232\u0005o\u0000\u0000\u0232\u0233\u0005l\u0000\u0000\u0233" + + "\u0234\u0005e\u0000\u0000\u0234\u0235\u0005a\u0000\u0000\u0235\u0256\u0005" + + "n\u0000\u0000\u0236\u0237\u0005b\u0000\u0000\u0237\u0238\u0005y\u0000" + + "\u0000\u0238\u0239\u0005t\u0000\u0000\u0239\u0256\u0005e\u0000\u0000\u023a" + + "\u023b\u0005s\u0000\u0000\u023b\u023c\u0005h\u0000\u0000\u023c\u023d\u0005" + + "o\u0000\u0000\u023d\u023e\u0005r\u0000\u0000\u023e\u0256\u0005t\u0000" + + "\u0000\u023f\u0240\u0005c\u0000\u0000\u0240\u0241\u0005h\u0000\u0000\u0241" + + "\u0242\u0005a\u0000\u0000\u0242\u0256\u0005r\u0000\u0000\u0243\u0244\u0005" + + "i\u0000\u0000\u0244\u0245\u0005n\u0000\u0000\u0245\u0256\u0005t\u0000" + + "\u0000\u0246\u0247\u0005l\u0000\u0000\u0247\u0248\u0005o\u0000\u0000\u0248" + + "\u0249\u0005n\u0000\u0000\u0249\u0256\u0005g\u0000\u0000\u024a\u024b\u0005" + + "f\u0000\u0000\u024b\u024c\u0005l\u0000\u0000\u024c\u024d\u0005o\u0000" + + "\u0000\u024d\u024e\u0005a\u0000\u0000\u024e\u0256\u0005t\u0000\u0000\u024f" + + "\u0250\u0005d\u0000\u0000\u0250\u0251\u0005o\u0000\u0000\u0251\u0252\u0005" + + "u\u0000\u0000\u0252\u0253\u0005b\u0000\u0000\u0253\u0254\u0005l\u0000" + + "\u0000\u0254\u0256\u0005e\u0000\u0000\u0255\u022f\u0001\u0000\u0000\u0000" + + "\u0255\u0236\u0001\u0000\u0000\u0000\u0255\u023a\u0001\u0000\u0000\u0000" + + "\u0255\u023f\u0001\u0000\u0000\u0000\u0255\u0243\u0001\u0000\u0000\u0000" + + "\u0255\u0246\u0001\u0000\u0000\u0000\u0255\u024a\u0001\u0000\u0000\u0000" + + "\u0255\u024f\u0001\u0000\u0000\u0000\u0256\u00a3\u0001\u0000\u0000\u0000" + + "\u0257\u0258\u0005d\u0000\u0000\u0258\u0259\u0005e\u0000\u0000\u0259\u025a" + + "\u0005f\u0000\u0000\u025a\u00a5\u0001\u0000\u0000\u0000\u025b\u025f\u0007" + + "\u0011\u0000\u0000\u025c\u025e\u0007\u0012\u0000\u0000\u025d\u025c\u0001" + + "\u0000\u0000\u0000\u025e\u0261\u0001\u0000\u0000\u0000\u025f\u025d\u0001" + + "\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260\u00a7\u0001" + + "\u0000\u0000\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0262\u026b\u0005" + + "0\u0000\u0000\u0263\u0267\u0007\u0006\u0000\u0000\u0264\u0266\u0007\u0007" + + "\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000\u0266\u0269\u0001\u0000" + + "\u0000\u0000\u0267\u0265\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000" + + "\u0000\u0000\u0268\u026b\u0001\u0000\u0000\u0000\u0269\u0267\u0001\u0000" + + "\u0000\u0000\u026a\u0262\u0001\u0000\u0000\u0000\u026a\u0263\u0001\u0000" + + "\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026d\u0006S\u0002" + + "\u0000\u026d\u00a9\u0001\u0000\u0000\u0000\u026e\u0272\u0007\u0011\u0000" + + "\u0000\u026f\u0271\u0007\u0012\u0000\u0000\u0270\u026f\u0001\u0000\u0000" + + "\u0000\u0271\u0274\u0001\u0000\u0000\u0000\u0272\u0270\u0001\u0000\u0000" + + "\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0275\u0001\u0000\u0000" + + "\u0000\u0274\u0272\u0001\u0000\u0000\u0000\u0275\u0276\u0006T\u0002\u0000" + + "\u0276\u00ab\u0001\u0000\u0000\u0000\"\u0000\u0001\u00af\u00b9\u00c2\u00c7" + + "\u01b8\u01bb\u01c2\u01c5\u01cc\u01cf\u01d2\u01d9\u01dc\u01e2\u01e4\u01e8" + + "\u01ed\u01ef\u01f2\u01fa\u01fc\u0206\u0208\u020c\u0212\u0214\u021a\u0255" + + "\u025f\u0267\u026a\u0272\u0003\u0006\u0000\u0000\u0002\u0001\u0000\u0002" + + "\u0000\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java new file mode 100644 index 0000000000000..dbba3226ba300 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java @@ -0,0 +1,51 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.painless; + +public class CommentTests extends ScriptTestCase { + + public void testSingleLineComments() { + assertEquals(5, exec("// comment\n return 5")); + assertEquals(5, exec("// comment\r return 5")); + assertEquals(5, exec("return 5 // comment no newline or return char")); + } + + public void testOpenCloseComments() { + assertEquals(5, exec("/* single-line comment */ return 5")); + assertEquals(5, exec("/* multi-line \n */ return 5")); + assertEquals(5, exec("/* multi-line \r */ return 5")); + assertEquals(5, exec("/* multi-line \n\n\r\r */ return 5")); + assertEquals(5, exec("def five = 5; /* multi-line \r */ return five")); + assertEquals(5, exec("return 5 /* multi-line ignored code */")); + } +} diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java index d83811e6668eb..a653edbd05992 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -135,35 +135,40 @@ public void testRangeQuery() throws IOException { public void testRoundsUpperBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(null, 0.1, true, false, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.1, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.095, true, false, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.095, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.105, true, false, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.105, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 79.99, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 7999]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 7999]", getQueryString(scaledFloatQ)); } public void testRoundsLowerBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(-0.1, null, false, true, MOCK_QSC); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.1, null, true, true, MOCK_QSC); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.095, null, false, true, MOCK_QSC); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.095, null, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.105, null, false, true, MOCK_QSC); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.105, null, true, true, MOCK_QSC); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ)); + } + + private String getQueryString(Query query) { + assertTrue(query instanceof IndexOrDocValuesQuery); + return ((IndexOrDocValuesQuery) query).getIndexQuery().toString(); } public void testValueForSearch() { diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index 743b51e979fb7..99d576bed01c7 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -79,7 +79,7 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase { @After public void tearDown() throws Exception { waitingRequests.keySet().forEach(this::finishRequest); - shutdownExecutorService(); + shutdownExecutorServices(); super.tearDown(); } @@ -88,7 +88,7 @@ private CountDownLatch finishRequest(String url) { return finishingRequests.get(url); } - private void shutdownExecutorService() throws InterruptedException { + private void shutdownExecutorServices() throws InterruptedException { if (!handlerService.isShutdown()) { handlerService.shutdown(); handlerService.awaitTermination(10, TimeUnit.SECONDS); diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 deleted file mode 100644 index ef410899981ca..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7133d34e92770f59eb28686f4d511b9f3f32e970 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..dde9b7c100dc7 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 @@ -0,0 +1 @@ +147cb42a90a29501d9ca6094ea0db1d213f3076a \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 deleted file mode 100644 index 46b83c9e40b3a..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -be44282e1f6b91a0650fcceb558053d6bdd4863d \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..b70a22e9db096 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 @@ -0,0 +1 @@ +b034dd3a975763e083c7e11b5d0f7d516ab72590 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java index 2939711f6f7e1..76b109932e642 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java @@ -38,7 +38,7 @@ import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; import org.apache.lucene.analysis.ja.dict.UserDictionary; -import org.apache.lucene.analysis.ja.util.CSVUtil; +import org.apache.lucene.analysis.util.CSVUtil; import org.opensearch.OpenSearchException; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 deleted file mode 100644 index 36664695a7818..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd1f80d33346f7e588685484ef29a304db5190e4 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..323f165c62790 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 @@ -0,0 +1 @@ +c405f2f7d0fc127d88dfbadd753469b2028fdf52 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 deleted file mode 100644 index 003ccdf8b0727..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b9ffdc7a52d2087ecb03318ec06305b480cdfe82 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..dd659ddf4de95 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 @@ -0,0 +1 @@ +970e5775876c2d7e1b9af7421a4b17d96f63faf4 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 deleted file mode 100644 index e22eaa474016f..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f73e2007b133fb699e517ef13b4952844f0150d8 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..ed0e81d8f1f75 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 @@ -0,0 +1 @@ +2421e5238e9b8484929291744d709dd743c01da1 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 deleted file mode 100644 index 1ebe42a2a2f56..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2c09cbc021a8f81a01600a1d2a999361e70f7aed \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..fd8e000088180 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 @@ -0,0 +1 @@ +a23e7de4cd9ae7af285c89dc1c55e0ac3f157fd3 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 deleted file mode 100644 index 3c4523d45c0f5..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b054f2c7b11fc7c5601b4c3cdf18aa7508612898 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..d0e7a3b0c751c --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 @@ -0,0 +1 @@ +8d9bce1ea51db279878c51091dd9aefc7b335da4 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java index 5e2cb8bfbc03a..3fcc995fb4199 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java @@ -9,10 +9,10 @@ package org.opensearch.plugin.correlation.core.index.codec; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.codec.correlation950.CorrelationCodec; -import org.opensearch.plugin.correlation.core.index.codec.correlation950.PerFieldCorrelationVectorsFormat; +import org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec; +import org.opensearch.plugin.correlation.core.index.codec.correlation990.PerFieldCorrelationVectorsFormat; import java.util.Optional; import java.util.function.BiFunction; @@ -24,15 +24,15 @@ * @opensearch.internal */ public enum CorrelationCodecVersion { - V_9_5_0( + V_9_9_0( "CorrelationCodec", - new Lucene95Codec(), + new Lucene99Codec(), new PerFieldCorrelationVectorsFormat(Optional.empty()), (userCodec, mapperService) -> new CorrelationCodec(userCodec, new PerFieldCorrelationVectorsFormat(Optional.of(mapperService))), CorrelationCodec::new ); - private static final CorrelationCodecVersion CURRENT = V_9_5_0; + private static final CorrelationCodecVersion CURRENT = V_9_9_0; private final String codecName; private final Codec defaultCodecDelegate; private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java similarity index 97% rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java index f91ba429fbea9..022972e2e06c3 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; @@ -19,7 +19,7 @@ * @opensearch.internal */ public class CorrelationCodec extends FilterCodec { - private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_5_0; + private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_9_0; private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; /** diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java similarity index 77% rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java index f6862ecc17736..89cc0b614a1a5 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java @@ -6,9 +6,9 @@ * compatible open source license. */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; -import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.opensearch.index.mapper.MapperService; import org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat; @@ -26,10 +26,10 @@ public class PerFieldCorrelationVectorsFormat extends BasePerFieldCorrelationVec public PerFieldCorrelationVectorsFormat(final Optional mapperService) { super( mapperService, - Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene95HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - Lucene95HnswVectorsFormat::new, - Lucene95HnswVectorsFormat::new + Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, + Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, + Lucene99HnswVectorsFormat::new, + Lucene99HnswVectorsFormat::new ); } } diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java similarity index 96% rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java index b4dad34d2718e..fc2a9de58a73a 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java @@ -9,4 +9,4 @@ /** * custom Lucene9.5 codec package for events-correlation-engine */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; diff --git a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 598a3b6af73c2..013c17e4a9736 100644 --- a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1 +1 @@ -org.opensearch.plugin.correlation.core.index.codec.correlation950.CorrelationCodec +org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java similarity index 98% rename from plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java rename to plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java index b93172537d419..7223b450a136c 100644 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java +++ b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Document; @@ -32,7 +32,7 @@ import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_EF_CONSTRUCTION; import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_M; -import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_5_0; +import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_9_0; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -56,7 +56,7 @@ public void testCorrelationVectorIndex() throws Exception { Function perFieldCorrelationVectorsProvider = mapperService -> new PerFieldCorrelationVectorsFormat(Optional.of(mapperService)); Function correlationCodecProvider = (correlationVectorsFormat -> new CorrelationCodec( - V_9_5_0.getDefaultCodecDelegate(), + V_9_9_0.getDefaultCodecDelegate(), correlationVectorsFormat )); testCorrelationVectorIndex(correlationCodecProvider, perFieldCorrelationVectorsProvider); diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 deleted file mode 100644 index f0d165ff7cf82..0000000000000 --- a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f3cdb002e0f2f30ad9c5fd053d78b1a485511ab1 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..ad4e055d4f19a --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 @@ -0,0 +1 @@ +d952ad30d3f2d1220f39db175618414b56d14638 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 deleted file mode 100644 index 40379694f5ea5..0000000000000 --- a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -77e3a27823f795d928b897d8444744ddb044a5c3 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..4309dad93b2b6 --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 @@ -0,0 +1 @@ +36a418325c618e440e5ccb80b75c705d894f50bd \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 820d14b3df8e4..0000000000000 --- a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -684daae9ea45087c670b4f6511edcfdb19c3a695 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..5f54d0ac554e0 --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +e9df364a2695e66eb8d2803d6725424842760125 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 0789dea62778a..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24d0f4f2b0b0b55acc498d62f9c0921d0ea6da7b \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..0232fc58f9357 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.101.Final.jar.sha1 deleted file mode 100644 index fb2d1aa25a5ec..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cbaac9e2c2d7b86e4c1bda220cde49949c9bcaee \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..3b6cd3524d978 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5b126ceba61275f38297cacd5ea0cd6d3addee04 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 deleted file mode 100644 index ca030677c32d0..0000000000000 --- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c198ee61abba7ae6b3cb6b1033f04fb0ae79d512 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9d01e814971f2 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +50a2d899a8f8a68daed1a9b6d7750184310cc45f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 450c34d975bb9..0000000000000 --- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3f503d7554cf25edc6b83b9b4724b240c6e91d5 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..987b524aedc98 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index f04d42a2155d6..36843e3bc8700 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -75,7 +75,7 @@ dependencies { api 'commons-collections:commons-collections:3.2.2' api "org.apache.commons:commons-compress:${versions.commonscompress}" api 'org.apache.commons:commons-configuration2:2.9.0' - api 'commons-io:commons-io:2.14.0' + api 'commons-io:commons-io:2.15.1' api 'org.apache.commons:commons-lang3:3.14.0' implementation 'com.google.re2j:re2j:1.7' api 'javax.servlet:servlet-api:2.5' diff --git a/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 deleted file mode 100644 index 33c5cfe53e01d..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4c6e1f6c196339473cd2e1b037f0eb97c62755b \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 new file mode 100644 index 0000000000000..47c5d13812a36 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 @@ -0,0 +1 @@ +f11560da189ab563a5c8e351941415430e9304ea \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 deleted file mode 100644 index 093ced04bee4b..0000000000000 --- a/plugins/repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2761d380bc97e6ce64745dd9ca04538cedd40a5b \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9110503f67304 --- /dev/null +++ b/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d75246285e5fac6f6dad47e387ed4f46f36e521d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/plugins/repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/plugins/repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/plugins/repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/plugins/repository-s3/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.101.Final.jar.sha1 deleted file mode 100644 index a710d964c1803..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f86303830ab65680e45cabb531e37078ecc6791e \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..522d85a3bf12e --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +700fdbabab44709b0eccffe8f91c4226a5787356 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 deleted file mode 100644 index 2c038aad4b934..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a5c081d8f877225732efe13908f350029c811709 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..19f734ca17b79 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 @@ -0,0 +1 @@ +b4aea155f6d6b1032eba85378564431cfd86f562 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 deleted file mode 100644 index 3243f524432eb..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5f8bb68084ea5709a27e935907b1bb49d0bd049 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..4c06d28cba199 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 @@ -0,0 +1 @@ +3fcc87f3d810ce49d865ee54b40831559c5e129b \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 deleted file mode 100644 index 1d7da47286ae0..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3643061da474061ffa7f2036a58a7a0d40212276 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..91a5c0f715d2b --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +19c9a3f52851a1333b648ed83c82d16eb4c64afd \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 deleted file mode 100644 index 3fab0e47adcbe..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ab56c7223112fac13a66e3f667c5fc666f4a3707 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..6c05600ae3b08 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 @@ -0,0 +1 @@ +b3e74d5b8cf5e60d9965042fa284085bbe081ce3 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 deleted file mode 100644 index f93cf7a63bfad..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5752d171cd08ac84f9273258a315bc5f97e1187e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..f54e6f6893050 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 @@ -0,0 +1 @@ +af68f90f0410b7b3a1900d3e0a15ad51b10ffd5b \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 deleted file mode 100644 index 2fc33b62aee54..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6b41cd66a385d513b58b6617f20b701435b64abd \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..49d40b36ba85b --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +4acab18052267e280d1f9de22c591a5c88bed3a6 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 deleted file mode 100644 index 99f758b047aa2..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9346006cead763247a786b5cabf3e1ae3c88eadb \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..a01de2aa84c43 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 @@ -0,0 +1 @@ +9f07e1764389e076a36fb7d9e5769e29f3dab950 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 deleted file mode 100644 index 705a342a684c4..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fab56e187e3fb3c70c18223184d53a76500114ab \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 new file mode 100644 index 0000000000000..a5fc8c2059104 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 @@ -0,0 +1 @@ +9201e6a43a0a89515626f7516c7d1b2c349f76df \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 deleted file mode 100644 index 31818695cc774..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -504de8cc7dc68e84c8c7c2757522d934e9c50d35 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..cd746f0756e46 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 @@ -0,0 +1 @@ +ab49eb621d6d01f0ad2f016989d0352ef18ea9a2 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 deleted file mode 100644 index 3cf3080a98bd9..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -454c7a6afab864de9f0c166246f28f16aaa824c1 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..740737dc13efc --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +01fcd8bad38d7b8987f6fc93bd7e933240eb727e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 deleted file mode 100644 index 41b0dca07556e..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b054760243906af0a327a8f5bd99adc2826ccd88 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..e6ff3dbafda22 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 @@ -0,0 +1 @@ +abad9abc80dfe6118a60413afa161696bbf8dd43 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 deleted file mode 100644 index 2f71fd5cc780a..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bff24f085193e105d4e23e3db27bf81ccb3d830e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..36ec960c4f7be --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 @@ -0,0 +1 @@ +d88407ae475e5f4e859a81e4f61e362e939f7bc2 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 deleted file mode 100644 index f0060b8a0f78f..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d80ad3210fa890a856a1d04379d134ab44a09501 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..293b82f206c99 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 @@ -0,0 +1 @@ +121a75c2ba9ed8b80f5ff131c2411a5d460f38d0 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java index 46cf6ae708d1c..d0c0406bd7774 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java @@ -80,7 +80,7 @@ public class NioHttpPipeliningHandlerTests extends OpenSearchTestCase { @After public void cleanup() throws Exception { waitingRequests.keySet().forEach(this::finishRequest); - shutdownExecutorService(); + shutdownExecutorServices(); } private CountDownLatch finishRequest(String url) { @@ -88,7 +88,7 @@ private CountDownLatch finishRequest(String url) { return finishingRequests.get(url); } - private void shutdownExecutorService() throws InterruptedException { + private void shutdownExecutorServices() throws InterruptedException { if (!handlerService.isShutdown()) { handlerService.shutdown(); handlerService.awaitTermination(10, TimeUnit.SECONDS); diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 0789dea62778a..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24d0f4f2b0b0b55acc498d62f9c0921d0ea6da7b \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..0232fc58f9357 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 450c34d975bb9..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3f503d7554cf25edc6b83b9b4724b240c6e91d5 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..987b524aedc98 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml new file mode 100644 index 0000000000000..dd8c2a2deb721 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml @@ -0,0 +1,55 @@ +--- +setup: + + - do: + indices.create: + index: test1 + wait_for_active_shards: all + body: + settings: + index.number_of_shards: 1 + index.number_of_replicas: 1 + + - do: + index: + index: test1 + id: 1 + body: { "bar": "bar" } + + - do: + indices.refresh: {} + +# Related issue: https://github.com/opensearch-project/OpenSearch/issues/9857 +--- +"Test noop_update_total metric can be updated by both update API and bulk API": + - skip: + version: " - 2.99.99" #TODO: change to 2.11.99 after the PR is backported to 2.x branch + reason: "fixed in 3.0" + + - do: + update: + index: test1 + id: 1 + body: { "doc": { "bar": "bar" } } + + - do: + indices.stats: + index: test1 + metric: indexing + + - match: { indices.test1.primaries.indexing.noop_update_total: 1 } + - match: { indices.test1.total.indexing.noop_update_total: 1 } + + - do: + bulk: + body: | + {"update": {"_id": "1", "_index": "test1"}} + {"doc": {"bar": "bar"}} + + - do: + indices.stats: + index: test1 + metric: indexing + + - match: { indices.test1.primaries.indexing.noop_update_total: 2 } + - match: { indices.test1.total.indexing.noop_update_total: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml new file mode 100644 index 0000000000000..b9457f0290897 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml @@ -0,0 +1,141 @@ +--- +"Throw aliases missing exception when removing non-existing alias with setting must_exist to true": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + catch: /aliases \[test_alias\] missing/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + alias: test_alias + must_exist: true + + - do: + catch: /aliases \[testAlias\*\] missing/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [ testAlias* ] + must_exist: true + + - do: + catch: /\[aliases\] can't be empty/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [] + must_exist: true + +--- +"Throw aliases missing exception when all of the specified aliases are non-existing": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + catch: /aliases \[test\_alias\] missing/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + alias: test_alias + + - do: + catch: /aliases \[test\_alias\*\] missing/ + indices.update_aliases: + body: + actions: + - remove: + indices: [ test_index ] + aliases: [ test_alias* ] + +--- +"Remove successfully when some specified aliases are non-existing": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + indices.update_aliases: + body: + actions: + - add: + indices: [ test_index ] + aliases: [ test_alias ] + + - do: + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [test_alias, test_alias1, test_alias2] + must_exist: false + + - match: { acknowledged: true } + +--- +"Remove silently when all of the specified aliases are non-existing and must_exist is false": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [test_alias, test_alias1, test_alias2] + must_exist: false + + - match: { acknowledged: true } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml index f3281e35ac8e6..c7b00d5fbbef2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml @@ -1,8 +1,8 @@ setup: - skip: features: [ "headers" ] - version: " - 2.99.99" - reason: "searching with only doc_values was added in 3.0.0" + version: " - 2.11.99" + reason: "searching with only doc_values was added in 2.12.0" --- "search on fields with both index and doc_values enabled": - do: diff --git a/server/licenses/lucene-analysis-common-9.8.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.8.0.jar.sha1 deleted file mode 100644 index 6ad304fa52c12..0000000000000 --- a/server/licenses/lucene-analysis-common-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -36f0363325ca7bf62c180160d1ed5165c7c37795 \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..c9e6120da7497 --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 @@ -0,0 +1 @@ +24c8401b530308f9568eb7b408c2029c63f564c6 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 deleted file mode 100644 index f104c4207d390..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e98fb408028f40170e6d87c16422bfdc0bb2e392 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..69ecf6aa68200 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 @@ -0,0 +1 @@ +11c46007366bb037be7d271ab0a5849b1d544662 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.8.0.jar.sha1 b/server/licenses/lucene-core-9.8.0.jar.sha1 deleted file mode 100644 index f9a3e2f3cbee6..0000000000000 --- a/server/licenses/lucene-core-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e8421c5f8573bcf22e9265fc7e19469545a775a \ No newline at end of file diff --git a/server/licenses/lucene-core-9.9.1.jar.sha1 b/server/licenses/lucene-core-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..ae596196d9e6a --- /dev/null +++ b/server/licenses/lucene-core-9.9.1.jar.sha1 @@ -0,0 +1 @@ +55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.8.0.jar.sha1 b/server/licenses/lucene-grouping-9.8.0.jar.sha1 deleted file mode 100644 index ab132121b2edc..0000000000000 --- a/server/licenses/lucene-grouping-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d39184518351178c404ed9669fc6cb6111f2288d \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.9.1.jar.sha1 b/server/licenses/lucene-grouping-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..e7df056400661 --- /dev/null +++ b/server/licenses/lucene-grouping-9.9.1.jar.sha1 @@ -0,0 +1 @@ +2f2785e17c5c823cc8f41a7ddb4647aaca8ee773 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.8.0.jar.sha1 b/server/licenses/lucene-highlighter-9.8.0.jar.sha1 deleted file mode 100644 index c7cb678fb7b72..0000000000000 --- a/server/licenses/lucene-highlighter-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1ac38c8278dbd63dfab30744a41dd955a415a31c \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.9.1.jar.sha1 b/server/licenses/lucene-highlighter-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..828c7294aa586 --- /dev/null +++ b/server/licenses/lucene-highlighter-9.9.1.jar.sha1 @@ -0,0 +1 @@ +30928513461bf79a5cb057e84da7d34a1e53227d \ No newline at end of file diff --git a/server/licenses/lucene-join-9.8.0.jar.sha1 b/server/licenses/lucene-join-9.8.0.jar.sha1 deleted file mode 100644 index 2b6cb8af4faf6..0000000000000 --- a/server/licenses/lucene-join-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3d64fc57bb6e718d906413a9f73c713e6d4d8bb0 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.9.1.jar.sha1 b/server/licenses/lucene-join-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..34b44ca8c6ad5 --- /dev/null +++ b/server/licenses/lucene-join-9.9.1.jar.sha1 @@ -0,0 +1 @@ +b9c8cc99632280148f92b4c0a64111c482d5d0ac \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.8.0.jar.sha1 b/server/licenses/lucene-memory-9.8.0.jar.sha1 deleted file mode 100644 index 5fdfee401dd0a..0000000000000 --- a/server/licenses/lucene-memory-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5283ac71d6ccecb5e00c7b52df2faec012f2625a \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.9.1.jar.sha1 b/server/licenses/lucene-memory-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..b75fba4c331e9 --- /dev/null +++ b/server/licenses/lucene-memory-9.9.1.jar.sha1 @@ -0,0 +1 @@ +49f820b1b321860fa42a4f7583e8ed8f77b9c1c2 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.8.0.jar.sha1 b/server/licenses/lucene-misc-9.8.0.jar.sha1 deleted file mode 100644 index cf815cba15862..0000000000000 --- a/server/licenses/lucene-misc-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a57b049cf51a5e9c9c1909c420f645f1b6f9a54 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.9.1.jar.sha1 b/server/licenses/lucene-misc-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..f1e1e056004e9 --- /dev/null +++ b/server/licenses/lucene-misc-9.9.1.jar.sha1 @@ -0,0 +1 @@ +db7c30217602dfcda394a4d0f0a9e68140d385a6 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.8.0.jar.sha1 b/server/licenses/lucene-queries-9.8.0.jar.sha1 deleted file mode 100644 index 09f369ef18e12..0000000000000 --- a/server/licenses/lucene-queries-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -628db4ef46f1c6a05145bdac1d1bc4ace6341b13 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.9.1.jar.sha1 b/server/licenses/lucene-queries-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..888b9b4a05ec8 --- /dev/null +++ b/server/licenses/lucene-queries-9.9.1.jar.sha1 @@ -0,0 +1 @@ +d157547bd24edc8e9d9d59c273107dc3ac5fde5e \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.8.0.jar.sha1 b/server/licenses/lucene-queryparser-9.8.0.jar.sha1 deleted file mode 100644 index 2a42a8956b18b..0000000000000 --- a/server/licenses/lucene-queryparser-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -982faf2bfa55542bf57fbadef54c19ac00f57cae \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.9.1.jar.sha1 b/server/licenses/lucene-queryparser-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..1ce8a069a0f4e --- /dev/null +++ b/server/licenses/lucene-queryparser-9.9.1.jar.sha1 @@ -0,0 +1 @@ +12d844fe224f6f97c510ac20d68903ed7f626f6c \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.8.0.jar.sha1 b/server/licenses/lucene-sandbox-9.8.0.jar.sha1 deleted file mode 100644 index 64a0b07f72d29..0000000000000 --- a/server/licenses/lucene-sandbox-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -06493dbd14d02537716822254866a94458f4d842 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.9.1.jar.sha1 b/server/licenses/lucene-sandbox-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..14fd86dadc404 --- /dev/null +++ b/server/licenses/lucene-sandbox-9.9.1.jar.sha1 @@ -0,0 +1 @@ +272e588fd3d8c0a401b28a1ac715f27044bf62ec \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 deleted file mode 100644 index d1bcb0581435c..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d9a731822ad6eefa1ba288a0c158d478522f165 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..0efd5a7595bfe --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 @@ -0,0 +1 @@ +e066432e7ab02b2a4914f989bcd8c44adbf340ad \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.8.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.8.0.jar.sha1 deleted file mode 100644 index d17459cc569a9..0000000000000 --- a/server/licenses/lucene-spatial3d-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ce752a52b2d4eac90633c7df7982e29504f99e76 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..7f06466e4c721 --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 @@ -0,0 +1 @@ +fa54c9b962778e28ebc0efb9f75297781350361a \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.8.0.jar.sha1 b/server/licenses/lucene-suggest-9.8.0.jar.sha1 deleted file mode 100644 index ff47b87672d2c..0000000000000 --- a/server/licenses/lucene-suggest-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f977f96f2093b7fddea6b67caa2e1c5b10edebf6 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.9.1.jar.sha1 b/server/licenses/lucene-suggest-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..06732480d1b6c --- /dev/null +++ b/server/licenses/lucene-suggest-9.9.1.jar.sha1 @@ -0,0 +1 @@ +9554de5b22ae7483b344b94a9a956960b7a5d49c \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java index d7fb632c847d1..e27c0c4786da8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java @@ -35,6 +35,8 @@ import org.opensearch.action.DocWriteRequest.OpType; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.admin.indices.alias.Alias; +import org.opensearch.action.admin.indices.stats.IndicesStatsRequest; +import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.action.delete.DeleteRequest; import org.opensearch.action.get.GetResponse; import org.opensearch.action.index.IndexRequest; @@ -738,6 +740,12 @@ public void testNoopUpdate() { equalTo(2) ); + // test noop_update_total metric in stats changed + IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest().indices(indexName).indexing(true); + final IndicesStatsResponse indicesStatsResponse = client().admin().indices().stats(indicesStatsRequest).actionGet(); + assertThat(indicesStatsResponse.getIndex(indexName).getTotal().indexing.getTotal().getNoopUpdateCount(), equalTo(1L)); + assertThat(indicesStatsResponse.getIndex(indexName).getPrimaries().indexing.getTotal().getNoopUpdateCount(), equalTo(1L)); + final BulkItemResponse notFoundUpdate = bulkResponse.getItems()[1]; assertNotNull(notFoundUpdate.getFailure()); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java index c69718d982f8b..522d63b22a0da 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java @@ -364,6 +364,7 @@ public void testAwarenessZonesIncrementalNodes() { assertThat(counts.get(noZoneNode), equalTo(2)); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/5908") public void testThreeZoneOneReplicaWithForceZoneValueAndLoadAwareness() throws Exception { int nodeCountPerAZ = 5; int numOfShards = 30; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java index 8291fef5d177b..f46f413f4d23f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java @@ -710,6 +710,7 @@ public void testPrimaryCorruptionDuringReplicationDoesNotFailReplicaShard() thro final NodeStats primaryNode = dataNodeStats.get(0); final NodeStats replicaNode = dataNodeStats.get(1); + assertAcked( prepareCreate("test").setSettings( Settings.builder() @@ -795,6 +796,17 @@ public void testPrimaryCorruptionDuringReplicationDoesNotFailReplicaShard() thro // Assert the cluster returns to green status because the replica will be promoted to primary ensureGreen(); + + // After Lucene 9.9 check index will flag corruption with old (not the latest) commit points. + // For this test our previous corrupt commit "segments_2" will remain on the primary. + // This asserts this is the case, and then resets check index status. + assertEquals("Check index has a single failure", 1, checkIndexFailures.size()); + assertTrue( + checkIndexFailures.get(0) + .getMessage() + .contains("could not read old (not latest) commit point segments file \"segments_2\" in directory") + ); + resetCheckIndexStatus(); } private int numShards(String... index) { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 848f6eddbb0df..51dba07a8f9f8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -636,6 +636,45 @@ public void testProfileDisableCache() throws Exception { } } + public void testCacheWithInvalidation() throws Exception { + Client client = client(); + assertAcked( + client.admin() + .indices() + .prepareCreate("index") + .setMapping("k", "type=keyword") + .setSettings( + Settings.builder() + .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + .get() + ); + indexRandom(true, client.prepareIndex("index").setSource("k", "hello")); + ensureSearchable("index"); + SearchResponse resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get(); + assertSearchResponse(resp); + OpenSearchAssertions.assertAllSuccessful(resp); + assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); + + assertCacheState(client, "index", 0, 1); + // Index but don't refresh + indexRandom(false, client.prepareIndex("index").setSource("k", "hello2")); + resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get(); + assertSearchResponse(resp); + // Should expect hit as here as refresh didn't happen + assertCacheState(client, "index", 1, 1); + + // Explicit refresh would invalidate cache + refresh(); + // Hit same query again + resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get(); + assertSearchResponse(resp); + // Should expect miss as key has changed due to change in IndexReader.CacheKey (due to refresh) + assertCacheState(client, "index", 1, 2); + } + private static void assertCacheState(Client client, String index, long expectedHits, long expectedMisses) { RequestCacheStats requestCacheStats = client.admin() .indices() @@ -650,6 +689,7 @@ private static void assertCacheState(Client client, String index, long expectedH Arrays.asList(expectedHits, expectedMisses, 0L), Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount(), requestCacheStats.getEvictions()) ); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java index aa1fe695ecc12..d1e66c19c28e2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java @@ -57,7 +57,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.opensearch.action.search.TransportSearchAction.SEARCH_REQUEST_STATS_ENABLED_KEY; +import static org.opensearch.action.search.SearchRequestStats.SEARCH_REQUEST_STATS_ENABLED_KEY; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java index 145830f02ee56..7c7cc12888307 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java @@ -86,6 +86,7 @@ public void testShardSizeEqualsSizeString() throws Exception { terms("keys").field("key") .size(3) .shardSize(3) + .showTermDocCountError(true) .collectMode(randomFrom(SubAggCollectionMode.values())) .order(BucketOrder.count(false)) ) @@ -98,8 +99,11 @@ public void testShardSizeEqualsSizeString() throws Exception { expected.put("1", 8L); expected.put("3", 8L); expected.put("2", 4L); + Long expectedDocCount; for (Terms.Bucket bucket : buckets) { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString()))); + expectedDocCount = expected.get(bucket.getKeyAsString()); + // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680 + assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount); } } @@ -221,6 +225,7 @@ public void testShardSizeEqualsSizeLong() throws Exception { terms("keys").field("key") .size(3) .shardSize(3) + .showTermDocCountError(true) .collectMode(randomFrom(SubAggCollectionMode.values())) .order(BucketOrder.count(false)) ) @@ -233,8 +238,11 @@ public void testShardSizeEqualsSizeLong() throws Exception { expected.put(1, 8L); expected.put(3, 8L); expected.put(2, 4L); + Long expectedDocCount; for (Terms.Bucket bucket : buckets) { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); + expectedDocCount = expected.get(bucket.getKeyAsNumber().intValue()); + // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680 + assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount); } } @@ -355,6 +363,7 @@ public void testShardSizeEqualsSizeDouble() throws Exception { terms("keys").field("key") .size(3) .shardSize(3) + .showTermDocCountError(true) .collectMode(randomFrom(SubAggCollectionMode.values())) .order(BucketOrder.count(false)) ) @@ -367,8 +376,11 @@ public void testShardSizeEqualsSizeDouble() throws Exception { expected.put(1, 8L); expected.put(3, 8L); expected.put(2, 4L); + Long expectedDocCount; for (Terms.Bucket bucket : buckets) { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); + expectedDocCount = expected.get(bucket.getKeyAsNumber().intValue()); + // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680 + assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index b355ce6d7a8dd..343cea4b94c87 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -225,8 +225,16 @@ public void setupSuiteScopeCluster() throws Exception { } indexRandom(true, builders); - indexRandomForMultipleSlices("idx"); ensureSearchable(); + + // Force merge each shard down to 1 segment to verify results are the same between concurrent and non-concurrent search paths, else + // for concurrent segment search there will be additional error introduced during the slice level reduce and thus different buckets, + // doc_counts, and doc_count_errors may be returned. This test serves to verify that the doc_count_error is the same between + // concurrent and non-concurrent search in the 1 slice case. TermsFixedDocCountErrorIT verifies that the doc count error is + // correctly calculated for concurrent segment search at the slice level. + // See https://github.com/opensearch-project/OpenSearch/issues/11680" + forceMerge(1); + Thread.sleep(5000); // Sleep 5s to ensure force merge completes } private void assertDocCountErrorWithinBounds(int size, SearchResponse accurateResponse, SearchResponse testResponse) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java new file mode 100644 index 0000000000000..5ad913e8c7086 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java @@ -0,0 +1,347 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.indices.segments.IndicesSegmentResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.search.aggregations.bucket.terms.Terms; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; + +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.opensearch.search.aggregations.AggregationBuilders.terms; +import static org.opensearch.test.OpenSearchIntegTestCase.Scope.TEST; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +@OpenSearchIntegTestCase.ClusterScope(scope = TEST, numClientNodes = 0, maxNumDataNodes = 1, supportsDedicatedMasters = false) +public class TermsFixedDocCountErrorIT extends ParameterizedOpenSearchIntegTestCase { + + private static final String STRING_FIELD_NAME = "s_value"; + + public TermsFixedDocCountErrorIT(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSimpleAggErrorMultiShard() throws Exception { + // size = 1, shard_size = 2 + // Shard_1 [A, A, A, A, B, B, C, C, D, D] -> Buckets {"A" : 4, "B" : 2} + // Shard_2 [A, B, B, B, C, C, C, D, D, D] -> Buckets {"B" : 3, "C" : 3} + // coordinator -> Buckets {"B" : 5, "A" : 4} + // Agg error is 4, from (shard_size)th bucket on each shard + // Bucket "A" error is 2, from (shard_size)th bucket on shard_2 + // Bucket "B" error is 0, it's present on both shards + + // size = 1 shard_size = 1 slice_size = 1 + // non-cs / cs + // Shard_1 [A, B, C] + // Shard_2 [B, C, D] + // cs + // Shard_1 slice_1 [A, B, C] -> {a : 1} -> {a : 1 -- error: 1} + // slice_2 [B, C, D] -> {b : 1} + // Coordinator should return the same doc count error in both cases + + assertAcked( + prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_1"); + + IndicesSegmentResponse segmentResponse = client().admin().indices().prepareSegments("idx_mshard_1").get(); + assertEquals(1, segmentResponse.getIndices().get("idx_mshard_1").getShards().get(0).getShards()[0].getSegments().size()); + + assertAcked( + prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_2"); + + segmentResponse = client().admin().indices().prepareSegments("idx_mshard_2").get(); + assertEquals(1, segmentResponse.getIndices().get("idx_mshard_2").getShards().get(0).getShards()[0].getSegments().size()); + + SearchResponse response = client().prepareSearch("idx_mshard_2", "idx_mshard_1") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(4, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); // Bucket "B" + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); // Bucket "A" + assertEquals("A", bucket.getKey().toString()); + assertEquals(4, bucket.getDocCount()); + assertEquals(2, bucket.getDocCountError()); + } + + public void testSimpleAggErrorSingleShard() throws Exception { + assertAcked( + prepareCreate("idx_shard_error").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + refresh("idx_shard_error"); + + SearchResponse response = client().prepareSearch("idx_shard_error") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(1).shardSize(2)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(0, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("A", bucket.getKey().toString()); + assertEquals(6, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + } + + public void testSliceLevelDocCountErrorSingleShard() throws Exception { + assumeTrue( + "Slice level error is not relevant to non-concurrent search cases", + internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); + + // Slices are created by sorting segments by doc count in descending order then distributing in round robin fashion. + // Creates 2 segments (and therefore 2 slices since slice_count = 2) as follows: + // 1. [A, A, A, B, B, C] + // 2. [A, B, B, B, C, C] + // Thus we expect the doc count error for A to be 2 as the nth largest bucket on slice 2 has size 2 + + assertAcked( + prepareCreate("idx_slice_error").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_slice_error"); + + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_slice_error"); + + IndicesSegmentResponse segmentResponse = client().admin().indices().prepareSegments("idx_slice_error").get(); + assertEquals(2, segmentResponse.getIndices().get("idx_slice_error").getShards().get(0).getShards()[0].getSegments().size()); + + // Confirm that there is no error when shard_size == slice_size > cardinality + SearchResponse response = client().prepareSearch("idx_slice_error") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(1).shardSize(4)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(0, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); // Bucket "B" + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + response = client().prepareSearch("idx_slice_error") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(4, terms.getDocCountError()); + + bucket = terms.getBuckets().get(0); // Bucket "B" + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); // Bucket "A" + assertEquals("A", bucket.getKey().toString()); + assertEquals(3, bucket.getDocCount()); + assertEquals(2, bucket.getDocCountError()); + } + + public void testSliceLevelDocCountErrorMultiShard() throws Exception { + assumeTrue( + "Slice level error is not relevant to non-concurrent search cases", + internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); + + // Size = 2, shard_size = 2 + // Shard_1 [A, A, A, A, B, B, C, C] + // slice_1 [A, A, A, B, B, C] {"A" : 3, "B" : 2} + // slice_2 [A, C] {"A" : 1, "C" : 1} + // Shard_1 buckets: {"A" : 4 - error: 0, "B" : 2 - error: 1} + // Shard_2 [A, A, B, B, B, C, C, C] + // slice_1 [A, B, B, B, C, C] {"B" : 3, "C" : 2} + // slice_2 [A, C] {"A" : 1, "C" : 1} + // Shard_2 buckets: {"B" : 3 - error: 1, "C" : 3 - error: 0} + // Overall + // {"B" : 5 - error: 2, "A" : 4 - error: 3} Agg error: 6 + + assertAcked( + prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_1"); + + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_1"); + + IndicesSegmentResponse segmentResponse = client().admin().indices().prepareSegments("idx_mshard_1").get(); + assertEquals(2, segmentResponse.getIndices().get("idx_mshard_1").getShards().get(0).getShards()[0].getSegments().size()); + + SearchResponse response = client().prepareSearch("idx_mshard_1") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(3, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("A", bucket.getKey().toString()); + assertEquals(4, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); + assertEquals("B", bucket.getKey().toString()); + assertEquals(2, bucket.getDocCount()); + assertEquals(1, bucket.getDocCountError()); + + assertAcked( + prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_2"); + + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_2"); + + segmentResponse = client().admin().indices().prepareSegments("idx_mshard_2").get(); + assertEquals(2, segmentResponse.getIndices().get("idx_mshard_2").getShards().get(0).getShards()[0].getSegments().size()); + + response = client().prepareSearch("idx_mshard_2") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(3, terms.getDocCountError()); + + bucket = terms.getBuckets().get(0); + assertEquals("B", bucket.getKey().toString()); + assertEquals(3, bucket.getDocCount()); + assertEquals(1, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); + assertEquals("C", bucket.getKey().toString()); + assertEquals(3, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + response = client().prepareSearch("idx_mshard_2", "idx_mshard_1") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(6, terms.getDocCountError()); + + bucket = terms.getBuckets().get(0); + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(2, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); + assertEquals("A", bucket.getKey().toString()); + assertEquals(4, bucket.getDocCount()); + assertEquals(3, bucket.getDocCountError()); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index b0d8e7ea02e8f..3851b16551795 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -88,6 +88,10 @@ private static String randomExecutionHint() { // see https://github.com/elastic/elasticsearch/issues/5998 public void testShardMinDocCountSignificantTermsTest() throws Exception { + assumeFalse( + "For concurrent segment search shard_min_doc_count is not enforced at the slice level. See https://github.com/opensearch-project/OpenSearch/issues/11847", + internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); String textMappings; if (randomBoolean()) { textMappings = "type=long"; @@ -157,6 +161,10 @@ private void addTermsDocs(String term, int numInClass, int numNotInClass, List Using request breaker setting: {}", requestBreaker); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java new file mode 100644 index 0000000000000..71f82d7c0b412 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java @@ -0,0 +1,121 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.nested; + +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.test.OpenSearchIntegTestCase; + +import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.query.QueryBuilders.nestedQuery; +import static org.opensearch.index.query.QueryBuilders.termQuery; +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.equalTo; + +/** + * Creating a separate class with no parameterization to create and index documents in a single + * test run and compare search responses across concurrent and non-concurrent search. For more details, + * refer: https://github.com/opensearch-project/OpenSearch/issues/11413 + */ +public class SimpleNestedExplainIT extends OpenSearchIntegTestCase { + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + /* + * Tests the explain output for multiple docs. Concurrent search with multiple slices is tested + * here as call to indexRandomForMultipleSlices is made and compared with explain output for + * non-concurrent search use-case. Separate test class is created to test explain for 1 slice + * case in concurrent search, refer {@link SimpleExplainIT#testExplainWithSingleDoc} + * For more details, refer: https://github.com/opensearch-project/OpenSearch/issues/11413 + * */ + public void testExplainMultipleDocs() throws Exception { + assertAcked( + prepareCreate("test").setMapping( + jsonBuilder().startObject() + .startObject("properties") + .startObject("nested1") + .field("type", "nested") + .endObject() + .endObject() + .endObject() + ) + ); + + ensureGreen(); + + client().prepareIndex("test") + .setId("1") + .setSource( + jsonBuilder().startObject() + .field("field1", "value1") + .startArray("nested1") + .startObject() + .field("n_field1", "n_value1") + .endObject() + .startObject() + .field("n_field1", "n_value1") + .endObject() + .endArray() + .endObject() + ) + .setRefreshPolicy(IMMEDIATE) + .get(); + + indexRandomForMultipleSlices("test"); + + // Turn off the concurrent search setting to test search with non-concurrent search + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build()) + .get(); + + SearchResponse nonConSearchResp = client().prepareSearch("test") + .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) + .setExplain(true) + .get(); + assertNoFailures(nonConSearchResp); + assertThat(nonConSearchResp.getHits().getTotalHits().value, equalTo(1L)); + Explanation nonConSearchExplain = nonConSearchResp.getHits().getHits()[0].getExplanation(); + assertThat(nonConSearchExplain.getValue(), equalTo(nonConSearchResp.getHits().getHits()[0].getScore())); + + // Turn on the concurrent search setting to test search with concurrent search + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build()) + .get(); + + SearchResponse conSearchResp = client().prepareSearch("test") + .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) + .setExplain(true) + .get(); + assertNoFailures(conSearchResp); + assertThat(conSearchResp.getHits().getTotalHits().value, equalTo(1L)); + Explanation conSearchExplain = conSearchResp.getHits().getHits()[0].getExplanation(); + assertThat(conSearchExplain.getValue(), equalTo(conSearchResp.getHits().getHits()[0].getScore())); + + // assert that the explanation for concurrent search should be equal to the non-concurrent search's explanation + assertEquals(nonConSearchExplain, conSearchExplain); + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey()).build()) + .get(); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index 6d0b074c3a660..8eeffcbecb377 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -455,7 +455,13 @@ public void testDeleteNestedDocsWithAlias() throws Exception { assertDocumentCount("test", 6); } - public void testExplain() throws Exception { + /* + * Tests the explain output for single doc. Concurrent search with only slice 1 is tested + * here as call to indexRandomForMultipleSlices has implications on the range of child docs + * in the explain output. Separate test class is created to test explain for multiple slices + * case in concurrent search, refer {@link SimpleNestedExplainIT} + * */ + public void testExplainWithSingleDoc() throws Exception { assertAcked( prepareCreate("test").setMapping( jsonBuilder().startObject() @@ -487,7 +493,6 @@ public void testExplain() throws Exception { ) .setRefreshPolicy(IMMEDIATE) .get(); - indexRandomForConcurrentSearch("test"); SearchResponse searchResponse = client().prepareSearch("test") .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java index 253a8b2b14824..8fb3c57dd7680 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java @@ -64,7 +64,7 @@ import java.util.Set; import java.util.function.Function; -import static org.opensearch.action.search.TransportSearchAction.SEARCH_REQUEST_STATS_ENABLED_KEY; +import static org.opensearch.action.search.SearchRequestStats.SEARCH_REQUEST_STATS_ENABLED_KEY; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index e619b93cdd93b..4ac2e1669ca67 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -270,7 +270,10 @@ public void testExplainDateRangeInQueryString() { long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000; long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1; - assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]")); + assertThat( + response.getQueryExplanation().get(0).getExplanation(), + containsString("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]") + ); assertThat(response.isValid(), equalTo(true)); } diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 345be330f048c..b47b974b96fed 100644 --- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -100,11 +100,10 @@ public Query rewrite(IndexSearcher searcher) throws IOException { return rewritten; } IndexReader reader = searcher.getIndexReader(); - IndexReaderContext context = reader.getContext(); TermStates[] ctx = new TermStates[terms.length]; int[] docFreqs = new int[ctx.length]; for (int i = 0; i < terms.length; i++) { - ctx[i] = TermStates.build(context, terms[i], true); + ctx[i] = TermStates.build(searcher, terms[i], true); docFreqs[i] = ctx[i].docFreq(); } diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java index e93e5cdcc3f7b..961587113173d 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -144,7 +145,7 @@ private static class MergeSortQueue extends PriorityQueue { reverseMul = new int[sortFields.length]; for (int compIDX = 0; compIDX < sortFields.length; compIDX++) { final SortField sortField = sortFields[compIDX]; - comparators[compIDX] = sortField.getComparator(1, false); + comparators[compIDX] = sortField.getComparator(1, Pruning.NONE); reverseMul[compIDX] = sortField.getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java index bc27c70282368..6ce62dda32d0a 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -220,9 +220,11 @@ private static ObjectParser parser(String name, Supplier REMOVE_PARSER = parser(REMOVE.getPreferredName(), AliasActions::remove); + static { + REMOVE_PARSER.declareField(AliasActions::mustExist, XContentParser::booleanValue, MUST_EXIST, ValueType.BOOLEAN); + } private static final ObjectParser REMOVE_INDEX_PARSER = parser( REMOVE_INDEX.getPreferredName(), AliasActions::removeIndex @@ -554,6 +556,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (null != isHidden) { builder.field(IS_HIDDEN.getPreferredName(), isHidden); } + if (null != mustExist) { + builder.field(MUST_EXIST.getPreferredName(), mustExist); + } builder.endObject(); builder.endObject(); return builder; @@ -582,6 +587,8 @@ public String toString() { + searchRouting + ",writeIndex=" + writeIndex + + ",mustExist=" + + mustExist + "]"; } @@ -600,12 +607,13 @@ public boolean equals(Object obj) { && Objects.equals(indexRouting, other.indexRouting) && Objects.equals(searchRouting, other.searchRouting) && Objects.equals(writeIndex, other.writeIndex) - && Objects.equals(isHidden, other.isHidden); + && Objects.equals(isHidden, other.isHidden) + && Objects.equals(mustExist, other.mustExist); } @Override public int hashCode() { - return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting, writeIndex, isHidden); + return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting, writeIndex, isHidden, mustExist); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 769044f3dafb7..81cb3102cfcb9 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -50,6 +50,7 @@ import org.opensearch.cluster.metadata.MetadataIndexAliasesService; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.common.regex.Regex; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.index.Index; @@ -59,6 +60,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -220,12 +222,33 @@ private static String[] concreteAliases(IndicesAliasesRequest.AliasActions actio // for DELETE we expand the aliases String[] indexAsArray = { concreteIndex }; final Map> aliasMetadata = metadata.findAliases(action, indexAsArray); - List finalAliases = new ArrayList<>(); + Set finalAliases = new HashSet<>(); for (final List curAliases : aliasMetadata.values()) { for (AliasMetadata aliasMeta : curAliases) { finalAliases.add(aliasMeta.alias()); } } + + // must_exist can only be set in the Remove Action in Update aliases API, + // we check the value here to make the behavior consistent with Delete aliases API + if (action.mustExist() != null) { + // if must_exist is false, we should make the remove action execute silently, + // so we return the original specified aliases to avoid AliasesNotFoundException + if (!action.mustExist()) { + return action.aliases(); + } + + // if there is any non-existing aliases specified in the request and must_exist is true, throw exception in advance + if (finalAliases.isEmpty()) { + throw new AliasesNotFoundException(action.aliases()); + } + String[] nonExistingAliases = Arrays.stream(action.aliases()) + .filter(originalAlias -> finalAliases.stream().noneMatch(finalAlias -> Regex.simpleMatch(originalAlias, finalAlias))) + .toArray(String[]::new); + if (nonExistingAliases.length != 0) { + throw new AliasesNotFoundException(nonExistingAliases); + } + } return finalAliases.toArray(new String[0]); } else { // for ADD and REMOVE_INDEX we just return the current aliases diff --git a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java index b8517f53ff294..a7a13afd2597c 100644 --- a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java @@ -593,6 +593,7 @@ static boolean executeBulkItemRequest( context.setRequestToExecute(updateResult.action()); break; case NOOP: + context.getPrimary().noopUpdate(); context.markOperationAsNoOp(updateResult.action()); context.markAsCompleted(context.getExecutionResult()); return true; diff --git a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java index f18bbb8a1cc13..5b41c2a13b596 100644 --- a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java @@ -214,7 +214,7 @@ public final void start() { 0, 0, buildTookInMillis(), - timeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), ShardSearchFailure.EMPTY_ARRAY, clusters, null @@ -670,7 +670,7 @@ protected final SearchResponse buildSearchResponse( successfulOps.get(), skippedOps.get(), buildTookInMillis(), - timeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), failures, clusters, searchContextId diff --git a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java index c831c80b6455c..bce8d9fb2b1ca 100644 --- a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.opensearch.search.DocValueFormat; @@ -59,7 +60,7 @@ class BottomSortValuesCollector { this.reverseMuls = new int[sortFields.length]; this.sortFields = sortFields; for (int i = 0; i < sortFields.length; i++) { - comparators[i] = sortFields[i].getComparator(1, false); + comparators[i] = sortFields[i].getComparator(1, Pruning.NONE); reverseMuls[i] = sortFields[i].getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java b/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java new file mode 100644 index 0000000000000..607ccf182851b --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java @@ -0,0 +1,55 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.PipelineAggregationBuilder; +import org.opensearch.telemetry.metrics.tags.Tags; + +import java.util.Collection; + +/** + * Increments the counters related to Aggregation Search Queries. + */ +public class SearchQueryAggregationCategorizer { + + private static final String TYPE_TAG = "type"; + private final SearchQueryCounters searchQueryCounters; + + public SearchQueryAggregationCategorizer(SearchQueryCounters searchQueryCounters) { + this.searchQueryCounters = searchQueryCounters; + } + + public void incrementSearchQueryAggregationCounters(Collection aggregatorFactories) { + for (AggregationBuilder aggregationBuilder : aggregatorFactories) { + incrementCountersRecursively(aggregationBuilder); + } + } + + private void incrementCountersRecursively(AggregationBuilder aggregationBuilder) { + // Increment counters for the current aggregation + String aggregationType = aggregationBuilder.getType(); + searchQueryCounters.aggCounter.add(1, Tags.create().addTag(TYPE_TAG, aggregationType)); + + // Recursively process sub-aggregations if any + Collection subAggregations = aggregationBuilder.getSubAggregations(); + if (subAggregations != null && !subAggregations.isEmpty()) { + for (AggregationBuilder subAggregation : subAggregations) { + incrementCountersRecursively(subAggregation); + } + } + + // Process pipeline aggregations + Collection pipelineAggregations = aggregationBuilder.getPipelineAggregations(); + for (PipelineAggregationBuilder pipelineAggregation : pipelineAggregations) { + String pipelineAggregationType = pipelineAggregation.getType(); + searchQueryCounters.aggCounter.add(1, Tags.create().addTag(TYPE_TAG, pipelineAggregationType)); + } + } +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java index 8fe1be610f9af..ffaae5b08772f 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java @@ -32,13 +32,15 @@ final class SearchQueryCategorizer { final SearchQueryCounters searchQueryCounters; + final SearchQueryAggregationCategorizer searchQueryAggregationCategorizer; + public SearchQueryCategorizer(MetricsRegistry metricsRegistry) { searchQueryCounters = new SearchQueryCounters(metricsRegistry); + searchQueryAggregationCategorizer = new SearchQueryAggregationCategorizer(searchQueryCounters); } public void categorize(SearchSourceBuilder source) { QueryBuilder topLevelQueryBuilder = source.query(); - logQueryShape(topLevelQueryBuilder); incrementQueryTypeCounters(topLevelQueryBuilder); incrementQueryAggregationCounters(source.aggregations()); @@ -56,9 +58,11 @@ private void incrementQuerySortCounters(List> sorts) { } private void incrementQueryAggregationCounters(AggregatorFactories.Builder aggregations) { - if (aggregations != null) { - searchQueryCounters.aggCounter.add(1); + if (aggregations == null) { + return; } + + searchQueryAggregationCategorizer.incrementSearchQueryAggregationCounters(aggregations.getAggregatorFactories()); } private void incrementQueryTypeCounters(QueryBuilder topLevelQueryBuilder) { diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java index 98f0169e69a5c..31f83dbef9dc9 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java @@ -9,26 +9,14 @@ package org.opensearch.action.search; import org.apache.lucene.search.BooleanClause; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.MatchPhraseQueryBuilder; -import org.opensearch.index.query.MatchQueryBuilder; -import org.opensearch.index.query.MultiMatchQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilderVisitor; -import org.opensearch.index.query.QueryStringQueryBuilder; -import org.opensearch.index.query.RangeQueryBuilder; -import org.opensearch.index.query.RegexpQueryBuilder; -import org.opensearch.index.query.TermQueryBuilder; -import org.opensearch.index.query.WildcardQueryBuilder; -import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; -import org.opensearch.telemetry.metrics.tags.Tags; /** - * Class to visit the querybuilder tree and also track the level information. + * Class to visit the query builder tree and also track the level information. * Increments the counters related to Search Query type. */ final class SearchQueryCategorizingVisitor implements QueryBuilderVisitor { - private static final String LEVEL_TAG = "level"; private final int level; private final SearchQueryCounters searchQueryCounters; @@ -42,29 +30,7 @@ private SearchQueryCategorizingVisitor(SearchQueryCounters counters, int level) } public void accept(QueryBuilder qb) { - if (qb instanceof BoolQueryBuilder) { - searchQueryCounters.boolCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof FunctionScoreQueryBuilder) { - searchQueryCounters.functionScoreCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof MatchQueryBuilder) { - searchQueryCounters.matchCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof MatchPhraseQueryBuilder) { - searchQueryCounters.matchPhrasePrefixCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof MultiMatchQueryBuilder) { - searchQueryCounters.multiMatchCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof QueryStringQueryBuilder) { - searchQueryCounters.queryStringQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof RangeQueryBuilder) { - searchQueryCounters.rangeCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof RegexpQueryBuilder) { - searchQueryCounters.regexCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof TermQueryBuilder) { - searchQueryCounters.termCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof WildcardQueryBuilder) { - searchQueryCounters.wildcardCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else { - searchQueryCounters.otherQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } + searchQueryCounters.incrementCounter(qb, level); } public QueryBuilderVisitor getChildVisitor(BooleanClause.Occur occur) { diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java index 7e0259af07701..bbb883809b41b 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java @@ -8,33 +8,82 @@ package org.opensearch.action.search; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.BoostingQueryBuilder; +import org.opensearch.index.query.ConstantScoreQueryBuilder; +import org.opensearch.index.query.DisMaxQueryBuilder; +import org.opensearch.index.query.DistanceFeatureQueryBuilder; +import org.opensearch.index.query.ExistsQueryBuilder; +import org.opensearch.index.query.FieldMaskingSpanQueryBuilder; +import org.opensearch.index.query.FuzzyQueryBuilder; +import org.opensearch.index.query.GeoBoundingBoxQueryBuilder; +import org.opensearch.index.query.GeoDistanceQueryBuilder; +import org.opensearch.index.query.GeoPolygonQueryBuilder; +import org.opensearch.index.query.GeoShapeQueryBuilder; +import org.opensearch.index.query.IntervalQueryBuilder; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.MatchPhraseQueryBuilder; +import org.opensearch.index.query.MatchQueryBuilder; +import org.opensearch.index.query.MultiMatchQueryBuilder; +import org.opensearch.index.query.PrefixQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryStringQueryBuilder; +import org.opensearch.index.query.RangeQueryBuilder; +import org.opensearch.index.query.RegexpQueryBuilder; +import org.opensearch.index.query.ScriptQueryBuilder; +import org.opensearch.index.query.SimpleQueryStringBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.query.WildcardQueryBuilder; +import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.opensearch.telemetry.metrics.Counter; import org.opensearch.telemetry.metrics.MetricsRegistry; +import org.opensearch.telemetry.metrics.tags.Tags; + +import java.util.HashMap; +import java.util.Map; /** * Class contains all the Counters related to search query types. */ final class SearchQueryCounters { + private static final String LEVEL_TAG = "level"; private static final String UNIT = "1"; private final MetricsRegistry metricsRegistry; // Counters related to Query types public final Counter aggCounter; public final Counter boolCounter; + public final Counter boostingCounter; + public final Counter constantScoreCounter; + public final Counter disMaxCounter; + public final Counter distanceFeatureCounter; + public final Counter existsCounter; + public final Counter fieldMaskingSpanCounter; public final Counter functionScoreCounter; + public final Counter fuzzyCounter; + public final Counter geoBoundingBoxCounter; + public final Counter geoDistanceCounter; + public final Counter geoPolygonCounter; + public final Counter geoShapeCounter; + public final Counter intervalCounter; public final Counter matchCounter; + public final Counter matchallCounter; public final Counter matchPhrasePrefixCounter; public final Counter multiMatchCounter; public final Counter otherQueryCounter; - public final Counter queryStringQueryCounter; + public final Counter prefixCounter; + public final Counter queryStringCounter; public final Counter rangeCounter; - public final Counter regexCounter; - + public final Counter regexpCounter; + public final Counter scriptCounter; + public final Counter simpleQueryStringCounter; public final Counter sortCounter; public final Counter skippedCounter; public final Counter termCounter; public final Counter totalCounter; public final Counter wildcardCounter; + public final Counter numberOfInputFieldsCounter; + private final Map, Counter> queryHandlers; public SearchQueryCounters(MetricsRegistry metricsRegistry) { this.metricsRegistry = metricsRegistry; @@ -48,16 +97,81 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested bool search queries", UNIT ); + this.boostingCounter = metricsRegistry.createCounter( + "search.query.type.boost.count", + "Counter for the number of top level and nested boost search queries", + UNIT + ); + this.constantScoreCounter = metricsRegistry.createCounter( + "search.query.type.counstantscore.count", + "Counter for the number of top level and nested constant score search queries", + UNIT + ); + this.disMaxCounter = metricsRegistry.createCounter( + "search.query.type.dismax.count", + "Counter for the number of top level and nested disjuntion max search queries", + UNIT + ); + this.distanceFeatureCounter = metricsRegistry.createCounter( + "search.query.type.distancefeature.count", + "Counter for the number of top level and nested distance feature search queries", + UNIT + ); + this.existsCounter = metricsRegistry.createCounter( + "search.query.type.exists.count", + "Counter for the number of top level and nested exists search queries", + UNIT + ); + this.fieldMaskingSpanCounter = metricsRegistry.createCounter( + "search.query.type.fieldmaskingspan.count", + "Counter for the number of top level and nested field masking span search queries", + UNIT + ); this.functionScoreCounter = metricsRegistry.createCounter( "search.query.type.functionscore.count", "Counter for the number of top level and nested function score search queries", UNIT ); + this.fuzzyCounter = metricsRegistry.createCounter( + "search.query.type.fuzzy.count", + "Counter for the number of top level and nested fuzzy search queries", + UNIT + ); + this.geoBoundingBoxCounter = metricsRegistry.createCounter( + "search.query.type.geoboundingbox.count", + "Counter for the number of top level and nested geo bounding box queries", + UNIT + ); + this.geoDistanceCounter = metricsRegistry.createCounter( + "search.query.type.geodistance.count", + "Counter for the number of top level and nested geo distance queries", + UNIT + ); + this.geoPolygonCounter = metricsRegistry.createCounter( + "search.query.type.geopolygon.count", + "Counter for the number of top level and nested geo polygon queries", + UNIT + ); + this.geoShapeCounter = metricsRegistry.createCounter( + "search.query.type.geoshape.count", + "Counter for the number of top level and nested geo shape queries", + UNIT + ); + this.intervalCounter = metricsRegistry.createCounter( + "search.query.type.interval.count", + "Counter for the number of top level and nested interval queries", + UNIT + ); this.matchCounter = metricsRegistry.createCounter( "search.query.type.match.count", "Counter for the number of top level and nested match search queries", UNIT ); + this.matchallCounter = metricsRegistry.createCounter( + "search.query.type.matchall.count", + "Counter for the number of top level and nested match all search queries", + UNIT + ); this.matchPhrasePrefixCounter = metricsRegistry.createCounter( "search.query.type.matchphrase.count", "Counter for the number of top level and nested match phrase prefix search queries", @@ -73,7 +187,12 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested search queries that do not match any other categories", UNIT ); - this.queryStringQueryCounter = metricsRegistry.createCounter( + this.prefixCounter = metricsRegistry.createCounter( + "search.query.type.prefix.count", + "Counter for the number of top level and nested search queries that match prefix queries", + UNIT + ); + this.queryStringCounter = metricsRegistry.createCounter( "search.query.type.querystringquery.count", "Counter for the number of top level and nested queryStringQuery search queries", UNIT @@ -83,11 +202,21 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested range search queries", UNIT ); - this.regexCounter = metricsRegistry.createCounter( + this.regexpCounter = metricsRegistry.createCounter( "search.query.type.regex.count", "Counter for the number of top level and nested regex search queries", UNIT ); + this.scriptCounter = metricsRegistry.createCounter( + "search.query.type.script.count", + "Counter for the number of top level and nested script search queries", + UNIT + ); + this.simpleQueryStringCounter = metricsRegistry.createCounter( + "search.query.type.simplequerystring.count", + "Counter for the number of top level and nested script simple query string search queries", + UNIT + ); this.skippedCounter = metricsRegistry.createCounter( "search.query.type.skipped.count", "Counter for the number queries skipped due to error", @@ -113,5 +242,51 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested wildcard search queries", UNIT ); + this.numberOfInputFieldsCounter = metricsRegistry.createCounter( + "search.query.type.numberofinputfields.count", + "Counter for the number of input fields in the search queries", + UNIT + ); + this.queryHandlers = new HashMap<>(); + initializeQueryHandlers(); + } + + public void incrementCounter(QueryBuilder queryBuilder, int level) { + Counter counter = queryHandlers.get(queryBuilder.getClass()); + if (counter != null) { + counter.add(1, Tags.create().addTag(LEVEL_TAG, level)); + } else { + otherQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); + } + } + + private void initializeQueryHandlers() { + + queryHandlers.put(BoolQueryBuilder.class, boolCounter); + queryHandlers.put(FunctionScoreQueryBuilder.class, functionScoreCounter); + queryHandlers.put(MatchQueryBuilder.class, matchCounter); + queryHandlers.put(MatchPhraseQueryBuilder.class, matchPhrasePrefixCounter); + queryHandlers.put(MultiMatchQueryBuilder.class, multiMatchCounter); + queryHandlers.put(QueryStringQueryBuilder.class, queryStringCounter); + queryHandlers.put(RangeQueryBuilder.class, rangeCounter); + queryHandlers.put(RegexpQueryBuilder.class, regexpCounter); + queryHandlers.put(TermQueryBuilder.class, termCounter); + queryHandlers.put(WildcardQueryBuilder.class, wildcardCounter); + queryHandlers.put(BoostingQueryBuilder.class, boostingCounter); + queryHandlers.put(ConstantScoreQueryBuilder.class, constantScoreCounter); + queryHandlers.put(DisMaxQueryBuilder.class, disMaxCounter); + queryHandlers.put(DistanceFeatureQueryBuilder.class, distanceFeatureCounter); + queryHandlers.put(ExistsQueryBuilder.class, existsCounter); + queryHandlers.put(FieldMaskingSpanQueryBuilder.class, fieldMaskingSpanCounter); + queryHandlers.put(FuzzyQueryBuilder.class, fuzzyCounter); + queryHandlers.put(GeoBoundingBoxQueryBuilder.class, geoBoundingBoxCounter); + queryHandlers.put(GeoDistanceQueryBuilder.class, geoDistanceCounter); + queryHandlers.put(GeoPolygonQueryBuilder.class, geoPolygonCounter); + queryHandlers.put(GeoShapeQueryBuilder.class, geoShapeCounter); + queryHandlers.put(IntervalQueryBuilder.class, intervalCounter); + queryHandlers.put(MatchAllQueryBuilder.class, matchallCounter); + queryHandlers.put(PrefixQueryBuilder.class, prefixCounter); + queryHandlers.put(ScriptQueryBuilder.class, scriptCounter); + queryHandlers.put(SimpleQueryStringBuilder.class, simpleQueryStringCounter); } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java index 674363600b251..eceac7204b196 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java @@ -8,13 +8,11 @@ package org.opensearch.action.search; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.TotalHits; import org.opensearch.common.annotation.InternalApi; import java.util.EnumMap; import java.util.HashMap; -import java.util.List; import java.util.Locale; import java.util.Map; @@ -31,18 +29,14 @@ class SearchRequestContext { private TotalHits totalHits; private final EnumMap shardStats; - /** - * This constructor is for testing only - */ - SearchRequestContext() { - this(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger())); - } + private final SearchRequest searchRequest; - SearchRequestContext(SearchRequestOperationsListener searchRequestOperationsListener) { + SearchRequestContext(final SearchRequestOperationsListener searchRequestOperationsListener, final SearchRequest searchRequest) { this.searchRequestOperationsListener = searchRequestOperationsListener; this.absoluteStartNanos = System.nanoTime(); this.phaseTookMap = new HashMap<>(); this.shardStats = new EnumMap<>(ShardStatsFieldNames.class); + this.searchRequest = searchRequest; } SearchRequestOperationsListener getSearchRequestOperationsListener() { @@ -57,6 +51,14 @@ Map phaseTookMap() { return phaseTookMap; } + SearchResponse.PhaseTook getPhaseTook() { + if (searchRequest != null && searchRequest.isPhaseTook() != null && searchRequest.isPhaseTook()) { + return new SearchResponse.PhaseTook(phaseTookMap); + } else { + return null; + } + } + /** * Override absoluteStartNanos set in constructor. * For testing only diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactory.java b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactory.java new file mode 100644 index 0000000000000..db487bf945889 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactory.java @@ -0,0 +1,81 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.logging.log4j.Logger; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * SearchRequestOperationsCompositeListenerFactory contains listeners registered to search requests, + * and is responsible for creating the {@link SearchRequestOperationsListener.CompositeListener} + * with the all listeners enabled at cluster-level and request-level. + * + * + * @opensearch.internal + */ +public final class SearchRequestOperationsCompositeListenerFactory { + private final List searchRequestListenersList; + + /** + * Create the SearchRequestOperationsCompositeListenerFactory and add multiple {@link SearchRequestOperationsListener} + * to the searchRequestListenersList. + * Those enabled listeners will be executed during each search request. + * + * @param listeners Multiple SearchRequestOperationsListener object to add. + * @throws IllegalArgumentException if any input listener is null. + */ + public SearchRequestOperationsCompositeListenerFactory(final SearchRequestOperationsListener... listeners) { + searchRequestListenersList = new ArrayList<>(); + for (SearchRequestOperationsListener listener : listeners) { + if (listener == null) { + throw new IllegalArgumentException("listener must not be null"); + } + searchRequestListenersList.add(listener); + } + } + + /** + * Get searchRequestListenersList, + * + * @return List of SearchRequestOperationsListener + */ + public List getListeners() { + return searchRequestListenersList; + } + + /** + * Create the {@link SearchRequestOperationsListener.CompositeListener} + * with the all listeners enabled at cluster-level and request-level. + * + * @param searchRequest The SearchRequest object used to decide which request-level listeners to add based on states/flags + * @param logger Logger to be attached to the {@link SearchRequestOperationsListener.CompositeListener} + * @param perRequestListeners the per-request listeners that can be optionally added to the returned CompositeListener list. + * @return SearchRequestOperationsListener.CompositeListener + */ + public SearchRequestOperationsListener.CompositeListener buildCompositeListener( + final SearchRequest searchRequest, + final Logger logger, + final SearchRequestOperationsListener... perRequestListeners + ) { + final List searchListenersList = Stream.concat( + searchRequestListenersList.stream(), + Arrays.stream(perRequestListeners) + ) + .filter((searchRequestOperationsListener -> searchRequestOperationsListener.isEnabled(searchRequest))) + .collect(Collectors.toList()); + + return new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger); + } + +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java index 19ce0beb3c493..2a09cc084f79f 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java @@ -20,7 +20,16 @@ * @opensearch.internal */ @InternalApi -abstract class SearchRequestOperationsListener { +public abstract class SearchRequestOperationsListener { + private volatile boolean enabled; + + protected SearchRequestOperationsListener() { + this.enabled = true; + } + + protected SearchRequestOperationsListener(final boolean enabled) { + this.enabled = enabled; + } abstract void onPhaseStart(SearchPhaseContext context); @@ -32,6 +41,18 @@ void onRequestStart(SearchRequestContext searchRequestContext) {} void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + boolean isEnabled(SearchRequest searchRequest) { + return isEnabled(); + } + + boolean isEnabled() { + return enabled; + } + + protected void setEnabled(final boolean enabled) { + this.enabled = enabled; + } + /** * Holder of Composite Listeners * @@ -101,5 +122,9 @@ public void onRequestEnd(SearchPhaseContext context, SearchRequestContext search } } } + + public List getListeners() { + return listeners; + } } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java index a55cfd463a78f..7f25f9026f215 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java @@ -116,11 +116,11 @@ public SearchRequestSlowLog(ClusterService clusterService) { this.logger = logger; Loggers.setLevel(this.logger, SlowLogLevel.TRACE.name()); - this.warnThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_WARN_SETTING).nanos(); - this.infoThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_INFO_SETTING).nanos(); - this.debugThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_DEBUG_SETTING).nanos(); - this.traceThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_TRACE_SETTING).nanos(); - this.level = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_LEVEL); + this.setWarnThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_WARN_SETTING)); + this.setInfoThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_INFO_SETTING)); + this.setDebugThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_DEBUG_SETTING)); + this.setTraceThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_TRACE_SETTING)); + this.setLevel(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_LEVEL)); clusterService.getClusterSettings() .addSettingsUpdateConsumer(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_WARN_SETTING, this::setWarnThreshold); @@ -233,18 +233,22 @@ private static String escapeJson(String text) { void setWarnThreshold(TimeValue warnThreshold) { this.warnThreshold = warnThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setInfoThreshold(TimeValue infoThreshold) { this.infoThreshold = infoThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setDebugThreshold(TimeValue debugThreshold) { this.debugThreshold = debugThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setTraceThreshold(TimeValue traceThreshold) { this.traceThreshold = traceThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setLevel(SlowLogLevel level) { @@ -270,4 +274,8 @@ protected long getTraceThreshold() { SlowLogLevel getLevel() { return level; } + + private void setEnabledIfThresholdExceed() { + super.setEnabled(this.warnThreshold >= 0 || this.debugThreshold >= 0 || this.infoThreshold >= 0 || this.traceThreshold >= 0); + } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java index 262750849eaa9..88d599a0dcdaa 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java @@ -12,6 +12,8 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.metrics.MeanMetric; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; import java.util.EnumMap; import java.util.Map; @@ -26,8 +28,18 @@ public final class SearchRequestStats extends SearchRequestOperationsListener { Map phaseStatsMap = new EnumMap<>(SearchPhaseName.class); + public static final String SEARCH_REQUEST_STATS_ENABLED_KEY = "search.request_stats_enabled"; + public static final Setting SEARCH_REQUEST_STATS_ENABLED = Setting.boolSetting( + SEARCH_REQUEST_STATS_ENABLED_KEY, + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + @Inject - public SearchRequestStats() { + public SearchRequestStats(ClusterSettings clusterSettings) { + this.setEnabled(clusterSettings.get(SEARCH_REQUEST_STATS_ENABLED)); + clusterSettings.addSettingsUpdateConsumer(SEARCH_REQUEST_STATS_ENABLED, this::setEnabled); for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { phaseStatsMap.put(searchPhaseName, new StatsHolder()); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java index 054bd578cc56c..538e7fd54e2c3 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java @@ -110,7 +110,7 @@ final class SearchResponseMerger { /** * Add a search response to the list of responses to be merged together into one. * Merges currently happen at once when all responses are available and - * {@link #getMergedResponse(SearchResponse.Clusters)} )} is called. + * {@link #getMergedResponse(SearchResponse.Clusters, SearchRequestContext)} )} is called. * That may change in the future as it's possible to introduce incremental merges as responses come in if necessary. */ void add(SearchResponse searchResponse) { @@ -126,7 +126,7 @@ int numResponses() { * Returns the merged response. To be called once all responses have been added through {@link #add(SearchResponse)} * so that all responses are merged into a single one. */ - SearchResponse getMergedResponse(SearchResponse.Clusters clusters) { + SearchResponse getMergedResponse(SearchResponse.Clusters clusters, SearchRequestContext searchRequestContext) { // if the search is only across remote clusters, none of them are available, and all of them have skip_unavailable set to true, // we end up calling merge without anything to merge, we just return an empty search response if (searchResponses.size() == 0) { @@ -236,7 +236,7 @@ SearchResponse getMergedResponse(SearchResponse.Clusters clusters) { successfulShards, skippedShards, tookInMillis, - searchTimeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), shardFailures, clusters, null diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java index 05f4308df74fa..842c10b700d24 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java @@ -98,7 +98,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -154,20 +153,12 @@ public class TransportSearchAction extends HandledTransportAction SEARCH_REQUEST_STATS_ENABLED = Setting.boolSetting( - SEARCH_REQUEST_STATS_ENABLED_KEY, - false, - Property.Dynamic, - Property.NodeScope - ); - public static final String SEARCH_PHASE_TOOK_ENABLED_KEY = "search.phase_took_enabled"; public static final Setting SEARCH_PHASE_TOOK_ENABLED = Setting.boolSetting( SEARCH_PHASE_TOOK_ENABLED_KEY, false, - Property.Dynamic, - Property.NodeScope + Setting.Property.Dynamic, + Setting.Property.NodeScope ); private final NodeClient client; @@ -181,14 +172,10 @@ public class TransportSearchAction extends HandledTransportAction) SearchRequest::new); this.client = client; @@ -224,12 +210,9 @@ public TransportSearchAction( this.indexNameExpressionResolver = indexNameExpressionResolver; this.namedWriteableRegistry = namedWriteableRegistry; this.searchPipelineService = searchPipelineService; - this.isRequestStatsEnabled = clusterService.getClusterSettings().get(SEARCH_REQUEST_STATS_ENABLED); - clusterService.getClusterSettings().addSettingsUpdateConsumer(SEARCH_REQUEST_STATS_ENABLED, this::setIsRequestStatsEnabled); - this.searchRequestStats = searchRequestStats; - this.searchRequestSlowLog = searchRequestSlowLog; this.metricsRegistry = metricsRegistry; this.searchQueryMetricsEnabled = clusterService.getClusterSettings().get(SEARCH_QUERY_METRICS_ENABLED_SETTING); + this.searchRequestOperationsCompositeListenerFactory = searchRequestOperationsCompositeListenerFactory; clusterService.getClusterSettings() .addSettingsUpdateConsumer(SEARCH_QUERY_METRICS_ENABLED_SETTING, this::setSearchQueryMetricsEnabled); } @@ -241,10 +224,6 @@ private void setSearchQueryMetricsEnabled(boolean searchQueryMetricsEnabled) { } } - private void setIsRequestStatsEnabled(boolean isRequestStatsEnabled) { - this.isRequestStatsEnabled = isRequestStatsEnabled; - } - private Map buildPerIndexAliasFilter( SearchRequest request, ClusterState clusterState, @@ -289,8 +268,6 @@ private Map resolveIndexBoosts(SearchRequest searchRequest, Clust } /** - * Listener to track request-level tookTime and phase tookTimes from the coordinator. - * * Search operations need two clocks. One clock is to fulfill real clock needs (e.g., resolving * "now" to an index name). Another clock is needed for measuring how long a search operation * took. These two uses are at odds with each other. There are many issues with using a real @@ -300,12 +277,10 @@ private Map resolveIndexBoosts(SearchRequest searchRequest, Clust * * @opensearch.internal */ - static final class SearchTimeProvider extends SearchRequestOperationsListener { - + static final class SearchTimeProvider { private final long absoluteStartMillis; private final long relativeStartNanos; private final LongSupplier relativeCurrentNanosProvider; - private boolean phaseTook = false; /** * Instantiates a new search time provider. The absolute start time is the real clock time @@ -331,43 +306,6 @@ long getAbsoluteStartMillis() { long buildTookInMillis() { return TimeUnit.NANOSECONDS.toMillis(relativeCurrentNanosProvider.getAsLong() - relativeStartNanos); } - - public void setPhaseTook(boolean phaseTook) { - this.phaseTook = phaseTook; - } - - SearchResponse.PhaseTook getPhaseTook() { - if (phaseTook) { - Map phaseTookMap = new HashMap<>(); - // Convert Map to Map for SearchResponse() - for (SearchPhaseName searchPhaseName : phaseStatsMap.keySet()) { - phaseTookMap.put(searchPhaseName.getName(), phaseStatsMap.get(searchPhaseName)); - } - return new SearchResponse.PhaseTook(phaseTookMap); - } else { - return null; - } - } - - Map phaseStatsMap = new EnumMap<>(SearchPhaseName.class); - - @Override - void onPhaseStart(SearchPhaseContext context) {} - - @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { - phaseStatsMap.put( - context.getCurrentPhase().getSearchPhaseName(), - TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - context.getCurrentPhase().getStartTimeInNanos()) - ); - } - - @Override - void onPhaseFailure(SearchPhaseContext context) {} - - public Long getPhaseTookTime(SearchPhaseName searchPhaseName) { - return phaseStatsMap.get(searchPhaseName); - } } @Override @@ -490,11 +428,12 @@ private void executeRequest( relativeStartNanos, System::nanoTime ); - - final List searchListenersList = createSearchListenerList(originalSearchRequest, timeProvider); - SearchRequestContext searchRequestContext = new SearchRequestContext( - new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger) - ); + if (originalSearchRequest.isPhaseTook() == null) { + originalSearchRequest.setPhaseTook(clusterService.getClusterSettings().get(SEARCH_PHASE_TOOK_ENABLED)); + } + SearchRequestOperationsListener.CompositeListener requestOperationsListeners = searchRequestOperationsCompositeListenerFactory + .buildCompositeListener(originalSearchRequest, logger); + SearchRequestContext searchRequestContext = new SearchRequestContext(requestOperationsListeners, originalSearchRequest); searchRequestContext.getSearchRequestOperationsListener().onRequestStart(searchRequestContext); PipelinedRequest searchRequest; @@ -599,7 +538,8 @@ private ActionListener buildRewriteListener( searchContext, searchAsyncActionProvider, searchRequestContext - ) + ), + searchRequestContext ); } else { AtomicInteger skippedClusters = new AtomicInteger(0); @@ -687,7 +627,8 @@ static void ccsRemoteReduce( RemoteClusterService remoteClusterService, ThreadPool threadPool, ActionListener listener, - BiConsumer> localSearchConsumer + BiConsumer> localSearchConsumer, + SearchRequestContext searchRequestContext ) { if (localIndices == null && remoteIndices.size() == 1) { @@ -729,7 +670,7 @@ public void onResponse(SearchResponse searchResponse) { searchResponse.getSuccessfulShards(), searchResponse.getSkippedShards(), timeProvider.buildTookInMillis(), - timeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), searchResponse.getShardFailures(), new SearchResponse.Clusters(1, 1, 0), searchResponse.pointInTimeId() @@ -775,7 +716,8 @@ public void onFailure(Exception e) { exceptions, searchResponseMerger, totalClusters, - listener + listener, + searchRequestContext ); Client remoteClusterClient = remoteClusterService.getRemoteClusterClient(threadPool, clusterAlias); remoteClusterClient.search(ccsSearchRequest, ccsListener); @@ -789,7 +731,8 @@ public void onFailure(Exception e) { exceptions, searchResponseMerger, totalClusters, - listener + listener, + searchRequestContext ); SearchRequest ccsLocalSearchRequest = SearchRequest.subSearchRequest( searchRequest, @@ -884,7 +827,8 @@ private static ActionListener createCCSListener( AtomicReference exceptions, SearchResponseMerger searchResponseMerger, int totalClusters, - ActionListener originalListener + ActionListener originalListener, + SearchRequestContext searchRequestContext ) { return new CCSActionListener( clusterAlias, @@ -906,7 +850,7 @@ SearchResponse createFinalResponse() { searchResponseMerger.numResponses(), skippedClusters.get() ); - return searchResponseMerger.getMergedResponse(clusters); + return searchResponseMerger.getMergedResponse(clusters, searchRequestContext); } }; } @@ -1244,35 +1188,6 @@ AbstractSearchAsyncAction asyncSearchAction( ); } - private List createSearchListenerList(SearchRequest searchRequest, SearchTimeProvider timeProvider) { - final List searchListenersList = new ArrayList<>(); - - if (isRequestStatsEnabled) { - searchListenersList.add(searchRequestStats); - } - - // phase_took is enabled with request param and/or cluster setting - Boolean phaseTookRequestParam = searchRequest.isPhaseTook(); - if (phaseTookRequestParam == null) { // check cluster setting only when request param is undefined - if (clusterService.getClusterSettings().get(TransportSearchAction.SEARCH_PHASE_TOOK_ENABLED)) { - timeProvider.setPhaseTook(true); - searchListenersList.add(timeProvider); - } - } else if (phaseTookRequestParam == true) { - timeProvider.setPhaseTook(true); - searchListenersList.add(timeProvider); - } - - if (searchRequestSlowLog.getWarnThreshold() >= 0 - || searchRequestSlowLog.getInfoThreshold() >= 0 - || searchRequestSlowLog.getDebugThreshold() >= 0 - || searchRequestSlowLog.getTraceThreshold() >= 0) { - searchListenersList.add(searchRequestSlowLog); - } - - return searchListenersList; - } - private AbstractSearchAsyncAction searchAsyncAction( SearchTask task, SearchRequest searchRequest, diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java index 47c6cad04343e..dac6ecaa13781 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java @@ -35,6 +35,7 @@ import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; import org.opensearch.common.Nullable; import org.opensearch.core.common.Strings; +import org.opensearch.rest.action.admin.indices.AliasesNotFoundException; /** * Individual operation to perform on the cluster state as part of an {@link IndicesAliasesRequest}. @@ -225,8 +226,8 @@ boolean removeIndex() { @Override boolean apply(NewAliasValidator aliasValidator, Metadata.Builder metadata, IndexMetadata index) { if (false == index.getAliases().containsKey(alias)) { - if (mustExist != null && mustExist.booleanValue()) { - throw new IllegalArgumentException("required alias [" + alias + "] does not exist"); + if (mustExist != null && mustExist) { + throw new AliasesNotFoundException(alias); } return false; } diff --git a/server/src/main/java/org/opensearch/common/cache/Cache.java b/server/src/main/java/org/opensearch/common/cache/Cache.java index 30e7c014a2ec0..d8aa4e93735e6 100644 --- a/server/src/main/java/org/opensearch/common/cache/Cache.java +++ b/server/src/main/java/org/opensearch/common/cache/Cache.java @@ -424,68 +424,74 @@ public V computeIfAbsent(K key, CacheLoader loader) throws ExecutionExcept } }); if (value == null) { - // we need to synchronize loading of a value for a given key; however, holding the segment lock while - // invoking load can lead to deadlock against another thread due to dependent key loading; therefore, we - // need a mechanism to ensure that load is invoked at most once, but we are not invoking load while holding - // the segment lock; to do this, we atomically put a future in the map that can load the value, and then - // get the value from this future on the thread that won the race to place the future into the segment map - CacheSegment segment = getCacheSegment(key); - CompletableFuture> future; - CompletableFuture> completableFuture = new CompletableFuture<>(); + value = compute(key, loader); + } + return value; + } - try (ReleasableLock ignored = segment.writeLock.acquire()) { - future = segment.map.putIfAbsent(key, completableFuture); - } + private V compute(K key, CacheLoader loader) throws ExecutionException { + long now = now(); + // we need to synchronize loading of a value for a given key; however, holding the segment lock while + // invoking load can lead to deadlock against another thread due to dependent key loading; therefore, we + // need a mechanism to ensure that load is invoked at most once, but we are not invoking load while holding + // the segment lock; to do this, we atomically put a future in the map that can load the value, and then + // get the value from this future on the thread that won the race to place the future into the segment map + CacheSegment segment = getCacheSegment(key); + CompletableFuture> future; + CompletableFuture> completableFuture = new CompletableFuture<>(); - BiFunction, Throwable, ? extends V> handler = (ok, ex) -> { - if (ok != null) { - try (ReleasableLock ignored = lruLock.acquire()) { - promote(ok, now); - } - return ok.value; - } else { - try (ReleasableLock ignored = segment.writeLock.acquire()) { - CompletableFuture> sanity = segment.map.get(key); - if (sanity != null && sanity.isCompletedExceptionally()) { - segment.map.remove(key); - } - } - return null; - } - }; + try (ReleasableLock ignored = segment.writeLock.acquire()) { + future = segment.map.putIfAbsent(key, completableFuture); + } - CompletableFuture completableValue; - if (future == null) { - future = completableFuture; - completableValue = future.handle(handler); - V loaded; - try { - loaded = loader.load(key); - } catch (Exception e) { - future.completeExceptionally(e); - throw new ExecutionException(e); - } - if (loaded == null) { - NullPointerException npe = new NullPointerException("loader returned a null value"); - future.completeExceptionally(npe); - throw new ExecutionException(npe); - } else { - future.complete(new Entry<>(key, loaded, now)); + BiFunction, Throwable, ? extends V> handler = (ok, ex) -> { + if (ok != null) { + try (ReleasableLock ignored = lruLock.acquire()) { + promote(ok, now); } + return ok.value; } else { - completableValue = future.handle(handler); + try (ReleasableLock ignored = segment.writeLock.acquire()) { + CompletableFuture> sanity = segment.map.get(key); + if (sanity != null && sanity.isCompletedExceptionally()) { + segment.map.remove(key); + } + } + return null; } + }; + CompletableFuture completableValue; + if (future == null) { + future = completableFuture; + completableValue = future.handle(handler); + V loaded; try { - value = completableValue.get(); - // check to ensure the future hasn't been completed with an exception - if (future.isCompletedExceptionally()) { - future.get(); // call get to force the exception to be thrown for other concurrent callers - throw new IllegalStateException("the future was completed exceptionally but no exception was thrown"); - } - } catch (InterruptedException e) { - throw new IllegalStateException(e); + loaded = loader.load(key); + } catch (Exception e) { + future.completeExceptionally(e); + throw new ExecutionException(e); } + if (loaded == null) { + NullPointerException npe = new NullPointerException("loader returned a null value"); + future.completeExceptionally(npe); + throw new ExecutionException(npe); + } else { + future.complete(new Entry<>(key, loaded, now)); + } + } else { + completableValue = future.handle(handler); + } + V value; + try { + value = completableValue.get(); + // check to ensure the future hasn't been completed with an exception + if (future.isCompletedExceptionally()) { + future.get(); // call get to force the exception to be thrown for other concurrent callers + throw new IllegalStateException("the future was completed exceptionally but no exception was thrown"); + } + } catch (InterruptedException e) { + throw new IllegalStateException(e); } return value; } diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java new file mode 100644 index 0000000000000..c6ea5fca1a8fe --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache; + +/** + * Represents a cache interface. + * @param Type of key. + * @param Type of value. + * + * @opensearch.experimental + */ +public interface ICache { + V get(K key); + + void put(K key, V value); + + V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception; + + void invalidate(K key); + + void invalidateAll(); + + Iterable keys(); + + long count(); + + void refresh(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java b/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java new file mode 100644 index 0000000000000..57aa4aa39c782 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java @@ -0,0 +1,20 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache; + +/** + * Extends a cache loader with awareness of whether the data is loaded or not. + * @param Type of key. + * @param Type of value. + * + * @opensearch.internal + */ +public interface LoadAwareCacheLoader extends CacheLoader { + boolean isLoaded(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java new file mode 100644 index 0000000000000..c497c8dbb7ea9 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -0,0 +1,128 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.Cache; +import org.opensearch.common.cache.CacheBuilder; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; + +/** + * This variant of on-heap cache uses OpenSearch custom cache implementation. + * @param Type of key. + * @param Type of value. + * + * @opensearch.experimental + */ +public class OpenSearchOnHeapCache implements StoreAwareCache, RemovalListener { + + private final Cache cache; + + private final StoreAwareCacheEventListener eventListener; + + public OpenSearchOnHeapCache(Builder builder) { + CacheBuilder cacheBuilder = CacheBuilder.builder() + .setMaximumWeight(builder.getMaxWeightInBytes()) + .weigher(builder.getWeigher()) + .removalListener(this); + if (builder.getExpireAfterAcess() != null) { + cacheBuilder.setExpireAfterAccess(builder.getExpireAfterAcess()); + } + cache = cacheBuilder.build(); + this.eventListener = builder.getEventListener(); + } + + @Override + public V get(K key) { + V value = cache.get(key); + if (value != null) { + eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + } else { + eventListener.onMiss(key, CacheStoreType.ON_HEAP); + } + return value; + } + + @Override + public void put(K key, V value) { + cache.put(key, value); + eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + V value = cache.computeIfAbsent(key, key1 -> loader.load(key)); + if (!loader.isLoaded()) { + eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + } else { + eventListener.onMiss(key, CacheStoreType.ON_HEAP); + eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + } + return value; + } + + @Override + public void invalidate(K key) { + cache.invalidate(key); + } + + @Override + public void invalidateAll() { + cache.invalidateAll(); + } + + @Override + public Iterable keys() { + return cache.keys(); + } + + @Override + public long count() { + return cache.count(); + } + + @Override + public void refresh() { + cache.refresh(); + } + + @Override + public CacheStoreType getTierType() { + return CacheStoreType.ON_HEAP; + } + + @Override + public void onRemoval(RemovalNotification notification) { + eventListener.onRemoval( + new StoreAwareCacheRemovalNotification<>( + notification.getKey(), + notification.getValue(), + notification.getRemovalReason(), + CacheStoreType.ON_HEAP + ) + ); + } + + /** + * Builder object + * @param Type of key + * @param Type of value + */ + public static class Builder extends StoreAwareCacheBuilder { + + @Override + public StoreAwareCache build() { + return new OpenSearchOnHeapCache(this); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java new file mode 100644 index 0000000000000..45ca48d94c140 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java @@ -0,0 +1,23 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * Represents a cache with a specific type of store like onHeap, disk etc. + * @param Type of key. + * @param Type of value. + * + * @opensearch.experimental + */ +public interface StoreAwareCache extends ICache { + CacheStoreType getTierType(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java new file mode 100644 index 0000000000000..492dbff3532a1 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java @@ -0,0 +1,33 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * Removal notification for store aware cache. + * @param Type of key. + * @param Type of value. + * + * @opensearch.internal + */ +public class StoreAwareCacheRemovalNotification extends RemovalNotification { + private final CacheStoreType cacheStoreType; + + public StoreAwareCacheRemovalNotification(K key, V value, RemovalReason removalReason, CacheStoreType cacheStoreType) { + super(key, value, removalReason); + this.cacheStoreType = cacheStoreType; + } + + public CacheStoreType getCacheStoreType() { + return cacheStoreType; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java new file mode 100644 index 0000000000000..4fbbbbfebfaa7 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * Represents a store aware cache value. + * @param Type of value. + * + * @opensearch.internal + */ +public class StoreAwareCacheValue { + private final V value; + private final CacheStoreType source; + + public StoreAwareCacheValue(V value, CacheStoreType source) { + this.value = value; + this.source = source; + } + + public V getValue() { + return value; + } + + public CacheStoreType getCacheStoreType() { + return source; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java b/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java new file mode 100644 index 0000000000000..fc5aa48aae90f --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java @@ -0,0 +1,73 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.builders; + +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.unit.TimeValue; + +import java.util.function.ToLongBiFunction; + +/** + * Builder for store aware cache. + * @param Type of key. + * @param Type of value. + * + * @opensearch.internal + */ +public abstract class StoreAwareCacheBuilder { + + private long maxWeightInBytes; + + private ToLongBiFunction weigher; + + private TimeValue expireAfterAcess; + + private StoreAwareCacheEventListener eventListener; + + public StoreAwareCacheBuilder() {} + + public StoreAwareCacheBuilder setMaximumWeightInBytes(long sizeInBytes) { + this.maxWeightInBytes = sizeInBytes; + return this; + } + + public StoreAwareCacheBuilder setWeigher(ToLongBiFunction weigher) { + this.weigher = weigher; + return this; + } + + public StoreAwareCacheBuilder setExpireAfterAccess(TimeValue expireAfterAcess) { + this.expireAfterAcess = expireAfterAcess; + return this; + } + + public StoreAwareCacheBuilder setEventListener(StoreAwareCacheEventListener eventListener) { + this.eventListener = eventListener; + return this; + } + + public long getMaxWeightInBytes() { + return maxWeightInBytes; + } + + public TimeValue getExpireAfterAcess() { + return expireAfterAcess; + } + + public ToLongBiFunction getWeigher() { + return weigher; + } + + public StoreAwareCacheEventListener getEventListener() { + return eventListener; + } + + public abstract StoreAwareCache build(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/builders/package-info.java new file mode 100644 index 0000000000000..ac4590ae3bff7 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Base package for builders. + */ +package org.opensearch.common.cache.store.builders; diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java new file mode 100644 index 0000000000000..04c0825787b66 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java @@ -0,0 +1,20 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.enums; + +/** + * Cache store types in tiered cache. + * + * @opensearch.internal + */ +public enum CacheStoreType { + + ON_HEAP, + DISK; +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java new file mode 100644 index 0000000000000..7a4e0fa7201fd --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Package related to tiered cache enums */ +package org.opensearch.common.cache.store.enums; diff --git a/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java b/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java new file mode 100644 index 0000000000000..6d7e4b39aaf9f --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java @@ -0,0 +1,30 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.listeners; + +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * This can be used to listen to tiered caching events + * @param Type of key + * @param Type of value + * + * @opensearch.internal + */ +public interface StoreAwareCacheEventListener { + + void onMiss(K key, CacheStoreType cacheStoreType); + + void onRemoval(StoreAwareCacheRemovalNotification notification); + + void onHit(K key, V value, CacheStoreType cacheStoreType); + + void onCached(K key, V value, CacheStoreType cacheStoreType); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java new file mode 100644 index 0000000000000..c3222ca3ffb62 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Package related to tiered cache listeners */ +package org.opensearch.common.cache.store.listeners; diff --git a/server/src/main/java/org/opensearch/common/cache/store/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/package-info.java new file mode 100644 index 0000000000000..edc1ecd7d5e7a --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for store aware caches. */ +package org.opensearch.common.cache.store; diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java new file mode 100644 index 0000000000000..8b432c9484aed --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java @@ -0,0 +1,268 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; +import org.opensearch.common.cache.store.StoreAwareCacheValue; +import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.util.concurrent.ReleasableLock; +import org.opensearch.common.util.iterable.Iterables; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Function; + +/** + * This cache spillover the evicted items from heap tier to disk tier. All the new items are first cached on heap + * and the items evicted from on heap cache are moved to disk based cache. If disk based cache also gets full, + * then items are eventually evicted from it and removed which will result in cache miss. + * + * @param Type of key + * @param Type of value + * + * @opensearch.experimental + */ +public class TieredSpilloverCache implements ICache, StoreAwareCacheEventListener { + + // TODO: Remove optional when diskCache implementation is integrated. + private final Optional> onDiskCache; + private final StoreAwareCache onHeapCache; + private final StoreAwareCacheEventListener listener; + ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); + ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); + ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); + + /** + * Maintains caching tiers in ascending order of cache latency. + */ + private final List> cacheList; + + TieredSpilloverCache(Builder builder) { + Objects.requireNonNull(builder.onHeapCacheBuilder, "onHeap cache builder can't be null"); + this.onHeapCache = builder.onHeapCacheBuilder.setEventListener(this).build(); + if (builder.onDiskCacheBuilder != null) { + this.onDiskCache = Optional.of(builder.onDiskCacheBuilder.setEventListener(this).build()); + } else { + this.onDiskCache = Optional.empty(); + } + this.listener = builder.listener; + this.cacheList = this.onDiskCache.map(diskTier -> Arrays.asList(this.onHeapCache, diskTier)).orElse(List.of(this.onHeapCache)); + } + + // Package private for testing + StoreAwareCache getOnHeapCache() { + return onHeapCache; + } + + // Package private for testing + Optional> getOnDiskCache() { + return onDiskCache; + } + + @Override + public V get(K key) { + StoreAwareCacheValue cacheValue = getValueFromTieredCache(true).apply(key); + if (cacheValue == null) { + return null; + } + return cacheValue.getValue(); + } + + @Override + public void put(K key, V value) { + try (ReleasableLock ignore = writeLock.acquire()) { + onHeapCache.put(key, value); + listener.onCached(key, value, CacheStoreType.ON_HEAP); + } + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + // We are skipping calling event listeners at this step as we do another get inside below computeIfAbsent. + // Where we might end up calling onMiss twice for a key not present in onHeap cache. + // Similary we might end up calling both onMiss and onHit for a key, in case we are receiving concurrent + // requests for the same key which requires loading only once. + StoreAwareCacheValue cacheValue = getValueFromTieredCache(false).apply(key); + if (cacheValue == null) { + // Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside. + // This is needed as there can be many requests for the same key at the same time and we only want to load + // the value once. + V value = null; + try (ReleasableLock ignore = writeLock.acquire()) { + value = onHeapCache.computeIfAbsent(key, loader); + } + if (loader.isLoaded()) { + listener.onMiss(key, CacheStoreType.ON_HEAP); + onDiskCache.ifPresent(diskTier -> listener.onMiss(key, CacheStoreType.DISK)); + listener.onCached(key, value, CacheStoreType.ON_HEAP); + } else { + listener.onHit(key, value, CacheStoreType.ON_HEAP); + } + return value; + } + listener.onHit(key, cacheValue.getValue(), cacheValue.getCacheStoreType()); + if (cacheValue.getCacheStoreType().equals(CacheStoreType.DISK)) { + listener.onMiss(key, CacheStoreType.ON_HEAP); + } + return cacheValue.getValue(); + } + + @Override + public void invalidate(K key) { + // We are trying to invalidate the key from all caches though it would be present in only of them. + // Doing this as we don't know where it is located. We could do a get from both and check that, but what will + // also trigger a hit/miss listener event, so ignoring it for now. + try (ReleasableLock ignore = writeLock.acquire()) { + for (StoreAwareCache storeAwareCache : cacheList) { + storeAwareCache.invalidate(key); + } + } + } + + @Override + public void invalidateAll() { + try (ReleasableLock ignore = writeLock.acquire()) { + for (StoreAwareCache storeAwareCache : cacheList) { + storeAwareCache.invalidateAll(); + } + } + } + + /** + * Provides an iteration over both onHeap and disk keys. This is not protected from any mutations to the cache. + * @return An iterable over (onHeap + disk) keys + */ + @Override + public Iterable keys() { + Iterable onDiskKeysIterable; + if (onDiskCache.isPresent()) { + onDiskKeysIterable = onDiskCache.get().keys(); + } else { + onDiskKeysIterable = Collections::emptyIterator; + } + return Iterables.concat(onHeapCache.keys(), onDiskKeysIterable); + } + + @Override + public long count() { + long totalCount = 0; + for (StoreAwareCache storeAwareCache : cacheList) { + totalCount += storeAwareCache.count(); + } + return totalCount; + } + + @Override + public void refresh() { + try (ReleasableLock ignore = writeLock.acquire()) { + for (StoreAwareCache storeAwareCache : cacheList) { + storeAwareCache.refresh(); + } + } + } + + @Override + public void onMiss(K key, CacheStoreType cacheStoreType) { + // Misses for tiered cache are tracked here itself. + } + + @Override + public void onRemoval(StoreAwareCacheRemovalNotification notification) { + if (RemovalReason.EVICTED.equals(notification.getRemovalReason()) + || RemovalReason.CAPACITY.equals(notification.getRemovalReason())) { + switch (notification.getCacheStoreType()) { + case ON_HEAP: + try (ReleasableLock ignore = writeLock.acquire()) { + onDiskCache.ifPresent(diskTier -> { diskTier.put(notification.getKey(), notification.getValue()); }); + } + onDiskCache.ifPresent( + diskTier -> listener.onCached(notification.getKey(), notification.getValue(), CacheStoreType.DISK) + ); + break; + default: + break; + } + } + listener.onRemoval(notification); + } + + @Override + public void onHit(K key, V value, CacheStoreType cacheStoreType) { + // Hits for tiered cache are tracked here itself. + } + + @Override + public void onCached(K key, V value, CacheStoreType cacheStoreType) { + // onCached events for tiered cache are tracked here itself. + } + + private Function> getValueFromTieredCache(boolean triggerEventListener) { + return key -> { + try (ReleasableLock ignore = readLock.acquire()) { + for (StoreAwareCache storeAwareCache : cacheList) { + V value = storeAwareCache.get(key); + if (value != null) { + if (triggerEventListener) { + listener.onHit(key, value, storeAwareCache.getTierType()); + } + return new StoreAwareCacheValue<>(value, storeAwareCache.getTierType()); + } else { + if (triggerEventListener) { + listener.onMiss(key, storeAwareCache.getTierType()); + } + } + } + } + return null; + }; + } + + /** + * Builder object for tiered spillover cache. + * @param Type of key + * @param Type of value + */ + public static class Builder { + private StoreAwareCacheBuilder onHeapCacheBuilder; + private StoreAwareCacheBuilder onDiskCacheBuilder; + private StoreAwareCacheEventListener listener; + + public Builder() {} + + public Builder setOnHeapCacheBuilder(StoreAwareCacheBuilder onHeapCacheBuilder) { + this.onHeapCacheBuilder = onHeapCacheBuilder; + return this; + } + + public Builder setOnDiskCacheBuilder(StoreAwareCacheBuilder onDiskCacheBuilder) { + this.onDiskCacheBuilder = onDiskCacheBuilder; + return this; + } + + public Builder setListener(StoreAwareCacheEventListener listener) { + this.listener = listener; + return this; + } + + public TieredSpilloverCache build() { + return new TieredSpilloverCache<>(this); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/tier/package-info.java b/server/src/main/java/org/opensearch/common/cache/tier/package-info.java new file mode 100644 index 0000000000000..7ad81dbe3073c --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for cache tier support. */ +package org.opensearch.common.cache.tier; diff --git a/server/src/main/java/org/opensearch/common/lucene/Lucene.java b/server/src/main/java/org/opensearch/common/lucene/Lucene.java index 5b521081ac63b..2c7b6b552b43f 100644 --- a/server/src/main/java/org/opensearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/opensearch/common/lucene/Lucene.java @@ -110,7 +110,7 @@ * @opensearch.internal */ public class Lucene { - public static final String LATEST_CODEC = "Lucene95"; + public static final String LATEST_CODEC = "Lucene99"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; diff --git a/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java b/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java index d6b2bb239b2a1..f9a87b9e74214 100644 --- a/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java +++ b/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java @@ -40,6 +40,8 @@ import org.opensearch.core.index.shard.ShardId; import java.io.IOException; +import java.util.Optional; +import java.util.UUID; /** * A {@link org.apache.lucene.index.FilterDirectoryReader} that exposes @@ -53,11 +55,14 @@ public final class OpenSearchDirectoryReader extends FilterDirectoryReader { private final ShardId shardId; private final FilterDirectoryReader.SubReaderWrapper wrapper; + private final DelegatingCacheHelper delegatingCacheHelper; + private OpenSearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, ShardId shardId) throws IOException { super(in, wrapper); this.wrapper = wrapper; this.shardId = shardId; + this.delegatingCacheHelper = new DelegatingCacheHelper(in.getReaderCacheHelper()); } /** @@ -70,7 +75,61 @@ public ShardId shardId() { @Override public CacheHelper getReaderCacheHelper() { // safe to delegate since this reader does not alter the index - return in.getReaderCacheHelper(); + return this.delegatingCacheHelper; + } + + public DelegatingCacheHelper getDelegatingCacheHelper() { + return this.delegatingCacheHelper; + } + + /** + * Wraps existing IndexReader cache helper which internally provides a way to wrap CacheKey. + * @opensearch.internal + */ + public class DelegatingCacheHelper implements CacheHelper { + private final CacheHelper cacheHelper; + private final DelegatingCacheKey serializableCacheKey; + + DelegatingCacheHelper(CacheHelper cacheHelper) { + this.cacheHelper = cacheHelper; + this.serializableCacheKey = new DelegatingCacheKey(Optional.ofNullable(cacheHelper).map(key -> getKey()).orElse(null)); + } + + @Override + public CacheKey getKey() { + return this.cacheHelper.getKey(); + } + + public DelegatingCacheKey getDelegatingCacheKey() { + return this.serializableCacheKey; + } + + @Override + public void addClosedListener(ClosedListener listener) { + this.cacheHelper.addClosedListener(listener); + } + } + + /** + * Wraps internal IndexReader.CacheKey and attaches a uniqueId to it which can be eventually be used instead of + * object itself for serialization purposes. + */ + public class DelegatingCacheKey { + private final CacheKey cacheKey; + private final String uniqueId; + + DelegatingCacheKey(CacheKey cacheKey) { + this.cacheKey = cacheKey; + this.uniqueId = UUID.randomUUID().toString(); + } + + public CacheKey getCacheKey() { + return this.cacheKey; + } + + public String getId() { + return uniqueId; + } } @Override diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index fa4b0f475edc5..277286ae1ff1b 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -36,6 +36,7 @@ import org.opensearch.action.admin.indices.close.TransportCloseIndexAction; import org.opensearch.action.search.CreatePitController; import org.opensearch.action.search.SearchRequestSlowLog; +import org.opensearch.action.search.SearchRequestStats; import org.opensearch.action.search.TransportSearchAction; import org.opensearch.action.support.AutoCreateIndex; import org.opensearch.action.support.DestructiveOperations; @@ -380,9 +381,9 @@ public void apply(Settings value, Settings current, Settings previous) { SearchService.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS, TransportSearchAction.SHARD_COUNT_LIMIT_SETTING, TransportSearchAction.SEARCH_CANCEL_AFTER_TIME_INTERVAL_SETTING, - TransportSearchAction.SEARCH_REQUEST_STATS_ENABLED, - TransportSearchAction.SEARCH_PHASE_TOOK_ENABLED, TransportSearchAction.SEARCH_QUERY_METRICS_ENABLED_SETTING, + TransportSearchAction.SEARCH_PHASE_TOOK_ENABLED, + SearchRequestStats.SEARCH_REQUEST_STATS_ENABLED, RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE, SniffConnectionStrategy.REMOTE_CONNECTIONS_PER_CLUSTER, RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING, diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java index d3285c379bcc4..e19f8e8370d5b 100644 --- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java @@ -11,9 +11,6 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.util.FeatureFlags; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; /** @@ -32,17 +29,12 @@ protected FeatureFlagSettings( super(settings, settingsSet, settingUpgraders, scope); } - public static final Set> BUILT_IN_FEATURE_FLAGS = Collections.unmodifiableSet( - new HashSet<>( - Arrays.asList( - FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL_SETTING, - FeatureFlags.EXTENSIONS_SETTING, - FeatureFlags.IDENTITY_SETTING, - FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING, - FeatureFlags.TELEMETRY_SETTING, - FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, - FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING - ) - ) + public static final Set> BUILT_IN_FEATURE_FLAGS = Set.of( + FeatureFlags.EXTENSIONS_SETTING, + FeatureFlags.IDENTITY_SETTING, + FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING, + FeatureFlags.TELEMETRY_SETTING, + FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, + FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING ); } diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java index c54772caa574b..d4ab161527cc0 100644 --- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java +++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java @@ -20,12 +20,6 @@ * @opensearch.internal */ public class FeatureFlags { - /** - * Gates the visibility of the segment replication experimental features that allows users to test unreleased beta features. - */ - public static final String SEGMENT_REPLICATION_EXPERIMENTAL = - "opensearch.experimental.feature.segment_replication_experimental.enabled"; - /** * Gates the ability for Searchable Snapshots to read snapshots that are older than the * guaranteed backward compatibility for OpenSearch (one prior major version) on a best effort basis. @@ -105,12 +99,6 @@ public static boolean isEnabled(Setting featureFlag) { } } - public static final Setting SEGMENT_REPLICATION_EXPERIMENTAL_SETTING = Setting.boolSetting( - SEGMENT_REPLICATION_EXPERIMENTAL, - false, - Property.NodeScope - ); - public static final Setting EXTENSIONS_SETTING = Setting.boolSetting(EXTENSIONS, false, Property.NodeScope); public static final Setting IDENTITY_SETTING = Setting.boolSetting(IDENTITY, false, Property.NodeScope); diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java index 3da21a6777456..6580b0e0085ef 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java @@ -161,7 +161,7 @@ public StoredContext stashContext() { ); } - final Map transientHeaders = propagateTransients(context.transientHeaders); + final Map transientHeaders = propagateTransients(context.transientHeaders, context.isSystemContext); if (!transientHeaders.isEmpty()) { threadContextStruct = threadContextStruct.putTransient(transientHeaders); } @@ -182,7 +182,7 @@ public StoredContext stashContext() { public Writeable captureAsWriteable() { final ThreadContextStruct context = threadLocal.get(); return out -> { - final Map propagatedHeaders = propagateHeaders(context.transientHeaders); + final Map propagatedHeaders = propagateHeaders(context.transientHeaders, context.isSystemContext); context.writeTo(out, defaultHeader, propagatedHeaders); }; } @@ -245,7 +245,7 @@ public StoredContext newStoredContext(boolean preserveResponseHeaders, Collectio final Map newTransientHeaders = new HashMap<>(originalContext.transientHeaders); boolean transientHeadersModified = false; - final Map transientHeaders = propagateTransients(originalContext.transientHeaders); + final Map transientHeaders = propagateTransients(originalContext.transientHeaders, originalContext.isSystemContext); if (!transientHeaders.isEmpty()) { newTransientHeaders.putAll(transientHeaders); transientHeadersModified = true; @@ -322,7 +322,7 @@ public Supplier wrapRestorable(StoredContext storedContext) { @Override public void writeTo(StreamOutput out) throws IOException { final ThreadContextStruct context = threadLocal.get(); - final Map propagatedHeaders = propagateHeaders(context.transientHeaders); + final Map propagatedHeaders = propagateHeaders(context.transientHeaders, context.isSystemContext); context.writeTo(out, defaultHeader, propagatedHeaders); } @@ -534,7 +534,7 @@ boolean isDefaultContext() { * by the system itself rather than by a user action. */ public void markAsSystemContext() { - threadLocal.set(threadLocal.get().setSystemContext()); + threadLocal.set(threadLocal.get().setSystemContext(propagators)); } /** @@ -573,15 +573,15 @@ public static Map buildDefaultHeaders(Settings settings) { } } - private Map propagateTransients(Map source) { + private Map propagateTransients(Map source, boolean isSystemContext) { final Map transients = new HashMap<>(); - propagators.forEach(p -> transients.putAll(p.transients(source))); + propagators.forEach(p -> transients.putAll(p.transients(source, isSystemContext))); return transients; } - private Map propagateHeaders(Map source) { + private Map propagateHeaders(Map source, boolean isSystemContext) { final Map headers = new HashMap<>(); - propagators.forEach(p -> headers.putAll(p.headers(source))); + propagators.forEach(p -> headers.putAll(p.headers(source, isSystemContext))); return headers; } @@ -603,11 +603,13 @@ private static final class ThreadContextStruct { // saving current warning headers' size not to recalculate the size with every new warning header private final long warningHeadersSize; - private ThreadContextStruct setSystemContext() { + private ThreadContextStruct setSystemContext(final List propagators) { if (isSystemContext) { return this; } - return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, persistentHeaders, true); + final Map transients = new HashMap<>(); + propagators.forEach(p -> transients.putAll(p.transients(transientHeaders, true))); + return new ThreadContextStruct(requestHeaders, responseHeaders, transients, persistentHeaders, true); } private ThreadContextStruct( diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java index dac70b0e8124e..e8c12ae13d5eb 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java @@ -22,15 +22,41 @@ public interface ThreadContextStatePropagator { /** * Returns the list of transient headers that needs to be propagated from current context to new thread context. - * @param source current context transient headers + * + * @param source current context transient headers * @return the list of transient headers that needs to be propagated from current context to new thread context */ + @Deprecated(since = "2.12.0", forRemoval = true) Map transients(Map source); + /** + * Returns the list of transient headers that needs to be propagated from current context to new thread context. + * + * @param source current context transient headers + * @param isSystemContext if the propagation is for system context. + * @return the list of transient headers that needs to be propagated from current context to new thread context + */ + default Map transients(Map source, boolean isSystemContext) { + return transients(source); + }; + /** * Returns the list of request headers that needs to be propagated from current context to request. - * @param source current context headers + * + * @param source current context headers * @return the list of request headers that needs to be propagated from current context to request */ + @Deprecated(since = "2.12.0", forRemoval = true) Map headers(Map source); + + /** + * Returns the list of request headers that needs to be propagated from current context to request. + * + * @param source current context headers + * @param isSystemContext if the propagation is for system context. + * @return the list of request headers that needs to be propagated from current context to request + */ + default Map headers(Map source, boolean isSystemContext) { + return headers(source); + } } diff --git a/server/src/main/java/org/opensearch/index/IndexModule.java b/server/src/main/java/org/opensearch/index/IndexModule.java index 545623287bae8..6ac10a221d49e 100644 --- a/server/src/main/java/org/opensearch/index/IndexModule.java +++ b/server/src/main/java/org/opensearch/index/IndexModule.java @@ -161,7 +161,7 @@ public final class IndexModule { /** Which lucene file extensions to load with the mmap directory when using hybridfs store. This settings is ignored if {@link #INDEX_STORE_HYBRID_NIO_EXTENSIONS} is set. * This is an expert setting. - * @see Lucene File Extensions. + * @see Lucene File Extensions. * * @deprecated This setting will be removed in OpenSearch 3.x. Use {@link #INDEX_STORE_HYBRID_NIO_EXTENSIONS} instead. */ @@ -206,7 +206,7 @@ public Iterator> settings() { /** Which lucene file extensions to load with nio. All others will default to mmap. Takes precedence over {@link #INDEX_STORE_HYBRID_MMAP_EXTENSIONS}. * This is an expert setting. - * @see Lucene File Extensions. + * @see Lucene File Extensions. */ public static final Setting> INDEX_STORE_HYBRID_NIO_EXTENSIONS = Setting.listSetting( "index.store.hybrid.nio.extensions", diff --git a/server/src/main/java/org/opensearch/index/codec/CodecService.java b/server/src/main/java/org/opensearch/index/codec/CodecService.java index 775fc88b504f5..67f38536a0d11 100644 --- a/server/src/main/java/org/opensearch/index/codec/CodecService.java +++ b/server/src/main/java/org/opensearch/index/codec/CodecService.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec.Mode; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec.Mode; import org.opensearch.common.Nullable; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.IndexSettings; @@ -68,10 +68,10 @@ public CodecService(@Nullable MapperService mapperService, IndexSettings indexSe final MapBuilder codecs = MapBuilder.newMapBuilder(); assert null != indexSettings; if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene95Codec()); - codecs.put(LZ4, new Lucene95Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene95Codec(Mode.BEST_COMPRESSION)); - codecs.put(ZLIB, new Lucene95Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene99Codec()); + codecs.put(LZ4, new Lucene99Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Mode.BEST_COMPRESSION)); + codecs.put(ZLIB, new Lucene99Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); codecs.put(LZ4, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index d3207557273a5..dc28ad2d6dc07 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -37,7 +37,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.common.lucene.Lucene; import org.opensearch.index.mapper.CompletionFieldMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -53,7 +53,7 @@ * * @opensearch.internal */ -public class PerFieldMappingPostingFormatCodec extends Lucene95Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene99Codec { private final Logger logger; private final MapperService mapperService; private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java index 6a79679cc2f09..a63e1d418e3ba 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java @@ -38,6 +38,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparatorSource; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -150,6 +151,13 @@ public void disableSkipping() { this.enableSkipping = false; } + protected Pruning filterPruning(Pruning pruning) { + if (this.enableSkipping) { + return pruning; + } + return Pruning.NONE; + } + /** * Simple wrapper class around a filter that matches parent documents * and a filter that matches child documents. For every root document R, diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 3aa4d9cb782ca..4c6eba2f0c7ec 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -38,6 +38,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.TermOrdValComparator; @@ -94,7 +95,7 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx protected void setScorer(Scorable scorer, LeafReaderContext context) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; @@ -105,7 +106,7 @@ public FieldComparator newComparator(String fieldname, int numHits, boolean e indexFieldData.getFieldName(), sortMissingLast, reversed, - enableSkipping + filterPruning(pruning) ) { @Override diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 4f9f05addc8c7..fd4fac3877f2f 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; @@ -98,11 +99,11 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, doubl protected void setScorer(Scorable scorer, LeafReaderContext context) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final double dMissingValue = (Double) missingObject(missingValue, reversed); - return new DoubleComparator(numHits, fieldname, dMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new DoubleComparator(numHits, fieldname, dMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 3fb8b5c6ce393..b106ba268bcb9 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.FloatComparator; @@ -91,11 +92,11 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, float } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float fMissingValue = (Float) missingObject(missingValue, reversed); - return new FloatComparator(numHits, fieldname, fMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new FloatComparator(numHits, fieldname, fMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new FloatLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java index e0227083f4181..e2e56dcb14fdf 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.util.BitSet; import org.opensearch.index.fielddata.FieldData; import org.opensearch.index.fielddata.IndexNumericFieldData; @@ -42,11 +43,11 @@ public HalfFloatValuesComparatorSource( } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float fMissingValue = (Float) missingObject(missingValue, reversed); - return new HalfFloatComparator(numHits, fieldname, fMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new HalfFloatComparator(numHits, fieldname, fMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new HalfFloatLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java index 3a3fdb85c3415..8f540cc6ae9d9 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.IntComparator; import org.apache.lucene.util.BitSet; @@ -70,11 +71,11 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, int miss } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final int iMissingValue = (Integer) missingObject(missingValue, reversed); - return new IntComparator(numHits, fieldname, iMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new IntComparator(numHits, fieldname, iMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new IntLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 57568aa84fa5c..3666cd8d6dfea 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.util.BitSet; @@ -114,11 +115,11 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, long mis } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final long lMissingValue = (Long) missingObject(missingValue, reversed); - return new LongComparator(numHits, fieldname, lMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new LongComparator(numHits, fieldname, lMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java index 9d1edde1fe08f..3714561b63e44 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.opensearch.common.Nullable; @@ -88,11 +89,11 @@ public Object missingObject(Object missingValue, boolean reversed) { } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final BigInteger ulMissingValue = (BigInteger) missingObject(missingValue, reversed); - return new UnsignedLongComparator(numHits, fieldname, ulMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new UnsignedLongComparator(numHits, fieldname, ulMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new UnsignedLongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java index e1413fd9b4bbe..a9d9f6cb35fcb 100644 --- a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -330,7 +330,7 @@ public ContextMappings getContextMappings() { */ public static synchronized PostingsFormat postingsFormat() { if (postingsFormat == null) { - postingsFormat = new Completion90PostingsFormat(); + postingsFormat = new Completion99PostingsFormat(); } return postingsFormat; } diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index 8ebb007787828..9b8fa7eec37b9 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -46,6 +46,7 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.Assertions; @@ -149,13 +150,45 @@ public enum MergeReason { "index.mapping.depth.limit", 20L, 1, + Long.MAX_VALUE, + limit -> { + // Make sure XContent constraints are not exceeded (otherwise content processing will fail) + if (limit > XContentContraints.DEFAULT_MAX_DEPTH) { + throw new IllegalArgumentException( + "The provided value " + + limit + + " of the index setting 'index.mapping.depth.limit' exceeds per-JVM configured limit of " + + XContentContraints.DEFAULT_MAX_DEPTH + + ". Please change the setting value or increase per-JVM limit " + + "using '" + + XContentContraints.DEFAULT_MAX_DEPTH_PROPERTY + + "' system property." + ); + } + }, Property.Dynamic, Property.IndexScope ); public static final Setting INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING = Setting.longSetting( "index.mapping.field_name_length.limit", - Long.MAX_VALUE, + 50000, 1L, + Long.MAX_VALUE, + limit -> { + // Make sure XContent constraints are not exceeded (otherwise content processing will fail) + if (limit > XContentContraints.DEFAULT_MAX_NAME_LEN) { + throw new IllegalArgumentException( + "The provided value " + + limit + + " of the index setting 'index.mapping.field_name_length.limit' exceeds per-JVM configured limit of " + + XContentContraints.DEFAULT_MAX_NAME_LEN + + ". Please change the setting value or increase per-JVM limit " + + "using '" + + XContentContraints.DEFAULT_MAX_NAME_LEN_PROPERTY + + "' system property." + ); + } + }, Property.Dynamic, Property.IndexScope ); diff --git a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java index 1162689a54689..4e73d87b07b7a 100644 --- a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -54,7 +54,9 @@ * @opensearch.internal */ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { - public static final ParseField SPAN_FIELD_MASKING_FIELD = new ParseField("span_field_masking", "field_masking_span"); + + public static final String NAME = "span_field_masking"; + public static final ParseField SPAN_FIELD_MASKING_FIELD = new ParseField(NAME, "field_masking_span"); private static final ParseField FIELD_FIELD = new ParseField("field"); private static final ParseField QUERY_FIELD = new ParseField("query"); diff --git a/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java b/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java index 6244fa647b042..b2e2ba8001b88 100644 --- a/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java +++ b/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.comparators.NumericComparator; import java.io.IOException; @@ -25,8 +26,8 @@ public class HalfFloatComparator extends NumericComparator { protected float topValue; protected float bottom; - public HalfFloatComparator(int numHits, String field, Float missingValue, boolean reverse, boolean enableSkipping) { - super(field, missingValue != null ? missingValue : 0.0f, reverse, enableSkipping, HalfFloatPoint.BYTES); + public HalfFloatComparator(int numHits, String field, Float missingValue, boolean reverse, Pruning pruning) { + super(field, missingValue != null ? missingValue : 0.0f, reverse, pruning, HalfFloatPoint.BYTES); values = new float[numHits]; } diff --git a/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java b/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java index d46b34fe97356..2b6bd9933e553 100644 --- a/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java +++ b/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.sandbox.document.BigIntegerPoint; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.comparators.NumericComparator; import org.opensearch.common.Numbers; @@ -23,8 +24,8 @@ public class UnsignedLongComparator extends NumericComparator { protected BigInteger topValue; protected BigInteger bottom; - public UnsignedLongComparator(int numHits, String field, BigInteger missingValue, boolean reverse, boolean enableSkipping) { - super(field, missingValue != null ? missingValue : Numbers.MIN_UNSIGNED_LONG_VALUE, reverse, enableSkipping, BigIntegerPoint.BYTES); + public UnsignedLongComparator(int numHits, String field, BigInteger missingValue, boolean reverse, Pruning pruning) { + super(field, missingValue != null ? missingValue : Numbers.MIN_UNSIGNED_LONG_VALUE, reverse, pruning, BigIntegerPoint.BYTES); values = new BigInteger[numHits]; } diff --git a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java index 629cea102a8b2..6d5c23274dbd6 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java @@ -51,7 +51,12 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.shard.IndexShard; import java.io.Closeable; import java.io.IOException; @@ -60,8 +65,10 @@ import java.util.HashSet; import java.util.Iterator; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentMap; +import java.util.function.Function; /** * The indices request cache allows to cache a shard level request stage responses, helping with improving @@ -103,13 +110,16 @@ public final class IndicesRequestCache implements RemovalListener registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); private final Set keysToClean = ConcurrentCollections.newConcurrentSet(); private final ByteSizeValue size; private final TimeValue expire; private final Cache cache; + private final Function> cacheEntityLookup; - IndicesRequestCache(Settings settings) { + IndicesRequestCache(Settings settings, Function> cacheEntityFunction) { this.size = INDICES_CACHE_QUERY_SIZE.get(settings); this.expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null; long sizeInBytes = size.getBytes(); @@ -121,6 +131,7 @@ public final class IndicesRequestCache implements RemovalListener notification) { - notification.getKey().entity.onRemoval(notification); + // In case this event happens for an old shard, we can safely ignore this as we don't keep track for old + // shards as part of request cache. + cacheEntityLookup.apply(notification.getKey().shardId).ifPresent(entity -> entity.onRemoval(notification)); } BytesReference getOrCompute( - CacheEntity cacheEntity, + IndicesService.IndexShardCacheEntity cacheEntity, CheckedSupplier loader, DirectoryReader reader, BytesReference cacheKey ) throws Exception { assert reader.getReaderCacheHelper() != null; - final Key key = new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey); + assert reader.getReaderCacheHelper() instanceof OpenSearchDirectoryReader.DelegatingCacheHelper; + + OpenSearchDirectoryReader.DelegatingCacheHelper delegatingCacheHelper = (OpenSearchDirectoryReader.DelegatingCacheHelper) reader + .getReaderCacheHelper(); + String readerCacheKeyId = delegatingCacheHelper.getDelegatingCacheKey().getId(); + assert readerCacheKeyId != null; + final Key key = new Key(((IndexShard) cacheEntity.getCacheIdentity()).shardId(), cacheKey, readerCacheKeyId); Loader cacheLoader = new Loader(cacheEntity, loader); BytesReference value = cache.computeIfAbsent(key, cacheLoader); if (cacheLoader.isLoaded()) { - key.entity.onMiss(); + cacheEntity.onMiss(); // see if its the first time we see this reader, and make sure to register a cleanup key - CleanupKey cleanupKey = new CleanupKey(cacheEntity, reader.getReaderCacheHelper().getKey()); + CleanupKey cleanupKey = new CleanupKey(cacheEntity, readerCacheKeyId); if (!registeredClosedListeners.containsKey(cleanupKey)) { Boolean previous = registeredClosedListeners.putIfAbsent(cleanupKey, Boolean.TRUE); if (previous == null) { @@ -159,7 +178,7 @@ BytesReference getOrCompute( } } } else { - key.entity.onHit(); + cacheEntity.onHit(); } return value; } @@ -170,9 +189,14 @@ BytesReference getOrCompute( * @param reader the reader to invalidate the cache entry for * @param cacheKey the cache key to invalidate */ - void invalidate(CacheEntity cacheEntity, DirectoryReader reader, BytesReference cacheKey) { + void invalidate(IndicesService.IndexShardCacheEntity cacheEntity, DirectoryReader reader, BytesReference cacheKey) { assert reader.getReaderCacheHelper() != null; - cache.invalidate(new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey)); + String readerCacheKeyId = null; + if (reader instanceof OpenSearchDirectoryReader) { + IndexReader.CacheHelper cacheHelper = ((OpenSearchDirectoryReader) reader).getDelegatingCacheHelper(); + readerCacheKeyId = ((OpenSearchDirectoryReader.DelegatingCacheHelper) cacheHelper).getDelegatingCacheKey().getId(); + } + cache.invalidate(new Key(((IndexShard) cacheEntity.getCacheIdentity()).shardId(), cacheKey, readerCacheKeyId)); } /** @@ -240,6 +264,7 @@ interface CacheEntity extends Accountable { * Called when this entity instance is removed */ void onRemoval(RemovalNotification notification); + } /** @@ -247,22 +272,26 @@ interface CacheEntity extends Accountable { * * @opensearch.internal */ - static class Key implements Accountable { - private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class); - - public final CacheEntity entity; // use as identity equality - public final IndexReader.CacheKey readerCacheKey; + static class Key implements Accountable, Writeable { + public final ShardId shardId; // use as identity equality + public final String readerCacheKeyId; public final BytesReference value; - Key(CacheEntity entity, IndexReader.CacheKey readerCacheKey, BytesReference value) { - this.entity = entity; - this.readerCacheKey = Objects.requireNonNull(readerCacheKey); + Key(ShardId shardId, BytesReference value, String readerCacheKeyId) { + this.shardId = shardId; this.value = value; + this.readerCacheKeyId = Objects.requireNonNull(readerCacheKeyId); + } + + Key(StreamInput in) throws IOException { + this.shardId = in.readOptionalWriteable(ShardId::new); + this.readerCacheKeyId = in.readOptionalString(); + this.value = in.readBytesReference(); } @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + entity.ramBytesUsed() + value.length(); + return BASE_RAM_BYTES_USED + shardId.getBaseRamBytesUsed() + value.length(); } @Override @@ -276,28 +305,35 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Key key = (Key) o; - if (Objects.equals(readerCacheKey, key.readerCacheKey) == false) return false; - if (!entity.getCacheIdentity().equals(key.entity.getCacheIdentity())) return false; + if (!Objects.equals(readerCacheKeyId, key.readerCacheKeyId)) return false; + if (!shardId.equals(key.shardId)) return false; if (!value.equals(key.value)) return false; return true; } @Override public int hashCode() { - int result = entity.getCacheIdentity().hashCode(); - result = 31 * result + readerCacheKey.hashCode(); + int result = shardId.hashCode(); + result = 31 * result + readerCacheKeyId.hashCode(); result = 31 * result + value.hashCode(); return result; } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalWriteable(shardId); + out.writeOptionalString(readerCacheKeyId); + out.writeBytesReference(value); + } } private class CleanupKey implements IndexReader.ClosedListener { final CacheEntity entity; - final IndexReader.CacheKey readerCacheKey; + final String readerCacheKeyId; - private CleanupKey(CacheEntity entity, IndexReader.CacheKey readerCacheKey) { + private CleanupKey(CacheEntity entity, String readerCacheKeyId) { this.entity = entity; - this.readerCacheKey = readerCacheKey; + this.readerCacheKeyId = readerCacheKeyId; } @Override @@ -315,7 +351,7 @@ public boolean equals(Object o) { return false; } CleanupKey that = (CleanupKey) o; - if (Objects.equals(readerCacheKey, that.readerCacheKey) == false) return false; + if (!Objects.equals(readerCacheKeyId, that.readerCacheKeyId)) return false; if (!entity.getCacheIdentity().equals(that.entity.getCacheIdentity())) return false; return true; } @@ -323,11 +359,14 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = entity.getCacheIdentity().hashCode(); - result = 31 * result + Objects.hashCode(readerCacheKey); + result = 31 * result + Objects.hashCode(readerCacheKeyId); return result; } } + /** + * Logic to clean up in-memory cache. + */ synchronized void cleanCache() { final Set currentKeysToClean = new HashSet<>(); final Set currentFullClean = new HashSet<>(); @@ -336,9 +375,9 @@ synchronized void cleanCache() { for (Iterator iterator = keysToClean.iterator(); iterator.hasNext();) { CleanupKey cleanupKey = iterator.next(); iterator.remove(); - if (cleanupKey.readerCacheKey == null || cleanupKey.entity.isOpen() == false) { + if (cleanupKey.readerCacheKeyId == null || !cleanupKey.entity.isOpen()) { // null indicates full cleanup, as does a closed shard - currentFullClean.add(cleanupKey.entity.getCacheIdentity()); + currentFullClean.add(((IndexShard) cleanupKey.entity.getCacheIdentity()).shardId()); } else { currentKeysToClean.add(cleanupKey); } @@ -346,16 +385,18 @@ synchronized void cleanCache() { if (!currentKeysToClean.isEmpty() || !currentFullClean.isEmpty()) { for (Iterator iterator = cache.keys().iterator(); iterator.hasNext();) { Key key = iterator.next(); - if (currentFullClean.contains(key.entity.getCacheIdentity())) { + if (currentFullClean.contains(key.shardId)) { iterator.remove(); } else { - if (currentKeysToClean.contains(new CleanupKey(key.entity, key.readerCacheKey))) { + // If the flow comes here, then we should have a open shard available on node. + if (currentKeysToClean.contains( + new CleanupKey(cacheEntityLookup.apply(key.shardId).orElse(null), key.readerCacheKeyId) + )) { iterator.remove(); } } } } - cache.refresh(); } diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 5c3beaf8509bd..db5b93f073b03 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -409,7 +409,13 @@ public IndicesService( this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS)); this.analysisRegistry = analysisRegistry; this.indexNameExpressionResolver = indexNameExpressionResolver; - this.indicesRequestCache = new IndicesRequestCache(settings); + this.indicesRequestCache = new IndicesRequestCache(settings, (shardId -> { + IndexService indexService = this.indices.get(shardId.getIndex().getUUID()); + if (indexService == null) { + return Optional.empty(); + } + return Optional.of(new IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); this.indicesQueryCache = new IndicesQueryCache(settings); this.mapperRegistry = mapperRegistry; this.namedWriteableRegistry = namedWriteableRegistry; @@ -1744,7 +1750,6 @@ private BytesReference cacheShardLevelResult( BytesReference cacheKey, CheckedConsumer loader ) throws Exception { - IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard); CheckedSupplier supplier = () -> { /* BytesStreamOutput allows to pass the expected size but by default uses * BigArrays.PAGE_SIZE_IN_BYTES which is 16k. A common cached result ie. @@ -1761,7 +1766,7 @@ private BytesReference cacheShardLevelResult( return out.bytes(); } }; - return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey); + return indicesRequestCache.getOrCompute(new IndexShardCacheEntity(shard), supplier, reader, cacheKey); } /** @@ -1769,11 +1774,12 @@ private BytesReference cacheShardLevelResult( * * @opensearch.internal */ - static final class IndexShardCacheEntity extends AbstractIndexShardCacheEntity { + public static class IndexShardCacheEntity extends AbstractIndexShardCacheEntity { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IndexShardCacheEntity.class); private final IndexShard indexShard; - protected IndexShardCacheEntity(IndexShard indexShard) { + public IndexShardCacheEntity(IndexShard indexShard) { this.indexShard = indexShard; } diff --git a/server/src/main/java/org/opensearch/ingest/IngestDocument.java b/server/src/main/java/org/opensearch/ingest/IngestDocument.java index e0de0a9488ad9..10e9e64db561e 100644 --- a/server/src/main/java/org/opensearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/opensearch/ingest/IngestDocument.java @@ -33,6 +33,7 @@ package org.opensearch.ingest; import org.opensearch.core.common.Strings; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.index.VersionType; import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.IndexFieldMapper; @@ -752,6 +753,7 @@ public Map getSourceAndMetadata() { @SuppressWarnings("unchecked") public static Map deepCopyMap(Map source) { + CollectionUtils.ensureNoSelfReferences(source, "IngestDocument: Self reference present in object."); return (Map) deepCopy(source); } diff --git a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java index e417a2eaa7cf4..600ba5b5a92d8 100644 --- a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -40,6 +40,7 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; @@ -77,7 +78,7 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { this.reverseMuls = new int[numFields]; for (int i = 0; i < numFields; i++) { SortField sortField = sort.getSort()[i]; - FieldComparator fieldComparator = sortField.getComparator(1, false); + FieldComparator fieldComparator = sortField.getComparator(1, Pruning.NONE); @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) fieldComparator; comparator.setTopValue(after.fields[i]); diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 4cbf8dc191a9d..8510122c39fcb 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -46,6 +46,8 @@ import org.opensearch.action.admin.cluster.snapshots.status.TransportNodesSnapshotsStatus; import org.opensearch.action.search.SearchExecutionStatsCollector; import org.opensearch.action.search.SearchPhaseController; +import org.opensearch.action.search.SearchRequestOperationsCompositeListenerFactory; +import org.opensearch.action.search.SearchRequestOperationsListener; import org.opensearch.action.search.SearchRequestSlowLog; import org.opensearch.action.search.SearchRequestStats; import org.opensearch.action.search.SearchTransportService; @@ -783,7 +785,7 @@ protected Node( threadPool ); - final SearchRequestStats searchRequestStats = new SearchRequestStats(); + final SearchRequestStats searchRequestStats = new SearchRequestStats(clusterService.getClusterSettings()); final SearchRequestSlowLog searchRequestSlowLog = new SearchRequestSlowLog(clusterService); remoteStoreStatsTrackerFactory = new RemoteStoreStatsTrackerFactory(clusterService, settings); @@ -879,6 +881,17 @@ protected Node( ) .collect(Collectors.toList()); + // register all standard SearchRequestOperationsCompositeListenerFactory to the SearchRequestOperationsCompositeListenerFactory + final SearchRequestOperationsCompositeListenerFactory searchRequestOperationsCompositeListenerFactory = + new SearchRequestOperationsCompositeListenerFactory( + Stream.concat( + Stream.of(searchRequestStats, searchRequestSlowLog), + pluginComponents.stream() + .filter(p -> p instanceof SearchRequestOperationsListener) + .map(p -> (SearchRequestOperationsListener) p) + ).toArray(SearchRequestOperationsListener[]::new) + ); + ActionModule actionModule = new ActionModule( settings, clusterModule.getIndexNameExpressionResolver(), @@ -1275,6 +1288,7 @@ protected Node( b.bind(RemoteClusterStateService.class).toProvider(() -> remoteClusterStateService); b.bind(PersistedStateRegistry.class).toInstance(persistedStateRegistry); b.bind(SegmentReplicationStatsTracker.class).toInstance(segmentReplicationStatsTracker); + b.bind(SearchRequestOperationsCompositeListenerFactory.class).toInstance(searchRequestOperationsCompositeListenerFactory); }); injector = modules.createInjector(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java index 2e7c4659bcb00..223be3ba2d1ae 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java @@ -90,7 +90,7 @@ static class Entry { protected PackedLongValues.Builder docDeltasBuilder; protected PackedLongValues.Builder bucketsBuilder; protected long maxBucket = -1; - protected boolean finished = false; + protected boolean finished; protected LongHash selectedBuckets; /** @@ -101,6 +101,9 @@ static class Entry { public BestBucketsDeferringCollector(SearchContext context, boolean isGlobal) { this.searchContext = context; this.isGlobal = isGlobal; + // a postCollection call is not made by the IndexSearcher when there are no segments. + // In this case init the collector as finished. + this.finished = context.searcher().getLeafContexts().isEmpty(); } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index 36f94dc833a0b..822b8a6c4b118 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -45,6 +45,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -423,8 +424,8 @@ public int hashCode() { } @Override - public FieldComparator getComparator(int numHits, boolean enableSkipping) { - return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), false) { + public FieldComparator getComparator(int numHits, Pruning pruning) { + return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java index 0e773291881cf..b8f9406ff55b9 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java @@ -225,6 +225,7 @@ public int hashCode() { protected final int requiredSize; protected final long minDocCount; protected final TermsAggregator.BucketCountThresholds bucketCountThresholds; + private boolean hasSliceLevelDocCountError = false; /** * Creates a new {@link InternalTerms} @@ -299,9 +300,7 @@ private BucketOrder getReduceOrder(List aggregations) { private long getDocCountError(InternalTerms terms, ReduceContext reduceContext) { int size = terms.getBuckets().size(); - // doc_count_error is always computed at the coordinator based on the buckets returned by the shards. This should be 0 during the - // shard level reduce as no buckets are being pruned at this stage. - if (reduceContext.isSliceLevel() || size == 0 || size < terms.getShardSize() || isKeyOrder(terms.order)) { + if (size == 0 || size < terms.getShardSize() || isKeyOrder(terms.order)) { return 0; } else if (InternalOrder.isCountDesc(terms.order)) { if (terms.getDocCountError() > 0) { @@ -398,6 +397,12 @@ public InternalAggregation reduce(List aggregations, Reduce for (InternalAggregation aggregation : aggregations) { @SuppressWarnings("unchecked") InternalTerms terms = (InternalTerms) aggregation; + // For Concurrent Segment Search the aggregation will have a computed doc count error coming from the shards. + // We use the existence of this doc count error to determine whether or not doc count error originated from the slice level + // and if so we will maintain the doc count error for the 1 shard case at the coordinator level + if (aggregations.size() == 1 && terms.getDocCountError() > 0) { + hasSliceLevelDocCountError = true; + } if (referenceTerms == null && aggregation.getClass().equals(UnmappedTerms.class) == false) { referenceTerms = terms; } @@ -500,7 +505,11 @@ For backward compatibility, we disable the merge sort and use ({@link InternalTe if (sumDocCountError == -1) { docCountError = -1; } else { - docCountError = aggregations.size() == 1 ? 0 : sumDocCountError; + if (hasSliceLevelDocCountError) { + docCountError = sumDocCountError; + } else { + docCountError = aggregations.size() == 1 ? 0 : sumDocCountError; + } } // Shards must return buckets sorted by key, so we apply the sort here in shard level reduce @@ -512,7 +521,7 @@ For backward compatibility, we disable the merge sort and use ({@link InternalTe @Override protected B reduceBucket(List buckets, ReduceContext context) { - assert buckets.size() > 0; + assert !buckets.isEmpty(); long docCount = 0; // For the per term doc count error we add up the errors from the // shards that did not respond with the term. To do this we add up @@ -523,7 +532,7 @@ protected B reduceBucket(List buckets, ReduceContext context) { for (B bucket : buckets) { docCount += bucket.getDocCount(); if (docCountError != -1) { - if (bucket.showDocCountError() == false || bucket.getDocCountError() == -1) { + if (bucket.showDocCountError() == false) { docCountError = -1; } else { docCountError += bucket.getDocCountError(); diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index b042f3cf41d61..403b0b545c113 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -102,6 +102,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { * The interval at which we check for search cancellation when we cannot use * a {@link CancellableBulkScorer}. See {@link #intersectScorerAndBitSet}. */ + private static final int CHECK_CANCELLED_SCORER_INTERVAL = 1 << 11; private AggregatedDfs aggregatedDfs; diff --git a/server/src/main/java/org/opensearch/search/internal/SearchContext.java b/server/src/main/java/org/opensearch/search/internal/SearchContext.java index cc43f4e5d79fb..02837da64dafd 100644 --- a/server/src/main/java/org/opensearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SearchContext.java @@ -86,8 +86,6 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; -import static org.opensearch.search.aggregations.bucket.BucketUtils.suggestShardSideQueueSize; - /** * This class encapsulates the state needed to execute a search. It holds a reference to the * shards point in time snapshot (IndexReader / ContextIndexSearcher) and allows passing on @@ -410,11 +408,10 @@ public boolean shouldUseConcurrentSearch() { * Returns local bucket count thresholds based on concurrent segment search status */ public LocalBucketCountThresholds asLocalBucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { - if (shouldUseConcurrentSearch()) { - return new LocalBucketCountThresholds(0, suggestShardSideQueueSize(bucketCountThresholds.getShardSize())); - } else { - return new LocalBucketCountThresholds(bucketCountThresholds.getShardMinDocCount(), bucketCountThresholds.getShardSize()); - } + return new LocalBucketCountThresholds( + shouldUseConcurrentSearch() ? 0 : bucketCountThresholds.getShardMinDocCount(), + bucketCountThresholds.getShardSize() + ); } /** diff --git a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java index c4bb54de2eedd..0499bba3245c6 100644 --- a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java @@ -38,6 +38,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; import org.apache.lucene.util.BitSet; @@ -734,8 +735,8 @@ private NumericDoubleValues getNumericDoubleValues(LeafReaderContext context) th } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { - return new DoubleComparator(numHits, null, null, reversed, enableSkipping) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { + return new DoubleComparator(numHits, null, null, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java b/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java index bded2417ba6c1..10cc832fdb684 100644 --- a/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java +++ b/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.comparators.NumericComparator; @@ -44,13 +45,13 @@ public SortedWiderNumericSortField(String field, Type type, boolean reverse) { * Creates and return a comparator, which always converts Numeric to double * and compare to support multi type comparison between numeric values * @param numHits number of top hits the queue will store - * @param enableSkipping true if the comparator can skip documents via {@link + * @param pruning controls how the comparator skips documents via {@link * LeafFieldComparator#competitiveIterator()} * @return NumericComparator */ @Override - public FieldComparator getComparator(int numHits, boolean enableSkipping) { - return new NumericComparator(getField(), (Number) getMissingValue(), getReverse(), enableSkipping, Double.BYTES) { + public FieldComparator getComparator(int numHits, Pruning pruning) { + return new NumericComparator(getField(), (Number) getMissingValue(), getReverse(), pruning, Double.BYTES) { @Override public int compare(int slot1, int slot2) { throw new UnsupportedOperationException(); diff --git a/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java b/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java index ed111b34f048f..99559e45aaaee 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java +++ b/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java @@ -20,7 +20,9 @@ * Propagates TASK_ID across thread contexts */ public class TaskThreadContextStatePropagator implements ThreadContextStatePropagator { + @Override + @SuppressWarnings("removal") public Map transients(Map source) { final Map transients = new HashMap<>(); @@ -32,7 +34,18 @@ public Map transients(Map source) { } @Override + public Map transients(Map source, boolean isSystemContext) { + return transients(source); + } + + @Override + @SuppressWarnings("removal") public Map headers(Map source) { return Collections.emptyMap(); } + + @Override + public Map headers(Map source, boolean isSystemContext) { + return headers(source); + } } diff --git a/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java b/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java index 863f56d9fbe94..908164d1935a7 100644 --- a/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java +++ b/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java @@ -12,6 +12,7 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.concurrent.ThreadContextStatePropagator; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -50,20 +51,29 @@ public void put(String key, Span span) { } @Override + @SuppressWarnings("removal") public Map transients(Map source) { final Map transients = new HashMap<>(); - if (source.containsKey(CURRENT_SPAN)) { final SpanReference current = (SpanReference) source.get(CURRENT_SPAN); if (current != null) { transients.put(CURRENT_SPAN, new SpanReference(current.getSpan())); } } - return transients; } @Override + public Map transients(Map source, boolean isSystemContext) { + if (isSystemContext == true) { + return Collections.emptyMap(); + } else { + return transients(source); + } + } + + @Override + @SuppressWarnings("removal") public Map headers(Map source) { final Map headers = new HashMap<>(); @@ -77,6 +87,11 @@ public Map headers(Map source) { return headers; } + @Override + public Map headers(Map source, boolean isSystemContext) { + return headers(source); + } + Span getCurrentSpan(String key) { SpanReference currentSpanRef = threadContext.getTransient(key); return (currentSpanRef == null) ? null : currentSpanRef.getSpan(); diff --git a/server/src/main/java/org/opensearch/transport/TransportService.java b/server/src/main/java/org/opensearch/transport/TransportService.java index a1697b1898eeb..d50266d8c9e4a 100644 --- a/server/src/main/java/org/opensearch/transport/TransportService.java +++ b/server/src/main/java/org/opensearch/transport/TransportService.java @@ -868,19 +868,10 @@ public final void sendRequest( final TransportRequestOptions options, final TransportResponseHandler handler ) { - if (connection == localNodeConnection) { - // See please https://github.com/opensearch-project/OpenSearch/issues/10291 - sendRequestAsync(connection, action, request, options, handler); - } else { - final Span span = tracer.startSpan(SpanBuilder.from(action, connection)); - try (SpanScope spanScope = tracer.withSpanInScope(span)) { - TransportResponseHandler traceableTransportResponseHandler = TraceableTransportResponseHandler.create( - handler, - span, - tracer - ); - sendRequestAsync(connection, action, request, options, traceableTransportResponseHandler); - } + final Span span = tracer.startSpan(SpanBuilder.from(action, connection)); + try (SpanScope spanScope = tracer.withSpanInScope(span)) { + TransportResponseHandler traceableTransportResponseHandler = TraceableTransportResponseHandler.create(handler, span, tracer); + sendRequestAsync(connection, action, request, options, traceableTransportResponseHandler); } } diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java index b8ab5c935fa34..a5ca08f141560 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -44,6 +44,8 @@ import org.opensearch.cluster.service.ClusterStateStats; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.metrics.OperationStats; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.indices.breaker.AllCircuitBreakerStats; @@ -961,7 +963,12 @@ public void apply(String action, AdmissionControlActionType admissionControlActi private static NodeIndicesStats getNodeIndicesStats(boolean remoteStoreStats) { NodeIndicesStats indicesStats = null; if (remoteStoreStats) { - indicesStats = new NodeIndicesStats(new CommonStats(CommonStatsFlags.ALL), new HashMap<>(), new SearchRequestStats()); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + indicesStats = new NodeIndicesStats( + new CommonStats(CommonStatsFlags.ALL), + new HashMap<>(), + new SearchRequestStats(clusterSettings) + ); RemoteSegmentStats remoteSegmentStats = indicesStats.getSegments().getRemoteSegmentStats(); remoteSegmentStats.addUploadBytesStarted(10L); remoteSegmentStats.addUploadBytesSucceeded(10L); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java b/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java index 8ba8d226715ed..d463782a70506 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java @@ -241,6 +241,7 @@ public void testParseRemove() throws IOException { String[] indices = generateRandomStringArray(10, 5, false, false); String[] aliases = generateRandomStringArray(10, 5, false, false); XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent()); + boolean mustExist = randomBoolean(); b.startObject(); { b.startObject("remove"); @@ -255,6 +256,9 @@ public void testParseRemove() throws IOException { } else { b.field("alias", aliases[0]); } + if (mustExist) { + b.field("must_exist", true); + } } b.endObject(); } @@ -265,6 +269,9 @@ public void testParseRemove() throws IOException { assertEquals(AliasActions.Type.REMOVE, action.actionType()); assertThat(action.indices(), equalTo(indices)); assertThat(action.aliases(), equalTo(aliases)); + if (mustExist) { + assertThat(action.mustExist(), equalTo(true)); + } } } diff --git a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java index e17fbab32a12e..76129341fc9a2 100644 --- a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java @@ -32,12 +32,15 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.opensearch.action.OriginalIndices; import org.opensearch.action.support.IndicesOptions; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.routing.GroupShardsIterator; import org.opensearch.common.UUIDs; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.AtomicArray; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.set.Sets; @@ -175,7 +178,7 @@ private AbstractSearchAsyncAction createAction( results, request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override protected SearchPhase getNextPhase(final SearchPhaseResults results, SearchPhaseContext context) { @@ -328,7 +331,8 @@ public void testSendSearchResponseDisallowPartialFailures() { } public void testOnPhaseFailureAndVerifyListeners() { - SearchRequestStats testListener = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); final List requestOperationListeners = new ArrayList<>(List.of(testListener)); SearchQueryThenFetchAsyncAction action = createSearchQueryThenFetchAsyncAction(requestOperationListeners); @@ -591,7 +595,8 @@ public void onFailure(Exception e) { } public void testOnPhaseListenersWithQueryAndThenFetchType() throws InterruptedException { - SearchRequestStats testListener = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); final List requestOperationListeners = new ArrayList<>(List.of(testListener)); long delay = (randomIntBetween(1, 5)); @@ -640,7 +645,8 @@ public void testOnPhaseListenersWithQueryAndThenFetchType() throws InterruptedEx } public void testOnPhaseListenersWithDfsType() throws InterruptedException { - SearchRequestStats testListener = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); final List requestOperationListeners = new ArrayList<>(List.of(testListener)); SearchDfsQueryThenFetchAsyncAction searchDfsQueryThenFetchAsyncAction = createSearchDfsQueryThenFetchAsyncAction( @@ -710,7 +716,10 @@ private SearchDfsQueryThenFetchAsyncAction createSearchDfsQueryThenFetchAsyncAct null, task, SearchResponse.Clusters.EMPTY, - new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger)) + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger), + searchRequest + ) ); } @@ -760,7 +769,10 @@ private SearchQueryThenFetchAsyncAction createSearchQueryThenFetchAsyncAction( null, task, SearchResponse.Clusters.EMPTY, - new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger)) + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger), + searchRequest + ) ) { @Override ShardSearchFailure[] buildShardFailures() { diff --git a/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java b/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java index 8042a7e296869..4f929a71429a6 100644 --- a/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java +++ b/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; @@ -264,7 +265,7 @@ private Object[] newDateNanoArray(String... values) { private TopFieldDocs createTopDocs(SortField sortField, int totalHits, Object[] values) { FieldDoc[] fieldDocs = new FieldDoc[values.length]; - FieldComparator cmp = sortField.getComparator(1, false); + FieldComparator cmp = sortField.getComparator(1, Pruning.NONE); for (int i = 0; i < values.length; i++) { fieldDocs[i] = new FieldDoc(i, Float.NaN, new Object[] { values[i] }); } diff --git a/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 4ed4797efe604..56dcf66d5607d 100644 --- a/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -31,6 +31,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.BytesRef; import org.opensearch.Version; import org.opensearch.action.OriginalIndices; @@ -137,7 +138,10 @@ public void run() throws IOException { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -229,7 +233,10 @@ public void run() throws IOException { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -320,7 +327,10 @@ public void sendCanMatch( new ArraySearchPhaseResults<>(iter.size()), randomIntBetween(1, 32), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ) { @Override @@ -348,7 +358,10 @@ protected void executePhaseOnShard( } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -433,7 +446,10 @@ public void run() { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -533,7 +549,10 @@ public void run() { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); diff --git a/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java index 7b4fa1d8387df..af7adc4e58fb8 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java @@ -31,6 +31,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.opensearch.Version; import org.opensearch.action.OriginalIndices; import org.opensearch.cluster.ClusterState; @@ -61,6 +62,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -136,7 +138,7 @@ public void testSkipSearchShards() throws InterruptedException { new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override @@ -255,7 +257,7 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override @@ -373,7 +375,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { TestSearchResponse response = new TestSearchResponse(); @@ -496,7 +498,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { TestSearchResponse response = new TestSearchResponse(); @@ -610,7 +612,7 @@ public void testAllowPartialResults() throws InterruptedException { new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override protected void executePhaseOnShard( diff --git a/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java b/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java index a2e301143d694..17fa124890158 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java @@ -34,16 +34,19 @@ import java.util.Arrays; -import org.mockito.Mockito; +import org.mockito.ArgumentCaptor; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public final class SearchQueryCategorizerTests extends OpenSearchTestCase { + private static final String MULTI_TERMS_AGGREGATION = "multi_terms"; + private MetricsRegistry metricsRegistry; private SearchQueryCategorizer searchQueryCategorizer; @@ -71,7 +74,20 @@ public void testAggregationsQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d)); + verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d), any(Tags.class)); + + // capture the arguments passed to the aggCounter.add method + ArgumentCaptor valueCaptor = ArgumentCaptor.forClass(Double.class); + ArgumentCaptor tagsCaptor = ArgumentCaptor.forClass(Tags.class); + + // Verify that aggCounter.add was called with the expected arguments + verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(valueCaptor.capture(), tagsCaptor.capture()); + + double actualValue = valueCaptor.getValue(); + String actualTag = (String) tagsCaptor.getValue().getTagsMap().get("type"); + + assertEquals(1.0d, actualValue, 0.0001); + assertEquals(MULTI_TERMS_AGGREGATION, actualTag); } public void testBoolQuery() { @@ -81,8 +97,8 @@ public void testBoolQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); } public void testFunctionScoreQuery() { @@ -92,7 +108,7 @@ public void testFunctionScoreQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.functionScoreCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.functionScoreCounter).add(eq(1.0d), any(Tags.class)); } public void testMatchQuery() { @@ -102,7 +118,7 @@ public void testMatchQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); } public void testMatchPhraseQuery() { @@ -112,7 +128,7 @@ public void testMatchPhraseQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchPhrasePrefixCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchPhrasePrefixCounter).add(eq(1.0d), any(Tags.class)); } public void testMultiMatchQuery() { @@ -122,7 +138,7 @@ public void testMultiMatchQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.multiMatchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.multiMatchCounter).add(eq(1.0d), any(Tags.class)); } public void testOtherQuery() { @@ -136,8 +152,8 @@ public void testOtherQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.otherQueryCounter, times(2)).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.otherQueryCounter, times(1)).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); } public void testQueryStringQuery() { @@ -148,7 +164,7 @@ public void testQueryStringQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.queryStringQueryCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.queryStringCounter).add(eq(1.0d), any(Tags.class)); } public void testRangeQuery() { @@ -160,7 +176,7 @@ public void testRangeQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.rangeCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.rangeCounter).add(eq(1.0d), any(Tags.class)); } public void testRegexQuery() { @@ -169,7 +185,7 @@ public void testRegexQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.regexCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.regexpCounter).add(eq(1.0d), any(Tags.class)); } public void testSortQuery() { @@ -180,8 +196,8 @@ public void testSortQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.sortCounter, times(2)).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.sortCounter, times(2)).add(eq(1.0d), any(Tags.class)); } public void testTermQuery() { @@ -191,7 +207,7 @@ public void testTermQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); } public void testWildcardQuery() { @@ -201,7 +217,7 @@ public void testWildcardQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.wildcardCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.wildcardCounter).add(eq(1.0d), any(Tags.class)); } public void testComplexQuery() { @@ -219,10 +235,10 @@ public void testComplexQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.regexCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d)); + verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.regexpCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d), any(Tags.class)); } } diff --git a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java index a8c0c43ac5080..faf6f86c69c27 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -32,6 +32,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; @@ -63,6 +64,7 @@ import org.opensearch.transport.Transport; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; @@ -215,7 +217,10 @@ public void sendExecuteQuery( null, task, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ) { @Override protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java new file mode 100644 index 0000000000000..78c5ba4412c68 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java @@ -0,0 +1,131 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.List; + +public class SearchRequestOperationsCompositeListenerFactoryTests extends OpenSearchTestCase { + public void testAddAndGetListeners() { + SearchRequestOperationsListener testListener = createTestSearchRequestOperationsListener(); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener + ); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener, requestListeners.getListeners().get(0)); + } + + public void testStandardListenersEnabled() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + testListener1.setEnabled(false); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1, + testListener2 + ); + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger + ); + List listeners = compositeListener.getListeners(); + assertEquals(1, listeners.size()); + assertEquals(testListener2, listeners.get(0)); + assertEquals(2, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + assertEquals(testListener2, requestListeners.getListeners().get(1)); + } + + public void testStandardListenersAndPerRequestListener() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1 + ); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + testListener1.setEnabled(true); + testListener2.setEnabled(true); + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + searchRequest.setPhaseTook(true); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger, + testListener2 + ); + List listeners = compositeListener.getListeners(); + assertEquals(2, listeners.size()); + assertEquals(testListener1, listeners.get(0)); + assertEquals(testListener2, listeners.get(1)); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + } + + public void testStandardListenersDisabledAndPerRequestListener() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + testListener1.setEnabled(false); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1 + ); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger, + testListener2 + ); + List listeners = compositeListener.getListeners(); + assertEquals(1, listeners.size()); + assertEquals(testListener2, listeners.get(0)); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + assertFalse(requestListeners.getListeners().get(0).isEnabled()); + } + + public void testStandardListenerAndPerRequestListenerDisabled() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1 + ); + testListener1.setEnabled(true); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + testListener2.setEnabled(false); + + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + searchRequest.setPhaseTook(false); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger, + testListener2 + ); + List listeners = compositeListener.getListeners(); + assertEquals(1, listeners.size()); + assertEquals(testListener1, listeners.get(0)); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + } + + public SearchRequestOperationsListener createTestSearchRequestOperationsListener() { + return new SearchRequestOperationsListener() { + @Override + void onPhaseStart(SearchPhaseContext context) {} + + @Override + void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + + @Override + void onPhaseFailure(SearchPhaseContext context) {} + }; + } +} diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java index 58a4c4a4e555d..0f737e00478cb 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java @@ -8,6 +8,10 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; + +import java.util.List; + /** * Helper interface to access package protected {@link SearchRequestOperationsListener} from test cases. */ @@ -17,6 +21,12 @@ default void onPhaseStart(SearchRequestOperationsListener listener, SearchPhaseC } default void onPhaseEnd(SearchRequestOperationsListener listener, SearchPhaseContext context) { - listener.onPhaseEnd(context, new SearchRequestContext()); + listener.onPhaseEnd( + context, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); } } diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java index e23f08c9415eb..f009988ffae17 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java @@ -104,6 +104,7 @@ public void testMultipleSlowLoggersUseSingleLog4jLogger() { new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null ); + SearchRequestOperationsCompositeListenerFactory searchRequestListeners = new SearchRequestOperationsCompositeListenerFactory(); SearchRequestSlowLog searchRequestSlowLog2 = new SearchRequestSlowLog(clusterService2); int numberOfLoggersAfter = context.getLoggers().size(); @@ -175,7 +176,8 @@ public void testConcurrentOnRequestEnd() throws InterruptedException { ArrayList searchRequestContexts = new ArrayList<>(); for (int i = 0; i < numRequests; i++) { SearchRequestContext searchRequestContext = new SearchRequestContext( - new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger) + new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger), + searchRequest ); searchRequestContext.setAbsoluteStartNanos((i < numRequestsLogged) ? 0 : System.nanoTime()); searchRequestContexts.add(searchRequestContext); @@ -204,7 +206,10 @@ public void testSearchRequestSlowLogHasJsonFields_EmptySearchRequestContext() th SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); SearchRequestSlowLog.SearchRequestSlowLogMessage p = new SearchRequestSlowLog.SearchRequestSlowLogMessage( searchPhaseContext, 10, @@ -225,7 +230,10 @@ public void testSearchRequestSlowLogHasJsonFields_NotEmptySearchRequestContext() SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); searchRequestContext.updatePhaseTookMap(SearchPhaseName.FETCH.getName(), 10L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.QUERY.getName(), 50L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.EXPAND.getName(), 5L); @@ -251,7 +259,10 @@ public void testSearchRequestSlowLogHasJsonFields_PartialContext() throws IOExce SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); searchRequestContext.updatePhaseTookMap(SearchPhaseName.FETCH.getName(), 10L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.QUERY.getName(), 50L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.EXPAND.getName(), 5L); @@ -277,7 +288,10 @@ public void testSearchRequestSlowLogSearchContextPrinterToLog() throws IOExcepti SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); searchRequestContext.updatePhaseTookMap(SearchPhaseName.FETCH.getName(), 10L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.QUERY.getName(), 50L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.EXPAND.getName(), 5L); diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java index 93cf77933fdd5..377ccebbfd418 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java @@ -8,9 +8,13 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchTestCase; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Phaser; @@ -22,7 +26,8 @@ public class SearchRequestStatsTests extends OpenSearchTestCase { public void testSearchRequestPhaseFailure() { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); SearchPhaseContext ctx = mock(SearchPhaseContext.class); SearchPhase mockSearchPhase = mock(SearchPhase.class); when(ctx.getCurrentPhase()).thenReturn(mockSearchPhase); @@ -37,7 +42,8 @@ public void testSearchRequestPhaseFailure() { } public void testSearchRequestStats() { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); SearchPhaseContext ctx = mock(SearchPhaseContext.class); SearchPhase mockSearchPhase = mock(SearchPhase.class); @@ -50,7 +56,13 @@ public void testSearchRequestStats() { long startTime = System.nanoTime() - TimeUnit.MILLISECONDS.toNanos(tookTimeInMillis); when(mockSearchPhase.getStartTimeInNanos()).thenReturn(startTime); assertEquals(1, testRequestStats.getPhaseCurrent(searchPhaseName)); - testRequestStats.onPhaseEnd(ctx, new SearchRequestContext()); + testRequestStats.onPhaseEnd( + ctx, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(0, testRequestStats.getPhaseCurrent(searchPhaseName)); assertEquals(1, testRequestStats.getPhaseTotal(searchPhaseName)); assertThat(testRequestStats.getPhaseMetric(searchPhaseName), greaterThanOrEqualTo(tookTimeInMillis)); @@ -58,7 +70,8 @@ public void testSearchRequestStats() { } public void testSearchRequestStatsOnPhaseStartConcurrently() throws InterruptedException { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); int numTasks = randomIntBetween(5, 50); Thread[] threads = new Thread[numTasks * SearchPhaseName.values().length]; Phaser phaser = new Phaser(numTasks * SearchPhaseName.values().length + 1); @@ -85,7 +98,8 @@ public void testSearchRequestStatsOnPhaseStartConcurrently() throws InterruptedE } public void testSearchRequestStatsOnPhaseEndConcurrently() throws InterruptedException { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); int numTasks = randomIntBetween(5, 50); Thread[] threads = new Thread[numTasks * SearchPhaseName.values().length]; Phaser phaser = new Phaser(numTasks * SearchPhaseName.values().length + 1); @@ -102,7 +116,13 @@ public void testSearchRequestStatsOnPhaseEndConcurrently() throws InterruptedExc for (int i = 0; i < numTasks; i++) { threads[i] = new Thread(() -> { phaser.arriveAndAwaitAdvance(); - testRequestStats.onPhaseEnd(ctx, new SearchRequestContext()); + testRequestStats.onPhaseEnd( + ctx, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); countDownLatch.countDown(); }); threads[i].start(); @@ -121,7 +141,8 @@ public void testSearchRequestStatsOnPhaseEndConcurrently() throws InterruptedExc } public void testSearchRequestStatsOnPhaseFailureConcurrently() throws InterruptedException { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); int numTasks = randomIntBetween(5, 50); Thread[] threads = new Thread[numTasks * SearchPhaseName.values().length]; Phaser phaser = new Phaser(numTasks * SearchPhaseName.values().length + 1); diff --git a/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java index 1004965c0d50e..ce4d5ca4f7091 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java @@ -32,6 +32,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.opensearch.OpenSearchException; @@ -132,7 +133,13 @@ public void testMergeTookInMillis() throws InterruptedException { addResponse(merger, searchResponse); } awaitResponsesAdded(); - SearchResponse searchResponse = merger.getMergedResponse(SearchResponse.Clusters.EMPTY); + SearchResponse searchResponse = merger.getMergedResponse( + SearchResponse.Clusters.EMPTY, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); } @@ -184,7 +191,13 @@ public void testMergeShardFailures() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -235,7 +248,13 @@ public void testMergeShardFailuresNullShardTarget() throws InterruptedException awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -281,7 +300,13 @@ public void testMergeShardFailuresNullShardId() throws InterruptedException { } awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); - ShardSearchFailure[] shardFailures = merger.getMergedResponse(SearchResponse.Clusters.EMPTY).getShardFailures(); + ShardSearchFailure[] shardFailures = merger.getMergedResponse( + SearchResponse.Clusters.EMPTY, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ).getShardFailures(); assertThat(Arrays.asList(shardFailures), containsInAnyOrder(expectedFailures.toArray(ShardSearchFailure.EMPTY_ARRAY))); } @@ -315,7 +340,13 @@ public void testMergeProfileResults() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -377,7 +408,13 @@ public void testMergeCompletionSuggestions() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -449,7 +486,13 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedException awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -523,7 +566,13 @@ public void testMergeAggs() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -680,7 +729,13 @@ public void testMergeSearchHits() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); final SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse searchResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse searchResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); assertEquals(expectedTotal, searchResponse.getTotalShards()); @@ -740,7 +795,13 @@ public void testMergeNoResponsesAdded() { SearchResponseMerger merger = new SearchResponseMerger(0, 10, Integer.MAX_VALUE, timeProvider, emptyReduceContextBuilder()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); assertEquals(0, merger.numResponses()); - SearchResponse response = merger.getMergedResponse(clusters); + SearchResponse response = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, response.getClusters()); assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), response.getTook().millis()); assertEquals(0, response.getTotalShards()); @@ -813,7 +874,13 @@ public void testMergeEmptySearchHitsWithNonEmpty() { merger.add(searchResponse); } assertEquals(2, merger.numResponses()); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(10, mergedResponse.getHits().getTotalHits().value); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); @@ -855,7 +922,13 @@ public void testMergeOnlyEmptyHits() { ); merger.add(searchResponse); } - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(expectedTotalHits, mergedResponse.getHits().getTotalHits()); } diff --git a/server/src/test/java/org/opensearch/action/search/SearchTimeProviderTests.java b/server/src/test/java/org/opensearch/action/search/SearchTimeProviderTests.java deleted file mode 100644 index 4d8a44417a3ee..0000000000000 --- a/server/src/test/java/org/opensearch/action/search/SearchTimeProviderTests.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.action.search; - -import org.opensearch.test.OpenSearchTestCase; - -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SearchTimeProviderTests extends OpenSearchTestCase { - - public void testSearchTimeProviderPhaseFailure() { - TransportSearchAction.SearchTimeProvider testTimeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); - SearchPhaseContext ctx = mock(SearchPhaseContext.class); - SearchPhase mockSearchPhase = mock(SearchPhase.class); - when(ctx.getCurrentPhase()).thenReturn(mockSearchPhase); - - for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { - when(mockSearchPhase.getSearchPhaseName()).thenReturn(searchPhaseName); - testTimeProvider.onPhaseStart(ctx); - assertNull(testTimeProvider.getPhaseTookTime(searchPhaseName)); - testTimeProvider.onPhaseFailure(ctx); - assertNull(testTimeProvider.getPhaseTookTime(searchPhaseName)); - } - } - - public void testSearchTimeProviderPhaseEnd() { - TransportSearchAction.SearchTimeProvider testTimeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); - - SearchPhaseContext ctx = mock(SearchPhaseContext.class); - SearchPhase mockSearchPhase = mock(SearchPhase.class); - when(ctx.getCurrentPhase()).thenReturn(mockSearchPhase); - - for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { - when(mockSearchPhase.getSearchPhaseName()).thenReturn(searchPhaseName); - long tookTimeInMillis = randomIntBetween(1, 100); - testTimeProvider.onPhaseStart(ctx); - long startTime = System.nanoTime() - TimeUnit.MILLISECONDS.toNanos(tookTimeInMillis); - when(mockSearchPhase.getStartTimeInNanos()).thenReturn(startTime); - assertNull(testTimeProvider.getPhaseTookTime(searchPhaseName)); - testTimeProvider.onPhaseEnd(ctx, new SearchRequestContext()); - assertThat(testTimeProvider.getPhaseTookTime(searchPhaseName), greaterThanOrEqualTo(tookTimeInMillis)); - } - } -} diff --git a/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java index c4bf8a5d87172..da19c839f3826 100644 --- a/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java @@ -32,6 +32,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.TotalHits; import org.opensearch.Version; import org.opensearch.action.LatchedActionListener; @@ -483,7 +484,11 @@ public void testCCSRemoteReduceMergeFails() throws Exception { remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -541,7 +546,11 @@ public void testCCSRemoteReduce() throws Exception { remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -578,7 +587,11 @@ public void testCCSRemoteReduce() throws Exception { remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -636,7 +649,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -676,7 +693,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -727,7 +748,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index bf66f577e182b..9fb4551f106ec 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -40,6 +40,7 @@ import org.opensearch.common.util.set.Sets; import org.opensearch.core.index.Index; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.rest.action.admin.indices.AliasesNotFoundException; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; @@ -164,11 +165,11 @@ public void testMustExist() { // Show that removing non-existing alias with mustExist == true fails final ClusterState finalCS = after; - final IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, + final AliasesNotFoundException iae = expectThrows( + AliasesNotFoundException.class, () -> service.applyAliasActions(finalCS, singletonList(new AliasAction.Remove(index, "test_2", true))) ); - assertThat(iae.getMessage(), containsString("required alias [test_2] does not exist")); + assertThat(iae.getMessage(), containsString("aliases [test_2] missing")); } public void testMultipleIndices() { diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java new file mode 100644 index 0000000000000..eb75244c6f8b1 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java @@ -0,0 +1,786 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.store.OpenSearchOnHeapCache; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; +import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +public class TieredSpilloverCacheTests extends OpenSearchTestCase { + + public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + MockCacheEventListener eventListener = new MockCacheEventListener(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + randomIntBetween(1, 4), + eventListener, + 0 + ); + int numOfItems1 = randomIntBetween(1, onHeapCacheSize / 2 - 1); + List keys = new ArrayList<>(); + // Put values in cache. + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + keys.add(key); + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + + // Try to hit cache again with some randomization. + int numOfItems2 = randomIntBetween(1, onHeapCacheSize / 2 - 1); + int cacheHit = 0; + int cacheMiss = 0; + for (int iter = 0; iter < numOfItems2; iter++) { + if (randomBoolean()) { + // Hit cache with stored key + cacheHit++; + int index = randomIntBetween(0, keys.size() - 1); + tieredSpilloverCache.computeIfAbsent(keys.get(index), getLoadAwareCacheLoader()); + } else { + // Hit cache with randomized key which is expected to miss cache always. + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), getLoadAwareCacheLoader()); + cacheMiss++; + } + } + assertEquals(cacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(numOfItems1 + cacheMiss, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + } + + public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + MockCacheEventListener eventListener = new MockCacheEventListener(); + StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( + onHeapCacheSize * 50 + ).setWeigher((k, v) -> 50); // Will support onHeapCacheSize entries. + + StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) + .setDeliberateDelay(0); + + TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() + .setOnHeapCacheBuilder(cacheBuilder) + .setOnDiskCacheBuilder(diskCacheBuilder) + .setListener(eventListener) + .build(); + + // Put values in cache more than it's size and cause evictions from onHeap. + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + long actualDiskCacheSize = tieredSpilloverCache.getOnDiskCache().get().count(); + assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + + assertEquals( + eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count(), + eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count() + ); + assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count()); + + tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add); + tieredSpilloverCache.getOnDiskCache().get().keys().forEach(diskTierKeys::add); + + assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size()); + assertEquals(tieredSpilloverCache.getOnDiskCache().get().count(), diskTierKeys.size()); + + // Try to hit cache again with some randomization. + int numOfItems2 = randomIntBetween(50, 200); + int onHeapCacheHit = 0; + int diskCacheHit = 0; + int cacheMiss = 0; + for (int iter = 0; iter < numOfItems2; iter++) { + if (randomBoolean()) { // Hit cache with key stored in onHeap cache. + onHeapCacheHit++; + int index = randomIntBetween(0, onHeapKeys.size() - 1); + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(onHeapKeys.get(index), loadAwareCacheLoader); + assertFalse(loadAwareCacheLoader.isLoaded()); + } else { // Hit cache with key stored in disk cache. + diskCacheHit++; + int index = randomIntBetween(0, diskTierKeys.size() - 1); + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(diskTierKeys.get(index), loadAwareCacheLoader); + assertFalse(loadAwareCacheLoader.isLoaded()); + } + } + for (int iter = 0; iter < randomIntBetween(50, 200); iter++) { + // Hit cache with randomized key which is expected to miss cache always. + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); + cacheMiss++; + } + // On heap cache misses would also include diskCacheHits as it means it missed onHeap cache. + assertEquals(numOfItems1 + cacheMiss + diskCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(onHeapCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(cacheMiss + numOfItems1, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count()); + assertEquals(diskCacheHit, eventListener.enumMap.get(CacheStoreType.DISK).hitCount.count()); + } + + public void testComputeIfAbsentWithEvictionsFromBothTier() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener eventListener = new MockCacheEventListener(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + int numOfItems = randomIntBetween(totalSize + 1, totalSize * 3); + for (int iter = 0; iter < numOfItems; iter++) { + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); + } + assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); + assertTrue(eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count() > 0); + } + + public void testGetAndCount() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener eventListener = new MockCacheEventListener(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + if (iter > (onHeapCacheSize - 1)) { + // All these are bound to go to disk based cache. + diskTierKeys.add(key); + } else { + onHeapKeys.add(key); + } + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); + } + + for (int iter = 0; iter < numOfItems1; iter++) { + if (randomBoolean()) { + if (randomBoolean()) { + int index = randomIntBetween(0, onHeapKeys.size() - 1); + assertNotNull(tieredSpilloverCache.get(onHeapKeys.get(index))); + } else { + int index = randomIntBetween(0, diskTierKeys.size() - 1); + assertNotNull(tieredSpilloverCache.get(diskTierKeys.get(index))); + } + } else { + assertNull(tieredSpilloverCache.get(UUID.randomUUID().toString())); + } + } + assertEquals(numOfItems1, tieredSpilloverCache.count()); + } + + public void testWithDiskTierNull() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + MockCacheEventListener eventListener = new MockCacheEventListener(); + + StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() + .setMaximumWeightInBytes(onHeapCacheSize * 20) + .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries + TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() + .setOnHeapCacheBuilder(onHeapCacheBuilder) + .setListener(eventListener) + .build(); + + int numOfItems = randomIntBetween(onHeapCacheSize + 1, onHeapCacheSize * 3); + for (int iter = 0; iter < numOfItems; iter++) { + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), loadAwareCacheLoader); + } + assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count()); + } + + public void testPut() { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + tieredSpilloverCache.put(key, value); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).cachedCount.count()); + assertEquals(1, tieredSpilloverCache.count()); + } + + public void testPutAndVerifyNewItemsArePresentOnHeapCache() throws Exception { + int onHeapCacheSize = randomIntBetween(200, 400); + int diskCacheSize = randomIntBetween(450, 800); + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + for (int i = 0; i < onHeapCacheSize; i++) { + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) throws Exception { + return UUID.randomUUID().toString(); + } + }); + } + + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(0, tieredSpilloverCache.getOnDiskCache().get().count()); + + // Again try to put OnHeap cache capacity amount of new items. + List newKeyList = new ArrayList<>(); + for (int i = 0; i < onHeapCacheSize; i++) { + newKeyList.add(UUID.randomUUID().toString()); + } + + for (int i = 0; i < newKeyList.size(); i++) { + tieredSpilloverCache.computeIfAbsent(newKeyList.get(i), new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) { + return UUID.randomUUID().toString(); + } + }); + } + + // Verify that new items are part of onHeap cache. + List actualOnHeapCacheKeys = new ArrayList<>(); + tieredSpilloverCache.getOnHeapCache().keys().forEach(actualOnHeapCacheKeys::add); + + assertEquals(newKeyList.size(), actualOnHeapCacheKeys.size()); + for (int i = 0; i < actualOnHeapCacheKeys.size(); i++) { + assertTrue(newKeyList.contains(actualOnHeapCacheKeys.get(i))); + } + + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnDiskCache().get().count()); + } + + public void testInvalidate() { + int onHeapCacheSize = 1; + int diskCacheSize = 10; + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + // First try to invalidate without the key present in cache. + tieredSpilloverCache.invalidate(key); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); + + // Now try to invalidate with the key present in onHeap cache. + tieredSpilloverCache.put(key, value); + tieredSpilloverCache.invalidate(key); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); + assertEquals(0, tieredSpilloverCache.count()); + + tieredSpilloverCache.put(key, value); + // Put another key/value so that one of the item is evicted to disk cache. + String key2 = UUID.randomUUID().toString(); + tieredSpilloverCache.put(key2, UUID.randomUUID().toString()); + assertEquals(2, tieredSpilloverCache.count()); + // Again invalidate older key + tieredSpilloverCache.invalidate(key); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.DISK).invalidationMetric.count()); + assertEquals(1, tieredSpilloverCache.count()); + } + + public void testCacheKeys() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + // During first round add onHeapCacheSize entries. Will go to onHeap cache initially. + for (int i = 0; i < onHeapCacheSize; i++) { + String key = UUID.randomUUID().toString(); + diskTierKeys.add(key); + tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); + } + // In another round, add another onHeapCacheSize entries. These will go to onHeap and above ones will be + // evicted to onDisk cache. + for (int i = 0; i < onHeapCacheSize; i++) { + String key = UUID.randomUUID().toString(); + onHeapKeys.add(key); + tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); + } + + List actualOnHeapKeys = new ArrayList<>(); + List actualOnDiskKeys = new ArrayList<>(); + Iterable onHeapiterable = tieredSpilloverCache.getOnHeapCache().keys(); + Iterable onDiskiterable = tieredSpilloverCache.getOnDiskCache().get().keys(); + onHeapiterable.iterator().forEachRemaining(actualOnHeapKeys::add); + onDiskiterable.iterator().forEachRemaining(actualOnDiskKeys::add); + for (String onHeapKey : onHeapKeys) { + assertTrue(actualOnHeapKeys.contains(onHeapKey)); + } + for (String onDiskKey : actualOnDiskKeys) { + assertTrue(actualOnDiskKeys.contains(onDiskKey)); + } + + // Testing keys() which returns all keys. + List actualMergedKeys = new ArrayList<>(); + List expectedMergedKeys = new ArrayList<>(); + expectedMergedKeys.addAll(onHeapKeys); + expectedMergedKeys.addAll(diskTierKeys); + + Iterable mergedIterable = tieredSpilloverCache.keys(); + mergedIterable.iterator().forEachRemaining(actualMergedKeys::add); + + assertEquals(expectedMergedKeys.size(), actualMergedKeys.size()); + for (String key : expectedMergedKeys) { + assertTrue(actualMergedKeys.contains(key)); + } + } + + public void testRefresh() { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + tieredSpilloverCache.refresh(); + } + + public void testInvalidateAll() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + // Put values in cache more than it's size and cause evictions from onHeap. + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + if (iter > (onHeapCacheSize - 1)) { + // All these are bound to go to disk based cache. + diskTierKeys.add(key); + } else { + onHeapKeys.add(key); + } + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + assertEquals(numOfItems1, tieredSpilloverCache.count()); + tieredSpilloverCache.invalidateAll(); + assertEquals(0, tieredSpilloverCache.count()); + } + + public void testComputeIfAbsentConcurrently() throws Exception { + int onHeapCacheSize = randomIntBetween(100, 300); + int diskCacheSize = randomIntBetween(200, 400); + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + int numberOfSameKeys = randomIntBetween(10, onHeapCacheSize - 1); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + + Thread[] threads = new Thread[numberOfSameKeys]; + Phaser phaser = new Phaser(numberOfSameKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(numberOfSameKeys); // To wait for all threads to finish. + + List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + + for (int i = 0; i < numberOfSameKeys; i++) { + threads[i] = new Thread(() -> { + try { + LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader() { + boolean isLoaded = false; + + @Override + public boolean isLoaded() { + return isLoaded; + } + + @Override + public Object load(Object key) throws Exception { + isLoaded = true; + return value; + } + }; + loadAwareCacheLoaderList.add(loadAwareCacheLoader); + phaser.arriveAndAwaitAdvance(); + tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); // Wait for rest of tasks to be cancelled. + int numberOfTimesKeyLoaded = 0; + assertEquals(numberOfSameKeys, loadAwareCacheLoaderList.size()); + for (int i = 0; i < loadAwareCacheLoaderList.size(); i++) { + LoadAwareCacheLoader loader = loadAwareCacheLoaderList.get(i); + if (loader.isLoaded()) { + numberOfTimesKeyLoaded++; + } + } + assertEquals(1, numberOfTimesKeyLoaded); // It should be loaded only once. + } + + public void testConcurrencyForEvictionFlow() throws Exception { + int diskCacheSize = randomIntBetween(450, 800); + + MockCacheEventListener eventListener = new MockCacheEventListener<>(); + + StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( + 200 + ).setWeigher((k, v) -> 150); + + StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) + .setDeliberateDelay(500); + + TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() + .setOnHeapCacheBuilder(cacheBuilder) + .setOnDiskCacheBuilder(diskCacheBuilder) + .setListener(eventListener) + .build(); + + String keyToBeEvicted = "key1"; + String secondKey = "key2"; + + // Put first key on tiered cache. Will go into onHeap cache. + tieredSpilloverCache.computeIfAbsent(keyToBeEvicted, new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) { + return UUID.randomUUID().toString(); + } + }); + CountDownLatch countDownLatch = new CountDownLatch(1); + CountDownLatch countDownLatch1 = new CountDownLatch(1); + // Put second key on tiered cache. Will cause eviction of first key from onHeap cache and should go into + // disk cache. + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + Thread thread = new Thread(() -> { + try { + tieredSpilloverCache.computeIfAbsent(secondKey, loadAwareCacheLoader); + countDownLatch1.countDown(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + thread.start(); + assertBusy(() -> { assertTrue(loadAwareCacheLoader.isLoaded()); }, 100, TimeUnit.MILLISECONDS); // We wait for new key to be loaded + // after which it eviction flow is + // guaranteed to occur. + StoreAwareCache onDiskCache = tieredSpilloverCache.getOnDiskCache().get(); + + // Now on a different thread, try to get key(above one which got evicted) from tiered cache. We expect this + // should return not null value as it should be present on diskCache. + AtomicReference actualValue = new AtomicReference<>(); + Thread thread1 = new Thread(() -> { + try { + actualValue.set(tieredSpilloverCache.get(keyToBeEvicted)); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + thread1.start(); + countDownLatch.await(); + assertNotNull(actualValue.get()); + countDownLatch1.await(); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + assertEquals(1, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(1, onDiskCache.count()); + assertNotNull(onDiskCache.get(keyToBeEvicted)); + } + + class MockCacheEventListener implements StoreAwareCacheEventListener { + + EnumMap enumMap = new EnumMap<>(CacheStoreType.class); + + MockCacheEventListener() { + for (CacheStoreType cacheStoreType : CacheStoreType.values()) { + enumMap.put(cacheStoreType, new TestStatsHolder()); + } + } + + @Override + public void onMiss(K key, CacheStoreType cacheStoreType) { + enumMap.get(cacheStoreType).missCount.inc(); + } + + @Override + public void onRemoval(StoreAwareCacheRemovalNotification notification) { + if (notification.getRemovalReason().equals(RemovalReason.EVICTED)) { + enumMap.get(notification.getCacheStoreType()).evictionsMetric.inc(); + } else if (notification.getRemovalReason().equals(RemovalReason.INVALIDATED)) { + enumMap.get(notification.getCacheStoreType()).invalidationMetric.inc(); + } + } + + @Override + public void onHit(K key, V value, CacheStoreType cacheStoreType) { + enumMap.get(cacheStoreType).hitCount.inc(); + } + + @Override + public void onCached(K key, V value, CacheStoreType cacheStoreType) { + enumMap.get(cacheStoreType).cachedCount.inc(); + } + + class TestStatsHolder { + final CounterMetric evictionsMetric = new CounterMetric(); + final CounterMetric hitCount = new CounterMetric(); + final CounterMetric missCount = new CounterMetric(); + final CounterMetric cachedCount = new CounterMetric(); + final CounterMetric invalidationMetric = new CounterMetric(); + } + } + + private LoadAwareCacheLoader getLoadAwareCacheLoader() { + return new LoadAwareCacheLoader() { + boolean isLoaded = false; + + @Override + public String load(String key) { + isLoaded = true; + return UUID.randomUUID().toString(); + } + + @Override + public boolean isLoaded() { + return isLoaded; + } + }; + } + + private TieredSpilloverCache intializeTieredSpilloverCache( + int onHeapCacheSize, + int diksCacheSize, + StoreAwareCacheEventListener eventListener, + long diskDeliberateDelay + ) { + StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diksCacheSize) + .setDeliberateDelay(diskDeliberateDelay); + StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() + .setMaximumWeightInBytes(onHeapCacheSize * 20) + .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries + return new TieredSpilloverCache.Builder().setOnHeapCacheBuilder(onHeapCacheBuilder) + .setOnDiskCacheBuilder(diskCacheBuilder) + .setListener(eventListener) + .build(); + } +} + +class MockOnDiskCache implements StoreAwareCache { + + Map cache; + int maxSize; + + long delay; + StoreAwareCacheEventListener eventListener; + + MockOnDiskCache(int maxSize, StoreAwareCacheEventListener eventListener, long delay) { + this.maxSize = maxSize; + this.eventListener = eventListener; + this.delay = delay; + this.cache = new ConcurrentHashMap(); + } + + @Override + public V get(K key) { + V value = cache.get(key); + if (value != null) { + eventListener.onHit(key, value, CacheStoreType.DISK); + } else { + eventListener.onMiss(key, CacheStoreType.DISK); + } + return value; + } + + @Override + public void put(K key, V value) { + if (this.cache.size() >= maxSize) { // For simplification + eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, value, RemovalReason.EVICTED, CacheStoreType.DISK)); + return; + } + try { + Thread.sleep(delay); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + this.cache.put(key, value); + eventListener.onCached(key, value, CacheStoreType.DISK); + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + V value = cache.computeIfAbsent(key, key1 -> { + try { + return loader.load(key); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + if (!loader.isLoaded()) { + eventListener.onHit(key, value, CacheStoreType.DISK); + } else { + eventListener.onMiss(key, CacheStoreType.DISK); + eventListener.onCached(key, value, CacheStoreType.DISK); + } + return value; + } + + @Override + public void invalidate(K key) { + if (this.cache.containsKey(key)) { + eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, null, RemovalReason.INVALIDATED, CacheStoreType.DISK)); + } + this.cache.remove(key); + } + + @Override + public void invalidateAll() { + this.cache.clear(); + } + + @Override + public Iterable keys() { + return this.cache.keySet(); + } + + @Override + public long count() { + return this.cache.size(); + } + + @Override + public void refresh() {} + + @Override + public CacheStoreType getTierType() { + return CacheStoreType.DISK; + } + + public static class Builder extends StoreAwareCacheBuilder { + + int maxSize; + long delay; + + @Override + public StoreAwareCache build() { + return new MockOnDiskCache(maxSize, this.getEventListener(), delay); + } + + public Builder setMaxSize(int maxSize) { + this.maxSize = maxSize; + return this; + } + + public Builder setDeliberateDelay(long millis) { + this.delay = millis; + return this; + } + } +} diff --git a/server/src/test/java/org/opensearch/common/logging/LoggersTests.java b/server/src/test/java/org/opensearch/common/logging/LoggersTests.java index 17c4f9d0fe13d..d9db57aef15b6 100644 --- a/server/src/test/java/org/opensearch/common/logging/LoggersTests.java +++ b/server/src/test/java/org/opensearch/common/logging/LoggersTests.java @@ -53,40 +53,45 @@ public void testParameterizedMessageLambda() throws Exception { appender.start(); final Logger testLogger = LogManager.getLogger(LoggersTests.class); Loggers.addAppender(testLogger, appender); - Loggers.setLevel(testLogger, Level.TRACE); + try { + Loggers.setLevel(testLogger, Level.TRACE); - Throwable ex = randomException(); - testLogger.error(() -> new ParameterizedMessage("an error message"), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.ERROR)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an error message")); + Throwable ex = randomException(); + testLogger.error(() -> new ParameterizedMessage("an error message"), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.ERROR)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an error message")); - ex = randomException(); - testLogger.warn(() -> new ParameterizedMessage("a warn message: [{}]", "long gc"), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.WARN)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a warn message: [long gc]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining("long gc")); + ex = randomException(); + testLogger.warn(() -> new ParameterizedMessage("a warn message: [{}]", "long gc"), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.WARN)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a warn message: [long gc]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining("long gc")); - testLogger.info(() -> new ParameterizedMessage("an info message a=[{}], b=[{}], c=[{}]", 1, 2, 3)); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.INFO)); - assertThat(appender.lastEvent.getThrown(), nullValue()); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an info message a=[1], b=[2], c=[3]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(1, 2, 3)); + testLogger.info(() -> new ParameterizedMessage("an info message a=[{}], b=[{}], c=[{}]", 1, 2, 3)); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.INFO)); + assertThat(appender.lastEvent.getThrown(), nullValue()); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an info message a=[1], b=[2], c=[3]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(1, 2, 3)); - ex = randomException(); - testLogger.debug(() -> new ParameterizedMessage("a debug message options = {}", Arrays.asList("yes", "no")), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.DEBUG)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a debug message options = [yes, no]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(Arrays.asList("yes", "no"))); + ex = randomException(); + testLogger.debug(() -> new ParameterizedMessage("a debug message options = {}", Arrays.asList("yes", "no")), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.DEBUG)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a debug message options = [yes, no]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(Arrays.asList("yes", "no"))); - ex = randomException(); - testLogger.trace(() -> new ParameterizedMessage("a trace message; element = [{}]", new Object[] { null }), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.TRACE)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a trace message; element = [null]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(new Object[] { null })); + ex = randomException(); + testLogger.trace(() -> new ParameterizedMessage("a trace message; element = [{}]", new Object[] { null }), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.TRACE)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a trace message; element = [null]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(new Object[] { null })); + } finally { + Loggers.removeAppender(testLogger, appender); + appender.stop(); + } } private Throwable randomException() { diff --git a/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java b/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java index b256ab956f963..9358013826a1c 100644 --- a/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java +++ b/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java @@ -25,7 +25,7 @@ public class InsecureSettingTests extends OpenSearchTestCase { private List rootLogMsgs = new ArrayList<>(); private AbstractAppender rootAppender; - protected void assertSettingWarning() { + private void assertSettingWarning() { assertWarnings( "[setting.name] setting was deprecated in OpenSearch and will be removed in a future release! See the breaking changes documentation for the next major version." ); @@ -50,13 +50,14 @@ public void append(LogEvent event) { InsecureSettingTests.this.rootLogMsgs.add(message); } }; - Loggers.addAppender(LogManager.getRootLogger(), rootAppender); rootAppender.start(); + Loggers.addAppender(LogManager.getLogger(SecureSetting.class), rootAppender); } @After public void removeInsecureSettingsAppender() { - Loggers.removeAppender(LogManager.getRootLogger(), rootAppender); + Loggers.removeAppender(LogManager.getLogger(SecureSetting.class), rootAppender); + rootAppender.stop(); } public void testShouldRaiseExceptionByDefault() { diff --git a/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java b/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java index a0531c76bf897..10669ca1a805b 100644 --- a/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java +++ b/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java @@ -44,6 +44,8 @@ import java.util.Map; import java.util.function.Supplier; +import org.mockito.Mockito; + import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -740,6 +742,71 @@ public void testMarkAsSystemContext() throws IOException { assertFalse(threadContext.isSystemContext()); } + public void testSystemContextWithPropagator() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + Map transientHeaderMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map transientHeaderTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map headerMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map headerTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + ThreadContext threadContext = new ThreadContext(build); + ThreadContextStatePropagator mockPropagator = Mockito.mock(ThreadContextStatePropagator.class); + Mockito.when(mockPropagator.transients(transientHeaderMap, true)).thenReturn(Collections.emptyMap()); + Mockito.when(mockPropagator.transients(transientHeaderMap, false)).thenReturn(transientHeaderTransformedMap); + + Mockito.when(mockPropagator.headers(headerMap, true)).thenReturn(headerTransformedMap); + Mockito.when(mockPropagator.headers(headerMap, false)).thenReturn(headerTransformedMap); + threadContext.registerThreadContextStatePropagator(mockPropagator); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("test_transient_propagation_key", 1); + assertEquals(Integer.valueOf(1), threadContext.getTransient("test_transient_propagation_key")); + assertEquals("bar", threadContext.getHeader("foo")); + try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { + threadContext.markAsSystemContext(); + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("test_transient_propagation_key")); + assertEquals("1", threadContext.getHeader("default")); + } + + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("test_transient_propagation_key")); + assertEquals("1", threadContext.getHeader("default")); + } + + public void testSerializeSystemContext() throws IOException { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + Map transientHeaderMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map transientHeaderTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map headerMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map headerTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + ThreadContext threadContext = new ThreadContext(build); + ThreadContextStatePropagator mockPropagator = Mockito.mock(ThreadContextStatePropagator.class); + Mockito.when(mockPropagator.transients(transientHeaderMap, true)).thenReturn(Collections.emptyMap()); + Mockito.when(mockPropagator.transients(transientHeaderMap, false)).thenReturn(transientHeaderTransformedMap); + + Mockito.when(mockPropagator.headers(headerMap, true)).thenReturn(headerTransformedMap); + Mockito.when(mockPropagator.headers(headerMap, false)).thenReturn(headerTransformedMap); + threadContext.registerThreadContextStatePropagator(mockPropagator); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("test_transient_propagation_key", "test"); + BytesStreamOutput out = new BytesStreamOutput(); + BytesStreamOutput outFromSystemContext = new BytesStreamOutput(); + threadContext.writeTo(out); + try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { + assertEquals("test", threadContext.getTransient("test_transient_propagation_key")); + threadContext.markAsSystemContext(); + threadContext.writeTo(outFromSystemContext); + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("test_transient_propagation_key")); + threadContext.readHeaders(outFromSystemContext.bytes().streamInput()); + assertNull(threadContext.getHeader("test_transient_propagation_key")); + } + assertEquals("test", threadContext.getTransient("test_transient_propagation_key")); + threadContext.readHeaders(out.bytes().streamInput()); + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("test", threadContext.getHeader("test_transient_propagation_key")); + assertEquals("1", threadContext.getHeader("default")); + } + public void testPutHeaders() { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index 6f9ff78aa0143..b31edd79411d0 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -68,28 +68,28 @@ public class CodecTests extends OpenSearchTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(false); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene95Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService(false).codec("default"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); } public void testBestCompression() throws Exception { Codec codec = createCodecService(false).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); } public void testLZ4() throws Exception { Codec codec = createCodecService(false).codec("lz4"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); assert codec instanceof PerFieldMappingPostingFormatCodec; } public void testZlib() throws Exception { Codec codec = createCodecService(false).codec("zlib"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); assert codec instanceof PerFieldMappingPostingFormatCodec; } @@ -125,12 +125,12 @@ public void testLuceneCodecsWithCompressionLevel() { public void testDefaultMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("default"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); } public void testBestCompressionMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); } public void testExceptionCodecNull() { @@ -142,11 +142,11 @@ public void testExceptionIndexSettingsNull() { } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { SegmentReader sr = getSegmentReader(actual); String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene95Codec.Mode.valueOf(v)); + assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); } private CodecService createCodecService(boolean isMapperServiceNull) throws IOException { diff --git a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java index b746d0ba8a56d..0c87c384e0749 100644 --- a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java @@ -32,14 +32,14 @@ package org.opensearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.opensearch.OpenSearchException; @@ -69,8 +69,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion90PostingsFormat(); - indexWriterConfig.setCodec(new Lucene95Codec() { + final PostingsFormat postingsFormat = new Completion99PostingsFormat(); + indexWriterConfig.setCodec(new Lucene99Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java index f0f34dff0a38f..bb3f2be8ea748 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java @@ -36,6 +36,7 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; @@ -65,6 +66,7 @@ import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -158,6 +160,26 @@ public void testMappingDepthExceedsLimit() throws Throwable { assertThat(e.getMessage(), containsString("Limit of mapping depth [1] has been exceeded")); } + public void testMappingDepthExceedsXContentLimit() throws Throwable { + final IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> createIndex( + "test1", + Settings.builder() + .put(MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING.getKey(), XContentContraints.DEFAULT_MAX_DEPTH + 1) + .build() + ) + ); + + assertThat( + ex.getMessage(), + is( + "The provided value 1001 of the index setting 'index.mapping.depth.limit' exceeds per-JVM configured limit of 1000. " + + "Please change the setting value or increase per-JVM limit using 'opensearch.xcontent.depth.max' system property." + ) + ); + } + public void testUnmappedFieldType() { MapperService mapperService = createIndex("index").mapperService(); assertThat(mapperService.unmappedFieldType("keyword"), instanceOf(KeywordFieldType.class)); @@ -300,6 +322,26 @@ public void testTotalFieldsLimitWithFieldAlias() throws Throwable { assertEquals("Limit of total fields [" + numberOfNonAliasFields + "] has been exceeded", e.getMessage()); } + public void testFieldNameLengthExceedsXContentLimit() throws Throwable { + final IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> createIndex( + "test1", + Settings.builder() + .put(MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING.getKey(), XContentContraints.DEFAULT_MAX_NAME_LEN + 1) + .build() + ) + ); + + assertThat( + ex.getMessage(), + is( + "The provided value 50001 of the index setting 'index.mapping.field_name_length.limit' exceeds per-JVM configured limit of 50000. " + + "Please change the setting value or increase per-JVM limit using 'opensearch.xcontent.name.length.max' system property." + ) + ); + } + public void testFieldNameLengthLimit() throws Throwable { int maxFieldNameLength = randomIntBetween(25, 30); String testString = new String(new char[maxFieldNameLength + 1]).replace("\0", "a"); diff --git a/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java index 755d77c6ae392..00b48240d0567 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java @@ -277,12 +277,12 @@ public void testDateRangeQueryUsingMappingFormat() { RangeFieldType fieldType = new RangeFieldType("field", formatter); final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:", query.toString()); + assertEquals("field:", ((IndexOrDocValuesQuery) query).getIndexQuery().toString()); // compare lower and upper bounds with what we would get on a `date` field DateFieldType dateFieldType = new DateFieldType("field", DateFieldMapper.Resolution.MILLISECONDS, formatter); final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString()); + assertEquals("field:[1465975790000 TO 1466062190999]", ((IndexOrDocValuesQuery) queryOnDateField).getIndexQuery().toString()); } /** diff --git a/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java index 1b62fef30d255..0dfe47e83fedc 100644 --- a/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java @@ -358,7 +358,10 @@ private void assertGeoDistanceRangeQuery(String query, double lat, double lon, d Query parsedQuery = parseQuery(query).toQuery(createShardContext()); // The parsedQuery contains IndexOrDocValuesQuery, which wraps LatLonPointDistanceQuery which in turn has default visibility, // so we cannot access its fields directly to check and have to use toString() here instead. - assertEquals(parsedQuery.toString(), "mapped_geo_point:" + lat + "," + lon + " +/- " + distanceUnit.toMeters(distance) + " meters"); + assertEquals( + ((IndexOrDocValuesQuery) parsedQuery).getIndexQuery().toString(), + "mapped_geo_point:" + lat + "," + lon + " +/- " + distanceUnit.toMeters(distance) + " meters" + ); } public void testFromJson() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java b/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java index 52b272094cd86..5656b77445772 100644 --- a/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java +++ b/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java @@ -37,6 +37,8 @@ import org.opensearch.action.search.SearchPhaseName; import org.opensearch.action.search.SearchRequestOperationsListenerSupport; import org.opensearch.action.search.SearchRequestStats; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.index.search.stats.SearchStats.Stats; import org.opensearch.test.OpenSearchTestCase; @@ -77,7 +79,8 @@ public void testShardLevelSearchGroupStats() throws Exception { long paramValue = randomIntBetween(2, 50); // Testing for request stats - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); SearchPhaseContext ctx = mock(SearchPhaseContext.class); for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { SearchPhase mockSearchPhase = mock(SearchPhase.class); diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java index dd92bfb47afdb..57a561bc8f2a3 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.util.Version; +import org.opensearch.action.StepListener; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; @@ -371,7 +372,6 @@ public void testPrimaryRestart() throws Exception { * prevent FileAlreadyExistsException. It does so by only copying files in first round of segment replication without * committing locally so that in next round of segment replication those files are not considered for download again */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/10885") public void testSegRepSucceedsOnPreviousCopiedFiles() throws Exception { try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { shards.startAll(); @@ -388,6 +388,7 @@ public void testSegRepSucceedsOnPreviousCopiedFiles() throws Exception { ); CountDownLatch latch = new CountDownLatch(1); + logger.info("--> Starting first round of replication"); // Start first round of segment replication. This should fail with simulated error but with replica having // files in its local store but not in active reader. final SegmentReplicationTarget target = targetService.startReplication( @@ -427,6 +428,7 @@ public void onReplicationFailure( // Start next round of segment replication and not throwing exception resulting in commit on replica when(sourceFactory.get(any())).thenReturn(getRemoteStoreReplicationSource(replica, () -> {})); CountDownLatch waitForSecondRound = new CountDownLatch(1); + logger.info("--> Starting second round of replication"); final SegmentReplicationTarget newTarget = targetService.startReplication( replica, primary.getLatestReplicationCheckpoint(), @@ -463,7 +465,6 @@ public void onReplicationFailure( * blocking update of reader. Once this is done, it corrupts one segment file and ensure that file is deleted in next * round of segment replication by ensuring doc count. */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/10885") public void testNoFailuresOnFileReads() throws Exception { try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { shards.startAll(); @@ -560,8 +561,19 @@ public void getSegmentFiles( BiConsumer fileProgressTracker, ActionListener listener ) { - super.getSegmentFiles(replicationId, checkpoint, filesToFetch, indexShard, (fileName, bytesRecovered) -> {}, listener); - postGetFilesRunnable.run(); + StepListener waitForCopyFilesListener = new StepListener(); + super.getSegmentFiles( + replicationId, + checkpoint, + filesToFetch, + indexShard, + (fileName, bytesRecovered) -> {}, + waitForCopyFilesListener + ); + waitForCopyFilesListener.whenComplete(response -> { + postGetFilesRunnable.run(); + listener.onResponse(response); + }, listener::onFailure); } @Override diff --git a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java index 8494259c8fd8a..73728aec12e51 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java @@ -52,23 +52,36 @@ import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.common.bytes.AbstractBytesReference; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentHelper; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.IndexService; +import org.opensearch.index.cache.request.RequestCacheStats; import org.opensearch.index.cache.request.ShardRequestCache; import org.opensearch.index.query.TermQueryBuilder; -import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.index.shard.IndexShardState; +import org.opensearch.test.OpenSearchSingleNodeTestCase; import java.io.IOException; import java.util.Arrays; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.Optional; +import java.util.UUID; -public class IndicesRequestCacheTests extends OpenSearchTestCase { +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class IndicesRequestCacheTests extends OpenSearchSingleNodeTestCase { public void testBasicOperationsCache() throws Exception { - ShardRequestCache requestCacheStats = new ShardRequestCache(); - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache( + Settings.EMPTY, + (shardId -> Optional.of(new IndicesService.IndexShardCacheEntity(indexShard))) + ); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -76,13 +89,13 @@ public void testBasicOperationsCache() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - AtomicBoolean indexShard = new AtomicBoolean(true); // initial cache - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + ShardRequestCache requestCacheStats = indexShard.requestCache(); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -90,10 +103,11 @@ public void testBasicOperationsCache() throws Exception { assertEquals(1, cache.count()); // cache hit - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = indexShard.requestCache(); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -106,7 +120,7 @@ public void testBasicOperationsCache() throws Exception { if (randomBoolean()) { reader.close(); } else { - indexShard.set(false); // closed shard but reader is still open + indexShard.close("test", true, true); // closed shard but reader is still open cache.clear(entity); } cache.cleanCache(); @@ -122,9 +136,17 @@ public void testBasicOperationsCache() throws Exception { } public void testCacheDifferentReaders() throws Exception { - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); - AtomicBoolean indexShard = new AtomicBoolean(true); - ShardRequestCache requestCacheStats = new ShardRequestCache(); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY, (shardId -> { + IndexService indexService = null; + try { + indexService = indicesService.indexServiceSafe(shardId.getIndex()); + } catch (IndexNotFoundException ex) { + return Optional.empty(); + } + return Optional.of(new IndicesService.IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -141,9 +163,10 @@ public void testCacheDifferentReaders() throws Exception { DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); // initial cache - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); + ShardRequestCache requestCacheStats = entity.stats(); assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); @@ -155,9 +178,10 @@ public void testCacheDifferentReaders() throws Exception { assertEquals(1, cache.numRegisteredCloseListeners()); // cache the second - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(secondReader, 0); value = cache.getOrCompute(entity, loader, secondReader, termBytes); + requestCacheStats = entity.stats(); assertEquals("bar", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); @@ -167,9 +191,10 @@ public void testCacheDifferentReaders() throws Exception { assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > cacheSize + value.length()); assertEquals(2, cache.numRegisteredCloseListeners()); - secondEntity = new TestEntity(requestCacheStats, indexShard); + secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(secondReader, 0); value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes); + requestCacheStats = entity.stats(); assertEquals("bar", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); @@ -177,10 +202,11 @@ public void testCacheDifferentReaders() throws Exception { assertTrue(loader.loadedFromCache); assertEquals(2, cache.count()); - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = entity.stats(); assertEquals(2, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -201,7 +227,7 @@ public void testCacheDifferentReaders() throws Exception { if (randomBoolean()) { secondReader.close(); } else { - indexShard.set(false); // closed shard but reader is still open + indexShard.close("test", true, true); // closed shard but reader is still open cache.clear(secondEntity); } cache.cleanCache(); @@ -218,9 +244,11 @@ public void testCacheDifferentReaders() throws Exception { public void testEviction() throws Exception { final ByteSizeValue size; { - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); - AtomicBoolean indexShard = new AtomicBoolean(true); - ShardRequestCache requestCacheStats = new ShardRequestCache(); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache( + Settings.EMPTY, + (shardId -> Optional.of(new IndicesService.IndexShardCacheEntity(indexShard))) + ); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -228,26 +256,26 @@ public void testEviction() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader secondLoader = new Loader(secondReader, 0); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); - size = requestCacheStats.stats().getMemorySize(); + size = indexShard.requestCache().stats().getMemorySize(); IOUtils.close(reader, secondReader, writer, dir, cache); } + IndexShard indexShard = createIndex("test1").getShard(0); IndicesRequestCache cache = new IndicesRequestCache( - Settings.builder().put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), size.getBytes() + 1 + "b").build() + Settings.builder().put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), size.getBytes() + 1 + "b").build(), + (shardId -> Optional.of(new IndicesService.IndexShardCacheEntity(indexShard))) ); - AtomicBoolean indexShard = new AtomicBoolean(true); - ShardRequestCache requestCacheStats = new ShardRequestCache(); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -255,36 +283,44 @@ public void testEviction() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader secondLoader = new Loader(secondReader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); DirectoryReader thirdReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity thirddEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader thirdLoader = new Loader(thirdReader, 0); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); - logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); + logger.info("Memory size: {}", indexShard.requestCache().stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); assertEquals("baz", value3.streamInput().readString()); assertEquals(2, cache.count()); - assertEquals(1, requestCacheStats.stats().getEvictions()); + assertEquals(1, indexShard.requestCache().stats().getEvictions()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); } public void testClearAllEntityIdentity() throws Exception { - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); - AtomicBoolean indexShard = new AtomicBoolean(true); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY, (shardId -> { + IndexService indexService = null; + try { + indexService = indicesService.indexServiceSafe(shardId.getIndex()); + } catch (IndexNotFoundException ex) { + return Optional.empty(); + } + return Optional.of(new IndicesService.IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); - ShardRequestCache requestCacheStats = new ShardRequestCache(); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -292,36 +328,39 @@ public void testClearAllEntityIdentity() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader secondLoader = new Loader(secondReader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); DirectoryReader thirdReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - AtomicBoolean differentIdentity = new AtomicBoolean(true); - TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity); + IndicesService.IndexShardCacheEntity thirddEntity = new IndicesService.IndexShardCacheEntity(createIndex("test1").getShard(0)); Loader thirdLoader = new Loader(thirdReader, 0); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); - logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); + logger.info("Memory size: {}", indexShard.requestCache().stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); assertEquals("baz", value3.streamInput().readString()); assertEquals(3, cache.count()); - final long hitCount = requestCacheStats.stats().getHitCount(); + RequestCacheStats requestCacheStats = entity.stats().stats(); + requestCacheStats.add(thirddEntity.stats().stats()); + final long hitCount = requestCacheStats.getHitCount(); // clear all for the indexShard Idendity even though is't still open cache.clear(randomFrom(entity, secondEntity)); cache.cleanCache(); assertEquals(1, cache.count()); // third has not been validated since it's a different identity value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); - assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount()); + requestCacheStats = entity.stats().stats(); + requestCacheStats.add(thirddEntity.stats().stats()); + assertEquals(hitCount + 1, requestCacheStats.getHitCount()); assertEquals("baz", value3.streamInput().readString()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); @@ -365,8 +404,17 @@ public BytesReference get() { } public void testInvalidate() throws Exception { - ShardRequestCache requestCacheStats = new ShardRequestCache(); - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY, (shardId -> { + IndexService indexService = null; + try { + indexService = indicesService.indexServiceSafe(shardId.getIndex()); + } catch (IndexNotFoundException ex) { + return Optional.empty(); + } + return Optional.of(new IndicesService.IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -374,13 +422,13 @@ public void testInvalidate() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - AtomicBoolean indexShard = new AtomicBoolean(true); // initial cache - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + ShardRequestCache requestCacheStats = entity.stats(); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -388,10 +436,11 @@ public void testInvalidate() throws Exception { assertEquals(1, cache.count()); // cache hit - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = entity.stats(); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -401,11 +450,12 @@ public void testInvalidate() throws Exception { assertEquals(1, cache.numRegisteredCloseListeners()); // load again after invalidate - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); cache.invalidate(entity, reader, termBytes); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = entity.stats(); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -418,7 +468,7 @@ public void testInvalidate() throws Exception { if (randomBoolean()) { reader.close(); } else { - indexShard.set(false); // closed shard but reader is still open + indexShard.close("test", true, true); // closed shard but reader is still open cache.clear(entity); } cache.cleanCache(); @@ -433,22 +483,25 @@ public void testInvalidate() throws Exception { } public void testEqualsKey() throws IOException { - AtomicBoolean trueBoolean = new AtomicBoolean(true); - AtomicBoolean falseBoolean = new AtomicBoolean(false); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(); IndexWriter writer = new IndexWriter(dir, config); - IndexReader reader1 = DirectoryReader.open(writer); - IndexReader.CacheKey rKey1 = reader1.getReaderCacheHelper().getKey(); + ShardId shardId = new ShardId("foo", "bar", 1); + ShardId shardId1 = new ShardId("foo1", "bar1", 2); + IndexReader reader1 = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), shardId); + String rKey1 = ((OpenSearchDirectoryReader) reader1).getDelegatingCacheHelper().getDelegatingCacheKey().getId(); writer.addDocument(new Document()); - IndexReader reader2 = DirectoryReader.open(writer); - IndexReader.CacheKey rKey2 = reader2.getReaderCacheHelper().getKey(); + IndexReader reader2 = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), shardId); + String rKey2 = ((OpenSearchDirectoryReader) reader2).getDelegatingCacheHelper().getDelegatingCacheKey().getId(); IOUtils.close(reader1, reader2, writer, dir); - IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(new TestEntity(null, falseBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey2, new TestBytesReference(1)); - IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(2)); + IndexShard indexShard = mock(IndexShard.class); + when(indexShard.state()).thenReturn(IndexShardState.STARTED); + IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(shardId, new TestBytesReference(1), rKey1); + IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(shardId, new TestBytesReference(1), rKey1); + IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(shardId1, new TestBytesReference(1), rKey1); + IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(shardId, new TestBytesReference(1), rKey2); + IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(shardId, new TestBytesReference(2), rKey2); String s = "Some other random object"; assertEquals(key1, key1); assertEquals(key1, key2); @@ -459,6 +512,29 @@ public void testEqualsKey() throws IOException { assertNotEquals(key1, key5); } + public void testSerializationDeserializationOfCacheKey() throws Exception { + TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); + BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); + IndexService indexService = createIndex("test"); + IndexShard indexShard = indexService.getShard(0); + IndicesService.IndexShardCacheEntity shardCacheEntity = new IndicesService.IndexShardCacheEntity(indexShard); + String readerCacheKeyId = UUID.randomUUID().toString(); + IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(indexShard.shardId(), termBytes, readerCacheKeyId); + BytesReference bytesReference = null; + try (BytesStreamOutput out = new BytesStreamOutput()) { + key1.writeTo(out); + bytesReference = out.bytes(); + } + StreamInput in = bytesReference.streamInput(); + + IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(in); + + assertEquals(readerCacheKeyId, key2.readerCacheKeyId); + assertEquals(((IndexShard) shardCacheEntity.getCacheIdentity()).shardId(), key2.shardId); + assertEquals(termBytes, key2.value); + + } + private class TestBytesReference extends AbstractBytesReference { int dummyValue; @@ -509,34 +585,4 @@ public boolean isFragment() { return false; } } - - private class TestEntity extends AbstractIndexShardCacheEntity { - private final AtomicBoolean standInForIndexShard; - private final ShardRequestCache shardRequestCache; - - private TestEntity(ShardRequestCache shardRequestCache, AtomicBoolean standInForIndexShard) { - this.standInForIndexShard = standInForIndexShard; - this.shardRequestCache = shardRequestCache; - } - - @Override - protected ShardRequestCache stats() { - return shardRequestCache; - } - - @Override - public boolean isOpen() { - return standInForIndexShard.get(); - } - - @Override - public Object getCacheIdentity() { - return standInForIndexShard; - } - - @Override - public long ramBytesUsed() { - return 42; - } - } } diff --git a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java index add1dfc348a8b..8a00cd2db21c9 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java @@ -44,18 +44,15 @@ import org.apache.lucene.search.Weight; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.routing.allocation.DiskThresholdSettings; -import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexService; import org.opensearch.index.engine.Engine; import org.opensearch.index.shard.IndexShard; -import org.opensearch.indices.IndicesRequestCache.Key; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.node.MockNode; import org.opensearch.node.Node; @@ -373,15 +370,12 @@ public void testCloseWhileOngoingRequestUsesRequestCache() throws Exception { assertEquals(1, indicesService.indicesRefCount.refCount()); assertEquals(0L, cache.count()); - IndicesRequestCache.CacheEntity cacheEntity = new IndicesRequestCache.CacheEntity() { + IndicesService.IndexShardCacheEntity cacheEntity = new IndicesService.IndexShardCacheEntity(shard) { @Override public long ramBytesUsed() { return 42; } - @Override - public void onCached(Key key, BytesReference value) {} - @Override public boolean isOpen() { return true; @@ -389,17 +383,8 @@ public boolean isOpen() { @Override public Object getCacheIdentity() { - return this; + return shard; } - - @Override - public void onHit() {} - - @Override - public void onMiss() {} - - @Override - public void onRemoval(RemovalNotification notification) {} }; cache.getOrCompute(cacheEntity, () -> new BytesArray("bar"), searcher.getDirectoryReader(), new BytesArray("foo")); assertEquals(1L, cache.count()); diff --git a/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java b/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java index 6f36d22b7e17b..2424e38636466 100644 --- a/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java +++ b/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java @@ -34,6 +34,8 @@ import org.opensearch.action.admin.indices.stats.CommonStats; import org.opensearch.action.search.SearchRequestStats; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchTestCase; @@ -46,7 +48,8 @@ public class NodeIndicesStatsTests extends OpenSearchTestCase { public void testInvalidLevel() { CommonStats oldStats = new CommonStats(); - SearchRequestStats requestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats requestStats = new SearchRequestStats(clusterSettings); final NodeIndicesStats stats = new NodeIndicesStats(oldStats, Collections.emptyMap(), requestStats); final String level = randomAlphaOfLength(16); final ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("level", level)); diff --git a/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java index 8358dadf9cc3a..be035bc6ef7ea 100644 --- a/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java @@ -95,6 +95,12 @@ public void setTestIngestDocument() { ingestDocument = new IngestDocument("index", "id", null, null, null, document); } + public void testSelfReferencingSource() { + Map value = new HashMap<>(); + value.put("foo", value); + expectThrows(IllegalArgumentException.class, () -> IngestDocument.deepCopyMap(value)); + } + public void testSimpleGetFieldValue() { assertThat(ingestDocument.getFieldValue("foo", String.class), equalTo("bar")); assertThat(ingestDocument.getFieldValue("int", Integer.class), equalTo(123)); diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java index 1ae201666bc54..d68b0911d3d01 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java @@ -8,10 +8,13 @@ package org.opensearch.search.aggregations; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.opensearch.index.engine.Engine; import org.opensearch.search.aggregations.bucket.global.GlobalAggregator; import org.opensearch.search.internal.ContextIndexSearcher; import org.opensearch.search.profile.query.CollectorResult; @@ -20,14 +23,17 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.List; +import java.util.concurrent.ExecutorService; import org.mockito.ArgumentMatchers; import static org.mockito.ArgumentMatchers.nullable; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class AggregationProcessorTests extends AggregationSetupTests { private final AggregationProcessor testAggregationProcessor = new ConcurrentAggregationProcessor(); @@ -152,9 +158,31 @@ private void testPostProcessCommon( globalCollectors.add(context.queryCollectorManagers().get(GlobalAggCollectorManager.class).newCollector()); } } - final ContextIndexSearcher testSearcher = mock(ContextIndexSearcher.class); final IndexSearcher.LeafSlice[] slicesToReturn = new IndexSearcher.LeafSlice[numSlices]; - when(testSearcher.getSlices()).thenReturn(slicesToReturn); + + // Build a ContextIndexSearcher that stubs slices to return slicesToReturn. Slices is protected in IndexReader + // so this builds a real object. The DirectoryReader fetched to build the object is not used for any searches. + final DirectoryReader reader; + try (Engine.Searcher searcher = context.indexShard().acquireSearcher("test")) { + reader = searcher.getDirectoryReader(); + } + ContextIndexSearcher testSearcher = spy( + new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + randomBoolean(), + mock(ExecutorService.class), + context + ) { + @Override + protected LeafSlice[] slices(List leaves) { + return slicesToReturn; + } + } + ); + ((TestSearchContext) context).setSearcher(testSearcher); AggregationCollectorManager collectorManager; if (expectedNonGlobalAggsPerSlice > 0) { @@ -164,8 +192,8 @@ private void testPostProcessCommon( if (expectedGlobalAggs > 0) { collectorManager = (AggregationCollectorManager) context.queryCollectorManagers().get(GlobalAggCollectorManager.class); ReduceableSearchResult result = collectorManager.reduce(globalCollectors); - when(testSearcher.search(nullable(Query.class), ArgumentMatchers.>any())) - .thenReturn(result); + doReturn(result).when(testSearcher) + .search(nullable(Query.class), ArgumentMatchers.>any()); } assertTrue(context.queryResult().hasAggs()); if (withProfilers) { diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java index 9358fa568db6a..01e259f84660d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; @@ -356,7 +357,7 @@ private Comparator sortFieldsComparator(SortField[] sortFields) { FieldComparator[] comparators = new FieldComparator[sortFields.length]; for (int i = 0; i < sortFields.length; i++) { // Values passed to getComparator shouldn't matter - comparators[i] = sortFields[i].getComparator(0, false); + comparators[i] = sortFields[i].getComparator(0, Pruning.NONE); } return (lhs, rhs) -> { FieldDoc l = (FieldDoc) lhs; diff --git a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java index b1f70dfce176c..a707c8b34e0a4 100644 --- a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java @@ -393,8 +393,8 @@ public void testGetSlicesWithNonNullExecutorButCSDisabled() throws Exception { null, searchContext ); - // Case 1: Verify getSlices return null when concurrent segment search is disabled - assertNull(searcher.getSlices()); + // Case 1: Verify getSlices returns not null when concurrent segment search is disabled + assertEquals(1, searcher.getSlices().length); // Case 2: Verify the slice count when custom max slice computation is used searcher = new ContextIndexSearcher( diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 39126a607f968..d0e01c5461c79 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -69,6 +69,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; @@ -659,7 +660,7 @@ public void testIndexSortScrollOptimization() throws Exception { @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator( 1, - false + Pruning.NONE ); int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); if (cmp == 0) { diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index 48ac2d3b5a804..6af04e15acef0 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -1086,7 +1087,7 @@ public void testIndexSortScrollOptimization() throws Exception { @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator( i, - randomBoolean() + randomFrom(Pruning.values()) ); int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); if (cmp == 0) { diff --git a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java index cc5c205f7f9fe..53c07674f5bd7 100644 --- a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; @@ -279,7 +280,7 @@ public SortField.Type reducedType() { } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { return null; } diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 9fe1f8294fc74..9bb1f51c51cf6 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -90,7 +90,7 @@ import org.opensearch.action.search.SearchExecutionStatsCollector; import org.opensearch.action.search.SearchPhaseController; import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchRequestSlowLog; +import org.opensearch.action.search.SearchRequestOperationsCompositeListenerFactory; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchTransportService; import org.opensearch.action.search.TransportSearchAction; @@ -2285,6 +2285,8 @@ public void onFailure(final Exception e) { writableRegistry(), searchService::aggReduceContextBuilder ); + SearchRequestOperationsCompositeListenerFactory searchRequestOperationsCompositeListenerFactory = + new SearchRequestOperationsCompositeListenerFactory(); actions.put( SearchAction.INSTANCE, new TransportSearchAction( @@ -2310,9 +2312,8 @@ public void onFailure(final Exception e) { List.of(), client ), - null, - new SearchRequestSlowLog(clusterService), - NoopMetricsRegistry.INSTANCE + NoopMetricsRegistry.INSTANCE, + searchRequestOperationsCompositeListenerFactory ) ); actions.put( diff --git a/server/src/test/java/org/opensearch/tasks/TaskThreadContextStatePropagatorTests.java b/server/src/test/java/org/opensearch/tasks/TaskThreadContextStatePropagatorTests.java new file mode 100644 index 0000000000000..bfa0d566aabd7 --- /dev/null +++ b/server/src/test/java/org/opensearch/tasks/TaskThreadContextStatePropagatorTests.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.tasks; + +import org.opensearch.test.OpenSearchTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; + +public class TaskThreadContextStatePropagatorTests extends OpenSearchTestCase { + private final TaskThreadContextStatePropagator taskThreadContextStatePropagator = new TaskThreadContextStatePropagator(); + + public void testTransient() { + Map transientHeader = new HashMap<>(); + transientHeader.put(TASK_ID, "t_1"); + Map transientPropagatedHeader = taskThreadContextStatePropagator.transients(transientHeader, false); + assertEquals("t_1", transientPropagatedHeader.get(TASK_ID)); + } + + public void testTransientForSystemContext() { + Map transientHeader = new HashMap<>(); + transientHeader.put(TASK_ID, "t_1"); + Map transientPropagatedHeader = taskThreadContextStatePropagator.transients(transientHeader, true); + assertEquals("t_1", transientPropagatedHeader.get(TASK_ID)); + } +} diff --git a/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java b/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java index ee816aa5f596d..bf11bcaf39a96 100644 --- a/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java +++ b/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java @@ -252,4 +252,20 @@ public void run() { assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); } + + public void testSpanNotPropagatedToChildSystemThreadContext() { + final Span span = tracer.startSpan(SpanCreationContext.internal().name("test")); + + try (SpanScope scope = tracer.withSpanInScope(span)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(span)); + threadContext.markAsSystemContext(); + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + } + } + + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + } } diff --git a/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java b/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java index ce862d2b69d8f..fb9db9a578b4a 100644 --- a/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java +++ b/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java @@ -102,6 +102,11 @@ public static AliasActions randomAliasAction(boolean useStringAsFilter) { action.isHidden(randomBoolean()); } } + if (action.actionType() == AliasActions.Type.REMOVE) { + if (randomBoolean()) { + action.mustExist(randomBoolean()); + } + } return action; }