diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 6a5db93053e3b..793f9cc7125ee 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -28,3 +28,4 @@ BWC_VERSION: - "2.11.1" - "2.11.2" - "2.12.0" + - "2.13.0" diff --git a/.github/ISSUE_TEMPLATE/meta.yml b/.github/ISSUE_TEMPLATE/meta.yml new file mode 100644 index 0000000000000..0ef42688474c3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/meta.yml @@ -0,0 +1,28 @@ +name: ✨ Meta Issue +description: An issue that collects other issues together to describe a larger project or activity. +title: '[META] ' +labels: ['Meta, untriaged'] +body: + - type: textarea + attributes: + label: Please describe the end goal of this project + description: A clear and concise description of this project/endeavor. This should be understandable to someone with no context. + placeholder: Ex. Views is a way to project indices in OpenSearch, these views act as a focal point for describing the underlying data and how the data is accessed. It allows for restricting the scope and filtering the response consistently. + validations: + required: true + - type: textarea + attributes: + label: Supporting References + description: Please provide links (and descriptions!) to RFCs, design docs, etc + validations: + required: true + - type: textarea + attributes: + label: Issues + description: Please create a list of issues that should be tracked by this meta issue, including a short description. The purpose is to provide everyone on the project with an "at a glance" update of the state us the work being tracked. If you use the format "- [ ]" it will put your list into a checklist. + placeholder: Ex. - [ ] https://github.com/opensearch-project/security/issues/3888 Add views to the cluster metadata schema + validations: + required: true + + + diff --git a/.github/workflows/create-documentation-issue.yml b/.github/workflows/create-documentation-issue.yml index df63847f8afca..b45e053cc25c2 100644 --- a/.github/workflows/create-documentation-issue.yml +++ b/.github/workflows/create-documentation-issue.yml @@ -29,7 +29,7 @@ jobs: - name: Create Issue From File id: create-issue - uses: peter-evans/create-issue-from-file@v4 + uses: peter-evans/create-issue-from-file@v5 with: title: Add documentation related to new feature content-filepath: ./ci/documentation/issue.md diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 2714d45bd108f..61962c91b4903 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: lychee Link Checker id: lychee - uses: lycheeverse/lychee-action@v1.9.0 + uses: lycheeverse/lychee-action@v1.9.1 with: args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes fail: true diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml index 34e8f57cc1878..fdc2bf16937b4 100644 --- a/.github/workflows/maintainer-approval.yml +++ b/.github/workflows/maintainer-approval.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - id: find-maintainers - uses: actions/github-script@v7 + uses: actions/github-script@v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml index c305818bdb0a9..83bf4926a8c2d 100644 --- a/.github/workflows/triage.yml +++ b/.github/workflows/triage.yml @@ -9,7 +9,7 @@ jobs: if: github.repository == 'opensearch-project/OpenSearch' runs-on: ubuntu-latest steps: - - uses: actions/github-script@v7 + - uses: actions/github-script@v7.0.1 with: script: | const { issue, repository } = context.payload; diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index a20c671c137b2..be2a89ac931e9 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -1,28 +1,32 @@ name: Increment Version on: + workflow_dispatch: + inputs: + tag: + description: 'the tag' + required: true + type: string push: tags: - '*.*.*' -permissions: {} +permissions: + contents: write + issues: write + pull-requests: write + jobs: build: if: github.repository == 'opensearch-project/OpenSearch' runs-on: ubuntu-latest steps: - - name: GitHub App token - id: github_app_token - uses: tibdex/github-app-token@v2.1.0 - with: - app_id: ${{ secrets.APP_ID }} - private_key: ${{ secrets.APP_PRIVATE_KEY }} - installation_id: 22958780 - - - uses: actions/checkout@v4 - - name: Fetch Tag and Version Information + - name: Fetch tag and version information run: | TAG=$(echo "${GITHUB_REF#refs/*/}") + if [ -n ${{ github.event.inputs.tag }} ]; then + TAG=${{ github.event.inputs.tag }} + fi CURRENT_VERSION_ARRAY=($(echo "$TAG" | tr . '\n')) BASE=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:2}") BASE_X=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:1}.x") @@ -44,24 +48,22 @@ jobs: echo "NEXT_VERSION=$NEXT_VERSION" >> $GITHUB_ENV echo "NEXT_VERSION_UNDERSCORE=$NEXT_VERSION_UNDERSCORE" >> $GITHUB_ENV echo "NEXT_VERSION_ID=$NEXT_VERSION_ID" >> $GITHUB_ENV + - uses: actions/checkout@v4 with: ref: ${{ env.BASE }} - token: ${{ steps.github_app_token.outputs.token }} - - name: Increment Patch Version - run: | - echo Incrementing $CURRENT_VERSION to $NEXT_VERSION - echo " - \"$CURRENT_VERSION\"" >> .ci/bwcVersions - sed -i "s/opensearch = $CURRENT_VERSION/opensearch = $NEXT_VERSION/g" buildSrc/version.properties - echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java - sed -i "s/CURRENT = $CURRENT_VERSION_UNDERSCORE;/CURRENT = $NEXT_VERSION_UNDERSCORE;/g" libs/core/src/main/java/org/opensearch/Version.java + - name: Increment Patch Version on Major.Minor branch + uses: peternied/opensearch-core-version-updater@v1 + with: + previous-version: ${{ env.CURRENT_VERSION }} + new-version: ${{ env.NEXT_VERSION }} + update-current: true - - name: Create Pull Request + - name: Create PR for BASE + id: base_pr uses: peter-evans/create-pull-request@v5 with: - token: ${{ steps.github_app_token.outputs.token }} base: ${{ env.BASE }} branch: 'create-pull-request/patch-${{ env.BASE }}' commit-message: Increment version to ${{ env.NEXT_VERSION }} @@ -76,19 +78,18 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ env.BASE_X }} - token: ${{ steps.github_app_token.outputs.token }} - - name: Add bwc version to .X branch - run: | - echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION - sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions - echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java + - name: Add Patch Version on Major.X branch + uses: peternied/opensearch-core-version-updater@v1 + with: + previous-version: ${{ env.CURRENT_VERSION }} + new-version: ${{ env.NEXT_VERSION }} + update-current: false - - name: Create Pull Request + - name: Create PR for BASE_X + id: base_x_pr uses: peter-evans/create-pull-request@v5 with: - token: ${{ steps.github_app_token.outputs.token }} base: ${{ env.BASE_X }} branch: 'create-pull-request/patch-${{ env.BASE_X }}' commit-message: Add bwc version ${{ env.NEXT_VERSION }} @@ -103,19 +104,18 @@ jobs: - uses: actions/checkout@v4 with: ref: main - token: ${{ steps.github_app_token.outputs.token }} - - name: Add bwc version to main branch - run: | - echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION - sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions - echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java + - name: Add Patch Version on main branch + uses: peternied/opensearch-core-version-updater@v1 + with: + previous-version: ${{ env.CURRENT_VERSION }} + new-version: ${{ env.NEXT_VERSION }} + update-current: false - - name: Create Pull Request + - name: Create PR for main + id: main_pr uses: peter-evans/create-pull-request@v5 with: - token: ${{ steps.github_app_token.outputs.token }} base: main branch: 'create-pull-request/patch-main' commit-message: Add bwc version ${{ env.NEXT_VERSION }} @@ -126,3 +126,32 @@ jobs: title: '[AUTO] [main] Add bwc version ${{ env.NEXT_VERSION }}.' body: | I've noticed that a new tag ${{ env.TAG }} was pushed, and added a bwc version ${{ env.NEXT_VERSION }}. + + - name: Create tracking issue + id: create-issue + uses: actions/github-script@v7.0.1 + with: + script: | + const body = ` + ### Description + A new version of OpenSearch was released, to prepare for the next release new version numbers need to be updated in all active branches of development. + + ### Exit Criteria + Review and merged the following pull requests + - [ ] ${{ steps.base_pr.outputs.pull-request-url }} + - [ ] ${{ steps.base_x_pr.outputs.pull-request-url }} + - [ ] ${{ steps.main_pr.outputs.pull-request-url }} + + ### Additional Context + See project wide guidance on branching and versions [[link]](https://github.com/opensearch-project/.github/blob/main/RELEASING.md). + ` + const { data: issue }= await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + labels: ["Build"], + title: "Increment version for ${{ env.NEXT_VERSION }}", + body: body + }); + console.error(JSON.stringify(issue)); + return issue.number; + result-encoding: string diff --git a/CHANGELOG.md b/CHANGELOG.md index c46f5362549b8..6fdb3893145dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - GHA to verify checklist items completion in PR descriptions ([#10800](https://github.com/opensearch-project/OpenSearch/pull/10800)) - Allow to pass the list settings through environment variables (like [], ["a", "b", "c"], ...) ([#10625](https://github.com/opensearch-project/OpenSearch/pull/10625)) - [Admission Control] Integrate CPU AC with ResourceUsageCollector and add CPU AC stats to nodes/stats ([#10887](https://github.com/opensearch-project/OpenSearch/pull/10887)) +- [S3 Repository] Add setting to control connection count for sync client ([#12028](https://github.com/opensearch-project/OpenSearch/pull/12028)) ### Dependencies - Bump `log4j-core` from 2.18.0 to 2.19.0 @@ -84,148 +85,27 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439)) - Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836)) - Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872)) +- [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart ([#12035](https://github.com/opensearch-project/OpenSearch/pull/12035)) +- Fix Span operation names generated from RestActions ([#12005](https://github.com/opensearch-project/OpenSearch/pull/12005)) ### Security ## [Unreleased 2.x] ### Added -- [Admission control] Add Resource usage collector service and resource usage tracker ([#9890](https://github.com/opensearch-project/OpenSearch/pull/9890)) -- [Admission control] Add enhancements to FS stats to include read/write time, queue size and IO time ([#10541](https://github.com/opensearch-project/OpenSearch/pull/10541)) -- [Remote cluster state] Change file names for remote cluster state ([#10557](https://github.com/opensearch-project/OpenSearch/pull/10557)) -- [Search Pipelines] Add request-scoped state shared between processors (and three new processors) ([#9405](https://github.com/opensearch-project/OpenSearch/pull/9405)) -- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) -- [Remote Store] Add repository stats for remote store([#10567](https://github.com/opensearch-project/OpenSearch/pull/10567)) -- [Remote cluster state] Upload global metadata in cluster state to remote store([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404)) -- [Remote cluster state] Download functionality of global metadata from remote store ([#10535](https://github.com/opensearch-project/OpenSearch/pull/10535)) -- [Remote cluster state] Restore global metadata from remote store when local state is lost after quorum loss ([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404)) -- [Remote cluster state] Make index and global metadata upload timeout dynamic cluster settings ([#10814](https://github.com/opensearch-project/OpenSearch/pull/10814)) -- Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255)) -- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) -- Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670)) -- [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275)) -- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753)) -- [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853)) -- Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247)) -- Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248)) -- Introduce ConcurrentQueryProfiler to profile query using concurrent segment search path and support concurrency during rewrite and create weight ([10352](https://github.com/opensearch-project/OpenSearch/pull/10352)) -- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679)) -- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618)) -- [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672)) -- Enable must_exist parameter for update aliases API ([#11210](https://github.com/opensearch-project/OpenSearch/pull/11210)) -- Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024)) -- Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650)) -- Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040)) -- Allowing pipeline processors to access index mapping info by passing ingest service ref as part of the processor factory parameters ([#10307](https://github.com/opensearch-project/OpenSearch/pull/10307)) -- Make number of segment metadata files in remote segment store configurable ([#11329](https://github.com/opensearch-project/OpenSearch/pull/11329)) -- Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317)) -- Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069)) -- [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175)) -- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378)) -- Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170)) -- Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) -- Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) -- Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) -- Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) +- Add support for dependencies in plugin descriptor properties with semver range ([#11441](https://github.com/opensearch-project/OpenSearch/pull/11441)) +- Add community_id ingest processor ([#12121](https://github.com/opensearch-project/OpenSearch/pull/12121)) +- Introduce query level setting `index.query.max_nested_depth` limiting nested queries ([#3268](https://github.com/opensearch-project/OpenSearch/issues/3268) ### Dependencies -- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) -- Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) -- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796)) -- Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) -- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630)) -- Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) -- Bump `de.thetaphi:forbiddenapis` from 3.5.1 to 3.6 ([#10508](https://github.com/opensearch-project/OpenSearch/pull/10508)) -- Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639)) -- Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297)) -- Bump `com.google.http-client:google-http-client` from 1.43.2 to 1.43.3 ([#10635](https://github.com/opensearch-project/OpenSearch/pull/10635)) -- Bump `com.squareup.okio:okio` from 3.5.0 to 3.7.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637), [#11632](https://github.com/opensearch-project/OpenSearch/pull/11632)) -- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.1 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270), [#11695](https://github.com/opensearch-project/OpenSearch/pull/11695)) -- Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504)) -- Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) -- Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271)) -- Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273)) -- Bump `netty` from 4.1.100.Final to 4.1.104.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)) -- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) -- Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) -- Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) -- Bump `reactor-netty-core` from 1.1.12 to 1.1.13 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)) -- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.1 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629)) -- Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447)) -- Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450)) -- Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445)) -- Bump `org.apache.xmlbeans:xmlbeans` from 5.1.1 to 5.2.0 ([#11448](https://github.com/opensearch-project/OpenSearch/pull/11448)) -- Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521)) -- Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539)) -- Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555)) -- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556)) -- Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557)) -- Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671)) -- Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996)) -- Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559)) -- Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) -- Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) -- Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) -- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.0 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795)) -- Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) ### Changed -- Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) -- Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036)) -- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)), ([#11751](https://github.com/opensearch-project/OpenSearch/pull/11751)) -- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107)) -- Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598)) -- [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524)) -- [BUG] Fix java.lang.SecurityException in repository-gcs plugin ([#10642](https://github.com/opensearch-project/OpenSearch/pull/10642)) -- Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395)) -- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558)) -- Add instrumentation for indexing in transport bulk action and transport shard bulk action. ([#10273](https://github.com/opensearch-project/OpenSearch/pull/10273)) -- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895), [#11607](https://github.com/opensearch-project/OpenSearch/pull/11607)) -- Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057)) -- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023)) -- Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083)) -- Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087)) -- Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528)) -- Improved performance of numeric exact-match queries ([#11209](https://github.com/opensearch-project/OpenSearch/pull/11209)) -- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) -- Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308)) -- Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) -- Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) -- Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) -- Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) -- Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582)) -- Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) -- Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) -- Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) ### Deprecated ### Removed -- Remove deprecated classes for Rounding ([#10956](https://github.com/opensearch-project/OpenSearch/issues/10956)) ### Fixed -- Fix failure in dissect ingest processor parsing empty brackets ([#9225](https://github.com/opensearch-project/OpenSearch/pull/9255)) -- Fix `class_cast_exception` when passing int to `_version` and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101)) -- Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370)) -- Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496)) -- Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737)) -- Delegating CachingWeightWrapper#count to internal weight object ([#10543](https://github.com/opensearch-project/OpenSearch/pull/10543)) -- Fix per request latency last phase not tracked ([#10934](https://github.com/opensearch-project/OpenSearch/pull/10934)) -- Fix SuggestSearch.testSkipDuplicates by forcing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068)) -- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873)) -- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249)) -- Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316)) -- Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369)) -- Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) -- Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425)) -- Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) - Fix for deserilization bug in weighted round-robin metadata ([#11679](https://github.com/opensearch-project/OpenSearch/pull/11679)) -- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485)) -- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) -- Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) -- Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) -- Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711)) -- Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490)) -- Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815)) ### Security diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 21adbb0305ab1..f0851fc58d444 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -348,7 +348,7 @@ Please follow these formatting guidelines: * Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. * If *absolutely* necessary, you can disable formatting for regions of code with the `// tag::NAME` and `// end::NAME` directives, but note that these are intended for use in documentation, so please make it clear what you have done, and only do this where the benefit clearly outweighs the decrease in consistency. * Note that JavaDoc and block comments i.e. `/* ... */` are not formatted, but line comments i.e `// ...` are. -* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, if might get called out in PR reviews as something to change. +* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, it might get called out in PR reviews as something to change. ## Adding Dependencies diff --git a/NOTICE.txt b/NOTICE.txt index 6c7dc983f8c7a..d463b8f28561f 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -10,3 +10,6 @@ Foundation (http://www.apache.org/). This product includes software developed by Joda.org (http://www.joda.org/). + +This product includes software developed by +Morten Haraldsen (ethlo) (https://github.com/ethlo) under the Apache License, version 2.0. diff --git a/TRIAGING.md b/TRIAGING.md new file mode 100644 index 0000000000000..47cb44a4f5ba2 --- /dev/null +++ b/TRIAGING.md @@ -0,0 +1,83 @@ +<img src="https://opensearch.org/assets/img/opensearch-logo-themed.svg" height="64px"> + +The maintainers of the OpenSearch Repo seek to promote an inclusive and engaged community of contributors. In order to facilitate this, weekly triage meetings are open-to-all and attendance is encouraged for anyone who hopes to contribute, discuss an issue, or learn more about the project. To learn more about contributing to the OpenSearch Repo visit the [Contributing](./CONTRIBUTING.md) documentation. + +### Do I need to attend for my issue to be addressed/triaged? + +Attendance is not required for your issue to be triaged or addressed. If not accepted the issue will be updated with a comment for next steps. All new issues are triaged weekly. + +You can track if your issue was triaged by watching your GitHub notifications for updates. + +### What happens if my issue does not get covered this time? + +Each meeting we seek to address all new issues. However, should we run out of time before your issue is discussed, you are always welcome to attend the next meeting or to follow up on the issue post itself. + +### How do I join the Triage meeting? + +Meetings are hosted regularly at 10:00a - 10:55a Central Time every Wednesday and can be joined via [Chime](https://aws.amazon.com/chime/), with this [meeting link](https://chime.aws/1988437365). + +After joining the Chime meeting, you can enable your video / voice to join the discussion. If you do not have a webcam or microphone available, you can still join in via the text chat. + +If you have an issue you'd like to bring forth please prepare a link to the issue so it can be presented and viewed by everyone in the meeting. + +### Is there an agenda for each week? + +Meetings are 55 minutes and follows this structure: + +Yes, each 55-minute meeting follows this structure: +1. **Initial Gathering:** Feel free to turn on your video and engage in informal conversation. Shortly, a volunteer triage [facilitator](#what-is-the-role-of-the-facilitator) will begin the meeting and share their screen. +2. **Record Attendees:** The facilitator will request attendees to share their GitHub profile links. These links will be collected and assembled into a [tag](#how-do-triage-facilitator-tag-comments-during-the-triage-meeting) to annotate comments during the meeting. +3. **Announcements:** Any announcements will be made at the beginning of the meeting. +4. **Review of New Issues:** We start by reviewing all untriaged [issues](https://github.com/search?q=label%3Auntriaged+is%3Aopen++repo%3Aopensearch-project%2FOpenSearch+&type=issues&ref=advsearch&s=created&o=desc) for the OpenSearch repo. +5. **Attendee Requests:** An opportunity for any meeting member to request consideration of an issue or pull request. +6. **Open Discussion:** Attendees can bring up any topics not already covered by filed issues or pull requests. + +### What is the role of the facilitator? + +The facilitator is crucial in driving the meeting, ensuring a smooth flow of issues into OpenSearch for future contributions. They maintain the meeting's agenda, solicit input from attendees, and record outcomes using the triage tag as items are discussed. + +### Do I need to have already contributed to the project to attend a triage meeting? + +No prior contributions are required. All interested individuals are welcome and encouraged to attend. Triage meetings offer a fantastic opportunity for new contributors to understand the project and explore various contribution avenues. + +### What if I have an issue that is almost a duplicate, should I open a new one to be triaged? + +You can always open an [issue](https://github.com/opensearch-project/OpenSearch/issues/new/choose) including one that you think may be a duplicate. If you believe your issue is similar but distinct from an existing one, you are encouraged to file it and explain the differences during the triage meeting. + +### What if I have follow-up questions on an issue? + +If you have an existing issue you would like to discuss, you can always comment on the issue itself. Alternatively, you are welcome to come to the triage meeting to discuss. + +### Is this meeting a good place to get help setting up features on my OpenSearch instance? + +While we are always happy to help the community, the best resource for implementation questions is [the OpenSearch forum](https://forum.opensearch.org/). + +There you can find answers to many common questions as well as speak with implementation experts. + +### What are the issue labels associated with triaging? + +Yes, there are several labels that are used to identify the 'state' of issues filed in OpenSearch . +| Label | When Applied | Meaning | +|---------------|----------------------|-----------------------------------------------------------------------------------------------------------------------------------------| +| `Untriaged` | When issues are created or re-opened. | Issues labeled as 'Untriaged' require the attention of the repository maintainers and may need to be prioritized for quicker resolution. It's crucial to keep the count of 'Untriaged' labels low to ensure all potential security issues are addressed in a timely manner. See [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) for more details on handling these issues. | +| `Help Wanted` | Anytime. | Issues marked as 'Help Wanted' signal that they are actionable and not the current focus of the project maintainers. Community contributions are especially encouraged for these issues. | +| `Good First Issue` | Anytime. | Issues labeled as 'Good First Issue' are small in scope and can be resolved with a single pull request. These are recommended starting points for newcomers looking to make their first contributions. | + +### What are the typical outcomes of a triaged issue? + +| Outcome | Label | Description | Canned Response | +|--------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Accepted | `-untriaged` | The issue has the details needed to be directed towards area owners. | "Thanks for filing this issue, please feel free to submit a pull request." | +| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to re-open after addressing the reason." | +| Area Triage | `+{AREALABEL}` | OpenSearch has many different areas. If it's unclear whether an issue should be accepted, it will be labeled with the area and an owner will be @mentioned for follow-up. | "Thanks for creating this issue; the triage meeting was unsure if this issue should be accepted, @{PERSON} or someone from the area please review and then accept or reject this issue?" | +| Transfer | N/A | If the issue applies to another repository within the OpenSearch Project, it will be transferred accordingly. | "@opensearch-project/triage, can you please transfer this issue to project {REPOSITORY}." Or, if someone at the meeting has permissions, they can start the transfer. | + +### Is this where I should bring up potential security vulnerabilities? + +Due to the sensitive nature of security vulnerabilities, please report all potential vulnerabilities directly by following the steps outlined on the [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) document. + +### How do triage facilitator tag comments during the triage meeting? + +During the triage meeting, facilitators should use the tag _[Triage - attendees [1](#Profile_link) [2](#Profile_link)]_ to indicate a collective decision. This ensures contributors know the decision came from the meeting rather than an individual and identifies participants for any follow-up queries. + +This tag should not be used outside triage meetings. diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 6b4634c7e791c..be4579b4e5324 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -84,3 +84,45 @@ spotless { targetExclude 'src/main/generated/**/*.java' } } + +if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) { + // Add support for incubator modules on supported Java versions. + run.jvmArgs += ['--add-modules=jdk.incubator.vector'] + run.classpath += files(jar.archiveFile) + run.classpath -= sourceSets.main.output + evaluationDependsOn(':libs:opensearch-common') + + sourceSets { + java20 { + java { + srcDirs = ['src/main/java20'] + } + } + } + + configurations { + java20Implementation.extendsFrom(implementation) + } + + dependencies { + java20Implementation sourceSets.main.output + java20Implementation project(':libs:opensearch-common').sourceSets.java20.output + java20AnnotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh" + } + + compileJava20Java { + targetCompatibility = JavaVersion.VERSION_20 + options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"]) + } + + jar { + metaInf { + into 'versions/20' + from sourceSets.java20.output + } + manifest.attributes('Multi-Release': 'true') + } + + // classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes + disableTasks('forbiddenApisJava20') +} diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java new file mode 100644 index 0000000000000..4e995f5a5067c --- /dev/null +++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java @@ -0,0 +1,67 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.benchmark.index.codec.fuzzy; + +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.UUIDs; +import org.opensearch.index.codec.fuzzy.FuzzySet; +import org.opensearch.index.codec.fuzzy.FuzzySetFactory; +import org.opensearch.index.codec.fuzzy.FuzzySetParameters; +import org.opensearch.index.mapper.IdFieldMapper; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +@Fork(3) +@Warmup(iterations = 2) +@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +public class FilterConstructionBenchmark { + + private List<BytesRef> items; + + @Param({ "1000000", "10000000", "50000000" }) + private int numIds; + + @Param({ "0.0511", "0.1023", "0.2047" }) + private double fpp; + + private FuzzySetFactory fuzzySetFactory; + private String fieldName; + + @Setup + public void setupIds() { + this.fieldName = IdFieldMapper.NAME; + this.items = IntStream.range(0, numIds).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList()); + FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp); + this.fuzzySetFactory = new FuzzySetFactory(Map.of(fieldName, parameters)); + } + + @Benchmark + public FuzzySet buildFilter() throws IOException { + return fuzzySetFactory.createFuzzySet(items.size(), fieldName, () -> items.iterator()); + } +} diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java new file mode 100644 index 0000000000000..383539219830e --- /dev/null +++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.benchmark.index.codec.fuzzy; + +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.UUIDs; +import org.opensearch.index.codec.fuzzy.FuzzySet; +import org.opensearch.index.codec.fuzzy.FuzzySetFactory; +import org.opensearch.index.codec.fuzzy.FuzzySetParameters; +import org.opensearch.index.mapper.IdFieldMapper; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +@Fork(3) +@Warmup(iterations = 2) +@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +public class FilterLookupBenchmark { + + @Param({ "50000000", "1000000" }) + private int numItems; + + @Param({ "1000000" }) + private int searchKeyCount; + + @Param({ "0.0511", "0.1023", "0.2047" }) + private double fpp; + + private FuzzySet fuzzySet; + private List<BytesRef> items; + private Random random = new Random(); + + @Setup + public void setupFilter() throws IOException { + String fieldName = IdFieldMapper.NAME; + items = IntStream.range(0, numItems).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList()); + FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp); + fuzzySet = new FuzzySetFactory(Map.of(fieldName, parameters)).createFuzzySet(numItems, fieldName, () -> items.iterator()); + } + + @Benchmark + public void contains_withExistingKeys(Blackhole blackhole) throws IOException { + for (int i = 0; i < searchKeyCount; i++) { + blackhole.consume(fuzzySet.contains(items.get(random.nextInt(items.size()))) == FuzzySet.Result.MAYBE); + } + } + + @Benchmark + public void contains_withRandomKeys(Blackhole blackhole) throws IOException { + for (int i = 0; i < searchKeyCount; i++) { + blackhole.consume(fuzzySet.contains(new BytesRef(UUIDs.base64UUID()))); + } + } +} diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java index 4e07af452968b..3909a3f4eb8fc 100644 --- a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java +++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java @@ -21,7 +21,6 @@ import org.openjdk.jmh.infra.Blackhole; import java.util.Random; -import java.util.function.Supplier; @Fork(value = 3) @Warmup(iterations = 3, time = 1) @@ -83,17 +82,17 @@ public static class Options { "256" }) public Integer size; - @Param({ "binary", "linear" }) + @Param({ "binary", "linear", "btree" }) public String type; @Param({ "uniform", "skewed_edge", "skewed_center" }) public String distribution; public long[] queries; - public Supplier<Roundable> supplier; + public RoundableSupplier supplier; @Setup - public void setup() { + public void setup() throws ClassNotFoundException { Random random = new Random(size); long[] values = new long[size]; for (int i = 1; i < values.length; i++) { @@ -128,16 +127,7 @@ public void setup() { throw new IllegalArgumentException("invalid distribution: " + distribution); } - switch (type) { - case "binary": - supplier = () -> new BinarySearcher(values, size); - break; - case "linear": - supplier = () -> new BidirectionalLinearSearcher(values, size); - break; - default: - throw new IllegalArgumentException("invalid type: " + type); - } + supplier = new RoundableSupplier(type, values, size); } private static long nextPositiveLong(Random random) { diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java new file mode 100644 index 0000000000000..44ac42810996f --- /dev/null +++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import java.util.function.Supplier; + +public class RoundableSupplier implements Supplier<Roundable> { + private final Supplier<Roundable> delegate; + + RoundableSupplier(String type, long[] values, int size) throws ClassNotFoundException { + switch (type) { + case "binary": + delegate = () -> new BinarySearcher(values, size); + break; + case "linear": + delegate = () -> new BidirectionalLinearSearcher(values, size); + break; + case "btree": + throw new ClassNotFoundException("BtreeSearcher is not supported below JDK 20"); + default: + throw new IllegalArgumentException("invalid type: " + type); + } + } + + @Override + public Roundable get() { + return delegate.get(); + } +} diff --git a/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java new file mode 100644 index 0000000000000..e81c1b137bd30 --- /dev/null +++ b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java @@ -0,0 +1,36 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import java.util.function.Supplier; + +public class RoundableSupplier implements Supplier<Roundable> { + private final Supplier<Roundable> delegate; + + RoundableSupplier(String type, long[] values, int size) { + switch (type) { + case "binary": + delegate = () -> new BinarySearcher(values, size); + break; + case "linear": + delegate = () -> new BidirectionalLinearSearcher(values, size); + break; + case "btree": + delegate = () -> new BtreeSearcher(values, size); + break; + default: + throw new IllegalArgumentException("invalid type: " + type); + } + } + + @Override + public Roundable get() { + return delegate.get(); + } +} diff --git a/build.gradle b/build.gradle index 296c30391af09..6f9aa0ea9e439 100644 --- a/build.gradle +++ b/build.gradle @@ -54,8 +54,8 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.23.2" apply false - id "org.gradle.test-retry" version "1.5.4" apply false + id "com.diffplug.spotless" version "6.25.0" apply false + id "org.gradle.test-retry" version "1.5.8" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' } @@ -545,6 +545,7 @@ subprojects { includeClasses.add("org.opensearch.snapshots.SnapshotStatusApisIT") includeClasses.add("org.opensearch.test.rest.ClientYamlTestSuiteIT") includeClasses.add("org.opensearch.upgrade.DetectEsInstallationTaskTests") + includeClasses.add("org.opensearch.cluster.MinimumClusterManagerNodesIT") } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 3c846b48549fb..0562ecc6ee61b 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -118,7 +118,7 @@ dependencies { api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6' api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}" api 'org.apache.maven:maven-model:3.9.6' - api 'com.networknt:json-schema-validator:1.0.86' + api 'com.networknt:json-schema-validator:1.2.0' api 'org.jruby.jcodings:jcodings:1.0.58' api 'org.jruby.joni:joni:2.2.1' api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}" diff --git a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java index 8ecfbf40b6c62..0c901b9726992 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java @@ -94,7 +94,7 @@ public static void configureRepositories(Project project) { String revision = matcher.group(1); MavenArtifactRepository luceneRepo = repos.maven(repo -> { repo.setName("lucene-snapshots"); - repo.setUrl("https://artifacts.opensearch.org/snapshots/lucene/"); + repo.setUrl("https://ci.opensearch.org/ci/dbc/snapshots/lucene/"); }); repos.exclusiveContent(exclusiveRepo -> { exclusiveRepo.filter( diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java index 1ad7e056b6ae6..bc44f81a81aff 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java @@ -77,9 +77,9 @@ import java.util.stream.Stream; public class DistroTestPlugin implements Plugin<Project> { - private static final String SYSTEM_JDK_VERSION = "17.0.9+9"; + private static final String SYSTEM_JDK_VERSION = "21.0.2+13"; private static final String SYSTEM_JDK_VENDOR = "adoptium"; - private static final String GRADLE_JDK_VERSION = "17.0.9+9"; + private static final String GRADLE_JDK_VERSION = "21.0.2+13"; private static final String GRADLE_JDK_VENDOR = "adoptium"; // all distributions used by distro tests. this is temporary until tests are per distribution diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 3813750507f18..95ae1ddb578a1 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,8 +1,8 @@ opensearch = 3.0.0 -lucene = 9.9.1 +lucene = 9.9.2 bundled_jdk_vendor = adoptium -bundled_jdk = 21.0.1+12 +bundled_jdk = 21.0.2+13 # optional dependencies spatial4j = 0.7 @@ -26,12 +26,12 @@ jakarta_annotation = 1.3.5 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.13.0 -netty = 4.1.104.Final +netty = 4.1.106.Final joda = 2.12.2 # project reactor -reactor_netty = 1.1.13 -reactor = 3.5.11 +reactor_netty = 1.1.15 +reactor = 3.5.14 # client dependencies httpclient5 = 5.2.1 @@ -55,7 +55,7 @@ bouncycastle=1.76 randomizedrunner = 2.7.1 junit = 4.13.2 hamcrest = 2.1 -mockito = 5.5.0 +mockito = 5.10.0 objenesis = 3.2 bytebuddy = 1.14.7 @@ -70,5 +70,5 @@ jzlib = 1.1.3 resteasy = 6.2.4.Final # opentelemetry dependencies -opentelemetry = 1.32.0 +opentelemetry = 1.34.1 opentelemetrysemconv = 1.23.1-alpha diff --git a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java index adddb3bda725c..f609fae4e3c81 100644 --- a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java +++ b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java @@ -305,6 +305,7 @@ public void shutdown() { } } + @SuppressWarnings("removal") static class SnifferThreadFactory implements ThreadFactory { private final AtomicInteger threadNumber = new AtomicInteger(1); private final String namePrefix; diff --git a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java index 88f667549f3e8..faef1441d0a02 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java @@ -51,6 +51,7 @@ protected Matcher<String> nodeNameMatcher() { return is("integTest-0"); } + @SuppressWarnings("removal") @Override protected BufferedReader openReader(Path logFile) { assumeFalse("Skipping test because it is being run against an external cluster.", diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index ededa7bff34d8..43c38c5ad0c67 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -63,7 +63,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.6.0" + id "com.netflix.nebula.ospackage-base" version "11.8.0" } void addProcessFilesTask(String type, boolean jdk) { diff --git a/distribution/src/config/opensearch.yml b/distribution/src/config/opensearch.yml index b7ab2e1c2309b..ebffdde0f3699 100644 --- a/distribution/src/config/opensearch.yml +++ b/distribution/src/config/opensearch.yml @@ -117,12 +117,6 @@ ${path.logs} #opensearch.experimental.feature.extensions.enabled: false # # -# Gates the concurrent segment search feature. This feature enables concurrent segment search in a separate -# index searcher threadpool. -# -#opensearch.experimental.feature.concurrent_segment_search.enabled: false -# -# # Gates the optimization of datetime formatters caching along with change in default datetime formatter # Once there is no observed impact on performance, this feature flag can be removed. # diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java index d269603656114..9ca42ac5f4ec1 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java @@ -78,15 +78,14 @@ private void printPlugin(Environment env, Terminal terminal, Path plugin, String PluginInfo info = PluginInfo.readFromProperties(env.pluginsDir().resolve(plugin)); terminal.println(Terminal.Verbosity.SILENT, prefix + info.getName()); terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix)); - if (info.getOpenSearchVersion().equals(Version.CURRENT) == false) { + if (!PluginsService.isPluginVersionCompatible(info, Version.CURRENT)) { terminal.errorPrintln( "WARNING: plugin [" + info.getName() + "] was built for OpenSearch version " - + info.getVersion() - + " but version " + + info.getOpenSearchVersionRangesString() + + " and is not compatible with " + Version.CURRENT - + " is required" ); } } diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java index f4532f5f83cc4..c264788df20e8 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java @@ -70,8 +70,10 @@ import org.opensearch.core.util.FileSystemUtils; import org.opensearch.env.Environment; import org.opensearch.env.TestEnvironment; +import org.opensearch.semver.SemverRange; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.PosixPermissionsResetter; +import org.opensearch.test.VersionUtils; import org.junit.After; import org.junit.Before; @@ -284,6 +286,35 @@ static void writePlugin(String name, Path structure, String... additionalProps) writeJar(structure.resolve("plugin.jar"), className); } + static void writePlugin(String name, Path structure, SemverRange opensearchVersionRange, String... additionalProps) throws IOException { + String[] properties = Stream.concat( + Stream.of( + "description", + "fake desc", + "name", + name, + "version", + "1.0", + "dependencies", + "{opensearch:\"" + opensearchVersionRange + "\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ), + Arrays.stream(additionalProps) + ).toArray(String[]::new); + PluginTestUtil.writePluginProperties(structure, properties); + String className = name.substring(0, 1).toUpperCase(Locale.ENGLISH) + name.substring(1) + "Plugin"; + writeJar(structure.resolve("plugin.jar"), className); + } + + static Path createPlugin(String name, Path structure, SemverRange opensearchVersionRange, String... additionalProps) + throws IOException { + writePlugin(name, structure, opensearchVersionRange, additionalProps); + return writeZip(structure, null); + } + static void writePluginSecurityPolicy(Path pluginDir, String... permissions) throws IOException { StringBuilder securityPolicyContent = new StringBuilder("grant {\n "); for (String permission : permissions) { @@ -867,6 +898,32 @@ public void testInstallMisspelledOfficialPlugins() throws Exception { assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin")); } + public void testInstallPluginWithCompatibleDependencies() throws Exception { + Tuple<Path, Environment> env = createEnv(fs, temp); + Path pluginDir = createPluginDir(temp); + String pluginZip = createPlugin("fake", pluginDir, SemverRange.fromString("~" + Version.CURRENT.toString())).toUri() + .toURL() + .toString(); + skipJarHellCommand.execute(terminal, Collections.singletonList(pluginZip), false, env.v2()); + assertThat(terminal.getOutput(), containsString("100%")); + } + + public void testInstallPluginWithIncompatibleDependencies() throws Exception { + Tuple<Path, Environment> env = createEnv(fs, temp); + Path pluginDir = createPluginDir(temp); + // Core version is behind plugin version by one w.r.t patch, hence incompatible + Version coreVersion = Version.CURRENT; + Version pluginVersion = VersionUtils.getVersion(coreVersion.major, coreVersion.minor, (byte) (coreVersion.revision + 1)); + String pluginZip = createPlugin("fake", pluginDir, SemverRange.fromString("~" + pluginVersion.toString())).toUri() + .toURL() + .toString(); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> skipJarHellCommand.execute(terminal, Collections.singletonList(pluginZip), false, env.v2()) + ); + assertThat(e.getMessage(), containsString("Plugin [fake] was built for OpenSearch version ~" + pluginVersion)); + } + public void testBatchFlag() throws Exception { MockTerminal terminal = new MockTerminal(); installPlugin(terminal, true); diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java index 7bbced38c7adb..6878efce4c804 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java @@ -278,7 +278,7 @@ public void testExistingIncompatiblePlugin() throws Exception { buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); MockTerminal terminal = listPlugins(home); - String message = "plugin [fake_plugin1] was built for OpenSearch version 1.0 but version " + Version.CURRENT + " is required"; + String message = "plugin [fake_plugin1] was built for OpenSearch version 5.0.0 and is not compatible with " + Version.CURRENT; assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput()); assertEquals("WARNING: " + message + "\n", terminal.getErrorOutput()); @@ -286,4 +286,41 @@ public void testExistingIncompatiblePlugin() throws Exception { terminal = listPlugins(home, params); assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput()); } + + public void testPluginWithDependencies() throws Exception { + PluginTestUtil.writePluginProperties( + env.pluginsDir().resolve("fake_plugin1"), + "description", + "fake desc 1", + "name", + "fake_plugin1", + "version", + "1.0", + "dependencies", + "{opensearch:\"" + Version.CURRENT + "\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "org.fake1" + ); + String[] params = { "-v" }; + MockTerminal terminal = listPlugins(home, params); + assertEquals( + buildMultiline( + "Plugins directory: " + env.pluginsDir(), + "fake_plugin1", + "- Plugin information:", + "Name: fake_plugin1", + "Description: fake desc 1", + "Version: 1.0", + "OpenSearch Version: " + Version.CURRENT.toString(), + "Java Version: " + System.getProperty("java.specification.version"), + "Native Controller: false", + "Extended Plugins: []", + " * Classname: org.fake1", + "Folder name: null" + ), + terminal.getOutput() + ); + } } diff --git a/gradle/code-coverage.gradle b/gradle/code-coverage.gradle index 822b471e2e034..3ca6b1fe84ea7 100644 --- a/gradle/code-coverage.gradle +++ b/gradle/code-coverage.gradle @@ -13,7 +13,7 @@ repositories { gradlePluginPortal() // TODO: Find the way to use the repositories from RepositoriesSetupPlugin maven { - url = "https://artifacts.opensearch.org/snapshots/lucene/" + url = "https://ci.opensearch.org/ci/dbc/snapshots/lucene/" } } @@ -37,7 +37,7 @@ tasks.withType(JacocoReport).configureEach { if (System.getProperty("tests.coverage")) { reporting { reports { - testCodeCoverageReport(JacocoCoverageReport) { + testCodeCoverageReport(JacocoCoverageReport) { testType = TestSuiteType.UNIT_TEST } } @@ -45,6 +45,6 @@ if (System.getProperty("tests.coverage")) { // Attach code coverage report task to Gradle check task project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure { - dependsOn tasks.named('testCodeCoverageReport', JacocoReport) + dependsOn tasks.named('testCodeCoverageReport', JacocoReport) } } diff --git a/gradle/formatting.gradle b/gradle/formatting.gradle index 93e1127c97a56..f3a4bf5cc765b 100644 --- a/gradle/formatting.gradle +++ b/gradle/formatting.gradle @@ -99,7 +99,9 @@ allprojects { } } format 'misc', { - target '*.md', '*.gradle', '**/*.yaml', '**/*.yml', '**/*.svg' + target '*.md', '*.gradle', '**/*.json', '**/*.yaml', '**/*.yml', '**/*.svg' + + targetExclude '**/simple-bulk11.json', '**/simple-msearch5.json' trimTrailingWhitespace() endWithNewline() diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index f1d76d80bbfa3..82a4add334a7d 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -11,7 +11,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b +distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d diff --git a/libs/common/build.gradle b/libs/common/build.gradle index 4f89b81636420..60bf488833393 100644 --- a/libs/common/build.gradle +++ b/libs/common/build.gradle @@ -43,3 +43,64 @@ tasks.named('forbiddenApisMain').configure { // TODO: Need to decide how we want to handle for forbidden signatures with the changes to server replaceSignatureFiles 'jdk-signatures' } + +// Add support for incubator modules on supported Java versions. +if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) { + sourceSets { + java20 { + java { + srcDirs = ['src/main/java20'] + } + } + } + + configurations { + java20Implementation.extendsFrom(implementation) + } + + dependencies { + java20Implementation sourceSets.main.output + } + + compileJava20Java { + targetCompatibility = JavaVersion.VERSION_20 + options.compilerArgs += ['--add-modules', 'jdk.incubator.vector'] + options.compilerArgs -= '-Werror' // use of incubator modules is reported as a warning + } + + jar { + metaInf { + into 'versions/20' + from sourceSets.java20.output + } + manifest.attributes('Multi-Release': 'true') + } + + tasks.withType(Test).configureEach { + // Relying on the convention for Test.classpath in custom Test tasks has been deprecated + // and scheduled to be removed in Gradle 9.0. Below lines are added from the migration guide: + // https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#test_task_default_classpath + testClassesDirs = testing.suites.test.sources.output.classesDirs + classpath = testing.suites.test.sources.runtimeClasspath + + // Adds the multi-release JAR to the classpath when executing tests. + // This allows newer sources to be picked up at test runtime (if supported). + classpath += files(jar.archiveFile) + // Removes the "main" sources from the classpath to avoid JarHell problems as + // the multi-release JAR already contains those classes. + classpath -= sourceSets.main.output + } + + tasks.register('roundableSimdTest', Test) { + group 'verification' + include '**/RoundableTests.class' + systemProperty 'opensearch.experimental.feature.simd.rounding.enabled', 'forced' + } + + check.dependsOn(roundableSimdTest) + + forbiddenApisJava20 { + failOnMissingClasses = false + ignoreSignaturesOfMissingClasses = true + } +} diff --git a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java index 0f289c09bbae2..60c0717a28f05 100644 --- a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java +++ b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java @@ -52,7 +52,7 @@ public static boolean isInetAddress(String ipString) { return ipStringToBytes(ipString) != null; } - private static byte[] ipStringToBytes(String ipString) { + public static byte[] ipStringToBytes(String ipString) { // Make a first pass to categorize the characters in this string. boolean hasColon = false; boolean hasDot = false; diff --git a/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java new file mode 100644 index 0000000000000..626fb6e6b810e --- /dev/null +++ b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java @@ -0,0 +1,100 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import org.opensearch.common.annotation.InternalApi; + +import jdk.incubator.vector.LongVector; +import jdk.incubator.vector.Vector; +import jdk.incubator.vector.VectorOperators; +import jdk.incubator.vector.VectorSpecies; + +/** + * It uses vectorized B-tree search to find the round-down point. + * + * @opensearch.internal + */ +@InternalApi +class BtreeSearcher implements Roundable { + private static final VectorSpecies<Long> LONG_VECTOR_SPECIES = LongVector.SPECIES_PREFERRED; + private static final int LANES = LONG_VECTOR_SPECIES.length(); + private static final int SHIFT = log2(LANES); + + private final long[] values; + private final long minValue; + + BtreeSearcher(long[] values, int size) { + if (size <= 0) { + throw new IllegalArgumentException("at least one value must be present"); + } + + int blocks = (size + LANES - 1) / LANES; // number of blocks + int length = 1 + blocks * LANES; // size of the backing array (1-indexed) + + this.minValue = values[0]; + this.values = new long[length]; + build(values, 0, size, this.values, 1); + } + + /** + * Builds the B-tree memory layout. + * It builds the tree recursively, following an in-order traversal. + * + * <p> + * Each block stores 'lanes' values at indices {@code i, i + 1, ..., i + lanes - 1} where {@code i} is the + * starting offset. The starting offset of the root block is 1. The branching factor is (1 + lanes) so each + * block can have these many children. Given the starting offset {@code i} of a block, the starting offset + * of its k-th child (ranging from {@code 0, 1, ..., k}) can be computed as {@code i + ((i + k) << shift)}. + * + * @param src is the sorted input array + * @param i is the index in the input array to read the value from + * @param size the number of values in the input array + * @param dst is the output array + * @param j is the index in the output array to write the value to + * @return the next index 'i' + */ + private static int build(long[] src, int i, int size, long[] dst, int j) { + if (j < dst.length) { + for (int k = 0; k < LANES; k++) { + i = build(src, i, size, dst, j + ((j + k) << SHIFT)); + + // Fills the B-tree as a complete tree, i.e., all levels are completely filled, + // except the last level which is filled from left to right. + // The trick is to fill the destination array between indices 1...size (inclusive / 1-indexed) + // and pad the remaining array with +infinity. + dst[j + k] = (j + k <= size) ? src[i++] : Long.MAX_VALUE; + } + i = build(src, i, size, dst, j + ((j + LANES) << SHIFT)); + } + return i; + } + + @Override + public long floor(long key) { + Vector<Long> keyVector = LongVector.broadcast(LONG_VECTOR_SPECIES, key); + int i = 1, result = 1; + + while (i < values.length) { + Vector<Long> valuesVector = LongVector.fromArray(LONG_VECTOR_SPECIES, values, i); + int j = i + valuesVector.compare(VectorOperators.GT, keyVector).firstTrue(); + result = (j > i) ? j : result; + i += (j << SHIFT); + } + + assert result > 1 : "key must be greater than or equal to " + minValue; + return values[result - 1]; + } + + private static int log2(int num) { + if ((num <= 0) || ((num & (num - 1)) != 0)) { + throw new IllegalArgumentException(num + " is not a positive power of 2"); + } + return 32 - Integer.numberOfLeadingZeros(num - 1); + } +} diff --git a/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java new file mode 100644 index 0000000000000..0709ed4374227 --- /dev/null +++ b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java @@ -0,0 +1,75 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import org.opensearch.common.annotation.InternalApi; + +/** + * Factory class to create and return the fastest implementation of {@link Roundable}. + * + * @opensearch.internal + */ +@InternalApi +public final class RoundableFactory { + /** + * The maximum limit up to which linear search is used, otherwise binary or B-tree search is used. + * This is because linear search is much faster on small arrays. + * Benchmark results: <a href="https://github.com/opensearch-project/OpenSearch/pull/9727">PR #9727</a> + */ + private static final int LINEAR_SEARCH_MAX_SIZE = 64; + + /** + * Indicates whether the vectorized (SIMD) B-tree search implementation is to be used. + * It is true when either: + * 1. The feature flag is set to "forced", or + * 2. The platform has a minimum of 4 long vector lanes and the feature flag is set to "true". + */ + private static final boolean USE_BTREE_SEARCHER; + + /** + * This class is initialized only when: + * - JDK-20+ + * - jdk.incubator.vector.LongVector is available (--add-modules=jdk.incubator.vector is passed) + */ + private static final class VectorCheck { + final static int SPECIES_PREFERRED = jdk.incubator.vector.LongVector.SPECIES_PREFERRED.length(); + } + + static { + String simdRoundingFeatureFlag = System.getProperty("opensearch.experimental.feature.simd.rounding.enabled"); + boolean useBtreeSearcher = false; + + try { + final Class<?> incubator = Class.forName("jdk.incubator.vector.LongVector"); + + useBtreeSearcher = "forced".equalsIgnoreCase(simdRoundingFeatureFlag) + || (VectorCheck.SPECIES_PREFERRED >= 4 && "true".equalsIgnoreCase(simdRoundingFeatureFlag)); + + } catch (final ClassNotFoundException ex) { + /* do not use BtreeSearcher */ + } + + USE_BTREE_SEARCHER = useBtreeSearcher; + } + + private RoundableFactory() {} + + /** + * Creates and returns the fastest implementation of {@link Roundable}. + */ + public static Roundable create(long[] values, int size) { + if (size <= LINEAR_SEARCH_MAX_SIZE) { + return new BidirectionalLinearSearcher(values, size); + } else if (USE_BTREE_SEARCHER) { + return new BtreeSearcher(values, size); + } else { + return new BinarySearcher(values, size); + } + } +} diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java index dcf8dd7945012..c8fdb3333a714 100644 --- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java +++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java @@ -43,6 +43,7 @@ default CompilerResult compile(String name, String... names) { return compileWithPackage(ApiAnnotationProcessorTests.class.getPackageName(), name, names); } + @SuppressWarnings("removal") default CompilerResult compileWithPackage(String pck, String name, String... names) { final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); final DiagnosticCollector<JavaFileObject> collector = new DiagnosticCollector<>(); diff --git a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java index ae9f629c59024..ad19f456b0df4 100644 --- a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java +++ b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java @@ -12,15 +12,31 @@ public class RoundableTests extends OpenSearchTestCase { - public void testFloor() { - int size = randomIntBetween(1, 256); - long[] values = new long[size]; - for (int i = 1; i < values.length; i++) { - values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1; - } + public void testRoundingEmptyArray() { + Throwable throwable = assertThrows(IllegalArgumentException.class, () -> RoundableFactory.create(new long[0], 0)); + assertEquals("at least one value must be present", throwable.getMessage()); + } + + public void testRoundingSmallArray() { + int size = randomIntBetween(1, 64); + long[] values = randomArrayOfSortedValues(size); + Roundable roundable = RoundableFactory.create(values, size); + + assertEquals("BidirectionalLinearSearcher", roundable.getClass().getSimpleName()); + assertRounding(roundable, values, size); + } - Roundable[] impls = { new BinarySearcher(values, size), new BidirectionalLinearSearcher(values, size) }; + public void testRoundingLargeArray() { + int size = randomIntBetween(65, 256); + long[] values = randomArrayOfSortedValues(size); + Roundable roundable = RoundableFactory.create(values, size); + boolean useBtreeSearcher = "forced".equalsIgnoreCase(System.getProperty("opensearch.experimental.feature.simd.rounding.enabled")); + assertEquals(useBtreeSearcher ? "BtreeSearcher" : "BinarySearcher", roundable.getClass().getSimpleName()); + assertRounding(roundable, values, size); + } + + private void assertRounding(Roundable roundable, long[] values, int size) { for (int i = 0; i < 100000; i++) { // Index of the expected round-down point. int idx = randomIntBetween(0, size - 1); @@ -35,23 +51,21 @@ public void testFloor() { // round-down point, which will still floor to the same value. long key = expected + (randomNonNegativeLong() % delta); - for (Roundable roundable : impls) { - assertEquals(expected, roundable.floor(key)); - } + assertEquals(expected, roundable.floor(key)); } + + Throwable throwable = assertThrows(AssertionError.class, () -> roundable.floor(values[0] - 1)); + assertEquals("key must be greater than or equal to " + values[0], throwable.getMessage()); } - public void testFailureCases() { - Throwable throwable; + private static long[] randomArrayOfSortedValues(int size) { + int capacity = size + randomInt(20); // May be slightly more than the size. + long[] values = new long[capacity]; - throwable = assertThrows(IllegalArgumentException.class, () -> new BinarySearcher(new long[0], 0)); - assertEquals("at least one value must be present", throwable.getMessage()); - throwable = assertThrows(IllegalArgumentException.class, () -> new BidirectionalLinearSearcher(new long[0], 0)); - assertEquals("at least one value must be present", throwable.getMessage()); + for (int i = 1; i < size; i++) { + values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1; + } - throwable = assertThrows(AssertionError.class, () -> new BinarySearcher(new long[] { 100 }, 1).floor(50)); - assertEquals("key must be greater than or equal to 100", throwable.getMessage()); - throwable = assertThrows(AssertionError.class, () -> new BidirectionalLinearSearcher(new long[] { 100 }, 1).floor(50)); - assertEquals("key must be greater than or equal to 100", throwable.getMessage()); + return values; } } diff --git a/libs/core/licenses/lucene-core-9.9.1.jar.sha1 b/libs/core/licenses/lucene-core-9.9.1.jar.sha1 deleted file mode 100644 index ae596196d9e6a..0000000000000 --- a/libs/core/licenses/lucene-core-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-9.9.2.jar.sha1 b/libs/core/licenses/lucene-core-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..2d03d69369b9f --- /dev/null +++ b/libs/core/licenses/lucene-core-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7699f80220fc80b08413902560904623b88beb9f \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 6a92993f5dd42..5038bb90cc18d 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -98,8 +98,9 @@ public class Version implements Comparable<Version>, ToXContentFragment { public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0); - public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_1); - public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_1); + public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_2); + public static final Version V_2_13_0 = new Version(2130099, org.apache.lucene.util.Version.LUCENE_9_9_2); + public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_2); public static final Version CURRENT = V_3_0_0; public static Version fromId(int id) { diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java index 3e996bdee83a2..ea23b3d81a775 100644 --- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java +++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java @@ -56,6 +56,7 @@ import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.semver.SemverRange; import java.io.ByteArrayInputStream; import java.io.EOFException; @@ -750,6 +751,8 @@ public Object readGenericValue() throws IOException { return readCollection(StreamInput::readGenericValue, HashSet::new, Collections.emptySet()); case 26: return readBigInteger(); + case 27: + return readSemverRange(); default: throw new IOException("Can't read unknown type [" + type + "]"); } @@ -1090,6 +1093,10 @@ public Version readVersion() throws IOException { return Version.fromId(readVInt()); } + public SemverRange readSemverRange() throws IOException { + return SemverRange.fromString(readString()); + } + /** Reads the {@link Version} from the input stream */ public Build readBuild() throws IOException { // the following is new for opensearch: we write the distribution to support any "forks" diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java index 2d69e1c686df3..b7599265aece3 100644 --- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java +++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java @@ -54,6 +54,7 @@ import org.opensearch.core.common.settings.SecureString; import org.opensearch.core.common.text.Text; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; +import org.opensearch.semver.SemverRange; import java.io.EOFException; import java.io.FileNotFoundException; @@ -784,6 +785,10 @@ public final void writeOptionalInstant(@Nullable Instant instant) throws IOExcep o.writeByte((byte) 26); o.writeString(v.toString()); }); + writers.put(SemverRange.class, (o, v) -> { + o.writeByte((byte) 27); + o.writeSemverRange((SemverRange) v); + }); WRITERS = Collections.unmodifiableMap(writers); } @@ -1101,6 +1106,10 @@ public void writeVersion(final Version version) throws IOException { writeVInt(version.id); } + public void writeSemverRange(final SemverRange range) throws IOException { + writeString(range.toString()); + } + /** Writes the OpenSearch {@link Build} informn to the output stream */ public void writeBuild(final Build build) throws IOException { // the following is new for opensearch: we write the distribution name to support any "forks" of the code diff --git a/libs/core/src/main/java/org/opensearch/semver/SemverRange.java b/libs/core/src/main/java/org/opensearch/semver/SemverRange.java new file mode 100644 index 0000000000000..da87acc7124aa --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/semver/SemverRange.java @@ -0,0 +1,170 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver; + +import org.opensearch.Version; +import org.opensearch.common.Nullable; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.semver.expr.Caret; +import org.opensearch.semver.expr.Equal; +import org.opensearch.semver.expr.Expression; +import org.opensearch.semver.expr.Tilde; + +import java.io.IOException; +import java.util.Objects; +import java.util.Optional; + +import static java.util.Arrays.stream; + +/** + * Represents a single semver range that allows for specifying which {@code org.opensearch.Version}s satisfy the range. + * It is composed of a range version and a range operator. Following are the supported operators: + * <ul> + * <li>'=' Requires exact match with the range version. For example, =1.2.3 range would match only 1.2.3</li> + * <li>'~' Allows for patch version variability starting from the range version. For example, ~1.2.3 range would match versions greater than or equal to 1.2.3 but less than 1.3.0</li> + * <li>'^' Allows for patch and minor version variability starting from the range version. For example, ^1.2.3 range would match versions greater than or equal to 1.2.3 but less than 2.0.0</li> + * </ul> + */ +public class SemverRange implements ToXContentFragment { + + private final Version rangeVersion; + private final RangeOperator rangeOperator; + + public SemverRange(final Version rangeVersion, final RangeOperator rangeOperator) { + this.rangeVersion = rangeVersion; + this.rangeOperator = rangeOperator; + } + + /** + * Constructs a {@code SemverRange} from its string representation. + * @param range given range + * @return a {@code SemverRange} + */ + public static SemverRange fromString(final String range) { + RangeOperator rangeOperator = RangeOperator.fromRange(range); + String version = range.replaceFirst(rangeOperator.asEscapedString(), ""); + if (!Version.stringHasLength(version)) { + throw new IllegalArgumentException("Version cannot be empty"); + } + return new SemverRange(Version.fromString(version), rangeOperator); + } + + /** + * Return the range operator for this range. + * @return range operator + */ + public RangeOperator getRangeOperator() { + return rangeOperator; + } + + /** + * Return the version for this range. + * @return the range version + */ + public Version getRangeVersion() { + return rangeVersion; + } + + /** + * Check if range is satisfied by given version string. + * + * @param versionToEvaluate version to check + * @return {@code true} if range is satisfied by version, {@code false} otherwise + */ + public boolean isSatisfiedBy(final String versionToEvaluate) { + return isSatisfiedBy(Version.fromString(versionToEvaluate)); + } + + /** + * Check if range is satisfied by given version. + * + * @param versionToEvaluate version to check + * @return {@code true} if range is satisfied by version, {@code false} otherwise + * @see #isSatisfiedBy(String) + */ + public boolean isSatisfiedBy(final Version versionToEvaluate) { + return this.rangeOperator.expression.evaluate(this.rangeVersion, versionToEvaluate); + } + + @Override + public boolean equals(@Nullable final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SemverRange range = (SemverRange) o; + return Objects.equals(rangeVersion, range.rangeVersion) && rangeOperator == range.rangeOperator; + } + + @Override + public int hashCode() { + return Objects.hash(rangeVersion, rangeOperator); + } + + @Override + public String toString() { + return rangeOperator.asString() + rangeVersion; + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + return builder.value(toString()); + } + + /** + * A range operator. + */ + public enum RangeOperator { + + EQ("=", new Equal()), + TILDE("~", new Tilde()), + CARET("^", new Caret()), + DEFAULT("", new Equal()); + + private final String operator; + private final Expression expression; + + RangeOperator(final String operator, final Expression expression) { + this.operator = operator; + this.expression = expression; + } + + /** + * String representation of the range operator. + * + * @return range operator as string + */ + public String asString() { + return operator; + } + + /** + * Escaped string representation of the range operator, + * if operator is a regex character. + * + * @return range operator as escaped string, if operator is a regex character + */ + public String asEscapedString() { + if (Objects.equals(operator, "^")) { + return "\\^"; + } + return operator; + } + + public static RangeOperator fromRange(final String range) { + Optional<RangeOperator> rangeOperator = stream(values()).filter( + operator -> operator != DEFAULT && range.startsWith(operator.asString()) + ).findFirst(); + return rangeOperator.orElse(DEFAULT); + } + } +} diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java b/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java new file mode 100644 index 0000000000000..ce2b74dde0865 --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java @@ -0,0 +1,32 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver.expr; + +import org.opensearch.Version; + +/** + * Expression to evaluate version compatibility allowing for minor and patch version variability. + */ +public class Caret implements Expression { + + /** + * Checks if the given version is compatible with the range version allowing for minor and + * patch version variability. + * Allows all versions starting from the rangeVersion upto next major version (exclusive). + * @param rangeVersion the version specified in range + * @param versionToEvaluate the version to evaluate + * @return {@code true} if the versions are compatible {@code false} otherwise + */ + @Override + public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) { + Version lower = rangeVersion; + Version upper = Version.fromString((rangeVersion.major + 1) + ".0.0"); + return versionToEvaluate.onOrAfter(lower) && versionToEvaluate.before(upper); + } +} diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java b/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java new file mode 100644 index 0000000000000..d3e1d63060b77 --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java @@ -0,0 +1,29 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver.expr; + +import org.opensearch.Version; + +/** + * Expression to evaluate equality of versions. + */ +public class Equal implements Expression { + + /** + * Checks if a given version matches a certain range version. + * + * @param rangeVersion the version specified in range + * @param versionToEvaluate the version to evaluate + * @return {@code true} if the versions are equal {@code false} otherwise + */ + @Override + public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) { + return versionToEvaluate.equals(rangeVersion); + } +} diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java b/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java new file mode 100644 index 0000000000000..68bb4e249836a --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java @@ -0,0 +1,26 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver.expr; + +import org.opensearch.Version; + +/** + * An evaluation expression. + */ +public interface Expression { + + /** + * Evaluates an expression. + * + * @param rangeVersion the version specified in range + * @param versionToEvaluate the version to evaluate + * @return the result of the expression evaluation + */ + boolean evaluate(final Version rangeVersion, final Version versionToEvaluate); +} diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java b/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java new file mode 100644 index 0000000000000..5f62ffe62ddeb --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java @@ -0,0 +1,31 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver.expr; + +import org.opensearch.Version; + +/** + * Expression to evaluate version compatibility allowing patch version variability. + */ +public class Tilde implements Expression { + + /** + * Checks if the given version is compatible with a range version allowing for patch version variability. + * Allows all versions starting from the rangeVersion upto next minor version (exclusive). + * @param rangeVersion the version specified in range + * @param versionToEvaluate the version to evaluate + * @return {@code true} if the versions are compatible {@code false} otherwise + */ + @Override + public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) { + Version lower = rangeVersion; + Version upper = Version.fromString(rangeVersion.major + "." + (rangeVersion.minor + 1) + "." + 0); + return versionToEvaluate.onOrAfter(lower) && versionToEvaluate.before(upper); + } +} diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java b/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java new file mode 100644 index 0000000000000..06cf9feaaaf8f --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java @@ -0,0 +1,9 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +/** Expressions library module */ +package org.opensearch.semver.expr; diff --git a/libs/core/src/main/java/org/opensearch/semver/package-info.java b/libs/core/src/main/java/org/opensearch/semver/package-info.java new file mode 100644 index 0000000000000..ada935582d408 --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/semver/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Semver library module */ +package org.opensearch.semver; diff --git a/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java b/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java new file mode 100644 index 0000000000000..af1d95b2561b7 --- /dev/null +++ b/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java @@ -0,0 +1,105 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver; + +import org.opensearch.test.OpenSearchTestCase; + +public class SemverRangeTests extends OpenSearchTestCase { + + public void testRangeWithEqualsOperator() { + SemverRange range = SemverRange.fromString("=1.2.3"); + assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.EQ); + assertTrue(range.isSatisfiedBy("1.2.3")); + assertFalse(range.isSatisfiedBy("1.2.4")); + assertFalse(range.isSatisfiedBy("1.3.3")); + assertFalse(range.isSatisfiedBy("2.2.3")); + } + + public void testRangeWithDefaultOperator() { + SemverRange range = SemverRange.fromString("1.2.3"); + assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.DEFAULT); + assertTrue(range.isSatisfiedBy("1.2.3")); + assertFalse(range.isSatisfiedBy("1.2.4")); + assertFalse(range.isSatisfiedBy("1.3.3")); + assertFalse(range.isSatisfiedBy("2.2.3")); + } + + public void testRangeWithTildeOperator() { + SemverRange range = SemverRange.fromString("~2.3.4"); + assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.TILDE); + assertTrue(range.isSatisfiedBy("2.3.4")); + assertTrue(range.isSatisfiedBy("2.3.5")); + assertTrue(range.isSatisfiedBy("2.3.12")); + + assertFalse(range.isSatisfiedBy("2.3.0")); + assertFalse(range.isSatisfiedBy("2.3.3")); + assertFalse(range.isSatisfiedBy("2.4.0")); + assertFalse(range.isSatisfiedBy("3.0.0")); + } + + public void testRangeWithCaretOperator() { + SemverRange range = SemverRange.fromString("^2.3.4"); + assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.CARET); + assertTrue(range.isSatisfiedBy("2.3.4")); + assertTrue(range.isSatisfiedBy("2.3.5")); + assertTrue(range.isSatisfiedBy("2.4.12")); + + assertFalse(range.isSatisfiedBy("2.3.3")); + assertFalse(range.isSatisfiedBy("3.0.0")); + } + + public void testInvalidRanges() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("")); + assertEquals("Version cannot be empty", ex.getMessage()); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("1")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("1.2")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("=")); + assertEquals("Version cannot be empty", ex.getMessage()); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("=1")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("=1.2")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~")); + assertEquals("Version cannot be empty", ex.getMessage()); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~1")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~1.2")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^")); + assertEquals("Version cannot be empty", ex.getMessage()); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^1")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^1.2")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$1")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$1.2")); + assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build")); + + expectThrows(NumberFormatException.class, () -> SemverRange.fromString("$1.2.3")); + } +} diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java new file mode 100644 index 0000000000000..3cb168d42cda0 --- /dev/null +++ b/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java @@ -0,0 +1,30 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver.expr; + +import org.opensearch.Version; +import org.opensearch.test.OpenSearchTestCase; + +public class CaretTests extends OpenSearchTestCase { + + public void testMinorAndPatchVersionVariability() { + Caret caretExpr = new Caret(); + Version rangeVersion = Version.fromString("1.2.3"); + + // Compatible versions + assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.3"))); + assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.4"))); + assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.3.3"))); + assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.9.9"))); + + // Incompatible versions + assertFalse(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.2"))); + assertFalse(caretExpr.evaluate(rangeVersion, Version.fromString("2.0.0"))); + } +} diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java new file mode 100644 index 0000000000000..fb090865157ed --- /dev/null +++ b/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java @@ -0,0 +1,22 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver.expr; + +import org.opensearch.Version; +import org.opensearch.test.OpenSearchTestCase; + +public class EqualTests extends OpenSearchTestCase { + + public void testEquality() { + Equal equalExpr = new Equal(); + Version rangeVersion = Version.fromString("1.2.3"); + assertTrue(equalExpr.evaluate(rangeVersion, Version.fromString("1.2.3"))); + assertFalse(equalExpr.evaluate(rangeVersion, Version.fromString("1.2.4"))); + } +} diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java new file mode 100644 index 0000000000000..8666611645c3a --- /dev/null +++ b/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java @@ -0,0 +1,29 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.semver.expr; + +import org.opensearch.Version; +import org.opensearch.test.OpenSearchTestCase; + +public class TildeTests extends OpenSearchTestCase { + + public void testPatchVersionVariability() { + Tilde tildeExpr = new Tilde(); + Version rangeVersion = Version.fromString("1.2.3"); + + assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.3"))); + assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.4"))); + assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.9"))); + + assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.0"))); + assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.2"))); + assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.3.0"))); + assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("2.0.0"))); + } +} diff --git a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java index 898ce7e4e913b..ab48cc2357e7f 100644 --- a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java +++ b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java @@ -129,6 +129,7 @@ private void configureSocket(ServerSocket socket) throws IOException { socket.setReuseAddress(config.tcpReuseAddress()); } + @SuppressWarnings("removal") protected static SocketChannel accept(ServerSocketChannel serverSocketChannel) throws IOException { try { assert serverSocketChannel.isBlocking() == false; diff --git a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java index 3df8e42fe4f14..530aa1d86afc7 100644 --- a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java +++ b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java @@ -388,6 +388,7 @@ private void configureSocket(Socket socket, boolean isConnectComplete) throws IO } } + @SuppressWarnings("removal") private static void connect(SocketChannel socketChannel, InetSocketAddress remoteAddress) throws IOException { try { AccessController.doPrivileged((PrivilegedExceptionAction<Boolean>) () -> socketChannel.connect(remoteAddress)); diff --git a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java index 4a200a5dfa9bd..969fa91b50538 100644 --- a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java +++ b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java @@ -65,6 +65,7 @@ protected Class<?> findClass(String name) throws ClassNotFoundException { /** * Return a new classloader across the parent and extended loaders. */ + @SuppressWarnings("removal") public static ExtendedPluginsClassLoader create(ClassLoader parent, List<ClassLoader> extendedLoaders) { return AccessController.doPrivileged( (PrivilegedAction<ExtendedPluginsClassLoader>) () -> new ExtendedPluginsClassLoader(parent, extendedLoaders) diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java index f41c49844997d..a2531f4a9156e 100644 --- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java @@ -76,6 +76,7 @@ * @see <a href="http://cs.oswego.edu/pipermail/concurrency-interest/2009-August/006508.html"> * http://cs.oswego.edu/pipermail/concurrency-interest/2009-August/006508.html</a> */ +@SuppressWarnings("removal") public class SecureSM extends SecurityManager { private final String[] classesThatCanExit; diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java index fe239fea8129e..3c8e78a902fcb 100644 --- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java +++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java @@ -18,6 +18,7 @@ import java.util.concurrent.ForkJoinPool.ForkJoinWorkerThreadFactory; import java.util.concurrent.ForkJoinWorkerThread; +@SuppressWarnings("removal") public class SecuredForkJoinWorkerThreadFactory implements ForkJoinWorkerThreadFactory { static AccessControlContext contextWithPermissions(Permission... perms) { Permissions permissions = new Permissions(); diff --git a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java index 026ffb080ee61..fd666c70cfebb 100644 --- a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java +++ b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java @@ -41,6 +41,7 @@ import junit.framework.TestCase; /** Simple tests for SecureSM */ +@SuppressWarnings("removal") public class SecureSMTests extends TestCase { static { // install a mock security policy: diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java index d57def9406b17..f38fdd6412d79 100644 --- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java @@ -34,6 +34,11 @@ public Counter createUpDownCounter(String name, String description, String unit) return metricsTelemetry.createUpDownCounter(name, description, unit); } + @Override + public Histogram createHistogram(String name, String description, String unit) { + return metricsTelemetry.createHistogram(name, description, unit); + } + @Override public void close() throws IOException { metricsTelemetry.close(); diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java new file mode 100644 index 0000000000000..95ada626e21ee --- /dev/null +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.metrics; + +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.telemetry.metrics.tags.Tags; + +/** + * Histogram records the value for an existing metric. + * {@opensearch.experimental} + */ +@ExperimentalApi +public interface Histogram { + + /** + * record value. + * @param value value to be added. + */ + void record(double value); + + /** + * record value along with the attributes. + * + * @param value value to be added. + * @param tags attributes/dimensions of the metric. + */ + void record(double value, Tags tags); + +} diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java index 61b3df089928b..94d19bda31f34 100644 --- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java @@ -36,4 +36,15 @@ public interface MetricsRegistry extends Closeable { * @return counter. */ Counter createUpDownCounter(String name, String description, String unit); + + /** + * Creates the histogram type of Metric. Implementation framework will take care + * of the bucketing strategy. + * + * @param name name of the histogram. + * @param description any description about the metric. + * @param unit unit of the metric. + * @return histogram. + */ + Histogram createHistogram(String name, String description, String unit); } diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java new file mode 100644 index 0000000000000..20e72bccad899 --- /dev/null +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java @@ -0,0 +1,38 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.metrics.noop; + +import org.opensearch.common.annotation.InternalApi; +import org.opensearch.telemetry.metrics.Histogram; +import org.opensearch.telemetry.metrics.tags.Tags; + +/** + * No-op {@link Histogram} + * {@opensearch.internal} + */ +@InternalApi +public class NoopHistogram implements Histogram { + + /** + * No-op Histogram instance + */ + public final static NoopHistogram INSTANCE = new NoopHistogram(); + + private NoopHistogram() {} + + @Override + public void record(double value) { + + } + + @Override + public void record(double value, Tags tags) { + + } +} diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java index 640c6842a8960..d3dda68cfae71 100644 --- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java @@ -10,6 +10,7 @@ import org.opensearch.common.annotation.InternalApi; import org.opensearch.telemetry.metrics.Counter; +import org.opensearch.telemetry.metrics.Histogram; import org.opensearch.telemetry.metrics.MetricsRegistry; import java.io.IOException; @@ -38,6 +39,11 @@ public Counter createUpDownCounter(String name, String description, String unit) return NoopCounter.INSTANCE; } + @Override + public Histogram createHistogram(String name, String description, String unit) { + return NoopHistogram.INSTANCE; + } + @Override public void close() throws IOException { diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java index 6171641db5f07..02f126075845b 100644 --- a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java +++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java @@ -48,4 +48,15 @@ public void testUpDownCounter() { assertSame(mockCounter, counter); } + public void testHistogram() { + Histogram mockHistogram = mock(Histogram.class); + when(defaultMeterRegistry.createHistogram(any(String.class), any(String.class), any(String.class))).thenReturn(mockHistogram); + Histogram histogram = defaultMeterRegistry.createHistogram( + "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testHistogram", + "test up-down counter", + "ms" + ); + assertSame(mockHistogram, histogram); + } + } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java index 71af708f2e1dc..648536f9136a8 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java @@ -36,10 +36,9 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.Operator; import org.opensearch.plugins.Plugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -49,10 +48,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; -public class QueryStringWithAnalyzersIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryStringWithAnalyzersIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public QueryStringWithAnalyzersIT(Settings dynamicSettings) { - super(dynamicSettings); + public QueryStringWithAnalyzersIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -63,11 +62,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CommonAnalysisModulePlugin.class); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java index 26f4acb2b1e6a..e55c1c69b2e40 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; @@ -44,7 +43,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -68,10 +67,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; -public class HighlighterWithAnalyzersTests extends ParameterizedOpenSearchIntegTestCase { +public class HighlighterWithAnalyzersTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public HighlighterWithAnalyzersTests(Settings dynamicSettings) { - super(dynamicSettings); + public HighlighterWithAnalyzersTests(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -82,11 +81,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CommonAnalysisModulePlugin.class); diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java index c38b29502e282..6afd5c4ca75c1 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java @@ -11,12 +11,11 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.geometry.utils.StandardValidator; import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.index.mapper.GeoShapeFieldMapper; import org.opensearch.plugins.Plugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.TestGeoShapeFieldMapperPlugin; import java.util.Arrays; @@ -29,14 +28,14 @@ * This is the base class for all the Geo related integration tests. Use this class to add the features and settings * for the test cluster on which integration tests are running. */ -public abstract class GeoModulePluginIntegTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class GeoModulePluginIntegTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { protected static final double GEOHASH_TOLERANCE = 1E-5D; protected static final WellKnownText WKT = new WellKnownText(true, new StandardValidator(true)); - public GeoModulePluginIntegTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public GeoModulePluginIntegTestCase(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -47,11 +46,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this * geo plugin is not getting packaged in a zip, we need to load it before the tests run. diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java index 7344903fd5220..9e7ce0d3c7980 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java @@ -44,8 +44,8 @@ public class MissingValueIT extends GeoModulePluginIntegTestCase { private GeoPoint bottomRight; private GeoPoint topLeft; - public MissingValueIT(Settings dynamicSettings) { - super(dynamicSettings); + public MissingValueIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java new file mode 100644 index 0000000000000..c968fb2f6c2da --- /dev/null +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java @@ -0,0 +1,647 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.common.hash.MessageDigests; +import org.opensearch.common.network.InetAddresses; +import org.opensearch.core.common.Strings; +import org.opensearch.ingest.AbstractProcessor; +import org.opensearch.ingest.ConfigurationUtils; +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; + +import java.security.MessageDigest; +import java.util.Arrays; +import java.util.Base64; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException; + +/** + * Processor that generating community id flow hash for the network flow tuples, the algorithm is defined in + * <a href="https://github.com/corelight/community-id-spec">Community ID Flow Hashing</a>. + */ +public class CommunityIdProcessor extends AbstractProcessor { + public static final String TYPE = "community_id"; + // the version of the community id flow hashing algorithm + private static final String COMMUNITY_ID_HASH_VERSION = "1"; + // 0 byte for padding + private static final byte PADDING_BYTE = 0; + // the maximum code number for network protocol, ICMP message type and code as defined by IANA + private static final int IANA_COMMON_MAX_NUMBER = 255; + // the minimum code number for network protocol, ICMP message type and code as defined by IANA + private static final int IANA_COMMON_MIN_NUMBER = 0; + // the minimum seed for generating hash + private static final int MIN_SEED = 0; + // the maximum seed for generating hash + private static final int MAX_SEED = 65535; + // the minimum port number in transport layer + private static final int MIN_PORT = 0; + // the maximum port number in transport layer + private static final int MAX_PORT = 63335; + private static final String ICMP_MESSAGE_TYPE = "type"; + private static final String ICMP_MESSAGE_CODE = "code"; + private final String sourceIPField; + private final String sourcePortField; + private final String destinationIPField; + private final String destinationPortField; + private final String ianaProtocolNumberField; + private final String protocolField; + private final String icmpTypeField; + private final String icmpCodeField; + private final int seed; + private final String targetField; + private final boolean ignoreMissing; + + CommunityIdProcessor( + String tag, + String description, + String sourceIPField, + String sourcePortField, + String destinationIPField, + String destinationPortField, + String ianaProtocolNumberField, + String protocolField, + String icmpTypeField, + String icmpCodeField, + int seed, + String targetField, + boolean ignoreMissing + ) { + super(tag, description); + this.sourceIPField = sourceIPField; + this.sourcePortField = sourcePortField; + this.destinationIPField = destinationIPField; + this.destinationPortField = destinationPortField; + this.ianaProtocolNumberField = ianaProtocolNumberField; + this.protocolField = protocolField; + this.icmpTypeField = icmpTypeField; + this.icmpCodeField = icmpCodeField; + this.seed = seed; + this.targetField = targetField; + this.ignoreMissing = ignoreMissing; + } + + public String getSourceIPField() { + return sourceIPField; + } + + public String getSourcePortField() { + return sourcePortField; + } + + public String getDestinationIPField() { + return destinationIPField; + } + + public String getDestinationPortField() { + return destinationPortField; + } + + public String getIANAProtocolNumberField() { + return ianaProtocolNumberField; + } + + public String getProtocolField() { + return protocolField; + } + + public String getIcmpTypeField() { + return icmpTypeField; + } + + public String getIcmpCodeField() { + return icmpCodeField; + } + + public int getSeed() { + return seed; + } + + public String getTargetField() { + return targetField; + } + + public boolean isIgnoreMissing() { + return ignoreMissing; + } + + @Override + public IngestDocument execute(IngestDocument document) { + // resolve protocol firstly + Protocol protocol = resolveProtocol(document); + // exit quietly if protocol cannot be resolved and ignore_missing is true + if (protocol == null) { + return document; + } + + // resolve ip secondly, exit quietly if either source ip or destination ip cannot be resolved and ignore_missing is true + byte[] sourceIPByteArray = resolveIP(document, sourceIPField); + if (sourceIPByteArray == null) { + return document; + } + byte[] destIPByteArray = resolveIP(document, destinationIPField); + if (destIPByteArray == null) { + return document; + } + // source ip and destination ip must have same format, either ipv4 or ipv6 + if (sourceIPByteArray.length != destIPByteArray.length) { + throw new IllegalArgumentException("source ip and destination ip must have same format"); + } + + // resolve source port and destination port for transport protocols, + // exit quietly if either source port or destination port is null nor empty + Integer sourcePort = null; + Integer destinationPort = null; + if (protocol.isTransportProtocol()) { + sourcePort = resolvePort(document, sourcePortField); + if (sourcePort == null) { + return document; + } + + destinationPort = resolvePort(document, destinationPortField); + if (destinationPort == null) { + return document; + } + } + + // resolve ICMP message type and code, support both ipv4 and ipv6 + // set source port to icmp type, and set dest port to icmp code, so that we can have a generic way to handle + // all protocols + boolean isOneway = true; + final boolean isICMPProtocol = Protocol.ICMP == protocol || Protocol.ICMP_V6 == protocol; + if (isICMPProtocol) { + Integer icmpType = resolveICMP(document, icmpTypeField, ICMP_MESSAGE_TYPE); + if (icmpType == null) { + return document; + } else { + sourcePort = icmpType; + } + + // for the message types which don't have code, fetch the equivalent code from the pre-defined mapper, + // and they can be considered to two-way flow + Byte equivalentCode = Protocol.ICMP.getProtocolCode() == protocol.getProtocolCode() + ? ICMPType.getEquivalentCode(icmpType.byteValue()) + : ICMPv6Type.getEquivalentCode(icmpType.byteValue()); + if (equivalentCode != null) { + isOneway = false; + // for IPv6-ICMP, the pre-defined code is negative byte, + // we need to convert it to positive integer for later comparison + destinationPort = Protocol.ICMP.getProtocolCode() == protocol.getProtocolCode() + ? Integer.valueOf(equivalentCode) + : Byte.toUnsignedInt(equivalentCode); + } else { + // get icmp code from the document if we cannot get equivalent code from the pre-defined mapper + Integer icmpCode = resolveICMP(document, icmpCodeField, ICMP_MESSAGE_CODE); + if (icmpCode == null) { + return document; + } else { + destinationPort = icmpCode; + } + } + } + + assert (sourcePort != null && destinationPort != null); + boolean isLess = compareIPAndPort(sourceIPByteArray, sourcePort, destIPByteArray, destinationPort); + // swap ip and port to remove directionality in the flow tuple, smaller ip:port tuple comes first + // but for ICMP and IPv6-ICMP, if it's a one-way flow, the flow tuple is considered to be ordered + if (!isLess && (!isICMPProtocol || !isOneway)) { + byte[] byteArray = sourceIPByteArray; + sourceIPByteArray = destIPByteArray; + destIPByteArray = byteArray; + + int tempPort = sourcePort; + sourcePort = destinationPort; + destinationPort = tempPort; + } + + // generate flow hash + String digest = generateCommunityIDHash( + protocol.getProtocolCode(), + sourceIPByteArray, + destIPByteArray, + sourcePort, + destinationPort, + seed + ); + document.setFieldValue(targetField, digest); + return document; + } + + @Override + public String getType() { + return TYPE; + } + + /** + * Resolve network protocol + * @param document the ingesting document + * @return the resolved protocol, null if the resolved protocol is null and ignore_missing is true + * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid, + * or if the field that is found at the provided path is not of the expected type. + */ + private Protocol resolveProtocol(IngestDocument document) { + Protocol protocol = null; + Integer ianaProtocolNumber = null; + String protocolName = null; + if (!Strings.isNullOrEmpty(ianaProtocolNumberField)) { + ianaProtocolNumber = document.getFieldValue(ianaProtocolNumberField, Integer.class, true); + } + if (!Strings.isNullOrEmpty(protocolField)) { + protocolName = document.getFieldValue(protocolField, String.class, true); + } + // if iana protocol number is not specified, then resolve protocol name + if (ianaProtocolNumber != null) { + if (ianaProtocolNumber >= IANA_COMMON_MIN_NUMBER + && ianaProtocolNumber <= IANA_COMMON_MAX_NUMBER + && Protocol.protocolCodeMap.containsKey(ianaProtocolNumber.byteValue())) { + protocol = Protocol.protocolCodeMap.get(ianaProtocolNumber.byteValue()); + } else { + throw new IllegalArgumentException("unsupported iana protocol number [" + ianaProtocolNumber + "]"); + } + } else if (protocolName != null) { + Protocol protocolFromName = Protocol.fromProtocolName(protocolName); + if (protocolFromName != null) { + protocol = protocolFromName; + } else { + throw new IllegalArgumentException("unsupported protocol [" + protocolName + "]"); + } + } + + // return null if protocol cannot be resolved and ignore_missing is true + if (protocol == null) { + if (ignoreMissing) { + return null; + } else { + throw new IllegalArgumentException( + "cannot resolve protocol by neither iana protocol number field [" + + ianaProtocolNumberField + + "] nor protocol name field [" + + protocolField + + "]" + ); + } + } + return protocol; + } + + /** + * Resolve ip address + * @param document the ingesting document + * @param fieldName the ip field to be resolved + * @return the byte array of the resolved ip + * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid, + * or if the field that is found at the provided path is not of the expected type. + */ + private byte[] resolveIP(IngestDocument document, String fieldName) { + if (Strings.isNullOrEmpty(fieldName)) { + if (ignoreMissing) { + return null; + } else { + throw new IllegalArgumentException("both source ip field path and destination ip field path cannot be null nor empty"); + } + } + + String ipAddress = document.getFieldValue(fieldName, String.class, true); + if (Strings.isNullOrEmpty(ipAddress)) { + if (ignoreMissing) { + return null; + } else { + throw new IllegalArgumentException("ip address in the field [" + fieldName + "] is null or empty"); + } + } + + byte[] byteArray = InetAddresses.ipStringToBytes(ipAddress); + if (byteArray == null) { + throw new IllegalArgumentException( + "ip address [" + ipAddress + "] in the field [" + fieldName + "] is not a valid ipv4/ipv6 address" + ); + } else { + return byteArray; + } + } + + /** + * Resolve port for transport protocols + * @param document the ingesting document + * @param fieldName the port field to be resolved + * @return the resolved port number, null if the resolved port is null and ignoreMissing is true + * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid, + * or if the field that is found at the provided path is not of the expected type. + */ + private Integer resolvePort(IngestDocument document, String fieldName) { + Integer port; + if (Strings.isNullOrEmpty(fieldName)) { + if (ignoreMissing) { + return null; + } else { + throw new IllegalArgumentException("both source port and destination port field path cannot be null nor empty"); + } + } else { + port = document.getFieldValue(fieldName, Integer.class, true); + } + + if (port == null) { + if (ignoreMissing) { + return null; + } else { + throw new IllegalArgumentException( + "both source port and destination port cannot be null, but port in the field path [" + fieldName + "] is null" + ); + } + } else if (port < MIN_PORT || port > MAX_PORT) { + throw new IllegalArgumentException( + "both source port and destination port must be between 0 and 65535, but port in the field path [" + + fieldName + + "] is [" + + port + + "]" + ); + } + return port; + } + + /** + * Resolve ICMP's message type and code field + * @param document the ingesting document + * @param fieldName name of the type or the code field + * @param fieldType type or code + * @return the resolved value of the specified field, return null if ignore_missing if true and the field doesn't exist or is null, + * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid, + * or if the field that is found at the provided path is not of the expected type. + */ + private Integer resolveICMP(IngestDocument document, String fieldName, String fieldType) { + if (Strings.isNullOrEmpty(fieldName)) { + if (ignoreMissing) { + return null; + } else { + throw new IllegalArgumentException("icmp message " + fieldType + " field path cannot be null nor empty"); + } + } + Integer fieldValue = document.getFieldValue(fieldName, Integer.class, true); + if (fieldValue == null) { + if (ignoreMissing) { + return null; + } else { + throw new IllegalArgumentException("icmp message " + fieldType + " cannot be null"); + } + } else if (fieldValue < IANA_COMMON_MIN_NUMBER || fieldValue > IANA_COMMON_MAX_NUMBER) { + throw new IllegalArgumentException("invalid icmp message " + fieldType + " [" + fieldValue + "]"); + } else { + return fieldValue; + } + } + + /** + * + * @param protocolCode byte of the protocol number + * @param sourceIPByteArray bytes of the source ip in the network flow tuple + * @param destIPByteArray bytes of the destination ip in the network flow tuple + * @param sourcePort source port in the network flow tuple + * @param destinationPort destination port in the network flow tuple + * @param seed seed for generating hash + * @return the generated hash value, use SHA-1 + */ + private String generateCommunityIDHash( + byte protocolCode, + byte[] sourceIPByteArray, + byte[] destIPByteArray, + Integer sourcePort, + Integer destinationPort, + int seed + ) { + MessageDigest messageDigest = MessageDigests.sha1(); + messageDigest.update(intToTwoByteArray(seed)); + messageDigest.update(sourceIPByteArray); + messageDigest.update(destIPByteArray); + messageDigest.update(protocolCode); + messageDigest.update(PADDING_BYTE); + messageDigest.update(intToTwoByteArray(sourcePort)); + messageDigest.update(intToTwoByteArray(destinationPort)); + + return COMMUNITY_ID_HASH_VERSION + ":" + Base64.getEncoder().encodeToString(messageDigest.digest()); + } + + /** + * Convert an integer to two byte array + * @param val the integer which will be consumed to produce a two byte array + * @return the two byte array + */ + private byte[] intToTwoByteArray(Integer val) { + byte[] byteArray = new byte[2]; + byteArray[0] = Integer.valueOf(val >>> 8).byteValue(); + byteArray[1] = val.byteValue(); + return byteArray; + } + + /** + * Compare the ip and port, return true if the flow tuple is ordered + * @param sourceIPByteArray bytes of the source ip in the network flow tuple + * @param destIPByteArray bytes of the destination ip in the network flow tuple + * @param sourcePort source port in the network flow tuple + * @param destinationPort destination port in the network flow tuple + * @return true if sourceIP is less than destinationIP or sourceIP equals to destinationIP + * but sourcePort is less than destinationPort + */ + private boolean compareIPAndPort(byte[] sourceIPByteArray, int sourcePort, byte[] destIPByteArray, int destinationPort) { + int compareResult = compareByteArray(sourceIPByteArray, destIPByteArray); + return compareResult < 0 || compareResult == 0 && sourcePort < destinationPort; + } + + /** + * Compare two byte array which have same length + * @param byteArray1 the first byte array to compare + * @param byteArray2 the second byte array to compare + * @return 0 if each byte in both two arrays are same, a value less than 0 if byte in the first array is less than + * the byte at the same index, a value greater than 0 if byte in the first array is greater than the byte at the same index + */ + private int compareByteArray(byte[] byteArray1, byte[] byteArray2) { + assert (byteArray1.length == byteArray2.length); + int i = 0; + int j = 0; + while (i < byteArray1.length && j < byteArray2.length) { + int isLess = Byte.compareUnsigned(byteArray1[i], byteArray2[j]); + if (isLess == 0) { + i++; + j++; + } else { + return isLess; + } + } + return 0; + } + + /** + * Mapping ICMP's message type and code into a port-like notion for ordering the request or response + */ + enum ICMPType { + ECHO_REPLY((byte) 0, (byte) 8), + ECHO((byte) 8, (byte) 0), + RTR_ADVERT((byte) 9, (byte) 10), + RTR_SOLICIT((byte) 10, (byte) 9), + TSTAMP((byte) 13, (byte) 14), + TSTAMP_REPLY((byte) 14, (byte) 13), + INFO((byte) 15, (byte) 16), + INFO_REPLY((byte) 16, (byte) 15), + MASK((byte) 17, (byte) 18), + MASK_REPLY((byte) 18, (byte) 17); + + private final byte type; + private final byte code; + + ICMPType(byte type, byte code) { + this.type = type; + this.code = code; + } + + private static final Map<Byte, Byte> ICMPTypeMapper = Arrays.stream(values()).collect(Collectors.toMap(t -> t.type, t -> t.code)); + + /** + * Takes the message type of ICMP and derives equivalent message code + * @param type the message type of ICMP + * @return the equivalent message code + */ + public static Byte getEquivalentCode(int type) { + return ICMPTypeMapper.get(Integer.valueOf(type).byteValue()); + } + } + + /** + * Mapping IPv6-ICMP's message type and code into a port-like notion for ordering the request or response + */ + enum ICMPv6Type { + ECHO_REQUEST((byte) 128, (byte) 129), + ECHO_REPLY((byte) 129, (byte) 128), + MLD_LISTENER_QUERY((byte) 130, (byte) 131), + MLD_LISTENER_REPORT((byte) 131, (byte) 130), + ND_ROUTER_SOLICIT((byte) 133, (byte) 134), + ND_ROUTER_ADVERT((byte) 134, (byte) 133), + ND_NEIGHBOR_SOLICIT((byte) 135, (byte) 136), + ND_NEIGHBOR_ADVERT((byte) 136, (byte) 135), + WRU_REQUEST((byte) 139, (byte) 140), + WRU_REPLY((byte) 140, (byte) 139), + HAAD_REQUEST((byte) 144, (byte) 145), + HAAD_REPLY((byte) 145, (byte) 144); + + private final byte type; + private final byte code; + + ICMPv6Type(byte type, byte code) { + this.type = type; + this.code = code; + } + + private static final Map<Byte, Byte> ICMPTypeMapper = Arrays.stream(values()).collect(Collectors.toMap(t -> t.type, t -> t.code)); + + /** + * Takes the message type of IPv6-ICMP and derives equivalent message code + * @param type the message type of IPv6-ICMP + * @return the equivalent message code + */ + public static Byte getEquivalentCode(int type) { + return ICMPTypeMapper.get(Integer.valueOf(type).byteValue()); + } + } + + /** + * An enumeration of the supported network protocols + */ + enum Protocol { + ICMP((byte) 1, false), + TCP((byte) 6, true), + UDP((byte) 17, true), + ICMP_V6((byte) 58, false), + SCTP((byte) 132, true); + + private final byte protocolCode; + private final boolean isTransportProtocol; + + Protocol(int ianaNumber, boolean isTransportProtocol) { + this.protocolCode = Integer.valueOf(ianaNumber).byteValue(); + this.isTransportProtocol = isTransportProtocol; + } + + public static final Map<Byte, Protocol> protocolCodeMap = Arrays.stream(values()) + .collect(Collectors.toMap(Protocol::getProtocolCode, p -> p)); + + public static Protocol fromProtocolName(String protocolName) { + String name = protocolName.toUpperCase(Locale.ROOT); + if (name.equals("IPV6-ICMP")) { + return Protocol.ICMP_V6; + } + try { + return valueOf(name); + } catch (IllegalArgumentException e) { + return null; + } + } + + public byte getProtocolCode() { + return this.protocolCode; + } + + public boolean isTransportProtocol() { + return this.isTransportProtocol; + } + } + + public static class Factory implements Processor.Factory { + @Override + public CommunityIdProcessor create( + Map<String, Processor.Factory> registry, + String processorTag, + String description, + Map<String, Object> config + ) throws Exception { + String sourceIPField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "source_ip_field"); + String sourcePortField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "source_port_field"); + String destinationIPField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "destination_ip_field"); + String destinationPortField = ConfigurationUtils.readOptionalStringProperty( + TYPE, + processorTag, + config, + "destination_port_field" + ); + String ianaProtocolNumberField = ConfigurationUtils.readOptionalStringProperty( + TYPE, + processorTag, + config, + "iana_protocol_number_field" + ); + String protocolField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "protocol_field"); + String icmpTypeField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "icmp_type_field"); + String icmpCodeField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "icmp_code_field"); + int seed = ConfigurationUtils.readIntProperty(TYPE, processorTag, config, "seed", 0); + if (seed < MIN_SEED || seed > MAX_SEED) { + throw newConfigurationException(TYPE, processorTag, "seed", "seed must be between 0 and 65535"); + } + + String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", "community_id"); + boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); + + return new CommunityIdProcessor( + processorTag, + description, + sourceIPField, + sourcePortField, + destinationIPField, + destinationPortField, + ianaProtocolNumberField, + protocolField, + icmpTypeField, + icmpCodeField, + seed, + targetField, + ignoreMissing + ); + } + } +} diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java new file mode 100644 index 0000000000000..dec69df275130 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java @@ -0,0 +1,161 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.core.common.Strings; +import org.opensearch.ingest.AbstractProcessor; +import org.opensearch.ingest.ConfigurationUtils; +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; +import org.opensearch.script.ScriptService; +import org.opensearch.script.TemplateScript; + +import java.util.Map; + +public final class CopyProcessor extends AbstractProcessor { + public static final String TYPE = "copy"; + + private final TemplateScript.Factory sourceField; + private final TemplateScript.Factory targetField; + + private final boolean ignoreMissing; + + private final boolean removeSource; + + private final boolean overrideTarget; + + CopyProcessor(String tag, String description, TemplateScript.Factory sourceField, TemplateScript.Factory targetField) { + this(tag, description, sourceField, targetField, false, false, false); + } + + CopyProcessor( + String tag, + String description, + TemplateScript.Factory sourceField, + TemplateScript.Factory targetField, + boolean ignoreMissing, + boolean removeSource, + boolean overrideTarget + ) { + super(tag, description); + this.sourceField = sourceField; + this.targetField = targetField; + this.ignoreMissing = ignoreMissing; + this.removeSource = removeSource; + this.overrideTarget = overrideTarget; + } + + public TemplateScript.Factory getSourceField() { + return sourceField; + } + + public TemplateScript.Factory getTargetField() { + return targetField; + } + + public boolean isIgnoreMissing() { + return ignoreMissing; + } + + public boolean isRemoveSource() { + return removeSource; + } + + public boolean isOverrideTarget() { + return overrideTarget; + } + + @Override + public IngestDocument execute(IngestDocument document) { + String source = document.renderTemplate(sourceField); + final boolean sourceFieldPathIsNullOrEmpty = Strings.isNullOrEmpty(source); + if (sourceFieldPathIsNullOrEmpty || document.hasField(source, true) == false) { + if (ignoreMissing) { + return document; + } else if (sourceFieldPathIsNullOrEmpty) { + throw new IllegalArgumentException("source field path cannot be null nor empty"); + } else { + throw new IllegalArgumentException("source field [" + source + "] doesn't exist"); + } + } + + String target = document.renderTemplate(targetField); + if (Strings.isNullOrEmpty(target)) { + throw new IllegalArgumentException("target field path cannot be null nor empty"); + } + if (source.equals(target)) { + throw new IllegalArgumentException("source field path and target field path cannot be same"); + } + + if (overrideTarget || document.hasField(target, true) == false || document.getFieldValue(target, Object.class) == null) { + Object sourceValue = document.getFieldValue(source, Object.class); + document.setFieldValue(target, IngestDocument.deepCopy(sourceValue)); + } else { + throw new IllegalArgumentException("target field [" + target + "] already exists"); + } + + if (removeSource) { + document.removeField(source); + } + + return document; + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory implements Processor.Factory { + + private final ScriptService scriptService; + + public Factory(ScriptService scriptService) { + this.scriptService = scriptService; + } + + @Override + public CopyProcessor create( + Map<String, Processor.Factory> registry, + String processorTag, + String description, + Map<String, Object> config + ) throws Exception { + String sourceField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "source_field"); + TemplateScript.Factory sourceFieldTemplate = ConfigurationUtils.compileTemplate( + TYPE, + processorTag, + "source_field", + sourceField, + scriptService + ); + String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); + TemplateScript.Factory targetFieldTemplate = ConfigurationUtils.compileTemplate( + TYPE, + processorTag, + "target_field", + targetField, + scriptService + ); + boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); + boolean removeSource = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "remove_source", false); + boolean overrideTarget = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override_target", false); + + return new CopyProcessor( + processorTag, + description, + sourceFieldTemplate, + targetFieldTemplate, + ignoreMissing, + removeSource, + overrideTarget + ); + } + } +} diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java index a2a51d968e078..0f8b248fd5af8 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java @@ -106,6 +106,9 @@ public Map<String, Processor.Factory> getProcessors(Processor.Parameters paramet processors.put(DropProcessor.TYPE, new DropProcessor.Factory()); processors.put(HtmlStripProcessor.TYPE, new HtmlStripProcessor.Factory()); processors.put(CsvProcessor.TYPE, new CsvProcessor.Factory()); + processors.put(CopyProcessor.TYPE, new CopyProcessor.Factory(parameters.scriptService)); + processors.put(RemoveByPatternProcessor.TYPE, new RemoveByPatternProcessor.Factory()); + processors.put(CommunityIdProcessor.TYPE, new CommunityIdProcessor.Factory()); return Collections.unmodifiableMap(processors); } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java new file mode 100644 index 0000000000000..da87f5201db72 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java @@ -0,0 +1,180 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.common.Nullable; +import org.opensearch.common.ValidationException; +import org.opensearch.common.regex.Regex; +import org.opensearch.core.common.Strings; +import org.opensearch.ingest.AbstractProcessor; +import org.opensearch.ingest.ConfigurationUtils; +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException; + +/** + * Processor that removes existing fields by field patterns or excluding field patterns. + */ +public final class RemoveByPatternProcessor extends AbstractProcessor { + + public static final String TYPE = "remove_by_pattern"; + private final List<String> fieldPatterns; + private final List<String> excludeFieldPatterns; + + RemoveByPatternProcessor( + String tag, + String description, + @Nullable List<String> fieldPatterns, + @Nullable List<String> excludeFieldPatterns + ) { + super(tag, description); + if (fieldPatterns != null && excludeFieldPatterns != null || fieldPatterns == null && excludeFieldPatterns == null) { + throw new IllegalArgumentException("either fieldPatterns and excludeFieldPatterns must be set"); + } + if (fieldPatterns == null) { + this.fieldPatterns = null; + this.excludeFieldPatterns = new ArrayList<>(excludeFieldPatterns); + } else { + this.fieldPatterns = new ArrayList<>(fieldPatterns); + this.excludeFieldPatterns = null; + } + } + + public List<String> getFieldPatterns() { + return fieldPatterns; + } + + public List<String> getExcludeFieldPatterns() { + return excludeFieldPatterns; + } + + @Override + public IngestDocument execute(IngestDocument document) { + Set<String> existingFields = new HashSet<>(document.getSourceAndMetadata().keySet()); + Set<String> metadataFields = document.getMetadata() + .keySet() + .stream() + .map(IngestDocument.Metadata::getFieldName) + .collect(Collectors.toSet()); + + if (fieldPatterns != null && !fieldPatterns.isEmpty()) { + existingFields.forEach(field -> { + // ignore metadata fields such as _index, _id, etc. + if (!metadataFields.contains(field)) { + final boolean matched = fieldPatterns.stream().anyMatch(pattern -> Regex.simpleMatch(pattern, field)); + if (matched) { + document.removeField(field); + } + } + }); + } + + if (excludeFieldPatterns != null && !excludeFieldPatterns.isEmpty()) { + existingFields.forEach(field -> { + // ignore metadata fields such as _index, _id, etc. + if (!metadataFields.contains(field)) { + final boolean matched = excludeFieldPatterns.stream().anyMatch(pattern -> Regex.simpleMatch(pattern, field)); + if (!matched) { + document.removeField(field); + } + } + }); + } + + return document; + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory implements Processor.Factory { + + public Factory() {} + + @Override + public RemoveByPatternProcessor create( + Map<String, Processor.Factory> registry, + String processorTag, + String description, + Map<String, Object> config + ) throws Exception { + final List<String> fieldPatterns = new ArrayList<>(); + final List<String> excludeFieldPatterns = new ArrayList<>(); + final Object fieldPattern = ConfigurationUtils.readOptionalObject(config, "field_pattern"); + final Object excludeFieldPattern = ConfigurationUtils.readOptionalObject(config, "exclude_field_pattern"); + + if (fieldPattern == null && excludeFieldPattern == null || fieldPattern != null && excludeFieldPattern != null) { + throw newConfigurationException( + TYPE, + processorTag, + "field_pattern", + "either field_pattern or exclude_field_pattern must be set" + ); + } + + if (fieldPattern != null) { + if (fieldPattern instanceof List) { + @SuppressWarnings("unchecked") + List<String> fieldPatternList = (List<String>) fieldPattern; + fieldPatterns.addAll(fieldPatternList); + } else { + fieldPatterns.add((String) fieldPattern); + } + validateFieldPatterns(processorTag, fieldPatterns, "field_pattern"); + return new RemoveByPatternProcessor(processorTag, description, fieldPatterns, null); + } else { + if (excludeFieldPattern instanceof List) { + @SuppressWarnings("unchecked") + List<String> excludeFieldPatternList = (List<String>) excludeFieldPattern; + excludeFieldPatterns.addAll(excludeFieldPatternList); + } else { + excludeFieldPatterns.add((String) excludeFieldPattern); + } + validateFieldPatterns(processorTag, excludeFieldPatterns, "exclude_field_pattern"); + return new RemoveByPatternProcessor(processorTag, description, null, excludeFieldPatterns); + } + } + + private void validateFieldPatterns(String processorTag, List<String> patterns, String patternKey) { + List<String> validationErrors = new ArrayList<>(); + for (String fieldPattern : patterns) { + if (fieldPattern.contains("#")) { + validationErrors.add(patternKey + " [" + fieldPattern + "] must not contain a '#'"); + } + if (fieldPattern.contains(":")) { + validationErrors.add(patternKey + " [" + fieldPattern + "] must not contain a ':'"); + } + if (fieldPattern.startsWith("_")) { + validationErrors.add(patternKey + " [" + fieldPattern + "] must not start with '_'"); + } + if (Strings.validFileNameExcludingAstrix(fieldPattern) == false) { + validationErrors.add( + patternKey + " [" + fieldPattern + "] must not contain the following characters " + Strings.INVALID_FILENAME_CHARS + ); + } + } + + if (validationErrors.size() > 0) { + ValidationException validationException = new ValidationException(); + validationException.addValidationErrors(validationErrors); + throw newConfigurationException(TYPE, processorTag, patternKey, validationException.getMessage()); + } + } + } +} diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java index a48cfd87b78c3..e6d151aec9be1 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java @@ -32,6 +32,7 @@ package org.opensearch.ingest.common; +import org.opensearch.common.Nullable; import org.opensearch.core.common.Strings; import org.opensearch.index.VersionType; import org.opensearch.ingest.AbstractProcessor; @@ -42,11 +43,15 @@ import org.opensearch.script.TemplateScript; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; +import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException; + /** * Processor that removes existing fields. Nothing happens if the field is not present. */ @@ -55,11 +60,28 @@ public final class RemoveProcessor extends AbstractProcessor { public static final String TYPE = "remove"; private final List<TemplateScript.Factory> fields; + private final List<TemplateScript.Factory> excludeFields; private final boolean ignoreMissing; - RemoveProcessor(String tag, String description, List<TemplateScript.Factory> fields, boolean ignoreMissing) { + RemoveProcessor( + String tag, + String description, + @Nullable List<TemplateScript.Factory> fields, + @Nullable List<TemplateScript.Factory> excludeFields, + boolean ignoreMissing + ) { super(tag, description); - this.fields = new ArrayList<>(fields); + if (fields == null && excludeFields == null || fields != null && excludeFields != null) { + throw new IllegalArgumentException("either fields or excludeFields must be set"); + } + if (fields != null) { + this.fields = new ArrayList<>(fields); + this.excludeFields = null; + } else { + this.fields = null; + this.excludeFields = new ArrayList<>(excludeFields); + } + this.ignoreMissing = ignoreMissing; } @@ -67,42 +89,76 @@ public List<TemplateScript.Factory> getFields() { return fields; } + public List<TemplateScript.Factory> getExcludeFields() { + return excludeFields; + } + @Override public IngestDocument execute(IngestDocument document) { - fields.forEach(field -> { - String path = document.renderTemplate(field); - final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path); - if (fieldPathIsNullOrEmpty || document.hasField(path) == false) { - if (ignoreMissing) { - return; - } else if (fieldPathIsNullOrEmpty) { - throw new IllegalArgumentException("field path cannot be null nor empty"); - } else { - throw new IllegalArgumentException("field [" + path + "] doesn't exist"); + if (fields != null && !fields.isEmpty()) { + fields.forEach(field -> { + String path = document.renderTemplate(field); + final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path); + if (fieldPathIsNullOrEmpty || document.hasField(path) == false) { + if (ignoreMissing) { + return; + } else if (fieldPathIsNullOrEmpty) { + throw new IllegalArgumentException("field path cannot be null nor empty"); + } else { + throw new IllegalArgumentException("field [" + path + "] doesn't exist"); + } } - } - // cannot remove _index, _version and _version_type. - if (path.equals(IngestDocument.Metadata.INDEX.getFieldName()) - || path.equals(IngestDocument.Metadata.VERSION.getFieldName()) - || path.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { - throw new IllegalArgumentException("cannot remove metadata field [" + path + "]"); - } - // removing _id is disallowed when there's an external version specified in the request - if (path.equals(IngestDocument.Metadata.ID.getFieldName()) - && document.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { - String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class); - if (!Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) { - Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class, true); - throw new IllegalArgumentException( - "cannot remove metadata field [_id] when specifying external version for the document, version: " - + version - + ", version_type: " - + versionType - ); + + // cannot remove _index, _version and _version_type. + if (path.equals(IngestDocument.Metadata.INDEX.getFieldName()) + || path.equals(IngestDocument.Metadata.VERSION.getFieldName()) + || path.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { + throw new IllegalArgumentException("cannot remove metadata field [" + path + "]"); } + // removing _id is disallowed when there's an external version specified in the request + if (path.equals(IngestDocument.Metadata.ID.getFieldName()) + && document.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { + String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class); + if (!Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) { + Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class, true); + throw new IllegalArgumentException( + "cannot remove metadata field [_id] when specifying external version for the document, version: " + + version + + ", version_type: " + + versionType + ); + } + } + document.removeField(path); + }); + } + + if (excludeFields != null && !excludeFields.isEmpty()) { + Set<String> excludeFieldSet = new HashSet<>(); + excludeFields.forEach(field -> { + String path = document.renderTemplate(field); + // ignore the empty or null field path + if (!Strings.isNullOrEmpty(path)) { + excludeFieldSet.add(path); + } + }); + + if (!excludeFieldSet.isEmpty()) { + Set<String> existingFields = new HashSet<>(document.getSourceAndMetadata().keySet()); + Set<String> metadataFields = document.getMetadata() + .keySet() + .stream() + .map(IngestDocument.Metadata::getFieldName) + .collect(Collectors.toSet()); + existingFields.forEach(field -> { + // ignore metadata fields such as _index, _id, etc. + if (!metadataFields.contains(field) && !excludeFieldSet.contains(field)) { + document.removeField(field); + } + }); } - document.removeField(path); - }); + } + return document; } @@ -127,20 +183,41 @@ public RemoveProcessor create( Map<String, Object> config ) throws Exception { final List<String> fields = new ArrayList<>(); - final Object field = ConfigurationUtils.readObject(TYPE, processorTag, config, "field"); - if (field instanceof List) { - @SuppressWarnings("unchecked") - List<String> stringList = (List<String>) field; - fields.addAll(stringList); - } else { - fields.add((String) field); + final List<String> excludeFields = new ArrayList<>(); + final Object field = ConfigurationUtils.readOptionalObject(config, "field"); + final Object excludeField = ConfigurationUtils.readOptionalObject(config, "exclude_field"); + + if (field == null && excludeField == null || field != null && excludeField != null) { + throw newConfigurationException(TYPE, processorTag, "field", "either field or exclude_field must be set"); } - final List<TemplateScript.Factory> compiledTemplates = fields.stream() - .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", f, scriptService)) - .collect(Collectors.toList()); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new RemoveProcessor(processorTag, description, compiledTemplates, ignoreMissing); + + if (field != null) { + if (field instanceof List) { + @SuppressWarnings("unchecked") + List<String> stringList = (List<String>) field; + fields.addAll(stringList); + } else { + fields.add((String) field); + } + List<TemplateScript.Factory> fieldCompiledTemplates = fields.stream() + .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", f, scriptService)) + .collect(Collectors.toList()); + return new RemoveProcessor(processorTag, description, fieldCompiledTemplates, null, ignoreMissing); + } else { + if (excludeField instanceof List) { + @SuppressWarnings("unchecked") + List<String> stringList = (List<String>) excludeField; + excludeFields.addAll(stringList); + } else { + excludeFields.add((String) excludeField); + } + List<TemplateScript.Factory> excludeFieldCompiledTemplates = excludeFields.stream() + .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "exclude_field", f, scriptService)) + .collect(Collectors.toList()); + return new RemoveProcessor(processorTag, description, null, excludeFieldCompiledTemplates, ignoreMissing); + } } } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorFactoryTests.java new file mode 100644 index 0000000000000..5edb44b8c64f2 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorFactoryTests.java @@ -0,0 +1,117 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchParseException; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class CommunityIdProcessorFactoryTests extends OpenSearchTestCase { + private CommunityIdProcessor.Factory factory; + + @Before + public void init() { + factory = new CommunityIdProcessor.Factory(); + } + + public void testCreate() throws Exception { + boolean ignoreMissing = randomBoolean(); + int seed = randomIntBetween(0, 65535); + Map<String, Object> config = new HashMap<>(); + config.put("source_ip_field", "source_ip"); + config.put("source_port_field", "source_port"); + config.put("destination_ip_field", "destination_ip"); + config.put("destination_port_field", "destination_port"); + config.put("iana_protocol_number_field", "iana_protocol_number"); + config.put("protocol_field", "protocol"); + config.put("icmp_type_field", "icmp_type"); + config.put("icmp_code_field", "icmp_code"); + config.put("seed", seed); + config.put("target_field", "community_id_hash"); + config.put("ignore_missing", ignoreMissing); + String processorTag = randomAlphaOfLength(10); + CommunityIdProcessor communityIDProcessor = factory.create(null, processorTag, null, config); + assertThat(communityIDProcessor.getTag(), equalTo(processorTag)); + assertThat(communityIDProcessor.getSourceIPField(), equalTo("source_ip")); + assertThat(communityIDProcessor.getSourcePortField(), equalTo("source_port")); + assertThat(communityIDProcessor.getDestinationIPField(), equalTo("destination_ip")); + assertThat(communityIDProcessor.getDestinationPortField(), equalTo("destination_port")); + assertThat(communityIDProcessor.getIANAProtocolNumberField(), equalTo("iana_protocol_number")); + assertThat(communityIDProcessor.getProtocolField(), equalTo("protocol")); + assertThat(communityIDProcessor.getIcmpTypeField(), equalTo("icmp_type")); + assertThat(communityIDProcessor.getIcmpCodeField(), equalTo("icmp_code")); + assertThat(communityIDProcessor.getSeed(), equalTo(seed)); + assertThat(communityIDProcessor.getTargetField(), equalTo("community_id_hash")); + assertThat(communityIDProcessor.isIgnoreMissing(), equalTo(ignoreMissing)); + } + + public void testCreateWithSourceIPField() throws Exception { + Map<String, Object> config = new HashMap<>(); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[source_ip_field] required property is missing")); + } + + config.put("source_ip_field", null); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[source_ip_field] required property is missing")); + } + } + + public void testCreateWithDestinationIPField() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("source_ip_field", "source_ip"); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[destination_ip_field] required property is missing")); + } + + config.put("source_ip_field", "source_ip"); + config.put("destination_ip_field", null); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[destination_ip_field] required property is missing")); + } + } + + public void testInvalidSeed() throws Exception { + Map<String, Object> config = new HashMap<>(); + int seed; + if (randomBoolean()) { + seed = -1; + } else { + seed = 65536; + } + config.put("source_ip_field", "source_ip"); + config.put("destination_ip_field", "destination_ip"); + config.put("seed", seed); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchException e) { + assertThat(e.getMessage(), equalTo("[seed] seed must be between 0 and 65535")); + } + } + +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorTests.java new file mode 100644 index 0000000000000..2bda9db80dbcc --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorTests.java @@ -0,0 +1,910 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; +import org.opensearch.ingest.RandomDocumentPicks; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class CommunityIdProcessorTests extends OpenSearchTestCase { + + public void testResolveProtocol() throws Exception { + Map<String, Object> source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + + String targetFieldName = randomAlphaOfLength(100); + boolean ignore_missing = randomBoolean(); + Processor processor = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + null, + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignore_missing + ); + if (ignore_missing) { + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "cannot resolve protocol by neither iana protocol number field [iana_protocol_number] nor protocol name field [protocol]", + IllegalArgumentException.class, + () -> processor.execute(ingestDocument) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + String protocol = randomAlphaOfLength(10); + source.put("protocol", protocol); + IngestDocument ingestDocumentWithProtocol = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithProtocol = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + assertThrows( + "unsupported protocol [" + protocol + "]", + IllegalArgumentException.class, + () -> processorWithProtocol.execute(ingestDocumentWithProtocol) + ); + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + int ianaProtocolNumber = randomIntBetween(1000, 10000); + source.put("iana_protocol_number", ianaProtocolNumber); + IngestDocument ingestDocumentWithProtocolNumber = RandomDocumentPicks.randomIngestDocument(random(), source); + + Processor processorWithProtocolNumber = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + null, + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + assertThrows( + "unsupported iana protocol number [" + ianaProtocolNumber + "]", + IllegalArgumentException.class, + () -> processorWithProtocolNumber.execute(ingestDocumentWithProtocolNumber) + ); + } + + public void testResolveIPAndPort() throws Exception { + Map<String, Object> source = new HashMap<>(); + source.put("source_ip", ""); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + source.put("protocol", "tcp"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + + String targetFieldName = randomAlphaOfLength(100); + boolean ignore_missing = randomBoolean(); + Processor processor = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + null, + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignore_missing + ); + if (ignore_missing) { + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "ip address in the field [source_ip] is null or empty", + IllegalArgumentException.class, + () -> processor.execute(ingestDocument) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + source.put("protocol", "tcp"); + IngestDocument ingestDocumentWithInvalidSourceIP = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithInvalidSourceIP = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + + assertThrows( + "ip address in the field [source_ip] is not a valid ipv4/ipv6 address", + IllegalArgumentException.class, + () -> processorWithInvalidSourceIP.execute(ingestDocumentWithInvalidSourceIP) + ); + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", ""); + source.put("source_port", 1000); + source.put("destination_port", 2000); + source.put("protocol", "tcp"); + ignore_missing = randomBoolean(); + IngestDocument ingestDocumentWithEmptyDestIP = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithEmptyDestIP = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignore_missing + ); + if (ignore_missing) { + processorWithEmptyDestIP.execute(ingestDocumentWithEmptyDestIP); + assertThat(ingestDocumentWithEmptyDestIP.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "ip address in the field [destination_ip] is null or empty", + IllegalArgumentException.class, + () -> processorWithEmptyDestIP.execute(ingestDocumentWithEmptyDestIP) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + source.put("protocol", "tcp"); + IngestDocument ingestDocumentWithInvalidDestIP = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithInvalidDestIP = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + assertThrows( + "ip address in the field [destination_ip] is not a valid ipv4/ipv6 address", + IllegalArgumentException.class, + () -> processorWithInvalidDestIP.execute(ingestDocumentWithInvalidDestIP) + ); + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + source.put("protocol", "tcp"); + ignore_missing = randomBoolean(); + IngestDocument normalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithEmptySourceIPFieldPath = createCommunityIdProcessor( + "", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignore_missing + ); + if (ignore_missing) { + processorWithEmptySourceIPFieldPath.execute(normalIngestDocument); + assertThat(normalIngestDocument.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "both source ip field path and destination ip field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithEmptySourceIPFieldPath.execute(normalIngestDocument) + ); + } + ignore_missing = randomBoolean(); + Processor processorWithEmptyDestIPFieldPath = createCommunityIdProcessor( + "source_ip", + "source_port", + "", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignore_missing + ); + if (ignore_missing) { + processorWithEmptyDestIPFieldPath.execute(normalIngestDocument); + assertThat(normalIngestDocument.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "both source ip field path and destination ip field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithEmptyDestIPFieldPath.execute(normalIngestDocument) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", null); + source.put("destination_port", 2000); + source.put("protocol", "tcp"); + ignore_missing = randomBoolean(); + IngestDocument ingestDocumentWithEmptySourcePort = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithEmptySourcePort = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignore_missing + ); + if (ignore_missing) { + processorWithEmptySourcePort.execute(ingestDocumentWithEmptySourcePort); + assertThat(ingestDocumentWithEmptySourcePort.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "both source port and destination port field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithEmptySourcePort.execute(ingestDocumentWithEmptySourcePort) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 65536); + source.put("destination_port", 2000); + source.put("protocol", "tcp"); + IngestDocument ingestDocumentWithInvalidSourcePort = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithInvalidSourcePort = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + assertThrows( + "both source port and destination port must be between 0 and 65535, but port in the field path [source_port] is [65536]", + IllegalArgumentException.class, + () -> processorWithInvalidSourcePort.execute(ingestDocumentWithInvalidSourcePort) + ); + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", null); + source.put("protocol", "tcp"); + ignore_missing = randomBoolean(); + IngestDocument ingestDocumentWithEmptyDestPort = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithEmptyDestPort = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignore_missing + ); + if (ignore_missing) { + processorWithEmptyDestPort.execute(ingestDocumentWithEmptyDestPort); + assertThat(ingestDocumentWithEmptyDestPort.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "both source port and destination port cannot be null, but port in the field path [destination_port] is null", + IllegalArgumentException.class, + () -> processorWithEmptyDestPort.execute(ingestDocumentWithEmptyDestPort) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", -1); + source.put("protocol", "tcp"); + IngestDocument ingestDocumentWithInvalidDestPort = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithInvalidDestPort = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + assertThrows( + "both source port and destination port cannot be null, but port in the field path [destination_port] is [-1]", + IllegalArgumentException.class, + () -> processorWithInvalidDestPort.execute(ingestDocumentWithInvalidDestPort) + ); + } + + public void testResolveICMPTypeAndCode() throws Exception { + Map<String, Object> source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + int protocolNumber = randomFrom(1, 58); + source.put("iana_protocol_number", protocolNumber); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + String targetFieldName = randomAlphaOfLength(100); + boolean ignoreMissing = randomBoolean(); + Processor processor = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + null, + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + ignoreMissing + ); + if (ignoreMissing) { + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "icmp message type field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processor.execute(ingestDocument) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + protocolNumber = randomFrom(1, 58); + source.put("iana_protocol_number", protocolNumber); + source.put("icmp_type", null); + IngestDocument ingestDocumentWithNullType = RandomDocumentPicks.randomIngestDocument(random(), source); + ignoreMissing = randomBoolean(); + Processor processorWithNullType = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + "icmp_type", + null, + randomIntBetween(0, 65535), + targetFieldName, + ignoreMissing + ); + if (ignoreMissing) { + processorWithNullType.execute(ingestDocumentWithNullType); + assertThat(ingestDocumentWithNullType.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "icmp message type cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithNullType.execute(ingestDocumentWithNullType) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + protocolNumber = randomFrom(1, 58); + source.put("iana_protocol_number", protocolNumber); + int icmpType; + if (randomBoolean()) { + icmpType = randomIntBetween(256, 1000); + } else { + icmpType = randomIntBetween(-100, -1); + } + source.put("icmp_type", icmpType); + IngestDocument ingestDocumentWithInvalidICMPType = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithInvalidICMPType = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_protocol_number", + "protocol", + "icmp_type", + null, + randomIntBetween(0, 65535), + targetFieldName, + false + ); + assertThrows( + "invalid icmp message type [" + icmpType + "]", + IllegalArgumentException.class, + () -> processorWithInvalidICMPType.execute(ingestDocumentWithInvalidICMPType) + ); + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + protocolNumber = randomFrom(1, 58); + source.put("iana_protocol_number", protocolNumber); + if (protocolNumber == 1) { + icmpType = randomIntBetween(3, 6); + } else { + icmpType = randomIntBetween(146, 161); + } + source.put("icmp_type", icmpType); + IngestDocument ingestDocumentWithNoCode = RandomDocumentPicks.randomIngestDocument(random(), source); + ignoreMissing = randomBoolean(); + Processor processorWithNoCode = createCommunityIdProcessor( + "source_ip", + null, + "destination_ip", + null, + "iana_protocol_number", + "protocol", + "icmp_type", + null, + randomIntBetween(0, 65535), + targetFieldName, + ignoreMissing + ); + if (ignoreMissing) { + processorWithNoCode.execute(ingestDocumentWithNoCode); + assertThat(ingestDocumentWithNoCode.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "icmp message code field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithNoCode.execute(ingestDocumentWithNoCode) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + protocolNumber = randomFrom(1, 58); + source.put("iana_protocol_number", protocolNumber); + if (protocolNumber == 1) { + icmpType = randomIntBetween(3, 6); + } else { + icmpType = randomIntBetween(146, 161); + } + source.put("icmp_type", icmpType); + source.put("icmp_code", null); + IngestDocument ingestDocumentWithNullCode = RandomDocumentPicks.randomIngestDocument(random(), source); + ignoreMissing = randomBoolean(); + Processor processorWithNullCode = createCommunityIdProcessor( + "source_ip", + null, + "destination_ip", + null, + "iana_protocol_number", + "protocol", + "icmp_type", + "icmp_code", + randomIntBetween(0, 65535), + targetFieldName, + ignoreMissing + ); + if (ignoreMissing) { + processorWithNullCode.execute(ingestDocumentWithNullCode); + assertThat(ingestDocumentWithNullCode.hasField(targetFieldName), equalTo(false)); + } else { + assertThrows( + "icmp message code cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithNullCode.execute(ingestDocumentWithNullCode) + ); + } + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + protocolNumber = randomFrom(1, 58); + source.put("iana_protocol_number", protocolNumber); + if (protocolNumber == 1) { + icmpType = randomIntBetween(3, 6); + } else { + icmpType = randomIntBetween(146, 161); + } + source.put("icmp_type", icmpType); + int icmpCode; + if (randomBoolean()) { + icmpCode = randomIntBetween(256, 1000); + } else { + icmpCode = randomIntBetween(-100, -1); + } + source.put("icmp_code", icmpCode); + IngestDocument ingestDocumentWithInvalidCode = RandomDocumentPicks.randomIngestDocument(random(), source); + Processor processorWithInvalidCode = createCommunityIdProcessor( + "source_ip", + null, + "destination_ip", + null, + "iana_protocol_number", + null, + "icmp_type", + "icmp_code", + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + assertThrows( + "invalid icmp message code [" + icmpCode + "]", + IllegalArgumentException.class, + () -> processorWithInvalidCode.execute(ingestDocumentWithInvalidCode) + ); + } + + public void testTransportProtocols() throws Exception { + Map<String, Object> source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + source.put("source_port", 1000); + source.put("destination_port", 2000); + boolean isProtocolNameSpecified = randomBoolean(); + if (isProtocolNameSpecified) { + source.put("protocol", randomFrom("tcp", "udp", "sctp")); + } else { + source.put("iana_number", randomFrom(6, 17, 132)); + } + + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + + String targetFieldName = randomAlphaOfLength(100); + Processor processor; + if (isProtocolNameSpecified) { + processor = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + null, + "protocol", + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + } else { + processor = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_number", + null, + null, + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + } + + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + String communityIDHash = ingestDocument.getFieldValue(targetFieldName, String.class); + assertThat(communityIDHash.startsWith("1:"), equalTo(true)); + } + + public void testICMP() throws Exception { + Map<String, Object> source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + boolean isICMP = randomBoolean(); + if (isICMP) { + source.put("protocol", "icmp"); + source.put("type", randomFrom(0, 8, 9, 10, 13, 15, 17, 18)); + } else { + source.put("protocol", "ipv6-icmp"); + source.put("type", randomFrom(128, 129, 130, 131, 133, 134, 135, 136, 139, 140, 144, 145)); + } + + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + + String targetFieldName = randomAlphaOfLength(100); + Processor processor = createCommunityIdProcessor( + "source_ip", + null, + "destination_ip", + null, + null, + "protocol", + "type", + null, + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(targetFieldName, String.class).startsWith("1:"), equalTo(true)); + + source = new HashMap<>(); + source.put("source_ip", "1.1.1.1"); + source.put("destination_ip", "2.2.2.2"); + isICMP = randomBoolean(); + if (isICMP) { + source.put("protocol", "icmp"); + // see https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml#icmp-parameters-codes-5 + source.put("type", randomIntBetween(3, 6)); + source.put("code", 0); + } else { + source.put("protocol", "ipv6-icmp"); + // see https://www.iana.org/assignments/icmpv6-parameters/icmpv6-parameters.xhtml#icmpv6-parameters-codes-23 + source.put("type", randomIntBetween(146, 161)); + source.put("code", 0); + } + + IngestDocument ingestDocumentWithOnewayFlow = RandomDocumentPicks.randomIngestDocument(random(), source); + + targetFieldName = randomAlphaOfLength(100); + Processor processorWithOnewayFlow = createCommunityIdProcessor( + "source_ip", + null, + "destination_ip", + null, + null, + "protocol", + "type", + "code", + randomIntBetween(0, 65535), + targetFieldName, + randomBoolean() + ); + + processorWithOnewayFlow.execute(ingestDocumentWithOnewayFlow); + assertThat(ingestDocumentWithOnewayFlow.hasField(targetFieldName), equalTo(true)); + assertThat(ingestDocumentWithOnewayFlow.getFieldValue(targetFieldName, String.class).startsWith("1:"), equalTo(true)); + } + + // test that the hash result is consistent with the known value + public void testHashResult() throws Exception { + int index = randomIntBetween(0, CommunityIdHashInstance.values().length - 1); + CommunityIdHashInstance instance = CommunityIdHashInstance.values()[index]; + final boolean isTransportProtocol = instance.name().equals("TCP") + || instance.name().equals("UDP") + || instance.name().equals("SCTP"); + Map<String, Object> source = new HashMap<>(); + source.put("source_ip", instance.getSourceIp()); + source.put("destination_ip", instance.getDestIP()); + if (isTransportProtocol) { + source.put("source_port", instance.getSourcePort()); + source.put("destination_port", instance.getDestPort()); + source.put("iana_number", instance.getProtocolNumber()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + + String targetFieldName = randomAlphaOfLength(100); + boolean ignore_missing = randomBoolean(); + Processor processor = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_number", + null, + null, + null, + 0, + targetFieldName, + ignore_missing + ); + + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(targetFieldName, String.class), equalTo(instance.getHash())); + + // test the flow tuple in reversed direction, the hash result should be the same value + source = new HashMap<>(); + source.put("source_ip", instance.getDestIP()); + source.put("destination_ip", instance.getSourceIp()); + source.put("source_port", instance.getDestPort()); + source.put("destination_port", instance.getSourcePort()); + source.put("iana_number", instance.getProtocolNumber()); + IngestDocument ingestDocumentWithReversedDirection = RandomDocumentPicks.randomIngestDocument(random(), source); + + targetFieldName = randomAlphaOfLength(100); + Processor processorWithReversedDirection = createCommunityIdProcessor( + "source_ip", + "source_port", + "destination_ip", + "destination_port", + "iana_number", + null, + null, + null, + 0, + targetFieldName, + randomBoolean() + ); + + processorWithReversedDirection.execute(ingestDocumentWithReversedDirection); + assertThat(ingestDocumentWithReversedDirection.hasField(targetFieldName), equalTo(true)); + assertThat(ingestDocumentWithReversedDirection.getFieldValue(targetFieldName, String.class), equalTo(instance.getHash())); + } else { + source.put("type", instance.getSourcePort()); + source.put("code", instance.getDestPort()); + source.put("iana_number", instance.getProtocolNumber()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source); + + String targetFieldName = randomAlphaOfLength(100); + boolean ignore_missing = randomBoolean(); + Processor processor = createCommunityIdProcessor( + "source_ip", + null, + "destination_ip", + null, + "iana_number", + null, + "type", + "code", + 0, + targetFieldName, + ignore_missing + ); + + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(targetFieldName, String.class), equalTo(instance.getHash())); + } + } + + private enum CommunityIdHashInstance { + TCP("66.35.250.204", "128.232.110.120", 6, 80, 34855, "1:LQU9qZlK+B5F3KDmev6m5PMibrg="), + UDP("8.8.8.8", "192.168.1.52", 17, 53, 54585, "1:d/FP5EW3wiY1vCndhwleRRKHowQ="), + SCTP("192.168.170.8", "192.168.170.56", 132, 7, 7, "1:MP2EtRCAUIZvTw6MxJHLV7N7JDs="), + ICMP("192.168.0.89", "192.168.0.1", 1, 8, 0, "1:X0snYXpgwiv9TZtqg64sgzUn6Dk="), + ICMP_V6("fe80::260:97ff:fe07:69ea", "ff02::1", 58, 134, 0, "1:pkvHqCL88/tg1k4cPigmZXUtL00="); + + private final String sourceIp; + private final String destIP; + private final int protocolNumber; + private final int sourcePort; + private final int destPort; + private final String hash; + + CommunityIdHashInstance(String sourceIp, String destIP, int protocolNumber, int sourcePort, int destPort, String hash) { + this.sourceIp = sourceIp; + this.destIP = destIP; + this.protocolNumber = protocolNumber; + this.sourcePort = sourcePort; + this.destPort = destPort; + this.hash = hash; + } + + private String getSourceIp() { + return this.sourceIp; + } + + private String getDestIP() { + return this.destIP; + } + + private int getProtocolNumber() { + return this.protocolNumber; + } + + private int getSourcePort() { + return this.sourcePort; + } + + private int getDestPort() { + return this.destPort; + } + + private String getHash() { + return this.hash; + } + } + + private static Processor createCommunityIdProcessor( + String sourceIPField, + String sourcePortField, + String destinationIPField, + String destinationPortField, + String ianaProtocolNumberField, + String protocolField, + String icmpTypeField, + String icmpCodeField, + int seed, + String targetField, + boolean ignoreMissing + ) { + return new CommunityIdProcessor( + randomAlphaOfLength(10), + null, + sourceIPField, + sourcePortField, + destinationIPField, + destinationPortField, + ianaProtocolNumberField, + protocolField, + icmpTypeField, + icmpCodeField, + seed, + targetField, + ignoreMissing + ); + } +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java new file mode 100644 index 0000000000000..c1ca86a49e334 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java @@ -0,0 +1,101 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchParseException; +import org.opensearch.ingest.TestTemplateService; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class CopyProcessorFactoryTests extends OpenSearchTestCase { + + private CopyProcessor.Factory factory; + + @Before + public void init() { + factory = new CopyProcessor.Factory(TestTemplateService.instance()); + } + + public void testCreate() throws Exception { + boolean ignoreMissing = randomBoolean(); + boolean removeSource = randomBoolean(); + boolean overrideTarget = randomBoolean(); + Map<String, Object> config = new HashMap<>(); + config.put("source_field", "source"); + config.put("target_field", "target"); + config.put("ignore_missing", ignoreMissing); + config.put("remove_source", removeSource); + config.put("override_target", overrideTarget); + String processorTag = randomAlphaOfLength(10); + CopyProcessor copyProcessor = factory.create(null, processorTag, null, config); + assertThat(copyProcessor.getTag(), equalTo(processorTag)); + assertThat(copyProcessor.getSourceField().newInstance(Collections.emptyMap()).execute(), equalTo("source")); + assertThat(copyProcessor.getTargetField().newInstance(Collections.emptyMap()).execute(), equalTo("target")); + assertThat(copyProcessor.isIgnoreMissing(), equalTo(ignoreMissing)); + assertThat(copyProcessor.isRemoveSource(), equalTo(removeSource)); + assertThat(copyProcessor.isOverrideTarget(), equalTo(overrideTarget)); + } + + public void testCreateWithSourceField() throws Exception { + Map<String, Object> config = new HashMap<>(); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[source_field] required property is missing")); + } + + config.put("source_field", null); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[source_field] required property is missing")); + } + } + + public void testCreateWithTargetField() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("source_field", "source"); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); + } + + config.put("source_field", "source"); + config.put("target_field", null); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); + } + } + + public void testInvalidMustacheTemplate() throws Exception { + CopyProcessor.Factory factory = new CopyProcessor.Factory(TestTemplateService.instance(true)); + Map<String, Object> config = new HashMap<>(); + config.put("source_field", "{{source}}"); + config.put("target_field", "target"); + String processorTag = randomAlphaOfLength(10); + OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, null, config)); + assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); + assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); + } + +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java new file mode 100644 index 0000000000000..3259ba85ef340 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java @@ -0,0 +1,145 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; +import org.opensearch.ingest.RandomDocumentPicks; +import org.opensearch.ingest.TestTemplateService; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class CopyProcessorTests extends OpenSearchTestCase { + + public void testCopyExistingField() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String sourceFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + String targetFieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, false, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + Object sourceValue = ingestDocument.getFieldValue(sourceFieldName, Object.class); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldName, Object.class), sourceValue); + + Processor processorWithEmptyTarget = createCopyProcessor(sourceFieldName, "", false, false, false); + assertThrows( + "target field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithEmptyTarget.execute(ingestDocument) + ); + + Processor processorWithSameSourceAndTarget = createCopyProcessor(sourceFieldName, sourceFieldName, false, false, false); + assertThrows( + "source field path and target field path cannot be same", + IllegalArgumentException.class, + () -> processorWithSameSourceAndTarget.execute(ingestDocument) + ); + } + + public void testCopyWithIgnoreMissing() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String targetFieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = createCopyProcessor("non-existing-field", targetFieldName, false, false, false); + assertThrows( + "source field [non-existing-field] doesn't exist", + IllegalArgumentException.class, + () -> processor.execute(ingestDocument) + ); + + Processor processorWithEmptyFieldName = createCopyProcessor("", targetFieldName, false, false, false); + assertThrows( + "source field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithEmptyFieldName.execute(ingestDocument) + ); + + Processor processorWithIgnoreMissing = createCopyProcessor("non-existing-field", targetFieldName, true, false, false); + processorWithIgnoreMissing.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(false)); + } + + public void testCopyWithRemoveSource() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String sourceFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + String targetFieldName = RandomDocumentPicks.randomFieldName(random()); + Object sourceValue = ingestDocument.getFieldValue(sourceFieldName, Object.class); + + Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, true, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldName, Object.class), sourceValue); + assertThat(ingestDocument.hasField(sourceFieldName), equalTo(false)); + } + + public void testCopyToExistingField() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String targetFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + Object sourceValue = RandomDocumentPicks.randomFieldValue(random()); + String sourceFieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, sourceValue); + + Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, false, false); + assertThrows( + "target field [" + targetFieldName + "] already exists", + IllegalArgumentException.class, + () -> processor.execute(ingestDocument) + ); + + // if override_target is false but target field's value is null, copy can execute successfully + String targetFieldWithNullValue = RandomDocumentPicks.addRandomField(random(), ingestDocument, null); + Processor processorWithTargetNullValue = createCopyProcessor(sourceFieldName, targetFieldWithNullValue, false, false, false); + processorWithTargetNullValue.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldWithNullValue), equalTo(true)); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldWithNullValue, Object.class), sourceValue); + + Processor processorWithOverrideTargetIsTrue = createCopyProcessor(sourceFieldName, targetFieldName, false, false, true); + processorWithOverrideTargetIsTrue.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldName, Object.class), sourceValue); + } + + @SuppressWarnings("unchecked") + private static void assertDeepCopiedObjectEquals(Object expected, Object actual) { + if (expected instanceof Map) { + Map<String, Object> expectedMap = (Map<String, Object>) expected; + Map<String, Object> actualMap = (Map<String, Object>) actual; + assertEquals(expectedMap.size(), actualMap.size()); + for (Map.Entry<String, Object> expectedEntry : expectedMap.entrySet()) { + assertDeepCopiedObjectEquals(expectedEntry.getValue(), actualMap.get(expectedEntry.getKey())); + } + } else if (expected instanceof List) { + assertArrayEquals(((List<?>) expected).toArray(), ((List<?>) actual).toArray()); + } else if (expected instanceof byte[]) { + assertArrayEquals((byte[]) expected, (byte[]) actual); + } else { + assertEquals(expected, actual); + } + } + + private static Processor createCopyProcessor( + String sourceFieldName, + String targetFieldName, + boolean ignoreMissing, + boolean removeSource, + boolean overrideTarget + ) { + return new CopyProcessor( + randomAlphaOfLength(10), + null, + new TestTemplateService.MockTemplateScript.Factory(sourceFieldName), + new TestTemplateService.MockTemplateScript.Factory(targetFieldName), + ignoreMissing, + removeSource, + overrideTarget + ); + } +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java new file mode 100644 index 0000000000000..09ba97ebb4595 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java @@ -0,0 +1,114 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchParseException; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class RemoveByPatternProcessorFactoryTests extends OpenSearchTestCase { + + private RemoveByPatternProcessor.Factory factory; + + @Before + public void init() { + factory = new RemoveByPatternProcessor.Factory(); + } + + public void testCreateFieldPatterns() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("field_pattern", "field1*"); + String processorTag = randomAlphaOfLength(10); + RemoveByPatternProcessor removeByPatternProcessor = factory.create(null, processorTag, null, config); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getFieldPatterns().get(0), equalTo("field1*")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("field_pattern", List.of("field1*", "field2*")); + removeByPatternProcessor = factory.create(null, processorTag, null, config2); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getFieldPatterns().get(0), equalTo("field1*")); + assertThat(removeByPatternProcessor.getFieldPatterns().get(1), equalTo("field2*")); + + Map<String, Object> config3 = new HashMap<>(); + List<String> patterns = Arrays.asList("foo*", "*", " ", ",", "#", ":", "_"); + config3.put("field_pattern", patterns); + Exception exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config3)); + assertThat( + exception.getMessage(), + equalTo( + "[field_pattern] Validation Failed: " + + "1: field_pattern [ ] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "2: field_pattern [,] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "3: field_pattern [#] must not contain a '#';" + + "4: field_pattern [:] must not contain a ':';" + + "5: field_pattern [_] must not start with '_';" + ) + ); + } + + public void testCreateExcludeFieldPatterns() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("exclude_field_pattern", "field1*"); + String processorTag = randomAlphaOfLength(10); + RemoveByPatternProcessor removeByPatternProcessor = factory.create(null, processorTag, null, config); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(0), equalTo("field1*")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("exclude_field_pattern", List.of("field1*", "field2*")); + removeByPatternProcessor = factory.create(null, processorTag, null, config2); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(0), equalTo("field1*")); + assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(1), equalTo("field2*")); + + Map<String, Object> config3 = new HashMap<>(); + List<String> patterns = Arrays.asList("foo*", "*", " ", ",", "#", ":", "_"); + config3.put("exclude_field_pattern", patterns); + Exception exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config3)); + assertThat( + exception.getMessage(), + equalTo( + "[exclude_field_pattern] Validation Failed: " + + "1: exclude_field_pattern [ ] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "2: exclude_field_pattern [,] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "3: exclude_field_pattern [#] must not contain a '#';" + + "4: exclude_field_pattern [:] must not contain a ':';" + + "5: exclude_field_pattern [_] must not start with '_';" + ) + ); + } + + public void testCreatePatternsFailed() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("field_pattern", List.of("foo*")); + config.put("exclude_field_pattern", List.of("bar*")); + String processorTag = randomAlphaOfLength(10); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> factory.create(null, processorTag, null, config) + ); + assertThat(exception.getMessage(), equalTo("[field_pattern] either field_pattern or exclude_field_pattern must be set")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("field_pattern", null); + config2.put("exclude_field_pattern", null); + + exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config2)); + assertThat(exception.getMessage(), equalTo("[field_pattern] either field_pattern or exclude_field_pattern must be set")); + } +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java new file mode 100644 index 0000000000000..82ff93de1f44e --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java @@ -0,0 +1,96 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; +import org.opensearch.ingest.RandomDocumentPicks; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class RemoveByPatternProcessorTests extends OpenSearchTestCase { + + public void testRemoveWithFieldPatterns() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + ingestDocument.setFieldValue("foo_1", "value"); + ingestDocument.setFieldValue("foo_2", "value"); + ingestDocument.setFieldValue("bar_1", "value"); + ingestDocument.setFieldValue("bar_2", "value"); + List<String> fieldPatterns = new ArrayList<>(); + fieldPatterns.add("foo*"); + fieldPatterns.add("_index*"); + fieldPatterns.add("_id*"); + fieldPatterns.add("_version*"); + Processor processor = new RemoveByPatternProcessor(randomAlphaOfLength(10), null, fieldPatterns, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField("foo_1"), equalTo(false)); + assertThat(ingestDocument.hasField("foo_2"), equalTo(false)); + assertThat(ingestDocument.hasField("bar_1"), equalTo(true)); + assertThat(ingestDocument.hasField("bar_2"), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.INDEX.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName()), equalTo(true)); + } + + public void testRemoveWithExcludeFieldPatterns() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + ingestDocument.setFieldValue("foo_1", "value"); + ingestDocument.setFieldValue("foo_2", "value"); + ingestDocument.setFieldValue("foo_3", "value"); + List<String> excludeFieldPatterns = new ArrayList<>(); + excludeFieldPatterns.add("foo_3*"); + Processor processorWithExcludeFieldsAndPatterns = new RemoveByPatternProcessor( + randomAlphaOfLength(10), + null, + null, + excludeFieldPatterns + ); + processorWithExcludeFieldsAndPatterns.execute(ingestDocument); + assertThat(ingestDocument.hasField("foo_1"), equalTo(false)); + assertThat(ingestDocument.hasField("foo_2"), equalTo(false)); + assertThat(ingestDocument.hasField("foo_3"), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.INDEX.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName()), equalTo(true)); + } + + public void testCreateRemoveByPatternProcessorWithBothFieldsAndExcludeFields() throws Exception { + assertThrows( + "either fieldPatterns and excludeFieldPatterns must be set", + IllegalArgumentException.class, + () -> new RemoveByPatternProcessor(randomAlphaOfLength(10), null, null, null) + ); + + final List<String> fieldPatterns; + if (randomBoolean()) { + fieldPatterns = new ArrayList<>(); + } else { + fieldPatterns = List.of("foo_1*"); + } + + final List<String> excludeFieldPatterns; + if (randomBoolean()) { + excludeFieldPatterns = new ArrayList<>(); + } else { + excludeFieldPatterns = List.of("foo_2*"); + } + + assertThrows( + "either fieldPatterns and excludeFieldPatterns must be set", + IllegalArgumentException.class, + () -> new RemoveByPatternProcessor(randomAlphaOfLength(10), null, fieldPatterns, excludeFieldPatterns) + ); + } +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java index 66ca888a0d39f..6332eeafc387c 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java @@ -41,6 +41,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -79,16 +80,6 @@ public void testCreateMultipleFields() throws Exception { ); } - public void testCreateMissingField() throws Exception { - Map<String, Object> config = new HashMap<>(); - try { - factory.create(null, null, null, config); - fail("factory create should have failed"); - } catch (OpenSearchParseException e) { - assertThat(e.getMessage(), equalTo("[field] required property is missing")); - } - } - public void testInvalidMustacheTemplate() throws Exception { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(TestTemplateService.instance(true)); Map<String, Object> config = new HashMap<>(); @@ -98,4 +89,31 @@ public void testInvalidMustacheTemplate() throws Exception { assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); } + + public void testCreateWithExcludeField() throws Exception { + Map<String, Object> config = new HashMap<>(); + String processorTag = randomAlphaOfLength(10); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> factory.create(null, processorTag, null, config) + ); + assertThat(exception.getMessage(), equalTo("[field] either field or exclude_field must be set")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("field", "field1"); + config2.put("exclude_field", "field2"); + exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config2)); + assertThat(exception.getMessage(), equalTo("[field] either field or exclude_field must be set")); + + Map<String, Object> config6 = new HashMap<>(); + config6.put("exclude_field", "exclude_field"); + RemoveProcessor removeProcessor = factory.create(null, processorTag, null, config6); + assertThat( + removeProcessor.getExcludeFields() + .stream() + .map(template -> template.newInstance(Collections.emptyMap()).execute()) + .collect(Collectors.toList()), + equalTo(List.of("exclude_field")) + ); + } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java index c138ad606d2e5..7fc1d3f2f0a3c 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java @@ -38,8 +38,10 @@ import org.opensearch.ingest.Processor; import org.opensearch.ingest.RandomDocumentPicks; import org.opensearch.ingest.TestTemplateService; +import org.opensearch.script.TemplateScript; import org.opensearch.test.OpenSearchTestCase; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -57,12 +59,28 @@ public void testRemoveFields() throws Exception { randomAlphaOfLength(10), null, Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(field)), + null, false ); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(field), equalTo(false)); } + public void testRemoveByExcludeFields() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + ingestDocument.setFieldValue("foo_1", "value"); + ingestDocument.setFieldValue("foo_2", "value"); + ingestDocument.setFieldValue("foo_3", "value"); + List<TemplateScript.Factory> excludeFields = new ArrayList<>(); + excludeFields.add(new TestTemplateService.MockTemplateScript.Factory("foo_1")); + excludeFields.add(new TestTemplateService.MockTemplateScript.Factory("foo_2")); + Processor processor = new RemoveProcessor(randomAlphaOfLength(10), null, null, excludeFields, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField("foo_1"), equalTo(true)); + assertThat(ingestDocument.hasField("foo_2"), equalTo(true)); + assertThat(ingestDocument.hasField("foo_3"), equalTo(false)); + } + public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); @@ -183,6 +201,34 @@ public void testRemoveMetadataField() throws Exception { } } + public void testCreateRemoveProcessorWithBothFieldsAndExcludeFields() throws Exception { + assertThrows( + "either fields or excludeFields must be set", + IllegalArgumentException.class, + () -> new RemoveProcessor(randomAlphaOfLength(10), null, null, null, false) + ); + + final List<TemplateScript.Factory> fields; + if (randomBoolean()) { + fields = new ArrayList<>(); + } else { + fields = List.of(new TestTemplateService.MockTemplateScript.Factory("foo_1")); + } + + final List<TemplateScript.Factory> excludeFields; + if (randomBoolean()) { + excludeFields = new ArrayList<>(); + } else { + excludeFields = List.of(new TestTemplateService.MockTemplateScript.Factory("foo_2")); + } + + assertThrows( + "either fields or excludeFields must be set", + IllegalArgumentException.class, + () -> new RemoveProcessor(randomAlphaOfLength(10), null, fields, excludeFields, false) + ); + } + public void testRemoveDocumentId() throws Exception { Map<String, Object> config = new HashMap<>(); config.put("field", IngestDocument.Metadata.ID.getFieldName()); diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml index f44cc1f9f9fcf..2a816f0386667 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml @@ -36,3 +36,53 @@ - contains: { nodes.$cluster_manager.ingest.processors: { type: split } } - contains: { nodes.$cluster_manager.ingest.processors: { type: trim } } - contains: { nodes.$cluster_manager.ingest.processors: { type: uppercase } } + +--- +"Copy processor exists": + - skip: + version: " - 2.11.99" + features: contains + reason: "copy processor was introduced in 2.12.0 and contains is a newly added assertion" + - do: + cluster.state: {} + + # Get cluster-manager node id + - set: { cluster_manager_node: cluster_manager } + + - do: + nodes.info: {} + + - contains: { nodes.$cluster_manager.ingest.processors: { type: copy } } + +--- +"Remove_by_pattern processor exists": + - skip: + version: " - 2.11.99" + features: contains + reason: "remove_by_pattern processor was introduced in 2.12.0 and contains is a newly added assertion" + - do: + cluster.state: {} + + # Get cluster-manager node id + - set: { cluster_manager_node: cluster_manager } + + - do: + nodes.info: {} + + - contains: { nodes.$cluster_manager.ingest.processors: { type: remove_by_pattern } } + +--- +"Community_id processor exists": + - skip: + version: " - 2.12.99" + features: contains + reason: "community_id processor was introduced in 2.13.0 and contains is a newly added assertion" + - do: + cluster.state: {} + + # Get cluster-manager node id + - set: { cluster_manager_node: cluster_manager } + + - do: + nodes.info: {} + - contains: { nodes.$cluster_manager.ingest.processors: { type: community_id } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml index 6668b468f8edc..e120a865052b0 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml @@ -319,3 +319,43 @@ teardown: } - match: { docs.0.error.type: "illegal_argument_exception" } - match: { docs.0.error.reason: "cannot remove metadata field [_id] when specifying external version for the document, version: 1, version_type: external_gte" } + +# Related issue: https://github.com/opensearch-project/OpenSearch/issues/1578 +--- +"Test remove processor with exclude_field": + - skip: + version: " - 2.11.99" + reason: "exclude_field is introduced in 2.12" + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove" : { + "exclude_field": "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: { + foo1: "bar", + foo2: "bar", + bar: "zoo", + zoo: "bar" + } + + - do: + get: + index: test + id: 1 + - match: { _source: { bar: "zoo"}} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml new file mode 100644 index 0000000000000..0203b62ba67d6 --- /dev/null +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml @@ -0,0 +1,374 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "1" + ignore: 404 + +--- +"Test creat copy processor": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + catch: /\[target\_field\] required property is missing/ + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "source" + } + } + ] + } + - do: + catch: /\[source\_field\] required property is missing/ + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "target_field" : "target" + } + } + ] + } + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "source", + "target_field" : "target", + "ignore_missing" : true, + "remove_source" : true, + "override_target" : true + } + } + ] + } + - match: { acknowledged: true } + +--- +"Test copy processor with ignore_missing": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "unknown_field", + "target_field" : "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /source field \[unknown\_field\] doesn\'t exist/ + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "unknown_field", + "target_field" : "bar", + "ignore_missing" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + - do: + get: + index: test + id: 1 + - match: { _source: { foo: "hello" } } + +--- +"Test copy processor with remove_source": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + - do: + get: + index: test + id: 1 + - match: { _source: { foo: "hello", bar: "hello" } } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar", + "remove_source" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + - do: + get: + index: test + id: 1 + - match: { _source: { bar: "hello" } } + +--- +"Test copy processor with override_target": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /target field \[bar\] already exists/ + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar", + "override_target" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello", + bar: "world" + } + - do: + get: + index: test + id: 1 + - match: { _source: { foo: "hello", bar: "hello" } } + +--- +"Test copy processor with template snippets": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /source field path cannot be null nor empty/ + index: + index: test + id: 1 + pipeline: "1" + body: { + target: "bar", + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /target field path cannot be null nor empty/ + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "foo", + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /source field path and target field path cannot be same/ + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "foo", + target: "foo", + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}", + "override_target" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "foo", + target: "bar", + foo: "hello", + bar: "world" + } + - do: + get: + index: test + id: 1 + - match: { _source: { source: "foo", target: "bar", foo: "hello", bar: "hello" } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml new file mode 100644 index 0000000000000..397eb8f7b6033 --- /dev/null +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml @@ -0,0 +1,146 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test creating remove_by_pattern processor failed": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + catch: /\[field\_pattern\] either field\_pattern or exclude\_field\_pattern must be set/ + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "field_pattern" : "foo*", + "exclude_field_pattern" : "bar*" + } + } + ] + } + + - do: + catch: /\[field\_pattern\] either field\_pattern or exclude\_field\_pattern must be set/ + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + } + } + ] + } + +--- +"Test remove_by_pattern processor with field_pattern": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "field_pattern" : ["foo*", "*a*b"] + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: { + foo1: "bar", + foo2: "bar", + zoo: "bar", + ab: "bar", + aabb: "bar" + } + + - do: + get: + index: test + id: 1 + - match: { _source: {zoo: "bar" }} + +--- +"Test remove_by_pattern processor with exclude_field_pattern": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "exclude_field_pattern": ["foo*", "a*b*"] + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: { + foo1: "bar", + foo2: "bar", + bar: "zoo", + zoo: "bar", + ab: "bar", + aabb: "bar" + } + + - do: + get: + index: test + id: 1 + - match: { _source: { foo1: "bar", foo2: "bar", ab: "bar", aabb: "bar"}} + + +--- +"Test cannot remove metadata fields by remove_by_pattern processor": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + catch: /field\_pattern \[\_id\] must not start with \'\_\'\;/ + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "field_pattern": "_id" + } + } + ] + } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/320_community_id_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/320_community_id_processor.yml new file mode 100644 index 0000000000000..6de5371bb49f7 --- /dev/null +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/320_community_id_processor.yml @@ -0,0 +1,370 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "1" + ignore: 404 + +--- +"Test creat community_id processor": + - skip: + version: " - 2.12.99" + reason: "introduced in 2.13" + - do: + catch: /\[source\_ip\_field\] required property is missing/ + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "destination_ip_field" : "dest" + } + } + ] + } + - do: + catch: /\[destination\_ip\_field\] required property is missing/ + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "src" + } + } + ] + } + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "source_port_field" : "srcPort", + "destination_port_field" : "destPort", + "iana_protocol_number_field" : "iana_number", + "protocol_field" : "protocol", + "icmp_type_field" : "icmp", + "icmp_code_field" : "code", + "seed" : 0, + "target_field" : "community_id", + "ignore_missing" : false + } + } + ] + } + - match: { acknowledged: true } + +--- +"Test community_id processor with ignore_missing": + - skip: + version: " - 2.12.99" + reason: "introduced in 2.13" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "source_port_field" : "srcPort", + "destination_port_field" : "destPort", + "protocol_field" : "protocol" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /ip address in the field \[source\] is null or empty/ + index: + index: test + id: 1 + pipeline: "1" + body: { + dest: "1.1.1.1", + protocol: "tcp" + } + + - do: + catch: /ip address in the field \[dest\] is null or empty/ + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "1.1.1.1", + protocol: "tcp" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "source_port_field" : "srcPort", + "destination_port_field" : "destPort", + "protocol_field" : "protocol", + "ignore_missing" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "1.1.1.1", + protocol: "tcp" + } + - do: + get: + index: test + id: 1 + - match: { _source: { source: "1.1.1.1", protocol: "tcp" } } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + dest: "2.2.2.2", + protocol: "tcp" + } + - do: + get: + index: test + id: 1 + - match: { _source: { dest: "2.2.2.2", protocol: "tcp" } } + +--- +"Test community_id processor for tcp": + - skip: + version: " - 2.12.99" + reason: "introduced in 2.13" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "source_port_field" : "srcPort", + "destination_port_field" : "destPort", + "protocol_field" : "protocol" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "66.35.250.204", + dest: "128.232.110.120", + protocol: "tcp", + srcPort: 80, + destPort: 34855 + } + - do: + get: + index: test + id: 1 + - match: { _source.community_id: "1:LQU9qZlK+B5F3KDmev6m5PMibrg=" } + +--- +"Test community_id processor for udp": + - skip: + version: " - 2.12.99" + reason: "introduced in 2.13" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "source_port_field" : "srcPort", + "destination_port_field" : "destPort", + "protocol_field" : "protocol" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "8.8.8.8", + dest: "192.168.1.52", + protocol: "udp", + srcPort: 53, + destPort: 54585 + } + - do: + get: + index: test + id: 1 + - match: { _source.community_id: "1:d/FP5EW3wiY1vCndhwleRRKHowQ=" } + +--- +"Test community_id processor for sctp": + - skip: + version: " - 2.12.99" + reason: "introduced in 2.13" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "source_port_field" : "srcPort", + "destination_port_field" : "destPort", + "protocol_field" : "protocol" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "192.168.170.8", + dest: "192.168.170.56", + protocol: "sctp", + srcPort: 7, + destPort: 7 + } + - do: + get: + index: test + id: 1 + - match: { _source.community_id: "1:MP2EtRCAUIZvTw6MxJHLV7N7JDs=" } + +--- +"Test community_id processor for icmp": + - skip: + version: " - 2.12.99" + reason: "introduced in 2.13" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "icmp_type_field" : "type", + "icmp_code_field" : "code", + "protocol_field" : "protocol" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "192.168.0.89", + dest: "192.168.0.1", + protocol: "icmp", + type: 8, + code: 0 + } + - do: + get: + index: test + id: 1 + - match: { _source.community_id: "1:X0snYXpgwiv9TZtqg64sgzUn6Dk=" } + +--- +"Test community_id processor for icmp-v6": + - skip: + version: " - 2.12.99" + reason: "introduced in 2.13" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "community_id" : { + "source_ip_field" : "source", + "destination_ip_field" : "dest", + "icmp_type_field" : "type", + "icmp_code_field" : "code", + "protocol_field" : "protocol" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "fe80::260:97ff:fe07:69ea", + dest: "ff02::1", + protocol: "ipv6-icmp", + type: 134, + code: 0 + } + - do: + get: + index: test + id: 1 + - match: { _source.community_id: "1:pkvHqCL88/tg1k4cPigmZXUtL00=" } diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java index c7cf27b2e6493..b27c0f9fe0b31 100644 --- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java @@ -218,6 +218,7 @@ Set<Property> getProperties() { return properties; } + @SuppressWarnings("removal") private Map<String, Object> retrieveCityGeoData(InetAddress ipAddress) { SpecialPermission.check(); CityResponse response = AccessController.doPrivileged( @@ -305,6 +306,7 @@ private Map<String, Object> retrieveCityGeoData(InetAddress ipAddress) { return geoData; } + @SuppressWarnings("removal") private Map<String, Object> retrieveCountryGeoData(InetAddress ipAddress) { SpecialPermission.check(); CountryResponse response = AccessController.doPrivileged( @@ -351,6 +353,7 @@ private Map<String, Object> retrieveCountryGeoData(InetAddress ipAddress) { return geoData; } + @SuppressWarnings("removal") private Map<String, Object> retrieveAsnGeoData(InetAddress ipAddress) { SpecialPermission.check(); AsnResponse response = AccessController.doPrivileged( diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 deleted file mode 100644 index 402cc36ba3d68..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1782a69d0e83af9cc3c65db0dcd2e7e7c1e5f90e \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..e073455415e24 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 @@ -0,0 +1 @@ +045df3828b6460c032a6551040e31ea432b0aad6 \ No newline at end of file diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 8ca28a905f216..8e15488900e5f 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.update.UpdateRequestBuilder; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; @@ -57,7 +56,7 @@ import org.opensearch.search.aggregations.pipeline.SimpleValue; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.util.Arrays; @@ -80,10 +79,10 @@ import static org.hamcrest.Matchers.notNullValue; // TODO: please convert to unit tests! -public class MoreExpressionIT extends ParameterizedOpenSearchIntegTestCase { +public class MoreExpressionIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MoreExpressionIT(Settings dynamicSettings) { - super(dynamicSettings); + public MoreExpressionIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -94,11 +93,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(ExpressionModulePlugin.class); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java index b1cb5356a4405..7465fa1e5ddbe 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java @@ -35,7 +35,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.plugins.Plugin; @@ -43,7 +42,7 @@ import org.opensearch.script.ScriptType; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -54,10 +53,10 @@ import static org.hamcrest.Matchers.containsString; //TODO: please convert to unit tests! -public class StoredExpressionIT extends ParameterizedOpenSearchIntegTestCase { +public class StoredExpressionIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public StoredExpressionIT(Settings dynamicSettings) { - super(dynamicSettings); + public StoredExpressionIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -68,11 +67,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java index 5629b3b4a6972..0520177b72b62 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java @@ -170,6 +170,7 @@ public String getType() { return NAME; } + @SuppressWarnings("removal") @Override public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) { // classloader created here diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java index e480fbbd22ad2..f7abc220e75d8 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java @@ -37,11 +37,10 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexNotFoundException; import org.opensearch.plugins.Plugin; import org.opensearch.script.ScriptType; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -58,10 +57,10 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; -public class MultiSearchTemplateIT extends ParameterizedOpenSearchIntegTestCase { +public class MultiSearchTemplateIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MultiSearchTemplateIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiSearchTemplateIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -72,11 +71,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(MustacheModulePlugin.class); diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java index ec84475b70bb6..842353fdba336 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java @@ -128,6 +128,7 @@ private class MustacheExecutableScript extends TemplateScript { this.params = params; } + @SuppressWarnings("removal") @Override public String execute() { final StringWriter writer = new StringWriter(); diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java index 632fee9187eba..f18a7fb3ba1a9 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java @@ -513,6 +513,7 @@ public static Allowlist loadFromResourceFiles(Class<?> resource, Map<String, All } } + @SuppressWarnings("removal") ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>) resource::getClassLoader); return new Allowlist(loader, allowlistClasses, allowlistStatics, allowlistClassBindings, Collections.emptyList()); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java b/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java index d0af4651d2d3b..2bf70882a501b 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java @@ -501,6 +501,7 @@ private static void endLambdaClass(ClassWriter cw) { * Defines the {@link Class} for the lambda class using the same {@link Compiler.Loader} * that originally defined the class for the Painless script. */ + @SuppressWarnings("removal") private static Class<?> createLambdaClass(Compiler.Loader loader, ClassWriter cw, Type lambdaClassType) { byte[] classBytes = cw.toByteArray(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java index e9edfb73c740c..257687bfb98c5 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java @@ -67,6 +67,7 @@ /** * Implementation of a ScriptEngine for the Painless language. */ +@SuppressWarnings("removal") public final class PainlessScriptEngine implements ScriptEngine { /** diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java index e79eda975f417..e155a890c03d1 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java @@ -2189,6 +2189,7 @@ private void generateBridgeMethod(PainlessClassBuilder painlessClassBuilder, Pai bridgeClassWriter.visitEnd(); try { + @SuppressWarnings("removal") BridgeLoader bridgeLoader = AccessController.doPrivileged(new PrivilegedAction<BridgeLoader>() { @Override public BridgeLoader run() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java index 3418bcf01e19f..691e84176dce3 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java @@ -48,6 +48,7 @@ public class DocFieldsPhaseTests extends ScriptTestCase { PainlessLookup lookup = PainlessLookupBuilder.buildFromAllowlists(Allowlist.BASE_ALLOWLISTS); + @SuppressWarnings("removal") ScriptScope compile(String script) { Compiler compiler = new Compiler( MockDocTestScript.CONTEXT.instanceClazz, diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java index 5fc0a202ae45e..ab74463382aaa 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java @@ -41,7 +41,6 @@ import org.opensearch.client.Requests; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.InternalAggregation; @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testChildrenAggs() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") .setQuery(matchQuery("randomized", true)) diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java index 04703a65aa19d..4a6157e388777 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.Terms; @@ -76,11 +75,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleParentAgg() throws Exception { final SearchRequestBuilder searchRequest = client().prepareSearch("test") .setSize(10000) diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java index c43d6352b26f8..99527c3273c4b 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java @@ -44,7 +44,6 @@ import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.IdsQueryBuilder; @@ -118,11 +117,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testMultiLevelChild() throws Exception { assertAcked( prepareCreate("test").setMapping( diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java index 39da86c7fd726..4b5470d17c100 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.InnerHitBuilder; @@ -105,11 +104,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java index 8c19c0aafe763..9c0f96cf382a6 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java @@ -41,7 +41,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -51,10 +51,10 @@ import java.util.Map; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public abstract class ParentChildTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class ParentChildTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ParentChildTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public ParentChildTestCase(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index c8763c2f3f749..01436404e8a85 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.DistanceUnit; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.bytes.BytesArray; @@ -57,7 +56,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -90,10 +89,10 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsNull.notNullValue; -public class PercolatorQuerySearchIT extends ParameterizedOpenSearchIntegTestCase { +public class PercolatorQuerySearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public PercolatorQuerySearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public PercolatorQuerySearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -104,11 +103,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean addMockGeoShapeFieldMapper() { return false; diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java index cdc3cac1a1f06..488c2e33648e7 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -47,7 +46,7 @@ import org.opensearch.indices.IndexClosedException; import org.opensearch.plugins.Plugin; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.util.ArrayList; @@ -62,14 +61,14 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.instanceOf; -public class RankEvalRequestIT extends ParameterizedOpenSearchIntegTestCase { +public class RankEvalRequestIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String TEST_INDEX = "test"; private static final String INDEX_ALIAS = "alias0"; private static final int RELEVANT_RATING_1 = 1; - public RankEvalRequestIT(Settings dynamicSettings) { - super(dynamicSettings); + public RankEvalRequestIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -80,11 +79,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(RankEvalModulePlugin.class); diff --git a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java index b13a4d5a39a5b..02e858cb8d1f2 100644 --- a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java +++ b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java @@ -157,6 +157,7 @@ public void writeBlobAtomic(String blobName, InputStream inputStream, long blobS throw new UnsupportedOperationException("URL repository doesn't support this operation"); } + @SuppressWarnings("removal") @SuppressForbidden(reason = "We call connect in doPrivileged and provide SocketPermission") private static InputStream getInputStream(URL url) throws IOException { try { diff --git a/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java b/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java index 93e2e28718d51..05c6222d3d89a 100644 --- a/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java +++ b/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java @@ -40,6 +40,7 @@ /** * Provides access to the native method sd_notify from libsystemd. */ +@SuppressWarnings("removal") class Libsystemd { static { diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 deleted file mode 100644 index dde9b7c100dc7..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -147cb42a90a29501d9ca6094ea0db1d213f3076a \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..b318a2d89db7d --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 @@ -0,0 +1 @@ +f7e549fdac07140f4cd379a0f517c38434165e95 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 deleted file mode 100644 index b70a22e9db096..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b034dd3a975763e083c7e11b5d0f7d516ab72590 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..87510efa881bc --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 @@ -0,0 +1 @@ +eb19738fd3ca85071fef96978a056a7c94d8e793 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 deleted file mode 100644 index 323f165c62790..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c405f2f7d0fc127d88dfbadd753469b2028fdf52 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..fb3746eb27840 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7e83a83741155ececf9193a4f967e570e170236d \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 deleted file mode 100644 index dd659ddf4de95..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -970e5775876c2d7e1b9af7421a4b17d96f63faf4 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..e88299f106bb2 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 @@ -0,0 +1 @@ +dabdea96c7a6d00363b0093a580e7d783efa69a4 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 deleted file mode 100644 index ed0e81d8f1f75..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2421e5238e9b8484929291744d709dd743c01da1 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..05dd3328b032d --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 @@ -0,0 +1 @@ +fb551d9ae6a1bf12bc90a4d26dd8fa3eefac8cb9 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 deleted file mode 100644 index fd8e000088180..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a23e7de4cd9ae7af285c89dc1c55e0ac3f157fd3 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..b47a1e4052407 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 @@ -0,0 +1 @@ +0d495b478861b2d29e0a58d273ca0e6e755292e9 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 deleted file mode 100644 index d0e7a3b0c751c..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8d9bce1ea51db279878c51091dd9aefc7b335da4 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..b7d54cb230445 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 @@ -0,0 +1 @@ +9958d813d4cfdd890b4611c679ed36775480fa0d \ No newline at end of file diff --git a/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java b/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java index 5b026c30017ca..f3d0f278c7ce7 100644 --- a/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java +++ b/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java @@ -19,6 +19,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class SocketAccess { private SocketAccess() {} diff --git a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index ad622e68f5ccb..a4b733ec7d894 100644 --- a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -296,6 +296,7 @@ private static SSLContext getSSLContext() throws Exception { * The {@link HttpsServer} in the JDK has issues with TLSv1.3 when running in a JDK prior to * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK */ + @SuppressWarnings("removal") private static String getProtocol() { if (Runtime.version().compareTo(Version.parse("12")) < 0) { return "TLSv1.2"; diff --git a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java index 1bac80e576199..6e21feca7f5fb 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java @@ -112,6 +112,7 @@ private static String getRequiredSetting(Settings settings, Setting<String> sett return value; } + @SuppressWarnings("removal") @Override public HostedServiceGetDetailedResponse getServiceDetails() { SpecialPermission.check(); diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java index c6605002c4462..0125ae4d19c3e 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java @@ -46,6 +46,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") final class SocketAccess { private SocketAccess() {} diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java index b4ed613c0d8dd..9518fac442111 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java @@ -89,6 +89,7 @@ public static void startHttp() throws Exception { httpServer.start(); } + @SuppressWarnings("removal") @Before public void setup() { // redirect EC2 metadata service to httpServer @@ -116,6 +117,7 @@ public void testNetworkHostEc2() throws IOException { /** * Test for network.host: _ec2_ */ + @SuppressWarnings("removal") public void testNetworkHostUnableToResolveEc2() { // redirect EC2 metadata service to unknown location AccessController.doPrivileged( diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index c8b52d3afcd45..85efcc43fd65a 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -24,7 +24,7 @@ versions << [ dependencies { api "com.google.apis:google-api-services-compute:v1-rev160-${versions.google}" api "com.google.api-client:google-api-client:${versions.google}" - api "com.google.oauth-client:google-oauth-client:1.34.1" + api "com.google.oauth-client:google-oauth-client:1.35.0" api "com.google.http-client:google-http-client:${versions.google}" api "com.google.http-client:google-http-client-jackson2:${versions.google}" api 'com.google.code.findbugs:jsr305:3.0.2' diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 deleted file mode 100644 index a8434bd380761..0000000000000 --- a/plugins/discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4a4f88c5e13143f882268c98239fb85c3b2c6cb2 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 new file mode 100644 index 0000000000000..a52e79088c7ca --- /dev/null +++ b/plugins/discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 @@ -0,0 +1 @@ +2f52003156e40ba8be5f349a2716a77428896e69 \ No newline at end of file diff --git a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java index 1401f7ca26ce6..c46bfedbd8507 100644 --- a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java +++ b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java @@ -48,6 +48,7 @@ * {@code connect}. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class Access { private Access() {} diff --git a/plugins/identity-shiro/build.gradle b/plugins/identity-shiro/build.gradle index baa3464d0a98e..1548780aaa566 100644 --- a/plugins/identity-shiro/build.gradle +++ b/plugins/identity-shiro/build.gradle @@ -17,7 +17,7 @@ opensearchplugin { } dependencies { - implementation 'org.apache.shiro:shiro-core:1.11.0' + implementation 'org.apache.shiro:shiro-core:1.13.0' // Needed for shiro implementation "org.slf4j:slf4j-api:${versions.slf4j}" diff --git a/plugins/identity-shiro/licenses/shiro-core-1.11.0.jar.sha1 b/plugins/identity-shiro/licenses/shiro-core-1.11.0.jar.sha1 deleted file mode 100644 index 67c33e15ec689..0000000000000 --- a/plugins/identity-shiro/licenses/shiro-core-1.11.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -033a70c87e91968a299f1ee00f4e95050312346d \ No newline at end of file diff --git a/plugins/identity-shiro/licenses/shiro-core-1.13.0.jar.sha1 b/plugins/identity-shiro/licenses/shiro-core-1.13.0.jar.sha1 new file mode 100644 index 0000000000000..25bd4d9acd166 --- /dev/null +++ b/plugins/identity-shiro/licenses/shiro-core-1.13.0.jar.sha1 @@ -0,0 +1 @@ +7e542e3d614b197bf10005e98e19f9f19cb943e7 \ No newline at end of file diff --git a/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java index ce7ceb5e3d776..fe783e5ddb675 100644 --- a/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java @@ -112,6 +112,7 @@ final class TikaImpl { /** * parses with tika, throwing any exception hit while parsing the document */ + @SuppressWarnings("removal") static String parse(final byte content[], final Metadata metadata, final int limit) throws TikaException, IOException { // check that its not unprivileged code like a script SpecialPermission.check(); @@ -136,6 +137,7 @@ static String parse(final byte content[], final Metadata metadata, final int lim // apply additional containment for parsers, this is intersected with the current permissions // its hairy, but worth it so we don't have some XML flaw reading random crap from the FS + @SuppressWarnings("removal") private static final AccessControlContext RESTRICTED_CONTEXT = new AccessControlContext( new ProtectionDomain[] { new ProtectionDomain(null, getRestrictedPermissions()) } ); diff --git a/plugins/query-insights/build.gradle b/plugins/query-insights/build.gradle new file mode 100644 index 0000000000000..eabbd395bd3bd --- /dev/null +++ b/plugins/query-insights/build.gradle @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +opensearchplugin { + description 'OpenSearch Query Insights Plugin.' + classname 'org.opensearch.plugin.insights.QueryInsightsPlugin' +} + +dependencies { +} diff --git a/plugins/query-insights/src/internalClusterTest/java/org/opensearch/plugin/insights/QueryInsightsPluginTransportIT.java b/plugins/query-insights/src/internalClusterTest/java/org/opensearch/plugin/insights/QueryInsightsPluginTransportIT.java new file mode 100644 index 0000000000000..04e715444f50a --- /dev/null +++ b/plugins/query-insights/src/internalClusterTest/java/org/opensearch/plugin/insights/QueryInsightsPluginTransportIT.java @@ -0,0 +1,274 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; +import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugins.Plugin; +import org.opensearch.plugins.PluginInfo; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.junit.Assert; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +/** + * Transport Action tests for Query Insights Plugin + */ + +@OpenSearchIntegTestCase.ClusterScope(numDataNodes = 0, scope = OpenSearchIntegTestCase.Scope.TEST) +public class QueryInsightsPluginTransportIT extends OpenSearchIntegTestCase { + + private final int TOTAL_NUMBER_OF_NODES = 2; + private final int TOTAL_SEARCH_REQUESTS = 5; + + @Override + protected Collection<Class<? extends Plugin>> nodePlugins() { + return Arrays.asList(QueryInsightsPlugin.class); + } + + /** + * Test Query Insights Plugin is installed + */ + public void testQueryInsightPluginInstalled() { + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); + nodesInfoRequest.addMetric(NodesInfoRequest.Metric.PLUGINS.metricName()); + NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); + List<PluginInfo> pluginInfos = nodesInfoResponse.getNodes() + .stream() + .flatMap( + (Function<NodeInfo, Stream<PluginInfo>>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream() + ) + .collect(Collectors.toList()); + Assert.assertTrue( + pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName().equals("org.opensearch.plugin.insights.QueryInsightsPlugin")) + ); + } + + /** + * Test get top queries when feature disabled + */ + public void testGetTopQueriesWhenFeatureDisabled() { + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertNotEquals(0, response.failures().size()); + Assert.assertEquals( + "Cannot get top n queries for [latency] when it is not enabled.", + response.failures().get(0).getCause().getCause().getMessage() + ); + } + + /** + * Test update top query record when feature enabled + */ + public void testUpdateRecordWhenFeatureDisabledThenEnabled() throws ExecutionException, InterruptedException { + Settings commonSettings = Settings.builder().put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "false").build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertNotEquals(0, response.failures().size()); + Assert.assertEquals( + "Cannot get top n queries for [latency] when it is not enabled.", + response.failures().get(0).getCause().getCause().getMessage() + ); + + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true").build() + ); + assertAcked(internalCluster().client().admin().cluster().updateSettings(updateSettingsRequest).get()); + TopQueriesRequest request2 = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response2 = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request2).actionGet(); + Assert.assertEquals(0, response2.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response2.getNodes().size()); + for (int i = 0; i < TOTAL_NUMBER_OF_NODES; i++) { + Assert.assertEquals(0, response2.getNodes().get(i).getTopQueriesRecord().size()); + } + + internalCluster().stopAllNodes(); + } + + /** + * Test get top queries when feature enabled + */ + public void testGetTopQueriesWhenFeatureEnabled() throws InterruptedException { + Settings commonSettings = Settings.builder() + .put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true") + .put(TOP_N_LATENCY_QUERIES_SIZE.getKey(), "100") + .put(TOP_N_LATENCY_QUERIES_WINDOW_SIZE.getKey(), "600s") + .build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + // Sleep to wait for queue drained to top queries store + Thread.sleep(6000); + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertEquals(0, response.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response.getNodes().size()); + Assert.assertEquals(TOTAL_SEARCH_REQUESTS, response.getNodes().stream().mapToInt(o -> o.getTopQueriesRecord().size()).sum()); + + internalCluster().stopAllNodes(); + } + + /** + * Test get top queries with small top n size + */ + public void testGetTopQueriesWithSmallTopN() throws InterruptedException { + Settings commonSettings = Settings.builder() + .put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true") + .put(TOP_N_LATENCY_QUERIES_SIZE.getKey(), "1") + .put(TOP_N_LATENCY_QUERIES_WINDOW_SIZE.getKey(), "600s") + .build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + Thread.sleep(6000); + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertEquals(0, response.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response.getNodes().size()); + Assert.assertEquals(2, response.getNodes().stream().mapToInt(o -> o.getTopQueriesRecord().size()).sum()); + + internalCluster().stopAllNodes(); + } + + /** + * Test get top queries with small window size + */ + public void testGetTopQueriesWithSmallWindowSize() throws InterruptedException { + Settings commonSettings = Settings.builder() + .put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true") + .put(TOP_N_LATENCY_QUERIES_SIZE.getKey(), "100") + .put(TOP_N_LATENCY_QUERIES_WINDOW_SIZE.getKey(), "1m") + .build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertEquals(0, response.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response.getNodes().size()); + Thread.sleep(6000); + internalCluster().stopAllNodes(); + } +} diff --git a/plugins/query-insights/src/javaRestTest/java/org/opensearch/plugin/insights/TopQueriesRestIT.java b/plugins/query-insights/src/javaRestTest/java/org/opensearch/plugin/insights/TopQueriesRestIT.java new file mode 100644 index 0000000000000..57dea6ad8d5ff --- /dev/null +++ b/plugins/query-insights/src/javaRestTest/java/org/opensearch/plugin/insights/TopQueriesRestIT.java @@ -0,0 +1,107 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.junit.Assert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; + +/** + * Rest Action tests for Query Insights + */ +public class TopQueriesRestIT extends OpenSearchRestTestCase { + + /** + * test Query Insights is installed + * @throws IOException IOException + */ + @SuppressWarnings("unchecked") + public void testQueryInsightsPluginInstalled() throws IOException { + Request request = new Request("GET", "/_cat/plugins?s=component&h=name,component,version,description&format=json"); + Response response = client().performRequest(request); + List<Object> pluginsList = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).list(); + Assert.assertTrue( + pluginsList.stream().map(o -> (Map<String, Object>) o).anyMatch(plugin -> plugin.get("component").equals("query-insights")) + ); + } + + /** + * test enabling top queries + * @throws IOException IOException + */ + public void testTopQueriesResponses() throws IOException { + // Enable Top N Queries feature + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity(defaultTopQueriesSettings()); + Response response = client().performRequest(request); + + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + + // Create documents for search + request = new Request("POST", "/my-index-0/_doc"); + request.setJsonEntity(createDocumentsBody()); + response = client().performRequest(request); + + Assert.assertEquals(201, response.getStatusLine().getStatusCode()); + + // Do Search + request = new Request("GET", "/my-index-0/_search?size=20&pretty"); + request.setJsonEntity(searchBody()); + response = client().performRequest(request); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + response = client().performRequest(request); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + + // Get Top Queries + request = new Request("GET", "/_insights/top_queries?pretty"); + response = client().performRequest(request); + + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + String top_requests = new String(response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); + Assert.assertTrue(top_requests.contains("top_queries")); + Assert.assertEquals(2, top_requests.split("searchType", -1).length - 1); + } + + private String defaultTopQueriesSettings() { + return "{\n" + + " \"persistent\" : {\n" + + " \"search.top_n_queries.latency.enabled\" : \"true\",\n" + + " \"search.top_n_queries.latency.window_size\" : \"600s\",\n" + + " \"search.top_n_queries.latency.top_n_size\" : 5\n" + + " }\n" + + "}"; + } + + private String createDocumentsBody() { + return "{\n" + + " \"@timestamp\": \"2099-11-15T13:12:00\",\n" + + " \"message\": \"this is document 1\",\n" + + " \"user\": {\n" + + " \"id\": \"cyji\"\n" + + " }\n" + + "}"; + } + + private String searchBody() { + return "{}"; + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java new file mode 100644 index 0000000000000..4d7e0d486068a --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.ActionRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.IndexScopedSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsFilter; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.Environment; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.plugin.insights.core.listener.QueryInsightsListener; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.resthandler.top_queries.RestTopQueriesAction; +import org.opensearch.plugin.insights.rules.transport.top_queries.TransportTopQueriesAction; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.plugins.ActionPlugin; +import org.opensearch.plugins.Plugin; +import org.opensearch.repositories.RepositoriesService; +import org.opensearch.rest.RestController; +import org.opensearch.rest.RestHandler; +import org.opensearch.script.ScriptService; +import org.opensearch.threadpool.ExecutorBuilder; +import org.opensearch.threadpool.ScalingExecutorBuilder; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.watcher.ResourceWatcherService; + +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +/** + * Plugin class for Query Insights. + */ +public class QueryInsightsPlugin extends Plugin implements ActionPlugin { + /** + * Default constructor + */ + public QueryInsightsPlugin() {} + + @Override + public Collection<Object> createComponents( + final Client client, + final ClusterService clusterService, + final ThreadPool threadPool, + final ResourceWatcherService resourceWatcherService, + final ScriptService scriptService, + final NamedXContentRegistry xContentRegistry, + final Environment environment, + final NodeEnvironment nodeEnvironment, + final NamedWriteableRegistry namedWriteableRegistry, + final IndexNameExpressionResolver indexNameExpressionResolver, + final Supplier<RepositoriesService> repositoriesServiceSupplier + ) { + // create top n queries service + final QueryInsightsService queryInsightsService = new QueryInsightsService(threadPool); + return List.of(queryInsightsService, new QueryInsightsListener(clusterService, queryInsightsService)); + } + + @Override + public List<ExecutorBuilder<?>> getExecutorBuilders(final Settings settings) { + return List.of( + new ScalingExecutorBuilder( + QueryInsightsSettings.QUERY_INSIGHTS_EXECUTOR, + 1, + Math.min((OpenSearchExecutors.allocatedProcessors(settings) + 1) / 2, QueryInsightsSettings.MAX_THREAD_COUNT), + TimeValue.timeValueMinutes(5) + ) + ); + } + + @Override + public List<RestHandler> getRestHandlers( + final Settings settings, + final RestController restController, + final ClusterSettings clusterSettings, + final IndexScopedSettings indexScopedSettings, + final SettingsFilter settingsFilter, + final IndexNameExpressionResolver indexNameExpressionResolver, + final Supplier<DiscoveryNodes> nodesInCluster + ) { + return List.of(new RestTopQueriesAction()); + } + + @Override + public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() { + return List.of(new ActionPlugin.ActionHandler<>(TopQueriesAction.INSTANCE, TransportTopQueriesAction.class)); + } + + @Override + public List<Setting<?>> getSettings() { + return List.of( + // Settings for top N queries + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE + ); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java new file mode 100644 index 0000000000000..705273f52a567 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java @@ -0,0 +1,147 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.listener; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.search.SearchPhaseContext; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchRequestContext; +import org.opensearch.action.search.SearchRequestOperationsListener; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.model.Attribute; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE; + +/** + * The listener for query insights services. + * It forwards query-related data to the appropriate query insights stores, + * either for each request or for each phase. + * + * @opensearch.internal + */ +public final class QueryInsightsListener extends SearchRequestOperationsListener { + private static final ToXContent.Params FORMAT_PARAMS = new ToXContent.MapParams(Collections.singletonMap("pretty", "false")); + + private static final Logger log = LogManager.getLogger(QueryInsightsListener.class); + + private final QueryInsightsService queryInsightsService; + + /** + * Constructor for QueryInsightsListener + * + * @param clusterService The Node's cluster service. + * @param queryInsightsService The topQueriesByLatencyService associated with this listener + */ + @Inject + public QueryInsightsListener(final ClusterService clusterService, final QueryInsightsService queryInsightsService) { + this.queryInsightsService = queryInsightsService; + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(TOP_N_LATENCY_QUERIES_ENABLED, v -> this.setEnableTopQueries(MetricType.LATENCY, v)); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer( + TOP_N_LATENCY_QUERIES_SIZE, + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).setTopNSize(v), + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).validateTopNSize(v) + ); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer( + TOP_N_LATENCY_QUERIES_WINDOW_SIZE, + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).setWindowSize(v), + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).validateWindowSize(v) + ); + this.setEnableTopQueries(MetricType.LATENCY, clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_ENABLED)); + this.queryInsightsService.getTopQueriesService(MetricType.LATENCY) + .setTopNSize(clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_SIZE)); + this.queryInsightsService.getTopQueriesService(MetricType.LATENCY) + .setWindowSize(clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_WINDOW_SIZE)); + } + + /** + * Enable or disable top queries insights collection for {@link MetricType} + * This function will enable or disable the corresponding listeners + * and query insights services. + * + * @param metricType {@link MetricType} + * @param enabled boolean + */ + public void setEnableTopQueries(final MetricType metricType, final boolean enabled) { + boolean isAllMetricsDisabled = !queryInsightsService.isEnabled(); + this.queryInsightsService.enableCollection(metricType, enabled); + if (!enabled) { + // disable QueryInsightsListener only if all metrics collections are disabled now. + if (!queryInsightsService.isEnabled()) { + super.setEnabled(false); + this.queryInsightsService.stop(); + } + } else { + super.setEnabled(true); + // restart QueryInsightsListener only if none of metrics collections is enabled before. + if (isAllMetricsDisabled) { + this.queryInsightsService.stop(); + this.queryInsightsService.start(); + } + } + + } + + @Override + public boolean isEnabled() { + return super.isEnabled(); + } + + @Override + public void onPhaseStart(SearchPhaseContext context) {} + + @Override + public void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + + @Override + public void onPhaseFailure(SearchPhaseContext context) {} + + @Override + public void onRequestStart(SearchRequestContext searchRequestContext) {} + + @Override + public void onRequestEnd(final SearchPhaseContext context, final SearchRequestContext searchRequestContext) { + final SearchRequest request = context.getRequest(); + try { + Map<MetricType, Number> measurements = new HashMap<>(); + if (queryInsightsService.isCollectionEnabled(MetricType.LATENCY)) { + measurements.put( + MetricType.LATENCY, + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - searchRequestContext.getAbsoluteStartNanos()) + ); + } + Map<Attribute, Object> attributes = new HashMap<>(); + attributes.put(Attribute.SEARCH_TYPE, request.searchType().toString().toLowerCase(Locale.ROOT)); + attributes.put(Attribute.SOURCE, request.source().toString(FORMAT_PARAMS)); + attributes.put(Attribute.TOTAL_SHARDS, context.getNumShards()); + attributes.put(Attribute.INDICES, request.indices()); + attributes.put(Attribute.PHASE_LATENCY_MAP, searchRequestContext.phaseTookMap()); + SearchQueryRecord record = new SearchQueryRecord(request.getOrCreateAbsoluteStartMillis(), measurements, attributes); + queryInsightsService.addRecord(record); + } catch (Exception e) { + log.error(String.format(Locale.ROOT, "fail to ingest query insight data, error: %s", e)); + } + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/package-info.java new file mode 100644 index 0000000000000..3cb9cacf7fd1c --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Listeners for Query Insights + */ +package org.opensearch.plugin.insights.core.listener; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java new file mode 100644 index 0000000000000..525ca0d4a3d33 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java @@ -0,0 +1,180 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.common.inject.Inject; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.threadpool.Scheduler; +import org.opensearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.LinkedBlockingQueue; + +/** + * Service responsible for gathering, analyzing, storing and exporting + * information related to search queries + * + * @opensearch.internal + */ +public class QueryInsightsService extends AbstractLifecycleComponent { + /** + * The internal OpenSearch thread pool that execute async processing and exporting tasks + */ + private final ThreadPool threadPool; + + /** + * Services to capture top n queries for different metric types + */ + private final Map<MetricType, TopQueriesService> topQueriesServices; + + /** + * Flags for enabling insight data collection for different metric types + */ + private final Map<MetricType, Boolean> enableCollect; + + /** + * The internal thread-safe queue to ingest the search query data and subsequently forward to processors + */ + private final LinkedBlockingQueue<SearchQueryRecord> queryRecordsQueue; + + /** + * Holds a reference to delayed operation {@link Scheduler.Cancellable} so it can be cancelled when + * the service closed concurrently. + */ + protected volatile Scheduler.Cancellable scheduledFuture; + + /** + * Constructor of the QueryInsightsService + * + * @param threadPool The OpenSearch thread pool to run async tasks + */ + @Inject + public QueryInsightsService(final ThreadPool threadPool) { + enableCollect = new HashMap<>(); + queryRecordsQueue = new LinkedBlockingQueue<>(QueryInsightsSettings.QUERY_RECORD_QUEUE_CAPACITY); + topQueriesServices = new HashMap<>(); + for (MetricType metricType : MetricType.allMetricTypes()) { + enableCollect.put(metricType, false); + topQueriesServices.put(metricType, new TopQueriesService(metricType)); + } + this.threadPool = threadPool; + } + + /** + * Ingest the query data into in-memory stores + * + * @param record the record to ingest + */ + public boolean addRecord(final SearchQueryRecord record) { + boolean shouldAdd = false; + for (Map.Entry<MetricType, TopQueriesService> entry : topQueriesServices.entrySet()) { + if (!enableCollect.get(entry.getKey())) { + continue; + } + List<SearchQueryRecord> currentSnapshot = entry.getValue().getTopQueriesCurrentSnapshot(); + // skip add to top N queries store if the incoming record is smaller than the Nth record + if (currentSnapshot.size() < entry.getValue().getTopNSize() + || SearchQueryRecord.compare(record, currentSnapshot.get(0), entry.getKey()) > 0) { + shouldAdd = true; + break; + } + } + if (shouldAdd) { + return queryRecordsQueue.offer(record); + } + return false; + } + + /** + * Drain the queryRecordsQueue into internal stores and services + */ + public void drainRecords() { + final List<SearchQueryRecord> records = new ArrayList<>(); + queryRecordsQueue.drainTo(records); + records.sort(Comparator.comparingLong(SearchQueryRecord::getTimestamp)); + for (MetricType metricType : MetricType.allMetricTypes()) { + if (enableCollect.get(metricType)) { + // ingest the records into topQueriesService + topQueriesServices.get(metricType).consumeRecords(records); + } + } + } + + /** + * Get the top queries service based on metricType + * @param metricType {@link MetricType} + * @return {@link TopQueriesService} + */ + public TopQueriesService getTopQueriesService(final MetricType metricType) { + return topQueriesServices.get(metricType); + } + + /** + * Set flag to enable or disable Query Insights data collection + * + * @param metricType {@link MetricType} + * @param enable Flag to enable or disable Query Insights data collection + */ + public void enableCollection(final MetricType metricType, final boolean enable) { + this.enableCollect.put(metricType, enable); + this.topQueriesServices.get(metricType).setEnabled(enable); + } + + /** + * Get if the Query Insights data collection is enabled for a MetricType + * + * @param metricType {@link MetricType} + * @return if the Query Insights data collection is enabled + */ + public boolean isCollectionEnabled(final MetricType metricType) { + return this.enableCollect.get(metricType); + } + + /** + * Check if query insights service is enabled + * + * @return if query insights service is enabled + */ + public boolean isEnabled() { + for (MetricType t : MetricType.allMetricTypes()) { + if (isCollectionEnabled(t)) { + return true; + } + } + return false; + } + + @Override + protected void doStart() { + if (isEnabled()) { + scheduledFuture = threadPool.scheduleWithFixedDelay( + this::drainRecords, + QueryInsightsSettings.QUERY_RECORD_QUEUE_DRAIN_INTERVAL, + QueryInsightsSettings.QUERY_INSIGHTS_EXECUTOR + ); + } + } + + @Override + protected void doStop() { + if (scheduledFuture != null) { + scheduledFuture.cancel(); + } + } + + @Override + protected void doClose() {} +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java new file mode 100644 index 0000000000000..d2c30cbdf98e7 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java @@ -0,0 +1,282 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.common.unit.TimeValue; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.PriorityQueue; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Service responsible for gathering and storing top N queries + * with high latency or resource usage + * + * @opensearch.internal + */ +public class TopQueriesService { + private boolean enabled; + /** + * The metric type to measure top n queries + */ + private final MetricType metricType; + private int topNSize; + /** + * The window size to keep the top n queries + */ + private TimeValue windowSize; + /** + * The current window start timestamp + */ + private long windowStart; + /** + * The internal thread-safe store that holds the top n queries insight data + */ + private final PriorityQueue<SearchQueryRecord> topQueriesStore; + + /** + * The AtomicReference of a snapshot of the current window top queries for getters to consume + */ + private final AtomicReference<List<SearchQueryRecord>> topQueriesCurrentSnapshot; + + /** + * The AtomicReference of a snapshot of the last window top queries for getters to consume + */ + private final AtomicReference<List<SearchQueryRecord>> topQueriesHistorySnapshot; + + TopQueriesService(final MetricType metricType) { + this.enabled = false; + this.metricType = metricType; + this.topNSize = QueryInsightsSettings.DEFAULT_TOP_N_SIZE; + this.windowSize = QueryInsightsSettings.DEFAULT_WINDOW_SIZE; + this.windowStart = -1L; + topQueriesStore = new PriorityQueue<>(topNSize, (a, b) -> SearchQueryRecord.compare(a, b, metricType)); + topQueriesCurrentSnapshot = new AtomicReference<>(new ArrayList<>()); + topQueriesHistorySnapshot = new AtomicReference<>(new ArrayList<>()); + } + + /** + * Set the top N size for TopQueriesService service. + * + * @param topNSize the top N size to set + */ + public void setTopNSize(final int topNSize) { + this.topNSize = topNSize; + } + + /** + * Get the current configured top n size + * + * @return top n size + */ + public int getTopNSize() { + return topNSize; + } + + /** + * Validate the top N size based on the internal constrains + * + * @param size the wanted top N size + */ + public void validateTopNSize(final int size) { + if (size > QueryInsightsSettings.MAX_N_SIZE) { + throw new IllegalArgumentException( + "Top N size setting for [" + + metricType + + "]" + + " should be smaller than max top N size [" + + QueryInsightsSettings.MAX_N_SIZE + + "was (" + + size + + " > " + + QueryInsightsSettings.MAX_N_SIZE + + ")" + ); + } + } + + /** + * Set enable flag for the service + * @param enabled boolean + */ + public void setEnabled(final boolean enabled) { + this.enabled = enabled; + } + + /** + * Set the window size for top N queries service + * + * @param windowSize window size to set + */ + public void setWindowSize(final TimeValue windowSize) { + this.windowSize = windowSize; + // reset the window start time since the window size has changed + this.windowStart = -1L; + } + + /** + * Validate if the window size is valid, based on internal constrains. + * + * @param windowSize the window size to validate + */ + public void validateWindowSize(final TimeValue windowSize) { + if (windowSize.compareTo(QueryInsightsSettings.MAX_WINDOW_SIZE) > 0 + || windowSize.compareTo(QueryInsightsSettings.MIN_WINDOW_SIZE) < 0) { + throw new IllegalArgumentException( + "Window size setting for [" + + metricType + + "]" + + " should be between [" + + QueryInsightsSettings.MIN_WINDOW_SIZE + + "," + + QueryInsightsSettings.MAX_WINDOW_SIZE + + "]" + + "was (" + + windowSize + + ")" + ); + } + if (!(QueryInsightsSettings.VALID_WINDOW_SIZES_IN_MINUTES.contains(windowSize) || windowSize.getMinutes() % 60 == 0)) { + throw new IllegalArgumentException( + "Window size setting for [" + + metricType + + "]" + + " should be multiple of 1 hour, or one of " + + QueryInsightsSettings.VALID_WINDOW_SIZES_IN_MINUTES + + ", was (" + + windowSize + + ")" + ); + } + } + + /** + * Get all top queries records that are in the current top n queries store + * Optionally include top N records from the last window. + * + * By default, return the records in sorted order. + * + * @param includeLastWindow if the top N queries from the last window should be included + * @return List of the records that are in the query insight store + * @throws IllegalArgumentException if query insight is disabled in the cluster + */ + public List<SearchQueryRecord> getTopQueriesRecords(final boolean includeLastWindow) throws IllegalArgumentException { + if (!enabled) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Cannot get top n queries for [%s] when it is not enabled.", metricType.toString()) + ); + } + // read from window snapshots + final List<SearchQueryRecord> queries = new ArrayList<>(topQueriesCurrentSnapshot.get()); + if (includeLastWindow) { + queries.addAll(topQueriesHistorySnapshot.get()); + } + return Stream.of(queries) + .flatMap(Collection::stream) + .sorted((a, b) -> SearchQueryRecord.compare(a, b, metricType) * -1) + .collect(Collectors.toList()); + } + + /** + * Consume records to top queries stores + * + * @param records a list of {@link SearchQueryRecord} + */ + void consumeRecords(final List<SearchQueryRecord> records) { + final long currentWindowStart = calculateWindowStart(System.currentTimeMillis()); + List<SearchQueryRecord> recordsInLastWindow = new ArrayList<>(); + List<SearchQueryRecord> recordsInThisWindow = new ArrayList<>(); + for (SearchQueryRecord record : records) { + // skip the records that does not have the corresponding measurement + if (!record.getMeasurements().containsKey(metricType)) { + continue; + } + if (record.getTimestamp() < currentWindowStart) { + recordsInLastWindow.add(record); + } else { + recordsInThisWindow.add(record); + } + } + // add records in last window, if there are any, to the top n store + addToTopNStore(recordsInLastWindow); + // rotate window and reset window start if necessary + rotateWindowIfNecessary(currentWindowStart); + // add records in current window, if there are any, to the top n store + addToTopNStore(recordsInThisWindow); + // update the current window snapshot for getters to consume + final List<SearchQueryRecord> newSnapShot = new ArrayList<>(topQueriesStore); + newSnapShot.sort((a, b) -> SearchQueryRecord.compare(a, b, metricType)); + topQueriesCurrentSnapshot.set(newSnapShot); + } + + private void addToTopNStore(final List<SearchQueryRecord> records) { + topQueriesStore.addAll(records); + // remove top elements for fix sizing priority queue + while (topQueriesStore.size() > topNSize) { + topQueriesStore.poll(); + } + } + + /** + * Reset the current window and rotate the data to history snapshot for top n queries, + * This function would be invoked zero time or only once in each consumeRecords call + * + * @param newWindowStart the new windowStart to set to + */ + private void rotateWindowIfNecessary(final long newWindowStart) { + // reset window if the current window is outdated + if (windowStart < newWindowStart) { + final List<SearchQueryRecord> history = new ArrayList<>(); + // rotate the current window to history store only if the data belongs to the last window + if (windowStart == newWindowStart - windowSize.getMillis()) { + history.addAll(topQueriesStore); + } + topQueriesHistorySnapshot.set(history); + topQueriesStore.clear(); + topQueriesCurrentSnapshot.set(new ArrayList<>()); + windowStart = newWindowStart; + } + } + + /** + * Calculate the window start for the given timestamp + * + * @param timestamp the given timestamp to calculate window start + */ + private long calculateWindowStart(final long timestamp) { + final LocalDateTime currentTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp), ZoneId.of("UTC")); + LocalDateTime windowStartTime = currentTime.truncatedTo(ChronoUnit.HOURS); + while (!windowStartTime.plusMinutes(windowSize.getMinutes()).isAfter(currentTime)) { + windowStartTime = windowStartTime.plusMinutes(windowSize.getMinutes()); + } + return windowStartTime.toInstant(ZoneOffset.UTC).getEpochSecond() * 1000; + } + + /** + * Get the current top queries snapshot from the AtomicReference. + * + * @return a list of {@link SearchQueryRecord} + */ + public List<SearchQueryRecord> getTopQueriesCurrentSnapshot() { + return topQueriesCurrentSnapshot.get(); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/package-info.java new file mode 100644 index 0000000000000..5068f28234f6d --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Service Classes for Query Insights + */ +package org.opensearch.plugin.insights.core.service; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/package-info.java new file mode 100644 index 0000000000000..04d1f9bfff7e1 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Base Package of Query Insights + */ +package org.opensearch.plugin.insights; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/package-info.java new file mode 100644 index 0000000000000..9b6b5856f7d27 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions, Requests and Responses for Query Insights + */ +package org.opensearch.plugin.insights.rules.action; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueries.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueries.java new file mode 100644 index 0000000000000..26cff82aae52e --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueries.java @@ -0,0 +1,77 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.support.nodes.BaseNodeResponse; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; + +import java.io.IOException; +import java.util.List; + +/** + * Holds all top queries records by resource usage or latency on a node + * Mainly used in the top N queries node response workflow. + * + * @opensearch.internal + */ +public class TopQueries extends BaseNodeResponse implements ToXContentObject { + /** The store to keep the top queries records */ + private final List<SearchQueryRecord> topQueriesRecords; + + /** + * Create the TopQueries Object from StreamInput + * @param in A {@link StreamInput} object. + * @throws IOException IOException + */ + public TopQueries(final StreamInput in) throws IOException { + super(in); + topQueriesRecords = in.readList(SearchQueryRecord::new); + } + + /** + * Create the TopQueries Object + * @param node A node that is part of the cluster. + * @param searchQueryRecords A list of SearchQueryRecord associated in this TopQueries. + */ + public TopQueries(final DiscoveryNode node, final List<SearchQueryRecord> searchQueryRecords) { + super(node); + topQueriesRecords = searchQueryRecords; + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + if (topQueriesRecords != null) { + for (SearchQueryRecord record : topQueriesRecords) { + record.toXContent(builder, params); + } + } + return builder; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(topQueriesRecords); + + } + + /** + * Get all top queries records + * + * @return the top queries records in this node response + */ + public List<SearchQueryRecord> getTopQueriesRecord() { + return topQueriesRecords; + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesAction.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesAction.java new file mode 100644 index 0000000000000..b8ed69fa5692b --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesAction.java @@ -0,0 +1,32 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.ActionType; + +/** + * Transport action for cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TopQueriesAction extends ActionType<TopQueriesResponse> { + + /** + * The TopQueriesAction Instance. + */ + public static final TopQueriesAction INSTANCE = new TopQueriesAction(); + /** + * The name of this Action + */ + public static final String NAME = "cluster:admin/opensearch/insights/top_queries"; + + private TopQueriesAction() { + super(NAME, TopQueriesResponse::new); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequest.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequest.java new file mode 100644 index 0000000000000..3bdff2c403161 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequest.java @@ -0,0 +1,62 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.support.nodes.BaseNodesRequest; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.plugin.insights.rules.model.MetricType; + +import java.io.IOException; + +/** + * A request to get cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TopQueriesRequest extends BaseNodesRequest<TopQueriesRequest> { + + final MetricType metricType; + + /** + * Constructor for TopQueriesRequest + * + * @param in A {@link StreamInput} object. + * @throws IOException if the stream cannot be deserialized. + */ + public TopQueriesRequest(final StreamInput in) throws IOException { + super(in); + this.metricType = MetricType.readFromStream(in); + } + + /** + * Get top queries from nodes based on the nodes ids specified. + * If none are passed, cluster level top queries will be returned. + * + * @param metricType {@link MetricType} + * @param nodesIds the nodeIds specified in the request + */ + public TopQueriesRequest(final MetricType metricType, final String... nodesIds) { + super(nodesIds); + this.metricType = metricType; + } + + /** + * Get the type of requested metrics + */ + public MetricType getMetricType() { + return metricType; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(metricType.toString()); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponse.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponse.java new file mode 100644 index 0000000000000..2e66bb7f77baf --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponse.java @@ -0,0 +1,143 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.nodes.BaseNodesResponse; +import org.opensearch.cluster.ClusterName; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.rules.model.Attribute; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Transport response for cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TopQueriesResponse extends BaseNodesResponse<TopQueries> implements ToXContentFragment { + + private static final String CLUSTER_LEVEL_RESULTS_KEY = "top_queries"; + private final MetricType metricType; + private final int top_n_size; + + /** + * Constructor for TopQueriesResponse. + * + * @param in A {@link StreamInput} object. + * @throws IOException if the stream cannot be deserialized. + */ + public TopQueriesResponse(final StreamInput in) throws IOException { + super(in); + top_n_size = in.readInt(); + metricType = in.readEnum(MetricType.class); + } + + /** + * Constructor for TopQueriesResponse + * + * @param clusterName The current cluster name + * @param nodes A list that contains top queries results from all nodes + * @param failures A list that contains FailedNodeException + * @param top_n_size The top N size to return to the user + * @param metricType the {@link MetricType} to be returned in this response + */ + public TopQueriesResponse( + final ClusterName clusterName, + final List<TopQueries> nodes, + final List<FailedNodeException> failures, + final int top_n_size, + final MetricType metricType + ) { + super(clusterName, nodes, failures); + this.top_n_size = top_n_size; + this.metricType = metricType; + } + + @Override + protected List<TopQueries> readNodesFrom(final StreamInput in) throws IOException { + return in.readList(TopQueries::new); + } + + @Override + protected void writeNodesTo(final StreamOutput out, final List<TopQueries> nodes) throws IOException { + out.writeList(nodes); + out.writeLong(top_n_size); + out.writeEnum(metricType); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + final List<TopQueries> results = getNodes(); + postProcess(results); + builder.startObject(); + toClusterLevelResult(builder, params, results); + return builder.endObject(); + } + + @Override + public String toString() { + try { + final XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + this.toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.toString(); + } catch (IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } + + /** + * Post process the top queries results to add customized attributes + * + * @param results the top queries results + */ + private void postProcess(final List<TopQueries> results) { + for (TopQueries topQueries : results) { + final String nodeId = topQueries.getNode().getId(); + for (SearchQueryRecord record : topQueries.getTopQueriesRecord()) { + record.addAttribute(Attribute.NODE_ID, nodeId); + } + } + } + + /** + * Merge top n queries results from nodes into cluster level results in XContent format. + * + * @param builder XContent builder + * @param params serialization parameters + * @param results top queries results from all nodes + * @throws IOException if an error occurs + */ + private void toClusterLevelResult(final XContentBuilder builder, final Params params, final List<TopQueries> results) + throws IOException { + final List<SearchQueryRecord> all_records = results.stream() + .map(TopQueries::getTopQueriesRecord) + .flatMap(Collection::stream) + .sorted((a, b) -> SearchQueryRecord.compare(a, b, metricType) * -1) + .limit(top_n_size) + .collect(Collectors.toList()); + builder.startArray(CLUSTER_LEVEL_RESULTS_KEY); + for (SearchQueryRecord record : all_records) { + record.toXContent(builder, params); + } + builder.endArray(); + } + +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/package-info.java new file mode 100644 index 0000000000000..3cc7900e5ce7d --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions, Requests and Responses for Top N Queries + */ +package org.opensearch.plugin.insights.rules.action.top_queries; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java new file mode 100644 index 0000000000000..c1d17edf9ff14 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java @@ -0,0 +1,74 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Locale; + +/** + * Valid attributes for a search query record + * + * @opensearch.internal + */ +public enum Attribute { + /** + * The search query type + */ + SEARCH_TYPE, + /** + * The search query source + */ + SOURCE, + /** + * Total shards queried + */ + TOTAL_SHARDS, + /** + * The indices involved + */ + INDICES, + /** + * The per phase level latency map for a search query + */ + PHASE_LATENCY_MAP, + /** + * The node id for this request + */ + NODE_ID; + + /** + * Read an Attribute from a StreamInput + * + * @param in the StreamInput to read from + * @return Attribute + * @throws IOException IOException + */ + static Attribute readFromStream(final StreamInput in) throws IOException { + return Attribute.valueOf(in.readString().toUpperCase(Locale.ROOT)); + } + + /** + * Write Attribute to a StreamOutput + * + * @param out the StreamOutput to write + * @param attribute the Attribute to write + * @throws IOException IOException + */ + static void writeTo(final StreamOutput out, final Attribute attribute) throws IOException { + out.writeString(attribute.toString()); + } + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java new file mode 100644 index 0000000000000..cdd090fbf4804 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java @@ -0,0 +1,121 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Locale; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Valid metric types for a search query record + * + * @opensearch.internal + */ +public enum MetricType implements Comparator<Number> { + /** + * Latency metric type + */ + LATENCY, + /** + * CPU usage metric type + */ + CPU, + /** + * JVM heap usage metric type + */ + JVM; + + /** + * Read a MetricType from a StreamInput + * + * @param in the StreamInput to read from + * @return MetricType + * @throws IOException IOException + */ + public static MetricType readFromStream(final StreamInput in) throws IOException { + return fromString(in.readString()); + } + + /** + * Create MetricType from String + * + * @param metricType the String representation of MetricType + * @return MetricType + */ + public static MetricType fromString(final String metricType) { + return MetricType.valueOf(metricType.toUpperCase(Locale.ROOT)); + } + + /** + * Write MetricType to a StreamOutput + * + * @param out the StreamOutput to write + * @param metricType the MetricType to write + * @throws IOException IOException + */ + static void writeTo(final StreamOutput out, final MetricType metricType) throws IOException { + out.writeString(metricType.toString()); + } + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + /** + * Get all valid metrics + * + * @return A set of String that contains all valid metrics + */ + public static Set<MetricType> allMetricTypes() { + return Arrays.stream(values()).collect(Collectors.toSet()); + } + + /** + * Compare two numbers based on the metric type + * + * @param a the first Number to be compared. + * @param b the second Number to be compared. + * @return a negative integer, zero, or a positive integer as the first argument is less than, equal to, or greater than the second + */ + public int compare(final Number a, final Number b) { + switch (this) { + case LATENCY: + return Long.compare(a.longValue(), b.longValue()); + case JVM: + case CPU: + return Double.compare(a.doubleValue(), b.doubleValue()); + } + return -1; + } + + /** + * Parse a value with the correct type based on MetricType + * + * @param o the generic object to parse + * @return {@link Number} + */ + Number parseValue(final Object o) { + switch (this) { + case LATENCY: + return (Long) o; + case JVM: + case CPU: + return (Double) o; + default: + return (Number) o; + } + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecord.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecord.java new file mode 100644 index 0000000000000..060711edb5580 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecord.java @@ -0,0 +1,176 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * SearchQueryRecord represents a minimal atomic record stored in the Query Insight Framework, + * which contains extensive information related to a search query. + * + * @opensearch.internal + */ +public class SearchQueryRecord implements ToXContentObject, Writeable { + private final long timestamp; + private final Map<MetricType, Number> measurements; + private final Map<Attribute, Object> attributes; + + /** + * Constructor of SearchQueryRecord + * + * @param in the StreamInput to read the SearchQueryRecord from + * @throws IOException IOException + * @throws ClassCastException ClassCastException + */ + public SearchQueryRecord(final StreamInput in) throws IOException, ClassCastException { + this.timestamp = in.readLong(); + measurements = new HashMap<>(); + in.readMap(MetricType::readFromStream, StreamInput::readGenericValue) + .forEach(((metricType, o) -> measurements.put(metricType, metricType.parseValue(o)))); + this.attributes = in.readMap(Attribute::readFromStream, StreamInput::readGenericValue); + } + + /** + * Constructor of SearchQueryRecord + * + * @param timestamp The timestamp of the query. + * @param measurements A list of Measurement associated with this query + * @param attributes A list of Attributes associated with this query + */ + public SearchQueryRecord(final long timestamp, Map<MetricType, Number> measurements, final Map<Attribute, Object> attributes) { + if (measurements == null) { + throw new IllegalArgumentException("Measurements cannot be null"); + } + this.measurements = measurements; + this.attributes = attributes; + this.timestamp = timestamp; + } + + /** + * Returns the observation time of the metric. + * + * @return the observation time in milliseconds + */ + public long getTimestamp() { + return timestamp; + } + + /** + * Returns the measurement associated with the specified name. + * + * @param name the name of the measurement + * @return the measurement object, or null if not found + */ + public Number getMeasurement(final MetricType name) { + return measurements.get(name); + } + + /** + * Returns a map of all the measurements associated with the metric. + * + * @return a map of measurement names to measurement objects + */ + public Map<MetricType, Number> getMeasurements() { + return measurements; + } + + /** + * Returns a map of the attributes associated with the metric. + * + * @return a map of attribute keys to attribute values + */ + public Map<Attribute, Object> getAttributes() { + return attributes; + } + + /** + * Add an attribute to this record + * + * @param attribute attribute to add + * @param value the value associated with the attribute + */ + public void addAttribute(final Attribute attribute, final Object value) { + attributes.put(attribute, value); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field("timestamp", timestamp); + for (Map.Entry<Attribute, Object> entry : attributes.entrySet()) { + builder.field(entry.getKey().toString(), entry.getValue()); + } + for (Map.Entry<MetricType, Number> entry : measurements.entrySet()) { + builder.field(entry.getKey().toString(), entry.getValue()); + } + return builder.endObject(); + } + + /** + * Write a SearchQueryRecord to a StreamOutput + * + * @param out the StreamOutput to write + * @throws IOException IOException + */ + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeLong(timestamp); + out.writeMap(measurements, (stream, metricType) -> MetricType.writeTo(out, metricType), StreamOutput::writeGenericValue); + out.writeMap(attributes, (stream, attribute) -> Attribute.writeTo(out, attribute), StreamOutput::writeGenericValue); + } + + /** + * Compare two SearchQueryRecord, based on the given MetricType + * + * @param a the first SearchQueryRecord to compare + * @param b the second SearchQueryRecord to compare + * @param metricType the MetricType to compare on + * @return 0 if the first SearchQueryRecord is numerically equal to the second SearchQueryRecord; + * -1 if the first SearchQueryRecord is numerically less than the second SearchQueryRecord; + * 1 if the first SearchQueryRecord is numerically greater than the second SearchQueryRecord. + */ + public static int compare(final SearchQueryRecord a, final SearchQueryRecord b, final MetricType metricType) { + return metricType.compare(a.getMeasurement(metricType), b.getMeasurement(metricType)); + } + + /** + * Check if a SearchQueryRecord is deep equal to another record + * + * @param o the other SearchQueryRecord record + * @return true if two records are deep equal, false otherwise. + */ + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SearchQueryRecord)) { + return false; + } + final SearchQueryRecord other = (SearchQueryRecord) o; + return timestamp == other.getTimestamp() + && measurements.equals(other.getMeasurements()) + && attributes.size() == other.getAttributes().size(); + } + + @Override + public int hashCode() { + return Objects.hash(timestamp, measurements, attributes); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/package-info.java new file mode 100644 index 0000000000000..c59ec1550f54b --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Data Models for Query Insight Records + */ +package org.opensearch.plugin.insights.rules.model; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/package-info.java new file mode 100644 index 0000000000000..3787f05f65552 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Rest Handlers for Query Insights + */ +package org.opensearch.plugin.insights.rules.resthandler; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesAction.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesAction.java new file mode 100644 index 0000000000000..6aa511c626ab1 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesAction.java @@ -0,0 +1,99 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.resthandler.top_queries; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.Strings; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; +import org.opensearch.rest.action.RestResponseListener; + +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_QUERIES_BASE_URI; +import static org.opensearch.rest.RestRequest.Method.GET; + +/** + * Rest action to get Top N queries by certain metric type + * + * @opensearch.api + */ +public class RestTopQueriesAction extends BaseRestHandler { + /** The metric types that are allowed in top N queries */ + static final Set<String> ALLOWED_METRICS = MetricType.allMetricTypes().stream().map(MetricType::toString).collect(Collectors.toSet()); + + /** + * Constructor for RestTopQueriesAction + */ + public RestTopQueriesAction() {} + + @Override + public List<Route> routes() { + return List.of( + new Route(GET, TOP_QUERIES_BASE_URI), + new Route(GET, String.format(Locale.ROOT, "%s/{nodeId}", TOP_QUERIES_BASE_URI)) + ); + } + + @Override + public String getName() { + return "query_insights_top_queries_action"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { + final TopQueriesRequest topQueriesRequest = prepareRequest(request); + topQueriesRequest.timeout(request.param("timeout")); + + return channel -> client.execute(TopQueriesAction.INSTANCE, topQueriesRequest, topQueriesResponse(channel)); + } + + static TopQueriesRequest prepareRequest(final RestRequest request) { + final String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + final String metricType = request.param("type", MetricType.LATENCY.toString()); + if (!ALLOWED_METRICS.contains(metricType)) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "request [%s] contains invalid metric type [%s]", request.path(), metricType) + ); + } + return new TopQueriesRequest(MetricType.fromString(metricType), nodesIds); + } + + @Override + protected Set<String> responseParams() { + return Settings.FORMAT_PARAMS; + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } + + private RestResponseListener<TopQueriesResponse> topQueriesResponse(final RestChannel channel) { + return new RestResponseListener<>(channel) { + @Override + public RestResponse buildResponse(final TopQueriesResponse response) throws Exception { + return new BytesRestResponse(RestStatus.OK, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)); + } + }; + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/package-info.java new file mode 100644 index 0000000000000..087cf7d765f8c --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Rest Handlers for Top N Queries + */ +package org.opensearch.plugin.insights.rules.resthandler.top_queries; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/package-info.java new file mode 100644 index 0000000000000..f3a1c70b9af57 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions for Query Insights. + */ +package org.opensearch.plugin.insights.rules.transport; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java new file mode 100644 index 0000000000000..ddf614211bc41 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java @@ -0,0 +1,155 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.transport.top_queries; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.nodes.TransportNodesAction; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueries; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportRequest; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +/** + * Transport action for cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TransportTopQueriesAction extends TransportNodesAction< + TopQueriesRequest, + TopQueriesResponse, + TransportTopQueriesAction.NodeRequest, + TopQueries> { + + private final QueryInsightsService queryInsightsService; + + /** + * Create the TransportTopQueriesAction Object + + * @param threadPool The OpenSearch thread pool to run async tasks + * @param clusterService The clusterService of this node + * @param transportService The TransportService of this node + * @param queryInsightsService The topQueriesByLatencyService associated with this Transport Action + * @param actionFilters the action filters + */ + @Inject + public TransportTopQueriesAction( + final ThreadPool threadPool, + final ClusterService clusterService, + final TransportService transportService, + final QueryInsightsService queryInsightsService, + final ActionFilters actionFilters + ) { + super( + TopQueriesAction.NAME, + threadPool, + clusterService, + transportService, + actionFilters, + TopQueriesRequest::new, + NodeRequest::new, + ThreadPool.Names.GENERIC, + TopQueries.class + ); + this.queryInsightsService = queryInsightsService; + } + + @Override + protected TopQueriesResponse newResponse( + final TopQueriesRequest topQueriesRequest, + final List<TopQueries> responses, + final List<FailedNodeException> failures + ) { + if (topQueriesRequest.getMetricType() == MetricType.LATENCY) { + return new TopQueriesResponse( + clusterService.getClusterName(), + responses, + failures, + clusterService.getClusterSettings().get(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE), + MetricType.LATENCY + ); + } else { + throw new OpenSearchException(String.format(Locale.ROOT, "invalid metric type %s", topQueriesRequest.getMetricType())); + } + } + + @Override + protected NodeRequest newNodeRequest(final TopQueriesRequest request) { + return new NodeRequest(request); + } + + @Override + protected TopQueries newNodeResponse(final StreamInput in) throws IOException { + return new TopQueries(in); + } + + @Override + protected TopQueries nodeOperation(final NodeRequest nodeRequest) { + final TopQueriesRequest request = nodeRequest.request; + if (request.getMetricType() == MetricType.LATENCY) { + return new TopQueries( + clusterService.localNode(), + queryInsightsService.getTopQueriesService(MetricType.LATENCY).getTopQueriesRecords(true) + ); + } else { + throw new OpenSearchException(String.format(Locale.ROOT, "invalid metric type %s", request.getMetricType())); + } + + } + + /** + * Inner Node Top Queries Request + * + * @opensearch.internal + */ + public static class NodeRequest extends TransportRequest { + + final TopQueriesRequest request; + + /** + * Create the NodeResponse object from StreamInput + * + * @param in the StreamInput to read the object + * @throws IOException IOException + */ + public NodeRequest(StreamInput in) throws IOException { + super(in); + request = new TopQueriesRequest(in); + } + + /** + * Create the NodeResponse object from a TopQueriesRequest + * @param request the TopQueriesRequest object + */ + public NodeRequest(final TopQueriesRequest request) { + this.request = request; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/package-info.java new file mode 100644 index 0000000000000..54da0980deff8 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions for Top N Queries. + */ +package org.opensearch.plugin.insights.rules.transport.top_queries; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java new file mode 100644 index 0000000000000..52cc1fbde790f --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.settings; + +import org.opensearch.common.settings.Setting; +import org.opensearch.common.unit.TimeValue; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +/** + * Settings for Query Insights Plugin + * + * @opensearch.api + * @opensearch.experimental + */ +public class QueryInsightsSettings { + /** + * Executors settings + */ + public static final String QUERY_INSIGHTS_EXECUTOR = "query_insights_executor"; + /** + * Max number of thread + */ + public static final int MAX_THREAD_COUNT = 5; + /** + * Max number of requests for the consumer to collect at one time + */ + public static final int QUERY_RECORD_QUEUE_CAPACITY = 1000; + /** + * Time interval for record queue consumer to run + */ + public static final TimeValue QUERY_RECORD_QUEUE_DRAIN_INTERVAL = new TimeValue(5, TimeUnit.SECONDS); + /** + * Default Values and Settings + */ + public static final TimeValue MAX_WINDOW_SIZE = new TimeValue(1, TimeUnit.DAYS); + /** + * Minimal window size + */ + public static final TimeValue MIN_WINDOW_SIZE = new TimeValue(1, TimeUnit.MINUTES); + /** + * Valid window sizes + */ + public static final Set<TimeValue> VALID_WINDOW_SIZES_IN_MINUTES = new HashSet<>( + Arrays.asList( + new TimeValue(1, TimeUnit.MINUTES), + new TimeValue(5, TimeUnit.MINUTES), + new TimeValue(10, TimeUnit.MINUTES), + new TimeValue(30, TimeUnit.MINUTES) + ) + ); + + /** Default N size for top N queries */ + public static final int MAX_N_SIZE = 100; + /** Default window size in seconds to keep the top N queries with latency data in query insight store */ + public static final TimeValue DEFAULT_WINDOW_SIZE = new TimeValue(60, TimeUnit.SECONDS); + /** Default top N size to keep the data in query insight store */ + public static final int DEFAULT_TOP_N_SIZE = 3; + /** + * Query Insights base uri + */ + public static final String PLUGINS_BASE_URI = "/_insights"; + + /** + * Settings for Top Queries + * + */ + public static final String TOP_QUERIES_BASE_URI = PLUGINS_BASE_URI + "/top_queries"; + /** Default prefix for top N queries feature */ + public static final String TOP_N_QUERIES_SETTING_PREFIX = "search.insights.top_queries"; + /** Default prefix for top N queries by latency feature */ + public static final String TOP_N_LATENCY_QUERIES_PREFIX = TOP_N_QUERIES_SETTING_PREFIX + ".latency"; + /** + * Boolean setting for enabling top queries by latency. + */ + public static final Setting<Boolean> TOP_N_LATENCY_QUERIES_ENABLED = Setting.boolSetting( + TOP_N_LATENCY_QUERIES_PREFIX + ".enabled", + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + + /** + * Int setting to define the top n size for top queries by latency. + */ + public static final Setting<Integer> TOP_N_LATENCY_QUERIES_SIZE = Setting.intSetting( + TOP_N_LATENCY_QUERIES_PREFIX + ".top_n_size", + DEFAULT_TOP_N_SIZE, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + + /** + * Time setting to define the window size in seconds for top queries by latency. + */ + public static final Setting<TimeValue> TOP_N_LATENCY_QUERIES_WINDOW_SIZE = Setting.positiveTimeSetting( + TOP_N_LATENCY_QUERIES_PREFIX + ".window_size", + DEFAULT_WINDOW_SIZE, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Default constructor + */ + public QueryInsightsSettings() {} +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/package-info.java new file mode 100644 index 0000000000000..f3152bbf966cb --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Settings for Query Insights Plugin + */ +package org.opensearch.plugin.insights.settings; diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java new file mode 100644 index 0000000000000..273b69e483e8c --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java @@ -0,0 +1,107 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.ActionRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.plugin.insights.core.listener.QueryInsightsListener; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.resthandler.top_queries.RestTopQueriesAction; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.plugins.ActionPlugin; +import org.opensearch.rest.RestHandler; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ExecutorBuilder; +import org.opensearch.threadpool.ScalingExecutorBuilder; +import org.opensearch.threadpool.ThreadPool; +import org.junit.Before; + +import java.util.Arrays; +import java.util.List; + +import static org.mockito.Mockito.mock; + +public class QueryInsightsPluginTests extends OpenSearchTestCase { + + private QueryInsightsPlugin queryInsightsPlugin; + + private final Client client = mock(Client.class); + private ClusterService clusterService; + private final ThreadPool threadPool = mock(ThreadPool.class); + + @Before + public void setup() { + queryInsightsPlugin = new QueryInsightsPlugin(); + Settings.Builder settingsBuilder = Settings.builder(); + Settings settings = settingsBuilder.build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE); + + clusterService = new ClusterService(settings, clusterSettings, threadPool); + + } + + public void testGetSettings() { + assertEquals( + Arrays.asList( + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE + ), + queryInsightsPlugin.getSettings() + ); + } + + public void testCreateComponent() { + List<Object> components = (List<Object>) queryInsightsPlugin.createComponents( + client, + clusterService, + threadPool, + null, + null, + null, + null, + null, + null, + null, + null + ); + assertEquals(2, components.size()); + assertTrue(components.get(0) instanceof QueryInsightsService); + assertTrue(components.get(1) instanceof QueryInsightsListener); + } + + public void testGetExecutorBuilders() { + Settings.Builder settingsBuilder = Settings.builder(); + Settings settings = settingsBuilder.build(); + List<ExecutorBuilder<?>> executorBuilders = queryInsightsPlugin.getExecutorBuilders(settings); + assertEquals(1, executorBuilders.size()); + assertTrue(executorBuilders.get(0) instanceof ScalingExecutorBuilder); + } + + public void testGetRestHandlers() { + List<RestHandler> components = queryInsightsPlugin.getRestHandlers(Settings.EMPTY, null, null, null, null, null, null); + assertEquals(1, components.size()); + assertTrue(components.get(0) instanceof RestTopQueriesAction); + } + + public void testGetActions() { + List<ActionPlugin.ActionHandler<? extends ActionRequest, ? extends ActionResponse>> components = queryInsightsPlugin.getActions(); + assertEquals(1, components.size()); + assertTrue(components.get(0).getAction() instanceof TopQueriesAction); + } + +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java new file mode 100644 index 0000000000000..870ef5b9c8be9 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java @@ -0,0 +1,189 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.search.SearchType; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.common.util.Maps; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueries; +import org.opensearch.plugin.insights.rules.model.Attribute; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.test.VersionUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.test.OpenSearchTestCase.buildNewFakeTransportAddress; +import static org.opensearch.test.OpenSearchTestCase.random; +import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLengthBetween; +import static org.opensearch.test.OpenSearchTestCase.randomArray; +import static org.opensearch.test.OpenSearchTestCase.randomDouble; +import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; +import static org.opensearch.test.OpenSearchTestCase.randomLong; +import static org.opensearch.test.OpenSearchTestCase.randomLongBetween; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +final public class QueryInsightsTestUtils { + + public QueryInsightsTestUtils() {} + + public static List<SearchQueryRecord> generateQueryInsightRecords(int count) { + return generateQueryInsightRecords(count, count, System.currentTimeMillis(), 0); + } + + /** + * Creates a List of random Query Insight Records for testing purpose + */ + public static List<SearchQueryRecord> generateQueryInsightRecords(int lower, int upper, long startTimeStamp, long interval) { + List<SearchQueryRecord> records = new ArrayList<>(); + int countOfRecords = randomIntBetween(lower, upper); + long timestamp = startTimeStamp; + for (int i = 0; i < countOfRecords; ++i) { + Map<MetricType, Number> measurements = Map.of( + MetricType.LATENCY, + randomLongBetween(1000, 10000), + MetricType.CPU, + randomDouble(), + MetricType.JVM, + randomDouble() + ); + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + int countOfPhases = randomIntBetween(2, 5); + for (int j = 0; j < countOfPhases; ++j) { + phaseLatencyMap.put(randomAlphaOfLengthBetween(5, 10), randomLong()); + } + Map<Attribute, Object> attributes = new HashMap<>(); + attributes.put(Attribute.SEARCH_TYPE, SearchType.QUERY_THEN_FETCH.toString().toLowerCase(Locale.ROOT)); + attributes.put(Attribute.SOURCE, "{\"size\":20}"); + attributes.put(Attribute.TOTAL_SHARDS, randomIntBetween(1, 100)); + attributes.put(Attribute.INDICES, randomArray(1, 3, Object[]::new, () -> randomAlphaOfLengthBetween(5, 10))); + attributes.put(Attribute.PHASE_LATENCY_MAP, phaseLatencyMap); + + records.add(new SearchQueryRecord(timestamp, measurements, attributes)); + timestamp += interval; + } + return records; + } + + public static TopQueries createRandomTopQueries() { + DiscoveryNode node = new DiscoveryNode( + "node_for_top_queries_test", + buildNewFakeTransportAddress(), + emptyMap(), + emptySet(), + VersionUtils.randomVersion(random()) + ); + List<SearchQueryRecord> records = generateQueryInsightRecords(10); + + return new TopQueries(node, records); + } + + public static TopQueries createFixedTopQueries() { + DiscoveryNode node = new DiscoveryNode( + "node_for_top_queries_test", + buildNewFakeTransportAddress(), + emptyMap(), + emptySet(), + VersionUtils.randomVersion(random()) + ); + List<SearchQueryRecord> records = new ArrayList<>(); + records.add(createFixedSearchQueryRecord()); + + return new TopQueries(node, records); + } + + public static SearchQueryRecord createFixedSearchQueryRecord() { + long timestamp = 1706574180000L; + Map<MetricType, Number> measurements = Map.of(MetricType.LATENCY, 1L); + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + Map<Attribute, Object> attributes = new HashMap<>(); + attributes.put(Attribute.SEARCH_TYPE, SearchType.QUERY_THEN_FETCH.toString().toLowerCase(Locale.ROOT)); + + return new SearchQueryRecord(timestamp, measurements, attributes); + } + + public static void compareJson(ToXContent param1, ToXContent param2) throws IOException { + if (param1 == null || param2 == null) { + assertNull(param1); + assertNull(param2); + return; + } + + ToXContent.Params params = ToXContent.EMPTY_PARAMS; + XContentBuilder param1Builder = jsonBuilder(); + param1.toXContent(param1Builder, params); + + XContentBuilder param2Builder = jsonBuilder(); + param2.toXContent(param2Builder, params); + + assertEquals(param1Builder.toString(), param2Builder.toString()); + } + + @SuppressWarnings("unchecked") + public static boolean checkRecordsEquals(List<SearchQueryRecord> records1, List<SearchQueryRecord> records2) { + if (records1.size() != records2.size()) { + return false; + } + for (int i = 0; i < records1.size(); i++) { + if (!records1.get(i).equals(records2.get(i))) { + return false; + } + Map<Attribute, Object> attributes1 = records1.get(i).getAttributes(); + Map<Attribute, Object> attributes2 = records2.get(i).getAttributes(); + for (Map.Entry<Attribute, Object> entry : attributes1.entrySet()) { + Attribute attribute = entry.getKey(); + Object value = entry.getValue(); + if (!attributes2.containsKey(attribute)) { + return false; + } + if (value instanceof Object[] && !Arrays.deepEquals((Object[]) value, (Object[]) attributes2.get(attribute))) { + return false; + } else if (value instanceof Map + && !Maps.deepEquals((Map<Object, Object>) value, (Map<Object, Object>) attributes2.get(attribute))) { + return false; + } + } + } + return true; + } + + public static boolean checkRecordsEqualsWithoutOrder( + List<SearchQueryRecord> records1, + List<SearchQueryRecord> records2, + MetricType metricType + ) { + Set<SearchQueryRecord> set2 = new TreeSet<>((a, b) -> SearchQueryRecord.compare(a, b, metricType)); + set2.addAll(records2); + if (records1.size() != records2.size()) { + return false; + } + for (int i = 0; i < records1.size(); i++) { + if (!set2.contains(records1.get(i))) { + return false; + } + } + return true; + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java new file mode 100644 index 0000000000000..f340950017a5c --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java @@ -0,0 +1,161 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.listener; + +import org.opensearch.action.search.SearchPhaseContext; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchRequestContext; +import org.opensearch.action.search.SearchType; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.core.service.TopQueriesService; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.opensearch.search.aggregations.support.ValueType; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit Tests for {@link QueryInsightsListener}. + */ +public class QueryInsightsListenerTests extends OpenSearchTestCase { + private final SearchRequestContext searchRequestContext = mock(SearchRequestContext.class); + private final SearchPhaseContext searchPhaseContext = mock(SearchPhaseContext.class); + private final SearchRequest searchRequest = mock(SearchRequest.class); + private final QueryInsightsService queryInsightsService = mock(QueryInsightsService.class); + private final TopQueriesService topQueriesService = mock(TopQueriesService.class); + private ClusterService clusterService; + + @Before + public void setup() { + Settings.Builder settingsBuilder = Settings.builder(); + Settings settings = settingsBuilder.build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE); + clusterService = new ClusterService(settings, clusterSettings, null); + when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(true); + when(queryInsightsService.getTopQueriesService(MetricType.LATENCY)).thenReturn(topQueriesService); + } + + public void testOnRequestEnd() throws InterruptedException { + Long timestamp = System.currentTimeMillis() - 100L; + SearchType searchType = SearchType.QUERY_THEN_FETCH; + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1").userValueTypeHint(ValueType.STRING).field("type.keyword")); + searchSourceBuilder.size(0); + + String[] indices = new String[] { "index-1", "index-2" }; + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + phaseLatencyMap.put("expand", 0L); + phaseLatencyMap.put("query", 20L); + phaseLatencyMap.put("fetch", 1L); + + int numberOfShards = 10; + + QueryInsightsListener queryInsightsListener = new QueryInsightsListener(clusterService, queryInsightsService); + + when(searchRequest.getOrCreateAbsoluteStartMillis()).thenReturn(timestamp); + when(searchRequest.searchType()).thenReturn(searchType); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + when(searchRequest.indices()).thenReturn(indices); + when(searchRequestContext.phaseTookMap()).thenReturn(phaseLatencyMap); + when(searchPhaseContext.getRequest()).thenReturn(searchRequest); + when(searchPhaseContext.getNumShards()).thenReturn(numberOfShards); + + queryInsightsListener.onRequestEnd(searchPhaseContext, searchRequestContext); + + verify(queryInsightsService, times(1)).addRecord(any()); + } + + public void testConcurrentOnRequestEnd() throws InterruptedException { + Long timestamp = System.currentTimeMillis() - 100L; + SearchType searchType = SearchType.QUERY_THEN_FETCH; + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1").userValueTypeHint(ValueType.STRING).field("type.keyword")); + searchSourceBuilder.size(0); + + String[] indices = new String[] { "index-1", "index-2" }; + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + phaseLatencyMap.put("expand", 0L); + phaseLatencyMap.put("query", 20L); + phaseLatencyMap.put("fetch", 1L); + + int numberOfShards = 10; + + final List<QueryInsightsListener> searchListenersList = new ArrayList<>(); + + when(searchRequest.getOrCreateAbsoluteStartMillis()).thenReturn(timestamp); + when(searchRequest.searchType()).thenReturn(searchType); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + when(searchRequest.indices()).thenReturn(indices); + when(searchRequestContext.phaseTookMap()).thenReturn(phaseLatencyMap); + when(searchPhaseContext.getRequest()).thenReturn(searchRequest); + when(searchPhaseContext.getNumShards()).thenReturn(numberOfShards); + + int numRequests = 50; + Thread[] threads = new Thread[numRequests]; + Phaser phaser = new Phaser(numRequests + 1); + CountDownLatch countDownLatch = new CountDownLatch(numRequests); + + for (int i = 0; i < numRequests; i++) { + searchListenersList.add(new QueryInsightsListener(clusterService, queryInsightsService)); + } + + for (int i = 0; i < numRequests; i++) { + int finalI = i; + threads[i] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + QueryInsightsListener thisListener = searchListenersList.get(finalI); + thisListener.onRequestEnd(searchPhaseContext, searchRequestContext); + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); + + verify(queryInsightsService, times(numRequests)).addRecord(any()); + } + + public void testSetEnabled() { + when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(true); + QueryInsightsListener queryInsightsListener = new QueryInsightsListener(clusterService, queryInsightsService); + queryInsightsListener.setEnableTopQueries(MetricType.LATENCY, true); + assertTrue(queryInsightsListener.isEnabled()); + + when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(false); + when(queryInsightsService.isCollectionEnabled(MetricType.CPU)).thenReturn(false); + when(queryInsightsService.isCollectionEnabled(MetricType.JVM)).thenReturn(false); + queryInsightsListener.setEnableTopQueries(MetricType.LATENCY, false); + assertFalse(queryInsightsListener.isEnabled()); + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java new file mode 100644 index 0000000000000..c29b48b9690d1 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.junit.Before; + +import static org.mockito.Mockito.mock; + +/** + * Unit Tests for {@link QueryInsightsService}. + */ +public class QueryInsightsServiceTests extends OpenSearchTestCase { + private final ThreadPool threadPool = mock(ThreadPool.class); + private QueryInsightsService queryInsightsService; + + @Before + public void setup() { + queryInsightsService = new QueryInsightsService(threadPool); + queryInsightsService.enableCollection(MetricType.LATENCY, true); + queryInsightsService.enableCollection(MetricType.CPU, true); + queryInsightsService.enableCollection(MetricType.JVM, true); + } + + public void testAddRecordToLimitAndDrain() { + SearchQueryRecord record = QueryInsightsTestUtils.generateQueryInsightRecords(1, 1, System.currentTimeMillis(), 0).get(0); + for (int i = 0; i < QueryInsightsSettings.QUERY_RECORD_QUEUE_CAPACITY; i++) { + assertTrue(queryInsightsService.addRecord(record)); + } + // exceed capacity + assertFalse(queryInsightsService.addRecord(record)); + queryInsightsService.drainRecords(); + assertEquals( + QueryInsightsSettings.DEFAULT_TOP_N_SIZE, + queryInsightsService.getTopQueriesService(MetricType.LATENCY).getTopQueriesRecords(false).size() + ); + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/TopQueriesServiceTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/TopQueriesServiceTests.java new file mode 100644 index 0000000000000..060df84a89485 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/TopQueriesServiceTests.java @@ -0,0 +1,102 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.cluster.coordination.DeterministicTaskQueue; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +/** + * Unit Tests for {@link QueryInsightsService}. + */ +public class TopQueriesServiceTests extends OpenSearchTestCase { + private TopQueriesService topQueriesService; + + @Before + public void setup() { + topQueriesService = new TopQueriesService(MetricType.LATENCY); + topQueriesService.setTopNSize(Integer.MAX_VALUE); + topQueriesService.setWindowSize(new TimeValue(Long.MAX_VALUE)); + topQueriesService.setEnabled(true); + } + + public void testIngestQueryDataWithLargeWindow() { + final List<SearchQueryRecord> records = QueryInsightsTestUtils.generateQueryInsightRecords(10); + topQueriesService.consumeRecords(records); + assertTrue( + QueryInsightsTestUtils.checkRecordsEqualsWithoutOrder( + topQueriesService.getTopQueriesRecords(false), + records, + MetricType.LATENCY + ) + ); + } + + public void testRollingWindows() { + List<SearchQueryRecord> records; + // Create 5 records at Now - 10 minutes to make sure they belong to the last window + records = QueryInsightsTestUtils.generateQueryInsightRecords(5, 5, System.currentTimeMillis() - 1000 * 60 * 10, 0); + topQueriesService.setWindowSize(TimeValue.timeValueMinutes(10)); + topQueriesService.consumeRecords(records); + assertEquals(0, topQueriesService.getTopQueriesRecords(true).size()); + + // Create 10 records at now + 1 minute, to make sure they belong to the current window + records = QueryInsightsTestUtils.generateQueryInsightRecords(10, 10, System.currentTimeMillis() + 1000 * 60, 0); + topQueriesService.setWindowSize(TimeValue.timeValueMinutes(10)); + topQueriesService.consumeRecords(records); + assertEquals(10, topQueriesService.getTopQueriesRecords(true).size()); + } + + public void testSmallNSize() { + final List<SearchQueryRecord> records = QueryInsightsTestUtils.generateQueryInsightRecords(10); + topQueriesService.setTopNSize(1); + topQueriesService.consumeRecords(records); + assertEquals(1, topQueriesService.getTopQueriesRecords(false).size()); + } + + public void testValidateTopNSize() { + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.validateTopNSize(QueryInsightsSettings.MAX_N_SIZE + 1); }); + } + + public void testGetTopQueriesWhenNotEnabled() { + topQueriesService.setEnabled(false); + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.getTopQueriesRecords(false); }); + } + + public void testValidateWindowSize() { + assertThrows(IllegalArgumentException.class, () -> { + topQueriesService.validateWindowSize(new TimeValue(QueryInsightsSettings.MAX_WINDOW_SIZE.getSeconds() + 1, TimeUnit.SECONDS)); + }); + assertThrows(IllegalArgumentException.class, () -> { + topQueriesService.validateWindowSize(new TimeValue(QueryInsightsSettings.MIN_WINDOW_SIZE.getSeconds() - 1, TimeUnit.SECONDS)); + }); + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.validateWindowSize(new TimeValue(2, TimeUnit.DAYS)); }); + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.validateWindowSize(new TimeValue(7, TimeUnit.MINUTES)); }); + } + + private static void runUntilTimeoutOrFinish(DeterministicTaskQueue deterministicTaskQueue, long duration) { + final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + duration; + while (deterministicTaskQueue.getCurrentTimeMillis() < endTime + && (deterministicTaskQueue.hasRunnableTasks() || deterministicTaskQueue.hasDeferredTasks())) { + if (deterministicTaskQueue.hasDeferredTasks() && randomBoolean()) { + deterministicTaskQueue.advanceTime(); + } else if (deterministicTaskQueue.hasRunnableTasks()) { + deterministicTaskQueue.runRandomTask(); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequestTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequestTests.java new file mode 100644 index 0000000000000..619fd4b33a3dc --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequestTests.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.test.OpenSearchTestCase; + +/** + * Granular tests for the {@link TopQueriesRequest} class. + */ +public class TopQueriesRequestTests extends OpenSearchTestCase { + + /** + * Check that we can set the metric type + */ + public void testSetMetricType() throws Exception { + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY, randomAlphaOfLength(5)); + TopQueriesRequest deserializedRequest = roundTripRequest(request); + assertEquals(request.getMetricType(), deserializedRequest.getMetricType()); + } + + /** + * Serialize and deserialize a request. + * @param request A request to serialize. + * @return The deserialized, "round-tripped" request. + */ + private static TopQueriesRequest roundTripRequest(TopQueriesRequest request) throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + request.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + return new TopQueriesRequest(in); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponseTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponseTests.java new file mode 100644 index 0000000000000..eeee50d3da703 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponseTests.java @@ -0,0 +1,71 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.cluster.ClusterName; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Granular tests for the {@link TopQueriesResponse} class. + */ +public class TopQueriesResponseTests extends OpenSearchTestCase { + + /** + * Check serialization and deserialization + */ + public void testSerialize() throws Exception { + TopQueries topQueries = QueryInsightsTestUtils.createRandomTopQueries(); + ClusterName clusterName = new ClusterName("test-cluster"); + TopQueriesResponse response = new TopQueriesResponse(clusterName, List.of(topQueries), new ArrayList<>(), 10, MetricType.LATENCY); + TopQueriesResponse deserializedResponse = roundTripResponse(response); + assertEquals(response.toString(), deserializedResponse.toString()); + } + + public void testToXContent() throws IOException { + char[] expectedXcontent = + "{\"top_queries\":[{\"timestamp\":1706574180000,\"node_id\":\"node_for_top_queries_test\",\"search_type\":\"query_then_fetch\",\"latency\":1}]}" + .toCharArray(); + TopQueries topQueries = QueryInsightsTestUtils.createFixedTopQueries(); + ClusterName clusterName = new ClusterName("test-cluster"); + TopQueriesResponse response = new TopQueriesResponse(clusterName, List.of(topQueries), new ArrayList<>(), 10, MetricType.LATENCY); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(MediaTypeRegistry.JSON); + char[] xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString().toCharArray(); + Arrays.sort(expectedXcontent); + Arrays.sort(xContent); + + assertEquals(Arrays.hashCode(expectedXcontent), Arrays.hashCode(xContent)); + } + + /** + * Serialize and deserialize a TopQueriesResponse. + * @param response A response to serialize. + * @return The deserialized, "round-tripped" response. + */ + private static TopQueriesResponse roundTripResponse(TopQueriesResponse response) throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + response.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + return new TopQueriesResponse(in); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesTests.java new file mode 100644 index 0000000000000..7db08b53ad1df --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesTests.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; + +/** + * Tests for {@link TopQueries}. + */ +public class TopQueriesTests extends OpenSearchTestCase { + + public void testTopQueries() throws IOException { + TopQueries topQueries = QueryInsightsTestUtils.createRandomTopQueries(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + topQueries.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + TopQueries readTopQueries = new TopQueries(in); + assertTrue( + QueryInsightsTestUtils.checkRecordsEquals(topQueries.getTopQueriesRecord(), readTopQueries.getTopQueriesRecord()) + ); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java new file mode 100644 index 0000000000000..793d5878e2300 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java @@ -0,0 +1,71 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +/** + * Granular tests for the {@link SearchQueryRecord} class. + */ +public class SearchQueryRecordTests extends OpenSearchTestCase { + + /** + * Check that if the serialization, deserialization and equals functions are working as expected + */ + public void testSerializationAndEquals() throws Exception { + List<SearchQueryRecord> records = QueryInsightsTestUtils.generateQueryInsightRecords(10); + List<SearchQueryRecord> copiedRecords = new ArrayList<>(); + for (SearchQueryRecord record : records) { + copiedRecords.add(roundTripRecord(record)); + } + assertTrue(QueryInsightsTestUtils.checkRecordsEquals(records, copiedRecords)); + + } + + public void testAllMetricTypes() { + Set<MetricType> allMetrics = MetricType.allMetricTypes(); + Set<MetricType> expected = new HashSet<>(Arrays.asList(MetricType.LATENCY, MetricType.CPU, MetricType.JVM)); + assertEquals(expected, allMetrics); + } + + public void testCompare() { + SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + assertEquals(0, SearchQueryRecord.compare(record1, record2, MetricType.LATENCY)); + } + + public void testEqual() { + SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + assertEquals(record1, record2); + } + + /** + * Serialize and deserialize a SearchQueryRecord. + * @param record A SearchQueryRecord to serialize. + * @return The deserialized, "round-tripped" record. + */ + private static SearchQueryRecord roundTripRecord(SearchQueryRecord record) throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + record.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + return new SearchQueryRecord(in); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesActionTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesActionTests.java new file mode 100644 index 0000000000000..ac19fa2a7348f --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesActionTests.java @@ -0,0 +1,70 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.resthandler.top_queries; + +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.rest.RestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.rest.FakeRestRequest; + +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.opensearch.plugin.insights.rules.resthandler.top_queries.RestTopQueriesAction.ALLOWED_METRICS; + +public class RestTopQueriesActionTests extends OpenSearchTestCase { + + public void testEmptyNodeIdsValidType() { + Map<String, String> params = new HashMap<>(); + params.put("type", randomFrom(ALLOWED_METRICS)); + RestRequest restRequest = buildRestRequest(params); + TopQueriesRequest actual = RestTopQueriesAction.prepareRequest(restRequest); + assertEquals(0, actual.nodesIds().length); + } + + public void testNodeIdsValid() { + Map<String, String> params = new HashMap<>(); + params.put("type", randomFrom(ALLOWED_METRICS)); + String[] nodes = randomArray(1, 10, String[]::new, () -> randomAlphaOfLengthBetween(5, 10)); + params.put("nodeId", String.join(",", nodes)); + + RestRequest restRequest = buildRestRequest(params); + TopQueriesRequest actual = RestTopQueriesAction.prepareRequest(restRequest); + assertArrayEquals(nodes, actual.nodesIds()); + } + + public void testInValidType() { + Map<String, String> params = new HashMap<>(); + params.put("type", randomAlphaOfLengthBetween(5, 10).toUpperCase(Locale.ROOT)); + + RestRequest restRequest = buildRestRequest(params); + Exception exception = assertThrows(IllegalArgumentException.class, () -> { RestTopQueriesAction.prepareRequest(restRequest); }); + assertEquals( + String.format(Locale.ROOT, "request [/_insights/top_queries] contains invalid metric type [%s]", params.get("type")), + exception.getMessage() + ); + } + + public void testGetRoutes() { + RestTopQueriesAction action = new RestTopQueriesAction(); + List<RestHandler.Route> routes = action.routes(); + assertEquals(2, routes.size()); + assertEquals("query_insights_top_queries_action", action.getName()); + } + + private FakeRestRequest buildRestRequest(Map<String, String> params) { + return new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath("/_insights/top_queries") + .withParams(params) + .build(); + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesActionTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesActionTests.java new file mode 100644 index 0000000000000..a5f36b6e8cce0 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesActionTests.java @@ -0,0 +1,84 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.transport.top_queries; + +import org.opensearch.action.support.ActionFilters; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; +import org.junit.Before; + +import java.util.List; + +import static org.mockito.Mockito.mock; + +public class TransportTopQueriesActionTests extends OpenSearchTestCase { + + private final ThreadPool threadPool = mock(ThreadPool.class); + + private final Settings.Builder settingsBuilder = Settings.builder(); + private final Settings settings = settingsBuilder.build(); + private final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + private final ClusterService clusterService = new ClusterService(settings, clusterSettings, threadPool); + private final TransportService transportService = mock(TransportService.class); + private final QueryInsightsService topQueriesByLatencyService = mock(QueryInsightsService.class); + private final ActionFilters actionFilters = mock(ActionFilters.class); + private final TransportTopQueriesAction transportTopQueriesAction = new TransportTopQueriesAction( + threadPool, + clusterService, + transportService, + topQueriesByLatencyService, + actionFilters + ); + private final DummyParentAction dummyParentAction = new DummyParentAction( + threadPool, + clusterService, + transportService, + topQueriesByLatencyService, + actionFilters + ); + + class DummyParentAction extends TransportTopQueriesAction { + public DummyParentAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + QueryInsightsService topQueriesByLatencyService, + ActionFilters actionFilters + ) { + super(threadPool, clusterService, transportService, topQueriesByLatencyService, actionFilters); + } + + public TopQueriesResponse createNewResponse() { + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + return newResponse(request, List.of(), List.of()); + } + } + + @Before + public void setup() { + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE); + } + + public void testNewResponse() { + TopQueriesResponse response = dummyParentAction.createNewResponse(); + assertNotNull(response); + } + +} diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 0232fc58f9357..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..005cc2388bd89 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +9d4b3315bb625ec2d73fa569fb6bce4589243d5e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 deleted file mode 100644 index 3b6cd3524d978..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5b126ceba61275f38297cacd5ea0cd6d3addee04 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..4fc3f5c43edb0 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +129f9bd6aa0cf28bfd7d45a8b6a598eed9c67702 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9d01e814971f2..0000000000000 --- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -50a2d899a8f8a68daed1a9b6d7750184310cc45f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..3dcfa7f26db06 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +bffc88b7c56d7b553453e4244247a1b0ba1fdc8a \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 987b524aedc98..0000000000000 --- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..0617c6999c586 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +3970474ed55aa1e3e8de5a5602c342c6b8155371 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.1.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.1.13.jar.sha1 deleted file mode 100644 index 5eaf96739ed72..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-core-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -faea23e582978a34f6a932b81e86206ec2314990 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.1.15.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..c30a99a2338b4 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-core-1.1.15.jar.sha1 @@ -0,0 +1 @@ +3221d405ad55a573cf29875a8244a4217cf07185 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.13.jar.sha1 deleted file mode 100644 index 091125169c696..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-http-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5af7bc746050d080891a5446cca2c96a0c51d03 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.15.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..ab3171cd02b73 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-http-1.1.15.jar.sha1 @@ -0,0 +1 @@ +c79756fa2dfc28ac81fc9d23a14b17c656c3e560 \ No newline at end of file diff --git a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 986720ec431fe..1ba16422c9214 100644 --- a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -39,6 +39,8 @@ import com.azure.storage.common.implementation.Constants; import com.azure.storage.common.policy.RequestRetryOptions; import com.azure.storage.common.policy.RetryPolicyType; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.MockSecureSettings; @@ -188,6 +190,7 @@ protected String requestUniqueId(final HttpExchange exchange) { @SuppressForbidden(reason = "this test uses a HttpServer to emulate an Azure endpoint") private static class AzureHTTPStatsCollectorHandler extends HttpStatsCollectorHandler { + private static final Logger testLogger = LogManager.getLogger(AzureHTTPStatsCollectorHandler.class); private static final Pattern listPattern = Pattern.compile("GET /[a-zA-Z0-9]+\\??.+"); private static final Pattern getPattern = Pattern.compile("GET /[^?/]+/[^?/]+\\??.*"); @@ -197,6 +200,7 @@ private AzureHTTPStatsCollectorHandler(HttpHandler delegate) { @Override protected void maybeTrack(String request, Headers headers) { + testLogger.info(request, headers); if (getPattern.matcher(request).matches()) { trackRequest("GetBlob"); } else if (Regex.simpleMatch("HEAD /*/*", request)) { diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java index b60701ba5e533..74edd4f3eb23c 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java @@ -389,6 +389,7 @@ private static class NioThreadFactory implements ThreadFactory { private final AtomicInteger threadNumber = new AtomicInteger(1); private final String namePrefix; + @SuppressWarnings("removal") NioThreadFactory() { SecurityManager s = System.getSecurityManager(); group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup(); diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java index 0fbe9797f726f..a206c3b883870 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java @@ -49,6 +49,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class SocketAccess { private SocketAccess() {} diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 1bef5146f1db9..b28f97677b0df 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -54,7 +54,7 @@ versions << [ dependencies { api 'com.google.api:api-common:1.8.1' api 'com.google.api:gax:2.35.0' - api 'com.google.api:gax-httpjson:0.103.1' + api 'com.google.api:gax-httpjson:2.42.0' api 'com.google.apis:google-api-services-storage:v1-rev20230617-2.0.0' @@ -66,7 +66,7 @@ dependencies { api "com.google.auth:google-auth-library-credentials:${versions.google_auth}" api "com.google.auth:google-auth-library-oauth2-http:${versions.google_auth}" - api 'com.google.cloud:google-cloud-core:2.5.10' + api 'com.google.cloud:google-cloud-core:2.30.0' api 'com.google.cloud:google-cloud-core-http:2.23.0' api 'com.google.cloud:google-cloud-storage:1.113.1' @@ -78,7 +78,7 @@ dependencies { api 'com.google.http-client:google-http-client:1.43.3' api 'com.google.http-client:google-http-client-appengine:1.43.3' api 'com.google.http-client:google-http-client-gson:1.43.3' - api 'com.google.http-client:google-http-client-jackson2:1.43.3' + api 'com.google.http-client:google-http-client-jackson2:1.44.1' api 'com.google.oauth-client:google-oauth-client:1.34.1' @@ -206,7 +206,10 @@ thirdPartyAudit { // commons-logging provided dependencies 'javax.jms.Message', 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener' + 'javax.servlet.ServletContextListener', + // Bump for gax 2.42.0 + 'com.google.api.gax.rpc.EndpointContext', + 'com.google.api.gax.rpc.RequestMutator' ) } diff --git a/plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 deleted file mode 100644 index 11315004e233d..0000000000000 --- a/plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -041d99172fda933bc879bdfd8de9420c5c34107e \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 new file mode 100644 index 0000000000000..672506572ed4d --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 @@ -0,0 +1 @@ +4db06bc31c2fb34b0490362e8666c20fdc1fb3f2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 new file mode 100644 index 0000000000000..10f8f90df108f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 @@ -0,0 +1 @@ +b48ea27cbdccd5f225d8a35ea28e2cd01c25918b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 deleted file mode 100644 index 34c3dc6805500..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d979bfe28551eb78cddae9282833ede147a9331 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.43.3.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.43.3.jar.sha1 deleted file mode 100644 index 8380b9fb770b5..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.43.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -689da86469d19a01c726c8c24477b95c8a834bbe \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.44.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..4472ffbbebe1c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.44.1.jar.sha1 @@ -0,0 +1 @@ +3f1947de0fd9eb250af16abe6103c11e68d11635 \ No newline at end of file diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java new file mode 100644 index 0000000000000..5002ab9a2e704 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java @@ -0,0 +1,33 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.gcs; + +import com.google.auth.oauth2.GoogleCredentials; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; + +/** + * This class facilitates to fetch Application Default Credentials + * see <a href="https://cloud.google.com/docs/authentication/application-default-credentials">How Application Default Credentials works</a> + */ +public class GoogleApplicationDefaultCredentials { + private static final Logger logger = LogManager.getLogger(GoogleApplicationDefaultCredentials.class); + + public GoogleCredentials get() { + GoogleCredentials credentials = null; + try { + credentials = SocketAccess.doPrivilegedIOException(GoogleCredentials::getApplicationDefault); + } catch (IOException e) { + logger.error("Failed to retrieve \"Application Default Credentials\"", e); + } + return credentials; + } +} diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java index e15b37f209c5f..620f8e98d5f20 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java @@ -106,6 +106,7 @@ class GoogleCloudStorageRetryingInputStream extends InputStream { currentStream = openStream(); } + @SuppressWarnings("removal") @SuppressForbidden(reason = "need access to storage client") private static com.google.api.services.storage.Storage getStorage(Storage client) { return AccessController.doPrivileged((PrivilegedAction<com.google.api.services.storage.Storage>) () -> { diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java index c9ebb3acaf3e5..83a4146c99b99 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java @@ -36,6 +36,7 @@ import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.cloud.ServiceOptions; import com.google.cloud.http.HttpTransportOptions; @@ -70,6 +71,16 @@ public class GoogleCloudStorageService { */ private volatile Map<String, Storage> clientCache = emptyMap(); + final private GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials; + + public GoogleCloudStorageService() { + this.googleApplicationDefaultCredentials = new GoogleApplicationDefaultCredentials(); + } + + public GoogleCloudStorageService(GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials) { + this.googleApplicationDefaultCredentials = googleApplicationDefaultCredentials; + } + /** * Refreshes the client settings and clears the client cache. Subsequent calls to * {@code GoogleCloudStorageService#client} will return new clients constructed @@ -213,10 +224,11 @@ StorageOptions createStorageOptions( storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } if (clientSettings.getCredential() == null) { - logger.warn( - "\"Application Default Credentials\" are not supported out of the box." - + " Additional file system permissions have to be granted to the plugin." - ); + logger.info("\"Application Default Credentials\" will be in use"); + final GoogleCredentials credentials = googleApplicationDefaultCredentials.get(); + if (credentials != null) { + storageOptionsBuilder.setCredentials(credentials); + } } else { ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); // override token server URI diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java index 35127d6ea4060..f8c451749480b 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java @@ -48,6 +48,7 @@ * needs {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access * in {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") final class SocketAccess { private SocketAccess() {} diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java index a531555debefb..58e412684ed5a 100644 --- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -33,8 +33,10 @@ package org.opensearch.repositories.gcs; import com.google.auth.Credentials; +import com.google.auth.oauth2.GoogleCredentials; import com.google.cloud.http.HttpTransportOptions; import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; @@ -42,30 +44,38 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; +import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; +import java.io.IOException; +import java.net.Proxy; +import java.net.URI; +import java.net.URISyntaxException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.util.Base64; import java.util.Locale; import java.util.UUID; +import org.mockito.Mockito; + import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class GoogleCloudStorageServiceTests extends OpenSearchTestCase { + final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String endpoint = randomFrom("http://", "https://") + + randomFrom("www.opensearch.org", "www.googleapis.com", "localhost/api", "google.com/oauth") + + ":" + + randomIntBetween(1, 65535); + final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + public void testClientInitializer() throws Exception { final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); - final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); - final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - final String endpoint = randomFrom("http://", "https://") - + randomFrom("www.opensearch.org", "www.googleapis.com", "localhost/api", "google.com/oauth") - + ":" - + randomIntBetween(1, 65535); - final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); final Settings settings = Settings.builder() .put( GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), @@ -82,31 +92,35 @@ public void testClientInitializer() throws Exception { .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .build(); - final GoogleCloudStorageService service = new GoogleCloudStorageService(); + GoogleCredentials mockGoogleCredentials = Mockito.mock(GoogleCredentials.class); + GoogleApplicationDefaultCredentials mockDefaultCredentials = Mockito.mock(GoogleApplicationDefaultCredentials.class); + Mockito.when(mockDefaultCredentials.get()).thenReturn(mockGoogleCredentials); + + final GoogleCloudStorageService service = new GoogleCloudStorageService(mockDefaultCredentials); service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings)); GoogleCloudStorageOperationsStats statsCollector = new GoogleCloudStorageOperationsStats("bucket"); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> service.client("another_client", "repo", statsCollector) ); - assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); + MatcherAssert.assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); assertSettingDeprecationsAndWarnings( new Setting<?>[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) } ); final Storage storage = service.client(clientName, "repo", statsCollector); - assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); - assertThat(storage.getOptions().getHost(), Matchers.is(endpoint)); - assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); - assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); - assertThat( + MatcherAssert.assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); + MatcherAssert.assertThat(storage.getOptions().getHost(), Matchers.is(endpoint)); + MatcherAssert.assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); + MatcherAssert.assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); + MatcherAssert.assertThat( ((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(), Matchers.is((int) connectTimeValue.millis()) ); - assertThat( + MatcherAssert.assertThat( ((HttpTransportOptions) storage.getOptions().getTransportOptions()).getReadTimeout(), Matchers.is((int) readTimeValue.millis()) ); - assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class)); + MatcherAssert.assertThat(storage.getOptions().getCredentials(), Matchers.instanceOf(Credentials.class)); } public void testReinitClientSettings() throws Exception { @@ -122,33 +136,33 @@ public void testReinitClientSettings() throws Exception { final GoogleCloudStorageService storageService = plugin.storageService; GoogleCloudStorageOperationsStats statsCollector = new GoogleCloudStorageOperationsStats("bucket"); final Storage client11 = storageService.client("gcs1", "repo1", statsCollector); - assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + MatcherAssert.assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); final Storage client12 = storageService.client("gcs2", "repo2", statsCollector); - assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + MatcherAssert.assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); // client 3 is missing final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, () -> storageService.client("gcs3", "repo3", statsCollector) ); - assertThat(e1.getMessage(), containsString("Unknown client name [gcs3].")); + MatcherAssert.assertThat(e1.getMessage(), containsString("Unknown client name [gcs3].")); // update client settings plugin.reload(settings2); // old client 1 not changed - assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + MatcherAssert.assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); // new client 1 is changed final Storage client21 = storageService.client("gcs1", "repo1", statsCollector); - assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21")); + MatcherAssert.assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21")); // old client 2 not changed - assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + MatcherAssert.assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); // new client2 is gone final IllegalArgumentException e2 = expectThrows( IllegalArgumentException.class, () -> storageService.client("gcs2", "repo2", statsCollector) ); - assertThat(e2.getMessage(), containsString("Unknown client name [gcs2].")); + MatcherAssert.assertThat(e2.getMessage(), containsString("Unknown client name [gcs2].")); // client 3 emerged final Storage client23 = storageService.client("gcs3", "repo3", statsCollector); - assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23")); + MatcherAssert.assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23")); } } @@ -193,4 +207,72 @@ public void testToTimeout() { assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue()); assertEquals(0, GoogleCloudStorageService.toTimeout(TimeValue.MINUS_ONE).intValue()); } + + /** + * The following method test the Google Application Default Credential instead of + * using service account file. + * Considered use of JUnit Mocking due to static method GoogleCredentials.getApplicationDefault + * and avoiding environment variables to set which later use GCE. + * @throws Exception + */ + public void testApplicationDefaultCredential() throws Exception { + GoogleCloudStorageClientSettings settings = getGCSClientSettingsWithoutCredentials(); + GoogleCredentials mockGoogleCredentials = Mockito.mock(GoogleCredentials.class); + HttpTransportOptions mockHttpTransportOptions = Mockito.mock(HttpTransportOptions.class); + GoogleApplicationDefaultCredentials mockDefaultCredentials = Mockito.mock(GoogleApplicationDefaultCredentials.class); + Mockito.when(mockDefaultCredentials.get()).thenReturn(mockGoogleCredentials); + + GoogleCloudStorageService service = new GoogleCloudStorageService(mockDefaultCredentials); + StorageOptions storageOptions = service.createStorageOptions(settings, mockHttpTransportOptions); + assertNotNull(storageOptions); + assertEquals(storageOptions.getCredentials().toString(), mockGoogleCredentials.toString()); + } + + /** + * The application default credential throws exception when there are + * no Environment Variables provided or Google Compute Engine is not running + * @throws Exception + */ + public void testApplicationDefaultCredentialsWhenNoSettingProvided() throws Exception { + GoogleCloudStorageClientSettings settings = getGCSClientSettingsWithoutCredentials(); + HttpTransportOptions mockHttpTransportOptions = Mockito.mock(HttpTransportOptions.class); + GoogleCloudStorageService service = new GoogleCloudStorageService(); + StorageOptions storageOptions = service.createStorageOptions(settings, mockHttpTransportOptions); + + Exception exception = assertThrows(IOException.class, GoogleCredentials::getApplicationDefault); + assertNotNull(storageOptions); + assertNull(storageOptions.getCredentials()); + MatcherAssert.assertThat(exception.getMessage(), containsString("The Application Default Credentials are not available")); + } + + /** + * The application default credential throws IOException when it is + * used without GoogleCloudStorageService + */ + public void testDefaultCredentialsThrowsExceptionWithoutGCStorageService() { + GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials = new GoogleApplicationDefaultCredentials(); + GoogleCredentials credentials = googleApplicationDefaultCredentials.get(); + assertNull(credentials); + Exception exception = assertThrows(IOException.class, GoogleCredentials::getApplicationDefault); + MatcherAssert.assertThat(exception.getMessage(), containsString("The Application Default Credentials are not available")); + } + + /** + * This is a helper method to provide GCS Client settings without credentials + * @return GoogleCloudStorageClientSettings + * @throws URISyntaxException + */ + private GoogleCloudStorageClientSettings getGCSClientSettingsWithoutCredentials() throws URISyntaxException { + return new GoogleCloudStorageClientSettings( + null, + endpoint, + projectIdName, + connectTimeValue, + readTimeValue, + applicationName, + new URI(""), + new ProxySettings(Proxy.Type.DIRECT, null, 0, null, null) + ); + } + } diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9110503f67304..0000000000000 --- a/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d75246285e5fac6f6dad47e387ed4f46f36e521d \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..f9affd7887093 --- /dev/null +++ b/plugins/repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +6ec5bd2be65d5529e58e9e482b747c1135b3736b \ No newline at end of file diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java index 119d060374be2..af49cd3c579e6 100644 --- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java @@ -52,6 +52,7 @@ import java.util.Collections; import java.util.Map; +@SuppressWarnings("removal") public final class HdfsPlugin extends Plugin implements RepositoryPlugin { // initialize some problematic classes with elevated privileges diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java index f0ffec5713c1d..4b38e62b2525a 100644 --- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java @@ -254,6 +254,7 @@ private static String getHostName() { } } + @SuppressWarnings("removal") @Override protected HdfsBlobStore createBlobStore() { // initialize our blobstore using elevated privileges. diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java index 07d1d29eecfc4..5a27eb937ff9c 100644 --- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java +++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java @@ -57,6 +57,7 @@ * Keeps track of the current user for a given repository, as well as which * permissions to grant the blob store restricted execution methods. */ +@SuppressWarnings("removal") class HdfsSecurityContext { private static final Permission[] SIMPLE_AUTH_PERMISSIONS; diff --git a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java index d0b63f17e3887..89ba8d51cf7f7 100644 --- a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java +++ b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java @@ -62,6 +62,7 @@ /** * Integration test that runs against an HA-Enabled HDFS instance */ +@SuppressWarnings("removal") public class HaHdfsFailoverTestSuiteIT extends OpenSearchRestTestCase { public void testHAFailoverWithRepository() throws Exception { diff --git a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java index 3a6eb0e205ccb..5f7454df4ecfc 100644 --- a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java +++ b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -64,6 +64,7 @@ @ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) public class HdfsBlobStoreContainerTests extends OpenSearchTestCase { + @SuppressWarnings("removal") private FileContext createTestContext() { FileContext fileContext; try { diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/plugins/repository-s3/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 deleted file mode 100644 index 522d85a3bf12e..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -700fdbabab44709b0eccffe8f91c4226a5787356 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..58b668b7e80a9 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c058d5c712e00e8560e519970b3d27747778b8f2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java index 3a55fcb0bdbcd..25f361b40636e 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java @@ -514,7 +514,7 @@ private static List<ListObjectsV2Response> executeListing( for (ListObjectsV2Response listObjectsV2Response : listObjectsIterable) { results.add(listObjectsV2Response); totalObjects += listObjectsV2Response.contents().size(); - if (limit != -1 && totalObjects > limit) { + if (limit != -1 && totalObjects >= limit) { break; } } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java index 4fda0ee95a3ec..e44f408e6dd12 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java @@ -201,6 +201,12 @@ final class S3ClientSettings { key -> Setting.intSetting(key, 500, Property.NodeScope) ); + static final Setting.AffixSetting<Integer> MAX_SYNC_CONNECTIONS_SETTING = Setting.affixKeySetting( + PREFIX, + "max_sync_connections", + key -> Setting.intSetting(key, 500, Property.NodeScope) + ); + /** Connection acquisition timeout for new connections to S3. */ static final Setting.AffixSetting<TimeValue> CONNECTION_ACQUISITION_TIMEOUT = Setting.affixKeySetting( PREFIX, @@ -284,10 +290,13 @@ final class S3ClientSettings { /** The connection TTL for the s3 client */ final int connectionTTLMillis; - /** The max number of connections for the s3 client */ + /** The max number of connections for the s3 async client */ final int maxConnections; - /** The connnection acquisition timeout for the s3 async client */ + /** The max number of connections for the s3 sync client */ + final int maxSyncConnections; + + /** The connnection acquisition timeout for the s3 sync and async client */ final int connectionAcquisitionTimeoutMillis; /** The number of retries to use for the s3 client. */ @@ -318,6 +327,7 @@ private S3ClientSettings( int connectionTimeoutMillis, int connectionTTLMillis, int maxConnections, + int maxSyncConnections, int connectionAcquisitionTimeoutMillis, int maxRetries, boolean throttleRetries, @@ -336,6 +346,7 @@ private S3ClientSettings( this.connectionTimeoutMillis = connectionTimeoutMillis; this.connectionTTLMillis = connectionTTLMillis; this.maxConnections = maxConnections; + this.maxSyncConnections = maxSyncConnections; this.connectionAcquisitionTimeoutMillis = connectionAcquisitionTimeoutMillis; this.maxRetries = maxRetries; this.throttleRetries = throttleRetries; @@ -386,6 +397,9 @@ S3ClientSettings refine(Settings repositorySettings) { ).millis() ); final int newMaxConnections = Math.toIntExact(getRepoSettingOrDefault(MAX_CONNECTIONS_SETTING, normalizedSettings, maxConnections)); + final int newMaxSyncConnections = Math.toIntExact( + getRepoSettingOrDefault(MAX_SYNC_CONNECTIONS_SETTING, normalizedSettings, maxConnections) + ); final int newMaxRetries = getRepoSettingOrDefault(MAX_RETRIES_SETTING, normalizedSettings, maxRetries); final boolean newThrottleRetries = getRepoSettingOrDefault(USE_THROTTLE_RETRIES_SETTING, normalizedSettings, throttleRetries); final boolean newPathStyleAccess = getRepoSettingOrDefault(USE_PATH_STYLE_ACCESS, normalizedSettings, pathStyleAccess); @@ -433,6 +447,7 @@ S3ClientSettings refine(Settings repositorySettings) { newConnectionTimeoutMillis, newConnectionTTLMillis, newMaxConnections, + newMaxSyncConnections, newConnectionAcquisitionTimeoutMillis, newMaxRetries, newThrottleRetries, @@ -563,6 +578,7 @@ static S3ClientSettings getClientSettings(final Settings settings, final String Math.toIntExact(getConfigValue(settings, clientName, CONNECTION_TIMEOUT_SETTING).millis()), Math.toIntExact(getConfigValue(settings, clientName, CONNECTION_TTL_SETTING).millis()), Math.toIntExact(getConfigValue(settings, clientName, MAX_CONNECTIONS_SETTING)), + Math.toIntExact(getConfigValue(settings, clientName, MAX_SYNC_CONNECTIONS_SETTING)), Math.toIntExact(getConfigValue(settings, clientName, CONNECTION_ACQUISITION_TIMEOUT).millis()), getConfigValue(settings, clientName, MAX_RETRIES_SETTING), getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING), diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java index 24387fb98a425..fe81da31432f4 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java @@ -279,6 +279,8 @@ protected PasswordAuthentication getPasswordAuthentication() { } clientBuilder.socketTimeout(Duration.ofMillis(clientSettings.readTimeoutMillis)); + clientBuilder.maxConnections(clientSettings.maxSyncConnections); + clientBuilder.connectionAcquisitionTimeout(Duration.ofMillis(clientSettings.connectionAcquisitionTimeoutMillis)); return clientBuilder; } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java index 4888764dbc720..f88aa46e61806 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java @@ -46,6 +46,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class SocketAccess { private SocketAccess() {} diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java index 8e1926d40302f..f84d953baae8e 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java @@ -75,6 +75,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +@SuppressWarnings("removal") @SuppressForbidden(reason = "test requires to set a System property to allow insecure settings when running in IDE") public class RepositoryCredentialsTests extends OpenSearchSingleNodeTestCase implements ConfigPathSupport { diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java index 58ad290a31e85..2b45e9cfe2d4b 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java @@ -916,6 +916,15 @@ public void testListBlobsByPrefixInLexicographicOrderWithLimitLessThanPageSize() testListBlobsByPrefixInLexicographicOrder(2, 1, BlobContainer.BlobNameSortOrder.LEXICOGRAPHIC); } + /** + * Test the boundary value at page size to ensure + * unnecessary calls are not made to S3 by fetching the next page. + * @throws IOException + */ + public void testListBlobsByPrefixInLexicographicOrderWithLimitEqualToPageSize() throws IOException { + testListBlobsByPrefixInLexicographicOrder(5, 1, BlobContainer.BlobNameSortOrder.LEXICOGRAPHIC); + } + public void testListBlobsByPrefixInLexicographicOrderWithLimitGreaterThanPageSize() throws IOException { testListBlobsByPrefixInLexicographicOrder(8, 2, BlobContainer.BlobNameSortOrder.LEXICOGRAPHIC); } diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java index f27c8387b6e45..b47749553aeba 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java @@ -74,6 +74,8 @@ public void testThereIsADefaultClientByDefault() { assertThat(defaultSettings.connectionTimeoutMillis, is(10 * 1000)); assertThat(defaultSettings.connectionTTLMillis, is(5 * 1000)); assertThat(defaultSettings.maxConnections, is(500)); + assertThat(defaultSettings.maxSyncConnections, is(500)); + assertThat(defaultSettings.connectionAcquisitionTimeoutMillis, is(15 * 60 * 1000)); assertThat(defaultSettings.maxRetries, is(3)); assertThat(defaultSettings.throttleRetries, is(true)); } diff --git a/plugins/telemetry-otel/build.gradle b/plugins/telemetry-otel/build.gradle index 9be83e30c3183..735cbd92b691a 100644 --- a/plugins/telemetry-otel/build.gradle +++ b/plugins/telemetry-otel/build.gradle @@ -16,7 +16,7 @@ apply plugin: 'opensearch.internal-cluster-test' opensearchplugin { description 'Opentelemetry based telemetry implementation.' classname 'org.opensearch.telemetry.OTelTelemetryPlugin' - hasClientJar = true + hasClientJar = false } dependencies { diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 deleted file mode 100644 index 2c038aad4b934..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a5c081d8f877225732efe13908f350029c811709 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..19f734ca17b79 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 @@ -0,0 +1 @@ +b4aea155f6d6b1032eba85378564431cfd86f562 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 deleted file mode 100644 index 3243f524432eb..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5f8bb68084ea5709a27e935907b1bb49d0bd049 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..4c06d28cba199 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 @@ -0,0 +1 @@ +3fcc87f3d810ce49d865ee54b40831559c5e129b \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 deleted file mode 100644 index 1d7da47286ae0..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3643061da474061ffa7f2036a58a7a0d40212276 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..91a5c0f715d2b --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +19c9a3f52851a1333b648ed83c82d16eb4c64afd \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 deleted file mode 100644 index 3fab0e47adcbe..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ab56c7223112fac13a66e3f667c5fc666f4a3707 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..6c05600ae3b08 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 @@ -0,0 +1 @@ +b3e74d5b8cf5e60d9965042fa284085bbe081ce3 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 deleted file mode 100644 index f93cf7a63bfad..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5752d171cd08ac84f9273258a315bc5f97e1187e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..f54e6f6893050 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 @@ -0,0 +1 @@ +af68f90f0410b7b3a1900d3e0a15ad51b10ffd5b \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 deleted file mode 100644 index 2fc33b62aee54..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6b41cd66a385d513b58b6617f20b701435b64abd \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..49d40b36ba85b --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +4acab18052267e280d1f9de22c591a5c88bed3a6 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 deleted file mode 100644 index 99f758b047aa2..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9346006cead763247a786b5cabf3e1ae3c88eadb \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..a01de2aa84c43 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 @@ -0,0 +1 @@ +9f07e1764389e076a36fb7d9e5769e29f3dab950 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 deleted file mode 100644 index 705a342a684c4..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fab56e187e3fb3c70c18223184d53a76500114ab \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 new file mode 100644 index 0000000000000..a5fc8c2059104 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 @@ -0,0 +1 @@ +9201e6a43a0a89515626f7516c7d1b2c349f76df \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 deleted file mode 100644 index 31818695cc774..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -504de8cc7dc68e84c8c7c2757522d934e9c50d35 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..cd746f0756e46 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 @@ -0,0 +1 @@ +ab49eb621d6d01f0ad2f016989d0352ef18ea9a2 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 deleted file mode 100644 index 3cf3080a98bd9..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -454c7a6afab864de9f0c166246f28f16aaa824c1 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..740737dc13efc --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +01fcd8bad38d7b8987f6fc93bd7e933240eb727e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 deleted file mode 100644 index 41b0dca07556e..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b054760243906af0a327a8f5bd99adc2826ccd88 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..e6ff3dbafda22 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 @@ -0,0 +1 @@ +abad9abc80dfe6118a60413afa161696bbf8dd43 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 deleted file mode 100644 index 2f71fd5cc780a..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bff24f085193e105d4e23e3db27bf81ccb3d830e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..36ec960c4f7be --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 @@ -0,0 +1 @@ +d88407ae475e5f4e859a81e4f61e362e939f7bc2 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 deleted file mode 100644 index f0060b8a0f78f..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d80ad3210fa890a856a1d04379d134ab44a09501 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..293b82f206c99 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 @@ -0,0 +1 @@ +121a75c2ba9ed8b80f5ff131c2411a5d460f38d0 \ No newline at end of file diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java index bcdcb657c4f42..e77e69d121036 100644 --- a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java +++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java @@ -15,6 +15,7 @@ import org.opensearch.telemetry.OTelTelemetrySettings; import org.opensearch.telemetry.TelemetrySettings; import org.opensearch.telemetry.metrics.noop.NoopCounter; +import org.opensearch.telemetry.metrics.noop.NoopHistogram; import org.opensearch.telemetry.metrics.noop.NoopMetricsRegistry; import org.opensearch.test.OpenSearchIntegTestCase; @@ -53,10 +54,13 @@ public void testSanityChecksWhenMetricsDisabled() throws Exception { Counter counter = metricsRegistry.createCounter("test-counter", "test", "1"); counter.add(1.0); + Histogram histogram = metricsRegistry.createHistogram("test-histogram", "test", "1"); + Thread.sleep(2000); assertTrue(metricsRegistry instanceof NoopMetricsRegistry); assertTrue(counter instanceof NoopCounter); + assertTrue(histogram instanceof NoopHistogram); } } diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java index ed341595d327d..1b8f694709a9c 100644 --- a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java +++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java @@ -23,6 +23,7 @@ import java.util.stream.Collectors; import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, minNumDataNodes = 1) public class TelemetryMetricsEnabledSanityIT extends OpenSearchIntegTestCase { @@ -92,6 +93,31 @@ public void testUpDownCounter() throws Exception { assertEquals(-1.0, value, 0.0); } + public void testHistogram() throws Exception { + MetricsRegistry metricsRegistry = internalCluster().getInstance(MetricsRegistry.class); + InMemorySingletonMetricsExporter.INSTANCE.reset(); + + Histogram histogram = metricsRegistry.createHistogram("test-histogram", "test", "ms"); + histogram.record(2.0); + histogram.record(1.0); + histogram.record(3.0); + // Sleep for about 2s to wait for metrics to be published. + Thread.sleep(2000); + + InMemorySingletonMetricsExporter exporter = InMemorySingletonMetricsExporter.INSTANCE; + ImmutableExponentialHistogramPointData histogramPointData = ((ImmutableExponentialHistogramPointData) ((ArrayList) exporter + .getFinishedMetricItems() + .stream() + .filter(a -> a.getName().contains("test-histogram")) + .collect(Collectors.toList()) + .get(0) + .getExponentialHistogramData() + .getPoints()).get(0)); + assertEquals(1.0, histogramPointData.getSum(), 6.0); + assertEquals(1.0, histogramPointData.getMax(), 3.0); + assertEquals(1.0, histogramPointData.getMin(), 1.0); + } + @After public void reset() { InMemorySingletonMetricsExporter.INSTANCE.reset(); diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetryPlugin.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetryPlugin.java index 297ae8873636f..000fd09d43c18 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetryPlugin.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetryPlugin.java @@ -53,7 +53,9 @@ public List<Setting<?>> getSettings() { OTelTelemetrySettings.TRACER_EXPORTER_DELAY_SETTING, OTelTelemetrySettings.TRACER_EXPORTER_MAX_QUEUE_SIZE_SETTING, OTelTelemetrySettings.OTEL_TRACER_SPAN_EXPORTER_CLASS_SETTING, - OTelTelemetrySettings.OTEL_METRICS_EXPORTER_CLASS_SETTING + OTelTelemetrySettings.OTEL_TRACER_SPAN_SAMPLER_CLASS_SETTINGS, + OTelTelemetrySettings.OTEL_METRICS_EXPORTER_CLASS_SETTING, + OTelTelemetrySettings.TRACER_SAMPLER_ACTION_PROBABILITY ); } diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java index 8e23f724b4570..95ce6918fcb70 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java @@ -13,15 +13,21 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.telemetry.metrics.exporter.OTelMetricsExporterFactory; import org.opensearch.telemetry.tracing.exporter.OTelSpanExporterFactory; +import org.opensearch.telemetry.tracing.sampler.OTelSamplerFactory; +import org.opensearch.telemetry.tracing.sampler.ProbabilisticSampler; +import org.opensearch.telemetry.tracing.sampler.ProbabilisticTransportActionSampler; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; +import java.util.Arrays; +import java.util.List; import io.opentelemetry.exporter.logging.LoggingMetricExporter; import io.opentelemetry.exporter.logging.LoggingSpanExporter; import io.opentelemetry.sdk.metrics.export.MetricExporter; import io.opentelemetry.sdk.trace.export.SpanExporter; +import io.opentelemetry.sdk.trace.samplers.Sampler; /** * OTel specific telemetry settings. @@ -66,7 +72,7 @@ private OTelTelemetrySettings() {} /** * Span Exporter type setting. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "removal" }) public static final Setting<Class<SpanExporter>> OTEL_TRACER_SPAN_EXPORTER_CLASS_SETTING = new Setting<>( "telemetry.otel.tracer.span.exporter.class", LoggingSpanExporter.class.getName(), @@ -90,7 +96,7 @@ private OTelTelemetrySettings() {} /** * Metrics Exporter type setting. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "removal" }) public static final Setting<Class<MetricExporter>> OTEL_METRICS_EXPORTER_CLASS_SETTING = new Setting<>( "telemetry.otel.metrics.exporter.class", LoggingMetricExporter.class.getName(), @@ -110,4 +116,40 @@ private OTelTelemetrySettings() {} Setting.Property.NodeScope, Setting.Property.Final ); + + /** + * Samplers orders setting. + */ + @SuppressWarnings("unchecked") + public static final Setting<List<Class<Sampler>>> OTEL_TRACER_SPAN_SAMPLER_CLASS_SETTINGS = Setting.listSetting( + "telemetry.otel.tracer.span.sampler.classes", + Arrays.asList(ProbabilisticTransportActionSampler.class.getName(), ProbabilisticSampler.class.getName()), + sampler -> { + // Check we ourselves are not being called by unprivileged code. + SpecialPermission.check(); + try { + return AccessController.doPrivileged((PrivilegedExceptionAction<Class<Sampler>>) () -> { + final ClassLoader loader = OTelSamplerFactory.class.getClassLoader(); + return (Class<Sampler>) loader.loadClass(sampler); + }); + } catch (PrivilegedActionException ex) { + throw new IllegalStateException("Unable to load sampler class: " + sampler, ex.getCause()); + } + }, + Setting.Property.NodeScope, + Setting.Property.Final + ); + + /** + * Probability of action based sampler + */ + public static final Setting<Double> TRACER_SAMPLER_ACTION_PROBABILITY = Setting.doubleSetting( + "telemetry.tracer.action.sampler.probability", + 0.001d, + 0.000d, + 1.00d, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + } diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelHistogram.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelHistogram.java new file mode 100644 index 0000000000000..73bb0d8adff62 --- /dev/null +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelHistogram.java @@ -0,0 +1,40 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.metrics; + +import org.opensearch.telemetry.OTelAttributesConverter; +import org.opensearch.telemetry.metrics.tags.Tags; + +import io.opentelemetry.api.metrics.DoubleHistogram; + +/** + * OTel aware implementation {@link Histogram} + */ +class OTelHistogram implements Histogram { + + private final DoubleHistogram otelDoubleHistogram; + + /** + * Constructor + * @param otelDoubleCounter delegate counter. + */ + public OTelHistogram(DoubleHistogram otelDoubleCounter) { + this.otelDoubleHistogram = otelDoubleCounter; + } + + @Override + public void record(double value) { + otelDoubleHistogram.record(value); + } + + @Override + public void record(double value, Tags tags) { + otelDoubleHistogram.record(value, OTelAttributesConverter.convert(tags)); + } +} diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java index 6160e5106c041..82ae2cdd198b2 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java @@ -17,6 +17,7 @@ import java.security.PrivilegedAction; import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.DoubleUpDownCounter; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; @@ -42,6 +43,7 @@ public OTelMetricsTelemetry(RefCountedReleasable<OpenTelemetrySdk> openTelemetry this.otelMeter = meterProvider.get(OTelTelemetryPlugin.INSTRUMENTATION_SCOPE_NAME); } + @SuppressWarnings("removal") @Override public Counter createCounter(String name, String description, String unit) { DoubleCounter doubleCounter = AccessController.doPrivileged( @@ -54,6 +56,7 @@ public Counter createCounter(String name, String description, String unit) { return new OTelCounter(doubleCounter); } + @SuppressWarnings("removal") @Override public Counter createUpDownCounter(String name, String description, String unit) { DoubleUpDownCounter doubleUpDownCounter = AccessController.doPrivileged( @@ -66,6 +69,23 @@ public Counter createUpDownCounter(String name, String description, String unit) return new OTelUpDownCounter(doubleUpDownCounter); } + /** + * Creates the Otel Histogram. In {@link org.opensearch.telemetry.tracing.OTelResourceProvider} + * we can configure the bucketing/aggregation strategy through view. Default startegy configured + * is the {@link io.opentelemetry.sdk.metrics.internal.view.Base2ExponentialHistogramAggregation}. + * @param name name of the histogram. + * @param description any description about the metric. + * @param unit unit of the metric. + * @return histogram + */ + @Override + public Histogram createHistogram(String name, String description, String unit) { + DoubleHistogram doubleHistogram = AccessController.doPrivileged( + (PrivilegedAction<DoubleHistogram>) () -> otelMeter.histogramBuilder(name).setUnit(unit).setDescription(description).build() + ); + return new OTelHistogram(doubleHistogram); + } + @Override public void close() throws IOException { meterProvider.close(); diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java index ef5a31e4003ca..9c548044484fd 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java @@ -51,6 +51,7 @@ public static MetricExporter create(Settings settings) { return metricExporter; } + @SuppressWarnings("removal") private static MetricExporter instantiateExporter(Class<MetricExporter> exporterProviderClass) { try { // Check we ourselves are not being called by unprivileged code. diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java index 14a19f122c17b..475fc09d04bff 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java @@ -12,7 +12,7 @@ import org.opensearch.telemetry.TelemetrySettings; import org.opensearch.telemetry.metrics.exporter.OTelMetricsExporterFactory; import org.opensearch.telemetry.tracing.exporter.OTelSpanExporterFactory; -import org.opensearch.telemetry.tracing.sampler.ProbabilisticSampler; +import org.opensearch.telemetry.tracing.sampler.OTelSamplerFactory; import org.opensearch.telemetry.tracing.sampler.RequestSampler; import java.security.AccessController; @@ -23,8 +23,12 @@ import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.metrics.InstrumentSelector; +import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.View; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.metrics.internal.view.Base2ExponentialHistogramAggregation; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; @@ -40,6 +44,7 @@ * This class encapsulates all OpenTelemetry related resources */ public final class OTelResourceProvider { + private OTelResourceProvider() {} /** @@ -48,13 +53,14 @@ private OTelResourceProvider() {} * @param settings cluster settings * @return OpenTelemetrySdk instance */ + @SuppressWarnings("removal") public static OpenTelemetrySdk get(TelemetrySettings telemetrySettings, Settings settings) { return AccessController.doPrivileged( (PrivilegedAction<OpenTelemetrySdk>) () -> get( settings, OTelSpanExporterFactory.create(settings), ContextPropagators.create(W3CTraceContextPropagator.getInstance()), - Sampler.parentBased(new RequestSampler(new ProbabilisticSampler(telemetrySettings))) + Sampler.parentBased(new RequestSampler(OTelSamplerFactory.create(telemetrySettings, settings))) ) ); } @@ -91,6 +97,10 @@ private static SdkMeterProvider createSdkMetricProvider(Settings settings, Resou .setInterval(TelemetrySettings.METRICS_PUBLISH_INTERVAL_SETTING.get(settings).getSeconds(), TimeUnit.SECONDS) .build() ) + .registerView( + InstrumentSelector.builder().setType(InstrumentType.HISTOGRAM).build(), + View.builder().setAggregation(Base2ExponentialHistogramAggregation.getDefault()).build() + ) .build(); } diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java index da7ce5c47d9ca..e9d7e78882c7d 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java @@ -51,6 +51,7 @@ public static SpanExporter create(Settings settings) { return spanExporter; } + @SuppressWarnings("removal") private static SpanExporter instantiateSpanExporter(Class<SpanExporter> spanExporterProviderClass) { try { // Check we ourselves are not being called by unprivileged code. diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/OTelSamplerFactory.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/OTelSamplerFactory.java new file mode 100644 index 0000000000000..b9d5c07a40cd8 --- /dev/null +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/OTelSamplerFactory.java @@ -0,0 +1,96 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.tracing.sampler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.SpecialPermission; +import org.opensearch.common.settings.Settings; +import org.opensearch.telemetry.OTelTelemetrySettings; +import org.opensearch.telemetry.TelemetrySettings; + +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.security.AccessController; +import java.security.PrivilegedExceptionAction; +import java.util.List; +import java.util.ListIterator; + +import io.opentelemetry.sdk.trace.samplers.Sampler; + +/** + * Factory class to create the instance of OTelSampler + */ +public class OTelSamplerFactory { + + /** + * Logger instance for logging messages related to the OTelSamplerFactory. + */ + private static final Logger logger = LogManager.getLogger(OTelSamplerFactory.class); + + /** + * Base constructor. + */ + private OTelSamplerFactory() { + + } + + /** + * Creates the {@link Sampler} instances based on the TRACER_SPAN_SAMPLER_CLASSES value. + * + * @param telemetrySettings TelemetrySettings. + * @param settings the settings + * @return list of samplers. + */ + public static Sampler create(TelemetrySettings telemetrySettings, Settings settings) { + List<Class<Sampler>> samplersNameList = OTelTelemetrySettings.OTEL_TRACER_SPAN_SAMPLER_CLASS_SETTINGS.get(settings); + ListIterator<Class<Sampler>> li = samplersNameList.listIterator(samplersNameList.size()); + + Sampler fallbackSampler = null; + + // Iterating samplers list in reverse order to create chain of sampler + while (li.hasPrevious()) { + Class<Sampler> samplerName = li.previous(); + fallbackSampler = instantiateSampler(samplerName, telemetrySettings, settings, fallbackSampler); + } + + return fallbackSampler; + } + + private static Sampler instantiateSampler( + Class<Sampler> samplerClassName, + TelemetrySettings telemetrySettings, + Settings settings, + Sampler fallbackSampler + ) { + try { + // Check we ourselves are not being called by unprivileged code. + SpecialPermission.check(); + + return AccessController.doPrivileged((PrivilegedExceptionAction<Sampler>) () -> { + try { + // Define the method type which receives TelemetrySettings & Sampler as arguments + MethodType methodType = MethodType.methodType(Sampler.class, TelemetrySettings.class, Settings.class, Sampler.class); + + return (Sampler) MethodHandles.publicLookup() + .findStatic(samplerClassName, "create", methodType) + .invokeExact(telemetrySettings, settings, fallbackSampler); + } catch (Throwable e) { + if (e.getCause() instanceof NoSuchMethodException) { + throw new IllegalStateException("No create method exist in [" + samplerClassName + "]", e.getCause()); + } else { + throw new IllegalStateException("Sampler instantiation failed for class [" + samplerClassName + "]", e.getCause()); + } + } + }); + } catch (Exception e) { + throw new IllegalStateException("Sampler instantiation failed for class [" + samplerClassName + "]", e.getCause()); + } + } +} diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSampler.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSampler.java index 774070aa39df6..d7fe92b1f3495 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSampler.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSampler.java @@ -8,6 +8,7 @@ package org.opensearch.telemetry.tracing.sampler; +import org.opensearch.common.settings.Settings; import org.opensearch.telemetry.TelemetrySettings; import java.util.List; @@ -18,14 +19,18 @@ import io.opentelemetry.context.Context; import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.sdk.trace.samplers.Sampler; +import io.opentelemetry.sdk.trace.samplers.SamplingDecision; import io.opentelemetry.sdk.trace.samplers.SamplingResult; /** - * ProbabilisticSampler implements a head-based sampling strategy based on provided settings. + * ProbabilisticSampler implements a probability sampling strategy based on configured sampling ratio. */ public class ProbabilisticSampler implements Sampler { private Sampler defaultSampler; private final TelemetrySettings telemetrySettings; + private final Settings settings; + private final Sampler fallbackSampler; + private double samplingRatio; /** @@ -33,21 +38,24 @@ public class ProbabilisticSampler implements Sampler { * * @param telemetrySettings Telemetry settings. */ - public ProbabilisticSampler(TelemetrySettings telemetrySettings) { + private ProbabilisticSampler(TelemetrySettings telemetrySettings, Settings settings, Sampler fallbackSampler) { this.telemetrySettings = Objects.requireNonNull(telemetrySettings); + this.settings = Objects.requireNonNull(settings); this.samplingRatio = telemetrySettings.getSamplingProbability(); this.defaultSampler = Sampler.traceIdRatioBased(samplingRatio); + this.fallbackSampler = fallbackSampler; } - Sampler getSampler() { - double newSamplingRatio = telemetrySettings.getSamplingProbability(); - if (isSamplingRatioChanged(newSamplingRatio)) { - synchronized (this) { - this.samplingRatio = newSamplingRatio; - defaultSampler = Sampler.traceIdRatioBased(samplingRatio); - } - } - return defaultSampler; + /** + * Create probabilistic sampler. + * + * @param telemetrySettings the telemetry settings + * @param settings the settings + * @param fallbackSampler the fallback sampler + * @return the probabilistic sampler + */ + public static Sampler create(TelemetrySettings telemetrySettings, Settings settings, Sampler fallbackSampler) { + return new ProbabilisticSampler(telemetrySettings, settings, fallbackSampler); } private boolean isSamplingRatioChanged(double newSamplingRatio) { @@ -67,7 +75,19 @@ public SamplingResult shouldSample( Attributes attributes, List<LinkData> parentLinks ) { - return getSampler().shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); + double newSamplingRatio = telemetrySettings.getSamplingProbability(); + if (isSamplingRatioChanged(newSamplingRatio)) { + synchronized (this) { + this.samplingRatio = newSamplingRatio; + defaultSampler = Sampler.traceIdRatioBased(samplingRatio); + } + } + final SamplingResult result = defaultSampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); + if (result.getDecision() != SamplingDecision.DROP && fallbackSampler != null) { + return fallbackSampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); + } else { + return result; + } } @Override diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticTransportActionSampler.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticTransportActionSampler.java new file mode 100644 index 0000000000000..93a8edaaaa760 --- /dev/null +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticTransportActionSampler.java @@ -0,0 +1,99 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.tracing.sampler; + +import org.opensearch.common.settings.Settings; +import org.opensearch.telemetry.OTelTelemetrySettings; +import org.opensearch.telemetry.TelemetrySettings; + +import java.util.List; +import java.util.Objects; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.trace.data.LinkData; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import io.opentelemetry.sdk.trace.samplers.SamplingDecision; +import io.opentelemetry.sdk.trace.samplers.SamplingResult; + +import static org.opensearch.telemetry.tracing.AttributeNames.TRANSPORT_ACTION; + +/** + * ProbabilisticTransportActionSampler sampler samples request with action based on defined probability + */ +public class ProbabilisticTransportActionSampler implements Sampler { + + private final Sampler fallbackSampler; + private Sampler actionSampler; + private final TelemetrySettings telemetrySettings; + private final Settings settings; + private double actionSamplingRatio; + + /** + * Creates ProbabilisticTransportActionSampler sampler + * @param telemetrySettings TelemetrySettings + */ + private ProbabilisticTransportActionSampler(TelemetrySettings telemetrySettings, Settings settings, Sampler fallbackSampler) { + this.telemetrySettings = Objects.requireNonNull(telemetrySettings); + this.settings = Objects.requireNonNull(settings); + this.actionSamplingRatio = OTelTelemetrySettings.TRACER_SAMPLER_ACTION_PROBABILITY.get(settings); + this.actionSampler = Sampler.traceIdRatioBased(actionSamplingRatio); + this.fallbackSampler = fallbackSampler; + } + + /** + * Create probabilistic transport action sampler. + * + * @param telemetrySettings the telemetry settings + * @param settings the settings + * @param fallbackSampler the fallback sampler + * @return the probabilistic transport action sampler + */ + public static Sampler create(TelemetrySettings telemetrySettings, Settings settings, Sampler fallbackSampler) { + return new ProbabilisticTransportActionSampler(telemetrySettings, settings, fallbackSampler); + } + + @Override + public SamplingResult shouldSample( + Context parentContext, + String traceId, + String name, + SpanKind spanKind, + Attributes attributes, + List<LinkData> parentLinks + ) { + final String action = attributes.get(AttributeKey.stringKey(TRANSPORT_ACTION)); + if (action != null) { + final SamplingResult result = actionSampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); + if (result.getDecision() != SamplingDecision.DROP && fallbackSampler != null) { + return fallbackSampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); + } + return result; + } + if (fallbackSampler != null) return fallbackSampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); + + return SamplingResult.drop(); + } + + double getSamplingRatio() { + return actionSamplingRatio; + } + + @Override + public String getDescription() { + return "Transport Action Sampler"; + } + + @Override + public String toString() { + return getDescription(); + } +} diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/RequestSampler.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/RequestSampler.java index 9ea681370a3ec..87c2849173aff 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/RequestSampler.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/sampler/RequestSampler.java @@ -18,21 +18,20 @@ import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingResult; +import static org.opensearch.telemetry.tracing.AttributeNames.TRACE; + /** - * HeadBased sampler + * RequestSampler based on HeadBased sampler */ public class RequestSampler implements Sampler { - private final Sampler defaultSampler; - - // TODO: Pick value of TRACE from PR #9415. - private static final String TRACE = "trace"; + private final Sampler fallbackSampler; /** - * Creates Head based sampler - * @param defaultSampler defaultSampler + * Creates request sampler which applies based on all applicable sampler + * @param fallbackSampler Sampler */ - public RequestSampler(Sampler defaultSampler) { - this.defaultSampler = defaultSampler; + public RequestSampler(Sampler fallbackSampler) { + this.fallbackSampler = fallbackSampler; } @Override @@ -44,15 +43,15 @@ public SamplingResult shouldSample( Attributes attributes, List<LinkData> parentLinks ) { - final String trace = attributes.get(AttributeKey.stringKey(TRACE)); if (trace != null) { return (Boolean.parseBoolean(trace) == true) ? SamplingResult.recordAndSample() : SamplingResult.drop(); - } else { - return defaultSampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); } - + if (fallbackSampler != null) { + return fallbackSampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); + } + return SamplingResult.recordAndSample(); } @Override diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/OTelTelemetryPluginTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/OTelTelemetryPluginTests.java index 2fcf89947e537..4a1301588dad2 100644 --- a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/OTelTelemetryPluginTests.java +++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/OTelTelemetryPluginTests.java @@ -30,9 +30,11 @@ import static org.opensearch.telemetry.OTelTelemetryPlugin.OTEL_TRACER_NAME; import static org.opensearch.telemetry.OTelTelemetrySettings.OTEL_METRICS_EXPORTER_CLASS_SETTING; import static org.opensearch.telemetry.OTelTelemetrySettings.OTEL_TRACER_SPAN_EXPORTER_CLASS_SETTING; +import static org.opensearch.telemetry.OTelTelemetrySettings.OTEL_TRACER_SPAN_SAMPLER_CLASS_SETTINGS; import static org.opensearch.telemetry.OTelTelemetrySettings.TRACER_EXPORTER_BATCH_SIZE_SETTING; import static org.opensearch.telemetry.OTelTelemetrySettings.TRACER_EXPORTER_DELAY_SETTING; import static org.opensearch.telemetry.OTelTelemetrySettings.TRACER_EXPORTER_MAX_QUEUE_SIZE_SETTING; +import static org.opensearch.telemetry.OTelTelemetrySettings.TRACER_SAMPLER_ACTION_PROBABILITY; import static org.opensearch.telemetry.TelemetrySettings.TRACER_ENABLED_SETTING; import static org.opensearch.telemetry.TelemetrySettings.TRACER_SAMPLER_PROBABILITY; @@ -69,7 +71,9 @@ public void testGetTelemetry() { TRACER_EXPORTER_DELAY_SETTING, TRACER_EXPORTER_MAX_QUEUE_SIZE_SETTING, OTEL_TRACER_SPAN_EXPORTER_CLASS_SETTING, - OTEL_METRICS_EXPORTER_CLASS_SETTING + OTEL_TRACER_SPAN_SAMPLER_CLASS_SETTINGS, + OTEL_METRICS_EXPORTER_CLASS_SETTING, + TRACER_SAMPLER_ACTION_PROBABILITY ), oTelTelemetryPlugin.getSettings() ); diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java index 9de575b69774a..4b39e3d0d607d 100644 --- a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java +++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java @@ -17,12 +17,15 @@ import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.metrics.DoubleCounter; import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; import io.opentelemetry.api.metrics.DoubleUpDownCounter; import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; import io.opentelemetry.api.metrics.LongCounterBuilder; import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; +import org.mockito.Mockito; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -118,4 +121,32 @@ public void testUpDownCounter() { counter.add(-2.0, tags); verify(mockOTelUpDownDoubleCounter).add((-2.0), OTelAttributesConverter.convert(tags)); } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + public void testHistogram() { + String histogramName = "test-histogram"; + String description = "test"; + String unit = "1"; + Meter mockMeter = mock(Meter.class); + OpenTelemetry mockOpenTelemetry = mock(OpenTelemetry.class); + DoubleHistogram mockOTelDoubleHistogram = mock(DoubleHistogram.class); + DoubleHistogramBuilder mockOTelDoubleHistogramBuilder = mock(DoubleHistogramBuilder.class); + MeterProvider meterProvider = mock(MeterProvider.class); + when(meterProvider.get(OTelTelemetryPlugin.INSTRUMENTATION_SCOPE_NAME)).thenReturn(mockMeter); + MetricsTelemetry metricsTelemetry = new OTelMetricsTelemetry( + new RefCountedReleasable("telemetry", mockOpenTelemetry, () -> {}), + meterProvider + ); + when(mockMeter.histogramBuilder(Mockito.contains(histogramName))).thenReturn(mockOTelDoubleHistogramBuilder); + when(mockOTelDoubleHistogramBuilder.setDescription(description)).thenReturn(mockOTelDoubleHistogramBuilder); + when(mockOTelDoubleHistogramBuilder.setUnit(unit)).thenReturn(mockOTelDoubleHistogramBuilder); + when(mockOTelDoubleHistogramBuilder.build()).thenReturn(mockOTelDoubleHistogram); + + Histogram histogram = metricsTelemetry.createHistogram(histogramName, description, unit); + histogram.record(1.0); + verify(mockOTelDoubleHistogram).record(1.0); + Tags tags = Tags.create().addTag("test", "test"); + histogram.record(2.0, tags); + verify(mockOTelDoubleHistogram).record(2.0, OTelAttributesConverter.convert(tags)); + } } diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/OTelSamplerFactoryTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/OTelSamplerFactoryTests.java new file mode 100644 index 0000000000000..39ccf299dfdc4 --- /dev/null +++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/OTelSamplerFactoryTests.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.telemetry.tracing.sampler; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.telemetry.OTelTelemetrySettings; +import org.opensearch.telemetry.TelemetrySettings; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Set; + +import io.opentelemetry.sdk.trace.samplers.Sampler; + +import static org.opensearch.telemetry.TelemetrySettings.TRACER_ENABLED_SETTING; +import static org.opensearch.telemetry.TelemetrySettings.TRACER_SAMPLER_PROBABILITY; + +public class OTelSamplerFactoryTests extends OpenSearchTestCase { + + public void testDefaultCreate() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Set.of(TRACER_SAMPLER_PROBABILITY, TRACER_ENABLED_SETTING)); + TelemetrySettings telemetrySettings = new TelemetrySettings(Settings.EMPTY, clusterSettings); + Sampler sampler = OTelSamplerFactory.create(telemetrySettings, Settings.EMPTY); + assertEquals(sampler.getClass(), ProbabilisticTransportActionSampler.class); + } + + public void testCreateWithSingleSampler() { + Settings settings = Settings.builder() + .put(OTelTelemetrySettings.OTEL_TRACER_SPAN_SAMPLER_CLASS_SETTINGS.getKey(), ProbabilisticSampler.class.getName()) + .build(); + + ClusterSettings clusterSettings = new ClusterSettings(settings, Set.of(TRACER_SAMPLER_PROBABILITY, TRACER_ENABLED_SETTING)); + TelemetrySettings telemetrySettings = new TelemetrySettings(settings, clusterSettings); + Sampler sampler = OTelSamplerFactory.create(telemetrySettings, settings); + assertTrue(sampler instanceof ProbabilisticSampler); + } +} diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSamplerTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSamplerTests.java index 639dc341ef0db..a094cd0119f5e 100644 --- a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSamplerTests.java +++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticSamplerTests.java @@ -15,18 +15,21 @@ import java.util.Set; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.context.Context; import io.opentelemetry.sdk.trace.samplers.Sampler; import static org.opensearch.telemetry.OTelTelemetrySettings.TRACER_EXPORTER_DELAY_SETTING; import static org.opensearch.telemetry.TelemetrySettings.TRACER_ENABLED_SETTING; import static org.opensearch.telemetry.TelemetrySettings.TRACER_SAMPLER_PROBABILITY; +import static org.mockito.Mockito.mock; public class ProbabilisticSamplerTests extends OpenSearchTestCase { // When ProbabilisticSampler is created with OTelTelemetrySettings as null public void testProbabilisticSamplerWithNullSettings() { // Verify that the constructor throws IllegalArgumentException when given null settings - assertThrows(NullPointerException.class, () -> { new ProbabilisticSampler(null); }); + assertThrows(NullPointerException.class, () -> { ProbabilisticSampler.create(null, null, null); }); } public void testDefaultGetSampler() { @@ -37,10 +40,9 @@ public void testDefaultGetSampler() { ); // Probabilistic Sampler - ProbabilisticSampler probabilisticSampler = new ProbabilisticSampler(telemetrySettings); + Sampler probabilisticSampler = ProbabilisticSampler.create(telemetrySettings, Settings.EMPTY, null); - assertNotNull(probabilisticSampler.getSampler()); - assertEquals(0.01, probabilisticSampler.getSamplingRatio(), 0.0d); + assertEquals(0.01, ((ProbabilisticSampler) probabilisticSampler).getSamplingRatio(), 0.0d); } public void testGetSamplerWithUpdatedSamplingRatio() { @@ -51,14 +53,16 @@ public void testGetSamplerWithUpdatedSamplingRatio() { ); // Probabilistic Sampler - ProbabilisticSampler probabilisticSampler = new ProbabilisticSampler(telemetrySettings); - assertEquals(0.01d, probabilisticSampler.getSamplingRatio(), 0.0d); + Sampler probabilisticSampler = ProbabilisticSampler.create(telemetrySettings, Settings.EMPTY, null); + + assertEquals(0.01d, ((ProbabilisticSampler) probabilisticSampler).getSamplingRatio(), 0.0d); telemetrySettings.setSamplingProbability(0.02); + // Need to call shouldSample() to update the value of samplingRatio + probabilisticSampler.shouldSample(mock(Context.class), "00000000000000000000000000000000", "", SpanKind.INTERNAL, null, null); + // Need to call getSampler() to update the value of tracerHeadSamplerSamplingRatio - Sampler updatedProbabilisticSampler = probabilisticSampler.getSampler(); - assertEquals(0.02, probabilisticSampler.getSamplingRatio(), 0.0d); + assertEquals(0.02, ((ProbabilisticSampler) probabilisticSampler).getSamplingRatio(), 0.0d); } - } diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticTransportActionSamplerTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticTransportActionSamplerTests.java new file mode 100644 index 0000000000000..261b0252fef60 --- /dev/null +++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/ProbabilisticTransportActionSamplerTests.java @@ -0,0 +1,52 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.tracing.sampler; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.telemetry.TelemetrySettings; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Collections; +import java.util.Set; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import io.opentelemetry.sdk.trace.samplers.SamplingResult; + +import static org.opensearch.telemetry.TelemetrySettings.TRACER_ENABLED_SETTING; +import static org.opensearch.telemetry.TelemetrySettings.TRACER_SAMPLER_PROBABILITY; +import static org.opensearch.telemetry.tracing.AttributeNames.TRANSPORT_ACTION; +import static org.mockito.Mockito.mock; + +public class ProbabilisticTransportActionSamplerTests extends OpenSearchTestCase { + + public void testGetSamplerWithActionSamplingRatio() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Set.of(TRACER_SAMPLER_PROBABILITY, TRACER_ENABLED_SETTING)); + + TelemetrySettings telemetrySettings = new TelemetrySettings(Settings.EMPTY, clusterSettings); + + // ProbabilisticTransportActionSampler + Sampler probabilisticTransportActionSampler = ProbabilisticTransportActionSampler.create(telemetrySettings, Settings.EMPTY, null); + + SamplingResult result = probabilisticTransportActionSampler.shouldSample( + mock(Context.class), + "00000000000000000000000000000000", + "spanName", + SpanKind.INTERNAL, + Attributes.builder().put(TRANSPORT_ACTION, "dummy_action").build(), + Collections.emptyList() + ); + // Verify that ProbabilisticTransportActionSampler returned SamplingResult.recordAndSample() as all actions will be sampled + assertEquals(SamplingResult.recordAndSample(), result); + assertEquals(0.001, ((ProbabilisticTransportActionSampler) probabilisticTransportActionSampler).getSamplingRatio(), 0.000d); + } +} diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/RequestSamplerTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/RequestSamplerTests.java index facf04623ec46..da234ca13dc9d 100644 --- a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/RequestSamplerTests.java +++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/tracing/sampler/RequestSamplerTests.java @@ -8,9 +8,14 @@ package org.opensearch.telemetry.tracing.sampler; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.telemetry.TelemetrySettings; import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; import java.util.Collections; +import java.util.Set; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; @@ -19,29 +24,29 @@ import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingResult; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; +import static org.opensearch.telemetry.TelemetrySettings.TRACER_ENABLED_SETTING; +import static org.opensearch.telemetry.TelemetrySettings.TRACER_SAMPLER_PROBABILITY; +import static org.opensearch.telemetry.tracing.AttributeNames.TRANSPORT_ACTION; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class RequestSamplerTests extends OpenSearchTestCase { + private ClusterSettings clusterSettings; + private TelemetrySettings telemetrySettings; + private RequestSampler requestSampler; + private Context parentContext; + + @Before + public void init() { + clusterSettings = new ClusterSettings(Settings.EMPTY, Set.of(TRACER_SAMPLER_PROBABILITY, TRACER_ENABLED_SETTING)); + telemetrySettings = new TelemetrySettings(Settings.EMPTY, clusterSettings); + Sampler fallbackSampler = OTelSamplerFactory.create(telemetrySettings, Settings.EMPTY); + requestSampler = new RequestSampler(fallbackSampler); + parentContext = mock(Context.class); + } public void testShouldSampleWithTraceAttributeAsTrue() { - - // Create a mock default sampler - Sampler defaultSampler = mock(Sampler.class); - when(defaultSampler.shouldSample(any(), anyString(), anyString(), any(), any(), any())).thenReturn(SamplingResult.drop()); - - // Create an instance of HeadSampler with the mock default sampler - RequestSampler requestSampler = new RequestSampler(defaultSampler); - - // Create a mock Context and Attributes - Context parentContext = mock(Context.class); Attributes attributes = Attributes.of(AttributeKey.stringKey("trace"), "true"); - // Call shouldSample on HeadSampler SamplingResult result = requestSampler.shouldSample( parentContext, "traceId", @@ -50,43 +55,85 @@ public void testShouldSampleWithTraceAttributeAsTrue() { attributes, Collections.emptyList() ); - assertEquals(SamplingResult.recordAndSample(), result); + } + + public void testShouldSampleWithTraceAttributeAsFalse() { + Attributes attributes = Attributes.of(AttributeKey.stringKey("trace"), "false"); - // Verify that the default sampler's shouldSample method was not called - verify(defaultSampler, never()).shouldSample(any(), anyString(), anyString(), any(), any(), any()); + SamplingResult result = requestSampler.shouldSample( + parentContext, + "traceId", + "spanName", + SpanKind.INTERNAL, + attributes, + Collections.emptyList() + ); + assertEquals(SamplingResult.drop(), result); } - public void testShouldSampleWithoutTraceAttribute() { + public void testShouldSampleForProbabilisticSampler() { + clusterSettings.applySettings( + Settings.builder() + .put("telemetry.tracer.sampler.probability", "1.0") + .put("telemetry.otel.tracer.span.sampler.classes", "org.opensearch.telemetry.tracing.sampler.ProbabilisticSampler") + .build() + ); + + Attributes attributes = Attributes.builder().build(); + + SamplingResult result = requestSampler.shouldSample( + parentContext, + "00000000000000000000000000000000", + "spanName", + SpanKind.INTERNAL, + attributes, + Collections.emptyList() + ); - // Create a mock default sampler - Sampler defaultSampler = mock(Sampler.class); - when(defaultSampler.shouldSample(any(), anyString(), anyString(), any(), any(), any())).thenReturn( - SamplingResult.recordAndSample() + // Verify that request is sampled + assertEquals(SamplingResult.recordAndSample(), result); + + clusterSettings.applySettings(Settings.builder().put("telemetry.tracer.sampler.probability", "0.0").build()); + result = requestSampler.shouldSample( + parentContext, + "00000000000000000000000000000000", + "spanName", + SpanKind.INTERNAL, + attributes, + Collections.emptyList() ); + assertEquals(SamplingResult.drop(), result); - // Create an instance of HeadSampler with the mock default sampler - RequestSampler requestSampler = new RequestSampler(defaultSampler); + } - // Create a mock Context and Attributes + public void testShouldSampleForProbabilisticTransportActionSampler() { + clusterSettings.applySettings( + Settings.builder() + .put( + "telemetry.otel.tracer.span.sampler.classes", + "org.opensearch.telemetry.tracing.sampler.ProbabilisticTransportActionSampler" + ) + .build() + ); + clusterSettings.applySettings(Settings.builder().put("telemetry.tracer.action.sampler.probability", "1.0").build()); + + // Create a mock Context and Attributes with dummy action Context parentContext = mock(Context.class); - Attributes attributes = Attributes.empty(); + Attributes attributes = Attributes.builder().put(TRANSPORT_ACTION, "dummy_action").build(); - // Call shouldSample on HeadSampler + // Calling shouldSample to update samplingRatio SamplingResult result = requestSampler.shouldSample( parentContext, - "traceId", + "00000000000000000000000000000000", "spanName", SpanKind.INTERNAL, attributes, Collections.emptyList() ); - // Verify that HeadSampler returned SamplingResult.recordAndSample() + // Verify that request is sampled assertEquals(SamplingResult.recordAndSample(), result); - - // Verify that the default sampler's shouldSample method was called - verify(defaultSampler).shouldSample(any(), anyString(), anyString(), any(), any(), any()); } } diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 0232fc58f9357..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..005cc2388bd89 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +9d4b3315bb625ec2d73fa569fb6bce4589243d5e \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 987b524aedc98..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..0617c6999c586 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +3970474ed55aa1e3e8de5a5602c342c6b8155371 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.13.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.13.jar.sha1 deleted file mode 100644 index 5eaf96739ed72..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -faea23e582978a34f6a932b81e86206ec2314990 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.15.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..c30a99a2338b4 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.15.jar.sha1 @@ -0,0 +1 @@ +3221d405ad55a573cf29875a8244a4217cf07185 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.13.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.13.jar.sha1 deleted file mode 100644 index 091125169c696..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5af7bc746050d080891a5446cca2c96a0c51d03 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.15.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..ab3171cd02b73 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.15.jar.sha1 @@ -0,0 +1 @@ +c79756fa2dfc28ac81fc9d23a14b17c656c3e560 \ No newline at end of file diff --git a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java index db532f9a1c503..4ea23e415c994 100644 --- a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java +++ b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java @@ -50,6 +50,7 @@ * Unit tests for OpenSearchPolicy: these cannot run with security manager, * we don't allow messing with the policy */ +@SuppressWarnings("removal") public class OpenSearchPolicyUnitTests extends OpenSearchTestCase { /** * Test policy with null codesource. diff --git a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java index 56d6c72705a78..99c9ee7e96d01 100644 --- a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java +++ b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java @@ -41,6 +41,7 @@ public class SystemCallFilterTests extends OpenSearchTestCase { /** command to try to run in tests */ static final String EXECUTABLE = Constants.WINDOWS ? "calc" : "ls"; + @SuppressWarnings("removal") @Override public void setUp() throws Exception { super.setUp(); diff --git a/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java b/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java index 430df1f899708..04eae95f6fe12 100644 --- a/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder; /** Tests plugin manager security check */ +@SuppressWarnings("removal") public class PluginSecurityTests extends OpenSearchTestCase { /** Test that we can parse the set of permissions correctly for a simple policy */ diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/PluginInfoIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/PluginInfoIT.java new file mode 100644 index 0000000000000..d4e7017aab8c2 --- /dev/null +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/PluginInfoIT.java @@ -0,0 +1,27 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.upgrades; + +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.test.rest.yaml.ObjectPath; + +import java.util.Map; + +public class PluginInfoIT extends AbstractFullClusterRestartTestCase { + public void testPluginInfoSerialization() throws Exception { + // Ensure all nodes are able to come up, validate with GET _nodes. + Response response = client().performRequest(new Request("GET", "_nodes")); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + final Map<String, Object> nodeMap = objectPath.evaluate("nodes"); + // Any issue in PluginInfo serialization logic will result into connection failures + // and hence reduced number of nodes. + assertEquals(2, nodeMap.keySet().size()); + } +} diff --git a/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java b/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java index 73c546b80d431..c2f799d7d48d2 100644 --- a/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java +++ b/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java @@ -62,6 +62,7 @@ public void testSuccessfulStartupWithCustomConfig() throws Exception { }); } + @SuppressWarnings("removal") private List<String> readAllLines(Path logFile) { return AccessController.doPrivileged((PrivilegedAction<List<String>>) () -> { try { diff --git a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/PluginInfoIT.java b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/PluginInfoIT.java new file mode 100644 index 0000000000000..47e454a7549cb --- /dev/null +++ b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/PluginInfoIT.java @@ -0,0 +1,26 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.backwards; + +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.opensearch.test.rest.yaml.ObjectPath; + +import java.util.Map; + +public class PluginInfoIT extends OpenSearchRestTestCase { + public void testPluginInfoSerialization() throws Exception { + // Ensure all nodes are able to come up, validate with GET _nodes. + Response response = client().performRequest(new Request("GET", "_nodes")); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + final Map<String, Object> nodeMap = objectPath.evaluate("nodes"); + assertEquals(4, nodeMap.keySet().size()); + } +} diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 3dff452be855f..777377f04e8b9 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -62,6 +62,7 @@ for (Version bwcVersion : BuildParams.bwcVersions.wireCompatible) { setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'http.content_type.required', 'true' + systemProperty 'opensearch.experimental.optimize_doc_id_lookup.fuzzy_set.enabled', 'true' } } diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index f963f8d221bb5..8e8734b5d62b3 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -40,10 +40,10 @@ import org.opensearch.common.Booleans; import org.opensearch.common.io.Streams; import org.opensearch.common.settings.Settings; +import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.EngineConfig; import org.opensearch.indices.replication.common.ReplicationType; -import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.rest.yaml.ObjectPath; import java.io.IOException; @@ -262,7 +262,6 @@ public void testIndexing() throws Exception { * @throws Exception if index creation fail * @throws UnsupportedOperationException if cluster type is unknown */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/7679") public void testIndexingWithSegRep() throws Exception { if (UPGRADE_FROM_VERSION.before(Version.V_2_4_0)) { logger.info("--> Skip test for version {} where segment replication feature is not available", UPGRADE_FROM_VERSION); @@ -345,6 +344,88 @@ public void testIndexingWithSegRep() throws Exception { } } + public void testIndexingWithFuzzyFilterPostings() throws Exception { + if (UPGRADE_FROM_VERSION.onOrBefore(Version.V_2_11_1)) { + logger.info("--> Skip test for version {} where fuzzy filter postings format feature is not available", UPGRADE_FROM_VERSION); + return; + } + final String indexName = "test-index-fuzzy-set"; + final int shardCount = 3; + final int replicaCount = 1; + logger.info("--> Case {}", CLUSTER_TYPE); + printClusterNodes(); + logger.info("--> _cat/shards before test execution \n{}", EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/shards?v")).getEntity())); + switch (CLUSTER_TYPE) { + case OLD: + Settings.Builder settings = Settings.builder() + .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), shardCount) + .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), replicaCount) + .put( + EngineConfig.INDEX_CODEC_SETTING.getKey(), + randomFrom(new ArrayList<>(CODECS) { + { + add(CodecService.LUCENE_DEFAULT_CODEC); + } + }) + ) + .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"); + createIndex(indexName, settings.build()); + waitForClusterHealthWithNoShardMigration(indexName, "green"); + bulk(indexName, "_OLD", 5); + break; + case MIXED: + waitForClusterHealthWithNoShardMigration(indexName, "yellow"); + break; + case UPGRADED: + Settings.Builder settingsBuilder = Settings.builder() + .put(IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING.getKey(), true); + updateIndexSettings(indexName, settingsBuilder); + waitForClusterHealthWithNoShardMigration(indexName, "green"); + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + + int expectedCount; + switch (CLUSTER_TYPE) { + case OLD: + expectedCount = 5; + break; + case MIXED: + if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) { + expectedCount = 5; + } else { + expectedCount = 10; + } + break; + case UPGRADED: + expectedCount = 15; + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + + assertCount(indexName, expectedCount); + + if (CLUSTER_TYPE != ClusterType.OLD) { + bulk(indexName, "_" + CLUSTER_TYPE, 5); + logger.info("--> Index one doc (to be deleted next) and verify doc count"); + Request toBeDeleted = new Request("PUT", "/" + indexName + "/_doc/to_be_deleted"); + toBeDeleted.addParameter("refresh", "true"); + toBeDeleted.setJsonEntity("{\"f1\": \"delete-me\"}"); + client().performRequest(toBeDeleted); + assertCount(indexName, expectedCount + 6); + + logger.info("--> Delete previously added doc and verify doc count"); + Request delete = new Request("DELETE", "/" + indexName + "/_doc/to_be_deleted"); + delete.addParameter("refresh", "true"); + client().performRequest(delete); + assertCount(indexName, expectedCount + 5); + + //forceMergeAndVerify(indexName, shardCount * (1 + replicaCount)); + } + } + public void testAutoIdWithOpTypeCreate() throws IOException { final String indexName = "auto_id_and_op_type_create_index"; StringBuilder b = new StringBuilder(); diff --git a/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java b/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java index d14c834405f32..8a6e5d62112c8 100644 --- a/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java +++ b/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java @@ -61,6 +61,7 @@ protected Matcher<String> nodeNameMatcher() { return equalTo(HOSTNAME); } + @SuppressWarnings("removal") @Override protected BufferedReader openReader(Path logFile) { return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> { diff --git a/release-notes/opensearch.release-notes-2.12.0.md b/release-notes/opensearch.release-notes-2.12.0.md new file mode 100644 index 0000000000000..1c9bef1f03fc1 --- /dev/null +++ b/release-notes/opensearch.release-notes-2.12.0.md @@ -0,0 +1,176 @@ +## 2024-02-09 Version 2.12.0 Release Notes + +## [2.12.0] +### Added +- [Admission control] Add Resource usage collector service and resource usage tracker ([#9890](https://github.com/opensearch-project/OpenSearch/pull/9890)) +- [Admission control] Add enhancements to FS stats to include read/write time, queue size and IO time ([#10541](https://github.com/opensearch-project/OpenSearch/pull/10541)) +- [Remote cluster state] Change file names for remote cluster state ([#10557](https://github.com/opensearch-project/OpenSearch/pull/10557)) +- [Search Pipelines] Add request-scoped state shared between processors (and three new processors) ([#9405](https://github.com/opensearch-project/OpenSearch/pull/9405)) +- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) +- [Remote Store] Add repository stats for remote store([#10567](https://github.com/opensearch-project/OpenSearch/pull/10567)) +- [Remote cluster state] Upload global metadata in cluster state to remote store([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404)) +- [Remote cluster state] Download functionality of global metadata from remote store ([#10535](https://github.com/opensearch-project/OpenSearch/pull/10535)) +- [Remote cluster state] Restore global metadata from remote store when local state is lost after quorum loss ([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404)) +- [Remote cluster state] Make index and global metadata upload timeout dynamic cluster settings ([#10814](https://github.com/opensearch-project/OpenSearch/pull/10814)) +- Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255)) +- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) +- Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670)) +- Remove ingest processor supports excluding fields ([#10967](https://github.com/opensearch-project/OpenSearch/pull/10967), [#11983](https://github.com/opensearch-project/OpenSearch/pull/11983)) +- [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275)) +- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753)) +- [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853)) +- Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247)) +- Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248)) +- Introduce ConcurrentQueryProfiler to profile query using concurrent segment search path and support concurrency during rewrite and create weight ([10352](https://github.com/opensearch-project/OpenSearch/pull/10352)) +- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679)) +- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618)) +- [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672)) +- Enable must_exist parameter for update aliases API ([#11210](https://github.com/opensearch-project/OpenSearch/pull/11210)) +- Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024)) +- Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650)) +- Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040)) +- Allowing pipeline processors to access index mapping info by passing ingest service ref as part of the processor factory parameters ([#10307](https://github.com/opensearch-project/OpenSearch/pull/10307)) +- Add experimental SIMD implementation of B-tree to round down dates ([#11194](https://github.com/opensearch-project/OpenSearch/issues/11194)) +- Make number of segment metadata files in remote segment store configurable ([#11329](https://github.com/opensearch-project/OpenSearch/pull/11329)) +- Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317)) +- Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069)) +- [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175)) +- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378)) +- Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170)) +- Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) +- Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) +- Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) +- Add copy ingest processor ([#11870](https://github.com/opensearch-project/OpenSearch/pull/11870)) +- Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) +- Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) +- Add remove_by_pattern ingest processor ([#11920](https://github.com/opensearch-project/OpenSearch/pull/11920)) +- Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563)) +- Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876)) +- New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465)) +- Add community_id ingest processor ([#12121](https://github.com/opensearch-project/OpenSearch/pull/12121)) +- Add support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394)) +- Remove concurrent segment search feature flag for GA launch ([#12074](https://github.com/opensearch-project/OpenSearch/pull/12074)) +- Enable Fuzzy codec for doc id fields using a bloom filter ([#11022](https://github.com/opensearch-project/OpenSearch/pull/11022)) +- [Metrics Framework] Adds support for Histogram metric ([#12062](https://github.com/opensearch-project/OpenSearch/pull/12062)) +- Support for returning scores in matched queries ([#11626](https://github.com/opensearch-project/OpenSearch/pull/11626)) + +### Dependencies +- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) +- Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) +- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796)) +- Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) +- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.8.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630), [#12167](https://github.com/opensearch-project/OpenSearch/pull/12167)) +- Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) +- Bump `de.thetaphi:forbiddenapis` from 3.5.1 to 3.6 ([#10508](https://github.com/opensearch-project/OpenSearch/pull/10508)) +- Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639)) +- Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297)) +- Bump `com.google.http-client:google-http-client` from 1.43.2 to 1.43.3 ([#10635](https://github.com/opensearch-project/OpenSearch/pull/10635)) +- Bump `com.squareup.okio:okio` from 3.5.0 to 3.7.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637), [#11632](https://github.com/opensearch-project/OpenSearch/pull/11632)) +- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.1 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270), [#11695](https://github.com/opensearch-project/OpenSearch/pull/11695)) +- Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504)) +- Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) +- Bump `actions/github-script` from 6 to 7.0.1 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271), [#12166](https://github.com/opensearch-project/OpenSearch/pull/12166)) +- Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273)) +- Bump `netty` from 4.1.100.Final to 4.1.106.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)), [#12034](https://github.com/opensearch-project/OpenSearch/pull/12034)) +- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) +- Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) +- Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) +- Bump `reactor-netty-core` from 1.1.12 to 1.1.15 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)), ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) +- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.2 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629), [#12056](https://github.com/opensearch-project/OpenSearch/pull/12056)) +- Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447)) +- Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450)) +- Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445)) +- Bump `org.apache.xmlbeans:xmlbeans` from 5.1.1 to 5.2.0 ([#11448](https://github.com/opensearch-project/OpenSearch/pull/11448)) +- Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521)) +- Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539)) +- Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555)) +- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556)) +- Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557)) +- Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671)) +- Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996)) +- Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559)) +- Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) +- Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) +- Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) +- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) +- Bump `Lucene` from 9.8.0 to 9.9.2 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)), ([#12063](https://github.com/opensearch-project/OpenSearch/pull/12063)) +- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.2.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886), [#11963](https://github.com/opensearch-project/OpenSearch/pull/11963)) +- Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.42.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794), [#12165](https://github.com/opensearch-project/OpenSearch/pull/12165)) +- Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) +- Bump `com.diffplug.spotless` from 6.23.2 to 6.25.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962), [#12055](https://github.com/opensearch-project/OpenSearch/pull/12055)) +- Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961)) +- Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) +- Bump `org.apache.shiro:shiro-core` from 1.11.0 to 1.13.0 ([#12200](https://github.com/opensearch-project/OpenSearch/pull/12200)) +- Bump `com.google.http-client:google-http-client-jackson2` from 1.43.3 to 1.44.1 ([#12059](https://github.com/opensearch-project/OpenSearch/pull/12059)) +- Bump `peter-evans/create-issue-from-file` from 4 to 5 ([#12057](https://github.com/opensearch-project/OpenSearch/pull/12057)) +- Bump `org.gradle.test-retry` from 1.5.4 to 1.5.8 ([#12168](https://github.com/opensearch-project/OpenSearch/pull/12168)) +- Bump `org.apache.kerby:kerb-admin` from 1.0.1 to 2.0.3 ([#12194](https://github.com/opensearch-project/OpenSearch/pull/12194)) + +### Changed +- Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) +- Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036)) +- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)), ([#11751](https://github.com/opensearch-project/OpenSearch/pull/11751)) +- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107)) +- Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598)) +- [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524)) +- [BUG] Fix java.lang.SecurityException in repository-gcs plugin ([#10642](https://github.com/opensearch-project/OpenSearch/pull/10642)) +- Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395)) +- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558)) +- Add instrumentation for indexing in transport bulk action and transport shard bulk action. ([#10273](https://github.com/opensearch-project/OpenSearch/pull/10273)) +- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895), [#11607](https://github.com/opensearch-project/OpenSearch/pull/11607)) +- Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057)) +- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023)) +- Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083)) +- Apply the fast filter optimization to composite aggregation of date histogram source ([#11505](https://github.com/opensearch-project/OpenSearch/pull/11083)) +- Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087)) +- Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528)) +- Improved performance of numeric exact-match queries ([#11209](https://github.com/opensearch-project/OpenSearch/pull/11209)) +- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) +- Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308)) +- Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) +- Allow composite aggregation to run under a parent filter aggregation ([#11499](https://github.com/opensearch-project/OpenSearch/pull/11499)) +- Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) +- Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) +- Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) +- Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582)) +- Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) +- Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) +- Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) +- Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)), ([#12000](https://github.com/opensearch-project/OpenSearch/pull/12000)) +- Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968)) +- Updates IpField to be searchable when only `doc_values` are enabled ([#11508](https://github.com/opensearch-project/OpenSearch/pull/11508)) +- [Query Insights] Query Insights Framework which currently supports retrieving the most time-consuming queries within the last configured time window ([#11903](https://github.com/opensearch-project/OpenSearch/pull/11903)) +- [Query Insights] Implement Top N Queries feature to collect and gather information about high latency queries in a window ([#11904](https://github.com/opensearch-project/OpenSearch/pull/11904)) +- Add override support for sampling based on action ([#9621](https://github.com/opensearch-project/OpenSearch/issues/9621)) +- Added custom sampler support based on transport action in request ([#9621](https://github.com/opensearch-project/OpenSearch/issues/9621)) + +### Removed +- Remove deprecated classes for Rounding ([#10956](https://github.com/opensearch-project/OpenSearch/issues/10956)) + +### Fixed +- Fix failure in dissect ingest processor parsing empty brackets ([#9225](https://github.com/opensearch-project/OpenSearch/pull/9255)) +- Fix `class_cast_exception` when passing int to `_version` and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101)) +- Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370)) +- Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496)) +- Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737)) +- Delegating CachingWeightWrapper#count to internal weight object ([#10543](https://github.com/opensearch-project/OpenSearch/pull/10543)) +- Fix per request latency last phase not tracked ([#10934](https://github.com/opensearch-project/OpenSearch/pull/10934)) +- Fix SuggestSearch.testSkipDuplicates by forcing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068)) +- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873)) +- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249)) +- Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316)) +- Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369)) +- Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) +- Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425)) +- Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) +- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485),[#11917](https://github.com/opensearch-project/OpenSearch/pull/11917)) +- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) +- Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) +- Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) +- Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711)) +- Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490)) +- Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815)) +- Fix memory leak issue in ReorganizingLongHash ([#11953](https://github.com/opensearch-project/OpenSearch/issues/11953)) +- Prevent setting remote_snapshot store type on index creation ([#11867](https://github.com/opensearch-project/OpenSearch/pull/11867)) +- [BUG] Fix remote shards balancer when filtering throttled nodes ([#11724](https://github.com/opensearch-project/OpenSearch/pull/11724)) +- Add advance(int) for numeric values in order to allow point based optimization to kick in ([#12089](https://github.com/opensearch-project/OpenSearch/pull/12089)) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index e0fbeeb83ffc4..e78d49a67a98a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -229,6 +229,11 @@ "search_pipeline": { "type": "string", "description": "The search pipeline to use to execute this request" + }, + "include_named_queries_score":{ + "type": "boolean", + "description":"Indicates whether hit.matched_queries should be rendered as a map that includes the name of the matched query associated with its score (true) or as an array containing the name of the matched queries (false)", + "default":false } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml index b572ed9e62ea9..29fbf55417961 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml @@ -1,7 +1,7 @@ "Help": - skip: version: " - 2.11.99" - reason: deleted docs added in 2.12.0 + reason: deleted docs and concurrent search are added in 2.12.0 features: node_selector - do: cat.shards: @@ -66,6 +66,10 @@ search.query_current .+ \n search.query_time .+ \n search.query_total .+ \n + search.concurrent_query_current .+ \n + search.concurrent_query_time .+ \n + search.concurrent_query_total .+ \n + search.concurrent_avg_slice_count .+ \n search.scroll_current .+ \n search.scroll_time .+ \n search.scroll_total .+ \n diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml index 00ec838489f63..39c8040993f2a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml @@ -11,6 +11,33 @@ / #node_name name active queue rejected ^ (\S+ \s+ \S+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ + - do: + cat.thread_pool: + thread_pool_patterns: search,search_throttled,generic + h: name,total_wait_time,twt + v: true + + - match: + $body: | + /^ name \s+ total_wait_time \s+ twt \n + (generic \s+ -1 \s+ -1 \n + search \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n + search_throttled \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n)+ $/ + +--- +"Test cat thread_pool total_wait_time output with concurrent search thread_pool": + - skip: + version: " - 2.11.99" + reason: index_search thread_pool was introduced in V_2.12.0 + + - do: + cat.thread_pool: {} + + - match: + $body: | + / #node_name name active queue rejected + ^ (\S+ \s+ \S+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ + - do: cat.thread_pool: thread_pool_patterns: search,search_throttled,index_searcher,generic @@ -21,6 +48,7 @@ $body: | /^ name \s+ total_wait_time \s+ twt \n (generic \s+ -1 \s+ -1 \n + index_searcher \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n search \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n search_throttled \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n)+ $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml index dd8c2a2deb721..2192873623715 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml @@ -23,8 +23,8 @@ setup: --- "Test noop_update_total metric can be updated by both update API and bulk API": - skip: - version: " - 2.99.99" #TODO: change to 2.11.99 after the PR is backported to 2.x branch - reason: "fixed in 3.0" + version: " - 2.11.99" + reason: "fixed in 2.12.0" - do: update: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 09278690f5d05..2808be8cd7045 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -509,6 +509,134 @@ setup: - match: { aggregations.1.2.buckets.1.key.nested: 1000 } - match: { aggregations.1.2.buckets.1.doc_count: 1 } +--- +"Composite aggregation with filtered nested parent": + - skip: + version: " - 2.99.99" + reason: fixed in 3.0.0 + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + 1: + nested: + path: nested + aggs: + 2: + filter: + range: + nested.nested_long: + gt: 0 + lt: 100 + aggs: + 3: + composite: + sources: [ + "nested": { + "terms": { + "field": "nested.nested_long" + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.1.2.3.buckets: 2 } + - match: { aggregations.1.2.3.buckets.0.key.nested: 10 } + - match: { aggregations.1.2.3.buckets.0.doc_count: 2 } + - match: { aggregations.1.2.3.buckets.1.key.nested: 20 } + - match: { aggregations.1.2.3.buckets.1.doc_count: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + 1: + nested: + path: nested + aggs: + 2: + filter: + range: + nested.nested_long: + gt: 0 + lt: 100 + aggs: + 3: + composite: + after: { "nested": 10 } + sources: [ + "nested": { + "terms": { + "field": "nested.nested_long" + } + } + ] + - match: {hits.total: 6} + - length: { aggregations.1.2.3.buckets: 1 } + - match: { aggregations.1.2.3.buckets.0.key.nested: 20 } + - match: { aggregations.1.2.3.buckets.0.doc_count: 2 } + +--- +"Composite aggregation with filtered reverse nested parent": + - skip: + version: " - 2.99.99" + reason: fixed in 3.0.0 + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + 1: + nested: + path: nested + aggs: + 2: + filter: + range: + nested.nested_long: + gt: 0 + lt: 20 + aggs: + 3: + reverse_nested: {} + aggs: + 4: + composite: + sources: [ + { + "long": { + "terms": { + "field": "long" + } + } + }, + { + "kw": { + "terms": { + "field": "keyword" + } + } + } + ] + - match: {hits.total: 6} + - length: { aggregations.1.2.3.4.buckets: 4 } + - match: { aggregations.1.2.3.4.buckets.0.key.long: 0 } + - match: { aggregations.1.2.3.4.buckets.0.key.kw: "bar" } + - match: { aggregations.1.2.3.4.buckets.0.doc_count: 1 } + - match: { aggregations.1.2.3.4.buckets.1.key.long: 10 } + - match: { aggregations.1.2.3.4.buckets.1.key.kw: "foo" } + - match: { aggregations.1.2.3.4.buckets.1.doc_count: 1 } + - match: { aggregations.1.2.3.4.buckets.2.key.long: 20 } + - match: { aggregations.1.2.3.4.buckets.2.key.kw: "foo" } + - match: { aggregations.1.2.3.4.buckets.2.doc_count: 1 } + - match: { aggregations.1.2.3.4.buckets.3.key.long: 100 } + - match: { aggregations.1.2.3.4.buckets.3.key.kw: "bar" } + - match: { aggregations.1.2.3.4.buckets.3.doc_count: 1 } + --- "Composite aggregation with unmapped field": - skip: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml index c7b00d5fbbef2..d5ece1719dc48 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml @@ -1,8 +1,3 @@ -setup: - - skip: - features: [ "headers" ] - version: " - 2.11.99" - reason: "searching with only doc_values was added in 2.12.0" --- "search on fields with both index and doc_values enabled": - do: @@ -47,6 +42,10 @@ setup: type: unsigned_long index: true doc_values: true + ip_field: + type: ip + index: true + doc_values: true - do: bulk: @@ -54,11 +53,11 @@ setup: refresh: true body: - '{"index": {"_index": "test-iodvq", "_id": "1" }}' - - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800, "ip_field": "192.168.0.1" }' - '{ "index": { "_index": "test-iodvq", "_id": "2" }}' - - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801, "ip_field": "192.168.0.2" }' - '{ "index": { "_index": "test-iodvq", "_id": "3" } }' - - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802, "ip_field": "192.168.0.3" }' - do: search: @@ -162,7 +161,6 @@ setup: - match: { hits.total: 1 } - - do: search: rest_total_hits_as_int: true @@ -174,6 +172,16 @@ setup: - match: { hits.total: 1 } + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + ip_field: "192.168.0.1" + + - match: {hits.total: 1} - do: search: @@ -186,7 +194,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -264,6 +271,17 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + ip_field: ["192.168.0.1", "192.168.0.2"] + + - match: { hits.total: 2 } + - do: search: rest_total_hits_as_int: true @@ -384,6 +402,19 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: + ip_field: + gte: "192.168.0.1" + lte: "192.168.0.2" + + - match: { hits.total: 2 } + --- "search on fields with only index enabled": - do: @@ -428,6 +459,10 @@ setup: type: unsigned_long index: true doc_values: false + ip_field: + type: ip + index: true + doc_values: false - do: bulk: @@ -435,11 +470,11 @@ setup: refresh: true body: - '{"index": {"_index": "test-index", "_id": "1" }}' - - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800, "ip_field": "192.168.0.1" }' - '{ "index": { "_index": "test-index", "_id": "2" }}' - - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801, "ip_field": "192.168.0.2" }' - '{ "index": { "_index": "test-index", "_id": "3" } }' - - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802, "ip_field": "192.168.0.3" }' - do: search: @@ -465,7 +500,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -555,6 +589,16 @@ setup: - match: { hits.total: 1 } + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + ip_field: "192.168.0.1" + + - match: {hits.total: 1} - do: search: @@ -567,7 +611,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -645,6 +688,17 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + ip_field: ["192.168.0.1", "192.168.0.2"] + + - match: { hits.total: 2 } + - do: search: rest_total_hits_as_int: true @@ -765,8 +819,24 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: + ip_field: + gte: "192.168.0.1" + lte: "192.168.0.2" + + - match: { hits.total: 2 } --- "search on fields with only doc_values enabled": + - skip: + features: [ "headers" ] + version: " - 2.99.99" + reason: "searching with only doc_values was added in 3.0.0" - do: indices.create: index: test-doc-values @@ -809,6 +879,10 @@ setup: type: unsigned_long index: false doc_values: true + ip_field: + type: ip + index: false + doc_values: true - do: bulk: @@ -816,11 +890,11 @@ setup: refresh: true body: - '{"index": {"_index": "test-doc-values", "_id": "1" }}' - - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800, "ip_field": "192.168.0.1" }' - '{ "index": { "_index": "test-doc-values", "_id": "2" }}' - - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801, "ip_field": "192.168.0.2" }' - '{ "index": { "_index": "test-doc-values", "_id": "3" } }' - - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802, "ip_field": "192.168.0.3" }' - do: search: @@ -846,7 +920,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -924,7 +997,6 @@ setup: - match: { hits.total: 1 } - - do: search: rest_total_hits_as_int: true @@ -936,6 +1008,16 @@ setup: - match: { hits.total: 1 } + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + ip_field: "192.168.0.3" + + - match: { hits.total: 1 } - do: search: @@ -948,7 +1030,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -1145,3 +1226,16 @@ setup: } - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: + ip_field: + gte: "192.168.0.1" + lte: "192.168.0.2" + + - match: { hits.total: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/350_matched_queries.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/350_matched_queries.yml new file mode 100644 index 0000000000000..450e9c0a0a7bb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/350_matched_queries.yml @@ -0,0 +1,103 @@ +setup: + - skip: + version: " - 2.99.99" + reason: "implemented for versions 3.0.0 and above" + +--- +"matched queries": + - do: + indices.create: + index: test + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "test_1", "_id" : "1" } }' + - '{"field" : 1 }' + - '{ "index" : { "_index" : "test_1", "_id" : "2" } }' + - '{"field" : [1, 2] }' + + - do: + search: + index: test_1 + body: + query: + bool: { + should: [ + { + match: { + field: { + query: 1, + _name: match_field_1 + } + } + }, + { + match: { + field: { + query: 2, + _name: match_field_2, + boost: 10 + } + } + } + ] + } + + - match: {hits.total.value: 2} + - length: {hits.hits.0.matched_queries: 2} + - match: {hits.hits.0.matched_queries: [ "match_field_1", "match_field_2" ]} + - length: {hits.hits.1.matched_queries: 1} + - match: {hits.hits.1.matched_queries: [ "match_field_1" ]} + +--- + +"matched queries with scores": + - do: + indices.create: + index: test + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "test_1", "_id" : "1" } }' + - '{"field" : 1 }' + - '{ "index" : { "_index" : "test_1", "_id" : "2" } }' + - '{"field" : [1, 2] }' + + - do: + search: + include_named_queries_score: true + index: test_1 + body: + query: + bool: { + should: [ + { + match: { + field: { + query: 1, + _name: match_field_1 + } + } + }, + { + match: { + field: { + query: 2, + _name: match_field_2, + boost: 10 + } + } + } + ] + } + + - match: { hits.total.value: 2 } + - length: { hits.hits.0.matched_queries: 2 } + - match: { hits.hits.0.matched_queries.match_field_1: 1 } + - match: { hits.hits.0.matched_queries.match_field_2: 10 } + - length: { hits.hits.1.matched_queries: 1 } + - match: { hits.hits.1.matched_queries.match_field_1: 1 } diff --git a/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 deleted file mode 100644 index c9e6120da7497..0000000000000 --- a/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24c8401b530308f9568eb7b408c2029c63f564c6 \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.9.2.jar.sha1 b/server/licenses/lucene-analysis-common-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..6eefe1b002fde --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.9.2.jar.sha1 @@ -0,0 +1 @@ +2a4712ee9d87e0f9942a81195dbd5223dbeaa541 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 deleted file mode 100644 index 69ecf6aa68200..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -11c46007366bb037be7d271ab0a5849b1d544662 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 b/server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..377acc22a1d6b --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 @@ -0,0 +1 @@ +b53707366cf2891bb301f85a44c5108fc2d5d1b5 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.9.1.jar.sha1 b/server/licenses/lucene-core-9.9.1.jar.sha1 deleted file mode 100644 index ae596196d9e6a..0000000000000 --- a/server/licenses/lucene-core-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.9.2.jar.sha1 b/server/licenses/lucene-core-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..2d03d69369b9f --- /dev/null +++ b/server/licenses/lucene-core-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7699f80220fc80b08413902560904623b88beb9f \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.9.1.jar.sha1 b/server/licenses/lucene-grouping-9.9.1.jar.sha1 deleted file mode 100644 index e7df056400661..0000000000000 --- a/server/licenses/lucene-grouping-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2f2785e17c5c823cc8f41a7ddb4647aaca8ee773 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.9.2.jar.sha1 b/server/licenses/lucene-grouping-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..1e321d751bff4 --- /dev/null +++ b/server/licenses/lucene-grouping-9.9.2.jar.sha1 @@ -0,0 +1 @@ +72b6c47fd80933d259859d38325e3e020c8e017b \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.9.1.jar.sha1 b/server/licenses/lucene-highlighter-9.9.1.jar.sha1 deleted file mode 100644 index 828c7294aa586..0000000000000 --- a/server/licenses/lucene-highlighter-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -30928513461bf79a5cb057e84da7d34a1e53227d \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.9.2.jar.sha1 b/server/licenses/lucene-highlighter-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..d5c2e2a5504f2 --- /dev/null +++ b/server/licenses/lucene-highlighter-9.9.2.jar.sha1 @@ -0,0 +1 @@ +0968d74d6794af65798819304f10ceb947080332 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.9.1.jar.sha1 b/server/licenses/lucene-join-9.9.1.jar.sha1 deleted file mode 100644 index 34b44ca8c6ad5..0000000000000 --- a/server/licenses/lucene-join-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b9c8cc99632280148f92b4c0a64111c482d5d0ac \ No newline at end of file diff --git a/server/licenses/lucene-join-9.9.2.jar.sha1 b/server/licenses/lucene-join-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..60da109fb5c83 --- /dev/null +++ b/server/licenses/lucene-join-9.9.2.jar.sha1 @@ -0,0 +1 @@ +902e4b65686e29c1489a7cdf43ed2ce3b7fe8ace \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.9.1.jar.sha1 b/server/licenses/lucene-memory-9.9.1.jar.sha1 deleted file mode 100644 index b75fba4c331e9..0000000000000 --- a/server/licenses/lucene-memory-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49f820b1b321860fa42a4f7583e8ed8f77b9c1c2 \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.9.2.jar.sha1 b/server/licenses/lucene-memory-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..bebb36d7ed684 --- /dev/null +++ b/server/licenses/lucene-memory-9.9.2.jar.sha1 @@ -0,0 +1 @@ +08175beb4135c23e6918c822381cd51cd2e118a8 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.9.1.jar.sha1 b/server/licenses/lucene-misc-9.9.1.jar.sha1 deleted file mode 100644 index f1e1e056004e9..0000000000000 --- a/server/licenses/lucene-misc-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -db7c30217602dfcda394a4d0f0a9e68140d385a6 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.9.2.jar.sha1 b/server/licenses/lucene-misc-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..15f572ec715bf --- /dev/null +++ b/server/licenses/lucene-misc-9.9.2.jar.sha1 @@ -0,0 +1 @@ +39b015aa603db42b55f0833193bdfc75d38842ba \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.9.1.jar.sha1 b/server/licenses/lucene-queries-9.9.1.jar.sha1 deleted file mode 100644 index 888b9b4a05ec8..0000000000000 --- a/server/licenses/lucene-queries-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d157547bd24edc8e9d9d59c273107dc3ac5fde5e \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.9.2.jar.sha1 b/server/licenses/lucene-queries-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..9a94e40b69e85 --- /dev/null +++ b/server/licenses/lucene-queries-9.9.2.jar.sha1 @@ -0,0 +1 @@ +671252594e15280818f56d4b369bc48c0ab00dac \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.9.1.jar.sha1 b/server/licenses/lucene-queryparser-9.9.1.jar.sha1 deleted file mode 100644 index 1ce8a069a0f4e..0000000000000 --- a/server/licenses/lucene-queryparser-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -12d844fe224f6f97c510ac20d68903ed7f626f6c \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.9.2.jar.sha1 b/server/licenses/lucene-queryparser-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..f23292999ae6a --- /dev/null +++ b/server/licenses/lucene-queryparser-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7cf8dea63ab511ea9a322916198c3abd0402c8b2 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.9.1.jar.sha1 b/server/licenses/lucene-sandbox-9.9.1.jar.sha1 deleted file mode 100644 index 14fd86dadc404..0000000000000 --- a/server/licenses/lucene-sandbox-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -272e588fd3d8c0a401b28a1ac715f27044bf62ec \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.9.2.jar.sha1 b/server/licenses/lucene-sandbox-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..a30f00d72a689 --- /dev/null +++ b/server/licenses/lucene-sandbox-9.9.2.jar.sha1 @@ -0,0 +1 @@ +4e2bdc6a5b65dd15b78477cf2c49cf7b707ba757 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 deleted file mode 100644 index 0efd5a7595bfe..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e066432e7ab02b2a4914f989bcd8c44adbf340ad \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 b/server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..dafeee401659a --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 @@ -0,0 +1 @@ +2693d4d4d3b9a0bf5b632ccbb01132eade1c4e14 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 deleted file mode 100644 index 7f06466e4c721..0000000000000 --- a/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fa54c9b962778e28ebc0efb9f75297781350361a \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.9.2.jar.sha1 b/server/licenses/lucene-spatial3d-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..47a1c05187098 --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.9.2.jar.sha1 @@ -0,0 +1 @@ +61866a44eafce499ebbc20bcb75c8b8a60a5449b \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.9.1.jar.sha1 b/server/licenses/lucene-suggest-9.9.1.jar.sha1 deleted file mode 100644 index 06732480d1b6c..0000000000000 --- a/server/licenses/lucene-suggest-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9554de5b22ae7483b344b94a9a956960b7a5d49c \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.9.2.jar.sha1 b/server/licenses/lucene-suggest-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..115983c932983 --- /dev/null +++ b/server/licenses/lucene-suggest-9.9.2.jar.sha1 @@ -0,0 +1 @@ +352ad5296f48a482e2b65bb7d5d6ba2977bb035e \ No newline at end of file diff --git a/server/licenses/reactor-core-3.5.11.jar.sha1 b/server/licenses/reactor-core-3.5.11.jar.sha1 deleted file mode 100644 index e5ffdbc8a7840..0000000000000 --- a/server/licenses/reactor-core-3.5.11.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -db2299757f562261eb775d13658e86ff06f91e8a \ No newline at end of file diff --git a/server/licenses/reactor-core-3.5.14.jar.sha1 b/server/licenses/reactor-core-3.5.14.jar.sha1 new file mode 100644 index 0000000000000..3b58e7a68bade --- /dev/null +++ b/server/licenses/reactor-core-3.5.14.jar.sha1 @@ -0,0 +1 @@ +6e0c97c2e78273a00fd4ed38016b19ff3c6de59e \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java index 5605e4872887a..c81d491719e4b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java @@ -449,6 +449,7 @@ public void onFailure(Exception e) { } } + @SuppressWarnings("removal") private SecureSettings writeEmptyKeystore(Environment environment, char[] password) throws Exception { final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); try { diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java index c4dcedcc722cf..bdb36b62ada21 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java @@ -52,7 +52,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.set.Sets; @@ -69,7 +68,7 @@ import org.opensearch.tasks.TaskInfo; import org.opensearch.tasks.TaskManager; import org.opensearch.test.InternalTestCluster; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportResponseHandler; @@ -99,7 +98,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -public class CancellableTasksIT extends ParameterizedOpenSearchIntegTestCase { +public class CancellableTasksIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int idGenerator = 0; static final Map<TestRequest, CountDownLatch> beforeSendLatches = ConcurrentCollections.newConcurrentMap(); @@ -107,8 +106,8 @@ public class CancellableTasksIT extends ParameterizedOpenSearchIntegTestCase { static final Map<TestRequest, CountDownLatch> beforeExecuteLatches = ConcurrentCollections.newConcurrentMap(); static final Map<TestRequest, CountDownLatch> completedLatches = ConcurrentCollections.newConcurrentMap(); - public CancellableTasksIT(Settings dynamicSettings) { - super(dynamicSettings); + public CancellableTasksIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -119,11 +118,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void resetTestStates() { idGenerator = 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java index e6fd9139d45f2..8b3c40c43e2d2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java @@ -15,9 +15,9 @@ import org.opensearch.common.settings.FeatureFlagSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.tasks.resourcetracker.ThreadResourceInfo; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchService; import org.opensearch.tasks.TaskInfo; import org.hamcrest.MatcherAssert; @@ -44,6 +44,7 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(super.nodeSettings(nodeOrdinal)) .put("thread_pool.index_searcher.size", INDEX_SEARCHER_THREADS) .put("thread_pool.index_searcher.queue_size", INDEX_SEARCHER_THREADS) + .put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true) .build(); } @@ -66,7 +67,6 @@ protected Settings featureFlagSettings() { for (Setting builtInFlag : FeatureFlagSettings.BUILT_IN_FEATURE_FLAGS) { featureSettings.put(builtInFlag.getKey(), builtInFlag.getDefaultRaw(Settings.EMPTY)); } - featureSettings.put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true); return featureSettings.build(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java index a081110e6c5a1..f50e8fd0a38cf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java @@ -50,6 +50,8 @@ import org.opensearch.remotestore.RemoteStoreBaseIntegTestCase; import org.opensearch.test.VersionUtils; +import java.util.concurrent.ExecutionException; + import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; @@ -130,4 +132,61 @@ public void testCreateCloneIndex() { } + public void testCreateCloneIndexFailure() throws ExecutionException, InterruptedException { + Version version = VersionUtils.randomIndexCompatibleVersion(random()); + int numPrimaryShards = 1; + prepareCreate("source").setSettings( + Settings.builder().put(indexSettings()).put("number_of_shards", numPrimaryShards).put("index.version.created", version) + ).get(); + final int docs = 2; + for (int i = 0; i < docs; i++) { + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", MediaTypeRegistry.JSON).get(); + } + internalCluster().ensureAtLeastNumDataNodes(2); + // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node + // if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due + // to the require._name below. + ensureGreen(); + // relocate all shards to one node such that we can merge it. + client().admin().indices().prepareUpdateSettings("source").setSettings(Settings.builder().put("index.blocks.write", true)).get(); + ensureGreen(); + + // disable rebalancing to be able to capture the right stats. balancing can move the target primary + // making it hard to pin point the source shards. + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none")) + .get(); + try { + setFailRate(REPOSITORY_NAME, 100); + + client().admin() + .indices() + .prepareResizeIndex("source", "target") + .setResizeType(ResizeType.CLONE) + .setWaitForActiveShards(0) + .setSettings(Settings.builder().put("index.number_of_replicas", 0).putNull("index.blocks.write").build()) + .get(); + + Thread.sleep(2000); + ensureYellow("target"); + + } catch (ExecutionException | InterruptedException e) { + throw new RuntimeException(e); + } finally { + setFailRate(REPOSITORY_NAME, 0); + ensureGreen(); + // clean up + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), (String) null) + ) + .get(); + } + + } + } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java index 7bd1467933e00..280f574b1baf9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java @@ -74,8 +74,8 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { - public GetTermVectorsIT(Settings dynamicSettings) { - super(dynamicSettings); + public GetTermVectorsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java index 7c6c47c682281..3fc3235701f17 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java @@ -52,8 +52,8 @@ import static org.hamcrest.Matchers.nullValue; public class MultiTermVectorsIT extends AbstractTermVectorsTestCase { - public MultiTermVectorsIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiTermVectorsIT(Settings staticSettings) { + super(staticSettings); } public void testDuelESLucene() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayRecoveryTestUtils.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayRecoveryTestUtils.java new file mode 100644 index 0000000000000..2b6a5b4ee6867 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayRecoveryTestUtils.java @@ -0,0 +1,77 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gateway; + +import org.opensearch.action.admin.cluster.state.ClusterStateRequest; +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.index.shard.ShardPath; +import org.opensearch.indices.store.ShardAttributes; + +import java.io.IOException; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static org.opensearch.test.OpenSearchIntegTestCase.client; +import static org.opensearch.test.OpenSearchIntegTestCase.internalCluster; +import static org.opensearch.test.OpenSearchIntegTestCase.resolveIndex; + +public class GatewayRecoveryTestUtils { + + public static DiscoveryNode[] getDiscoveryNodes() throws ExecutionException, InterruptedException { + final ClusterStateRequest clusterStateRequest = new ClusterStateRequest(); + clusterStateRequest.local(false); + clusterStateRequest.clear().nodes(true).routingTable(true).indices("*"); + ClusterStateResponse clusterStateResponse = client().admin().cluster().state(clusterStateRequest).get(); + final List<DiscoveryNode> nodes = new LinkedList<>(clusterStateResponse.getState().nodes().getDataNodes().values()); + DiscoveryNode[] disNodesArr = new DiscoveryNode[nodes.size()]; + nodes.toArray(disNodesArr); + return disNodesArr; + } + + public static Map<ShardId, ShardAttributes> prepareRequestMap(String[] indices, int primaryShardCount) { + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = new HashMap<>(); + for (String indexName : indices) { + final Index index = resolveIndex(indexName); + final String customDataPath = IndexMetadata.INDEX_DATA_PATH_SETTING.get( + client().admin().indices().prepareGetSettings(indexName).get().getIndexToSettings().get(indexName) + ); + for (int shardIdNum = 0; shardIdNum < primaryShardCount; shardIdNum++) { + final ShardId shardId = new ShardId(index, shardIdNum); + shardIdShardAttributesMap.put(shardId, new ShardAttributes(shardId, customDataPath)); + } + } + return shardIdShardAttributesMap; + } + + public static void corruptShard(String nodeName, ShardId shardId) throws IOException, InterruptedException { + for (Path path : internalCluster().getInstance(NodeEnvironment.class, nodeName).availableShardPaths(shardId)) { + final Path indexPath = path.resolve(ShardPath.INDEX_FOLDER_NAME); + if (Files.exists(indexPath)) { // multi data path might only have one path in use + try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) { + for (Path item : stream) { + if (item.getFileName().toString().startsWith("segments_")) { + Files.delete(item); + } + } + } + } + } + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index 229cd7bffad2f..9da1336642a64 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -36,6 +36,8 @@ import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.opensearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.opensearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsGroup; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.opensearch.action.admin.indices.recovery.RecoveryResponse; import org.opensearch.action.admin.indices.stats.IndexStats; import org.opensearch.action.admin.indices.stats.ShardStats; @@ -60,6 +62,7 @@ import org.opensearch.indices.IndicesService; import org.opensearch.indices.recovery.RecoveryState; import org.opensearch.indices.replication.common.ReplicationLuceneIndex; +import org.opensearch.indices.store.ShardAttributes; import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.InternalTestCluster.RestartCallback; @@ -85,6 +88,9 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.gateway.GatewayRecoveryTestUtils.corruptShard; +import static org.opensearch.gateway.GatewayRecoveryTestUtils.getDiscoveryNodes; +import static org.opensearch.gateway.GatewayRecoveryTestUtils.prepareRequestMap; import static org.opensearch.gateway.GatewayService.RECOVER_AFTER_NODES_SETTING; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.index.query.QueryBuilders.termQuery; @@ -734,4 +740,97 @@ public void testMessyElectionsStillMakeClusterGoGreen() throws Exception { internalCluster().fullRestart(); ensureGreen("test"); } + + public void testSingleShardFetchUsingBatchAction() { + String indexName = "test"; + int numOfShards = 1; + prepareIndex(indexName, numOfShards); + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = prepareRequestMap(new String[] { indexName }, numOfShards); + + ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards(indexName).get(); + + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch response; + response = ActionTestUtils.executeBlocking( + internalCluster().getInstance(TransportNodesListGatewayStartedShardsBatch.class), + new TransportNodesListGatewayStartedShardsBatch.Request(searchShardsResponse.getNodes(), shardIdShardAttributesMap) + ); + final Index index = resolveIndex(indexName); + final ShardId shardId = new ShardId(index, 0); + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards = response.getNodesMap() + .get(searchShardsResponse.getNodes()[0].getId()) + .getNodeGatewayStartedShardsBatch() + .get(shardId); + assertNodeGatewayStartedShardsHappyCase(nodeGatewayStartedShards); + } + + public void testShardFetchMultiNodeMultiIndexesUsingBatchAction() { + // start node + internalCluster().startNode(); + String indexName1 = "test1"; + String indexName2 = "test2"; + int numShards = internalCluster().numDataNodes(); + // assign one primary shard each to the data nodes + prepareIndex(indexName1, numShards); + prepareIndex(indexName2, numShards); + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = prepareRequestMap(new String[] { indexName1, indexName2 }, numShards); + ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards(indexName1, indexName2).get(); + assertEquals(internalCluster().numDataNodes(), searchShardsResponse.getNodes().length); + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch response; + response = ActionTestUtils.executeBlocking( + internalCluster().getInstance(TransportNodesListGatewayStartedShardsBatch.class), + new TransportNodesListGatewayStartedShardsBatch.Request(searchShardsResponse.getNodes(), shardIdShardAttributesMap) + ); + for (ClusterSearchShardsGroup clusterSearchShardsGroup : searchShardsResponse.getGroups()) { + ShardId shardId = clusterSearchShardsGroup.getShardId(); + assertEquals(1, clusterSearchShardsGroup.getShards().length); + String nodeId = clusterSearchShardsGroup.getShards()[0].currentNodeId(); + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards = response.getNodesMap() + .get(nodeId) + .getNodeGatewayStartedShardsBatch() + .get(shardId); + assertNodeGatewayStartedShardsHappyCase(nodeGatewayStartedShards); + } + } + + public void testShardFetchCorruptedShardsUsingBatchAction() throws Exception { + String indexName = "test"; + int numOfShards = 1; + prepareIndex(indexName, numOfShards); + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = prepareRequestMap(new String[] { indexName }, numOfShards); + ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards(indexName).get(); + final Index index = resolveIndex(indexName); + final ShardId shardId = new ShardId(index, 0); + corruptShard(searchShardsResponse.getNodes()[0].getName(), shardId); + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch response; + internalCluster().restartNode(searchShardsResponse.getNodes()[0].getName()); + response = ActionTestUtils.executeBlocking( + internalCluster().getInstance(TransportNodesListGatewayStartedShardsBatch.class), + new TransportNodesListGatewayStartedShardsBatch.Request(getDiscoveryNodes(), shardIdShardAttributesMap) + ); + DiscoveryNode[] discoveryNodes = getDiscoveryNodes(); + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards = response.getNodesMap() + .get(discoveryNodes[0].getId()) + .getNodeGatewayStartedShardsBatch() + .get(shardId); + assertNotNull(nodeGatewayStartedShards.storeException()); + assertNotNull(nodeGatewayStartedShards.allocationId()); + assertTrue(nodeGatewayStartedShards.primary()); + } + + private void assertNodeGatewayStartedShardsHappyCase( + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards + ) { + assertNull(nodeGatewayStartedShards.storeException()); + assertNotNull(nodeGatewayStartedShards.allocationId()); + assertTrue(nodeGatewayStartedShards.primary()); + } + + private void prepareIndex(String indexName, int numberOfPrimaryShards) { + createIndex( + indexName, + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfPrimaryShards).put(SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + index(indexName, "type", "1", Collections.emptyMap()); + flush(indexName); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java index bb6e356db188f..369c9f9b1a653 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java @@ -39,9 +39,8 @@ import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -51,11 +50,11 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.hamcrest.Matchers.containsString; -public class IndexSortIT extends ParameterizedOpenSearchIntegTestCase { +public class IndexSortIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final XContentBuilder TEST_MAPPING = createTestMapping(); - public IndexSortIT(Settings dynamicSettings) { - super(dynamicSettings); + public IndexSortIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static XContentBuilder createTestMapping() { try { return jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java index 2d28578dbebcc..df423edeca9c1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java @@ -38,10 +38,9 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.MatchPhraseQueryBuilder; import org.opensearch.index.search.MatchQuery.ZeroTermsQuery; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.util.ArrayList; @@ -55,12 +54,12 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; -public class MatchPhraseQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class MatchPhraseQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INDEX = "test"; - public MatchPhraseQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public MatchPhraseQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void setUp() throws Exception { super.setUp(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java index 6332b1b97426f..a1ff2da249d69 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java @@ -44,13 +44,12 @@ import org.opensearch.cluster.routing.ShardIterator; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.search.stats.SearchStats; import org.opensearch.search.suggest.SuggestBuilder; import org.opensearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.opensearch.search.suggest.term.TermSuggestionBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -68,10 +67,10 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SuggestStatsIT extends ParameterizedOpenSearchIntegTestCase { +public class SuggestStatsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SuggestStatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SuggestStatsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -82,11 +81,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfReplicas() { return 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 51dba07a8f9f8..95f6a7a03266c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -42,14 +42,13 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.cache.request.RequestCacheStats; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.time.ZoneId; @@ -69,7 +68,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -public class IndicesRequestCacheIT extends ParameterizedOpenSearchIntegTestCase { +public class IndicesRequestCacheIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public IndicesRequestCacheIT(Settings settings) { super(settings); } @@ -82,11 +81,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - // One of the primary purposes of the query cache is to cache aggs results public void testCacheAggs() throws Exception { Client client = client(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 0c5780210901d..73e888eea362c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -50,7 +50,6 @@ import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.breaker.NoopCircuitBreaker; @@ -63,7 +62,7 @@ import org.opensearch.search.SearchService; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import org.junit.Before; @@ -94,9 +93,9 @@ * Integration tests for InternalCircuitBreakerService */ @ClusterScope(scope = TEST, numClientNodes = 0, maxNumDataNodes = 1) -public class CircuitBreakerServiceIT extends ParameterizedOpenSearchIntegTestCase { - public CircuitBreakerServiceIT(Settings dynamicSettings) { - super(dynamicSettings); +public class CircuitBreakerServiceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public CircuitBreakerServiceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -107,11 +106,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index c049c8ed2d4a6..9decd17d95eab 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -66,19 +66,16 @@ public void testPrimaryRelocationWhileIndexing() throws Exception { ensureGreen("test"); AtomicInteger numAutoGenDocs = new AtomicInteger(); final AtomicBoolean finished = new AtomicBoolean(false); - Thread indexingThread = new Thread() { - @Override - public void run() { - while (finished.get() == false && numAutoGenDocs.get() < 10_000) { - IndexResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - DeleteResponse deleteResponse = client().prepareDelete("test", "id").get(); - assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); - client().prepareIndex("test").setSource("auto", true).get(); - numAutoGenDocs.incrementAndGet(); - } + Thread indexingThread = new Thread(() -> { + while (finished.get() == false && numAutoGenDocs.get() < 10_000) { + IndexResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); + DeleteResponse deleteResponse = client().prepareDelete("test", "id").get(); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + client().prepareIndex("test").setSource("auto", true).get(); + numAutoGenDocs.incrementAndGet(); } - }; + }); indexingThread.start(); ClusterState initialState = client().admin().cluster().prepareState().get().getState(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index e4f1f8717f899..72e680e22ed75 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -523,12 +523,12 @@ public void testRerouteRecovery() throws Exception { logger.info("--> waiting for recovery to start both on source and target"); final Index index = resolveIndex(INDEX_NAME); - assertBusy(() -> { + assertBusyWithFixedSleepTime(() -> { IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeA); assertThat(indicesService.indexServiceSafe(index).getShard(0).recoveryStats().currentAsSource(), equalTo(1)); indicesService = internalCluster().getInstance(IndicesService.class, nodeB); assertThat(indicesService.indexServiceSafe(index).getShard(0).recoveryStats().currentAsTarget(), equalTo(1)); - }); + }, TimeValue.timeValueSeconds(10), TimeValue.timeValueMillis(500)); logger.info("--> request recoveries"); RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java index f485d4e402b41..30edea6551067 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java @@ -22,6 +22,7 @@ import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.junit.annotations.TestLogging; import java.util.ArrayList; import java.util.List; @@ -128,6 +129,7 @@ public void testPerIndexPrimaryAllocation() throws Exception { * ensures the primary shard distribution is balanced. * */ + @TestLogging(reason = "Enable debug logs from cluster and index replication package", value = "org.opensearch.cluster:DEBUG,org.opensearch.indices.replication:DEBUG") public void testSingleIndexShardAllocation() throws Exception { internalCluster().startClusterManagerOnlyNode(); final int maxReplicaCount = 1; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java index 641f714d33414..796f09cb9528f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java @@ -19,7 +19,6 @@ import org.opensearch.common.lease.Releasable; import org.opensearch.common.settings.Settings; import org.opensearch.core.index.Index; -import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexService; import org.opensearch.index.SegmentReplicationShardStats; @@ -175,17 +174,6 @@ private IndexShard getIndexShard(ClusterState state, ShardRouting routing, Strin return getIndexShard(state.nodes().get(routing.currentNodeId()).getName(), routing.shardId(), indexName); } - /** - * Fetch IndexShard by shardId, multiple shards per node allowed. - */ - protected IndexShard getIndexShard(String node, ShardId shardId, String indexName) { - final Index index = resolveIndex(indexName); - IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); - IndexService indexService = indicesService.indexServiceSafe(index); - final Optional<Integer> id = indexService.shardIds().stream().filter(sid -> sid == shardId.id()).findFirst(); - return indexService.getShard(id.get()); - } - /** * Fetch IndexShard, assumes only a single shard per node. */ diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java index 5511bc7945d65..4a848e92800cb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java @@ -594,6 +594,67 @@ public void testCancellation() throws Exception { assertDocCounts(docCount, primaryNode); } + public void testCancellationDuringGetCheckpointInfo() throws Exception { + cancelDuringReplicaAction(SegmentReplicationSourceService.Actions.GET_CHECKPOINT_INFO); + } + + public void testCancellationDuringGetSegments() throws Exception { + cancelDuringReplicaAction(SegmentReplicationSourceService.Actions.GET_SEGMENT_FILES); + } + + private void cancelDuringReplicaAction(String actionToblock) throws Exception { + // this test stubs transport calls specific to node-node replication. + assumeFalse( + "Skipping the test as its not compatible with segment replication with remote store.", + segmentReplicationWithRemoteEnabled() + ); + final String primaryNode = internalCluster().startDataOnlyNode(); + createIndex(INDEX_NAME, Settings.builder().put(indexSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build()); + ensureYellow(INDEX_NAME); + + final String replicaNode = internalCluster().startDataOnlyNode(); + ensureGreen(INDEX_NAME); + final SegmentReplicationTargetService targetService = internalCluster().getInstance( + SegmentReplicationTargetService.class, + replicaNode + ); + final IndexShard replicaShard = getIndexShard(replicaNode, INDEX_NAME); + CountDownLatch startCancellationLatch = new CountDownLatch(1); + CountDownLatch latch = new CountDownLatch(1); + + MockTransportService primaryTransportService = (MockTransportService) internalCluster().getInstance( + TransportService.class, + primaryNode + ); + primaryTransportService.addRequestHandlingBehavior(actionToblock, (handler, request, channel, task) -> { + logger.info("action {}", actionToblock); + try { + startCancellationLatch.countDown(); + latch.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + // index a doc and trigger replication + client().prepareIndex(INDEX_NAME).setId("1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + + // remove the replica and ensure it is cleaned up. + startCancellationLatch.await(); + SegmentReplicationTarget target = targetService.get(replicaShard.shardId()); + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings(INDEX_NAME) + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + ); + assertEquals("Replication not closed: " + target.getId(), 0, target.refCount()); + assertEquals("Store has a positive refCount", 0, replicaShard.store().refCount()); + // stop the replica, this will do additional checks on shutDown to ensure the replica and its store are closed properly + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode)); + latch.countDown(); + } + public void testStartReplicaAfterPrimaryIndexesDocs() throws Exception { final String primaryNode = internalCluster().startDataOnlyNode(); createIndex(INDEX_NAME, Settings.builder().put(indexSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build()); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java index dbe0b43441f54..a7be63bc61bc2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java @@ -219,6 +219,7 @@ public void testPrimaryRelocationWithSegRepFailure() throws Exception { * This test verifies primary recovery behavior with continuous ingestion * */ + @TestLogging(reason = "Enable trace logs from replication and recovery package", value = "org.opensearch.indices.recovery:TRACE,org.opensearch.indices.replication:TRACE") public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { final String primary = internalCluster().startNode(); createIndex(1); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java index fb06a97bd51c2..69411b2ff640a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java @@ -87,7 +87,8 @@ public void testCreateShrinkIndexThrowsExceptionWhenReplicasBehind() throws Exce .get() ); assertEquals( - " For index [test] replica shards haven't caught up with primary, please retry after sometime.", + "Replication still in progress for index [test]. Please wait for replication to complete and retry. " + + "Use the _cat/segment_replication/test api to check if the index is up to date (e.g. bytes_behind == 0).", exception.getMessage() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java index 766471fdc0756..89aef6f0be1a6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java @@ -268,12 +268,12 @@ public void testMultipleIndices() throws Exception { internalCluster().startClusterManagerOnlyNode(); final String index_2 = "tst-index-2"; List<String> nodes = new ArrayList<>(); - final String primaryNode = internalCluster().startNode(); + final String primaryNode = internalCluster().startDataOnlyNode(); nodes.add(primaryNode); createIndex(INDEX_NAME, index_2); ensureYellowAndNoInitializingShards(INDEX_NAME, index_2); - nodes.add(internalCluster().startNode()); + nodes.add(internalCluster().startDataOnlyNode()); ensureGreen(INDEX_NAME, index_2); final long numDocs = scaledRandomIntBetween(50, 100); @@ -284,6 +284,7 @@ public void testMultipleIndices() throws Exception { refresh(INDEX_NAME, index_2); waitForSearchableDocs(INDEX_NAME, numDocs, nodes); waitForSearchableDocs(index_2, numDocs, nodes); + ensureSearchable(INDEX_NAME, index_2); final IndexShard index_1_primary = getIndexShard(primaryNode, INDEX_NAME); final IndexShard index_2_primary = getIndexShard(primaryNode, index_2); @@ -291,37 +292,39 @@ public void testMultipleIndices() throws Exception { assertTrue(index_1_primary.routingEntry().primary()); assertTrue(index_2_primary.routingEntry().primary()); - // test both indices are returned in the response. - SegmentReplicationStatsResponse segmentReplicationStatsResponse = client().admin() - .indices() - .prepareSegmentReplicationStats() - .execute() - .actionGet(); + assertBusy(() -> { + // test both indices are returned in the response. + SegmentReplicationStatsResponse segmentReplicationStatsResponse = dataNodeClient().admin() + .indices() + .prepareSegmentReplicationStats() + .execute() + .actionGet(); - Map<String, List<SegmentReplicationPerGroupStats>> replicationStats = segmentReplicationStatsResponse.getReplicationStats(); - assertEquals(2, replicationStats.size()); - List<SegmentReplicationPerGroupStats> replicationPerGroupStats = replicationStats.get(INDEX_NAME); - assertEquals(1, replicationPerGroupStats.size()); - SegmentReplicationPerGroupStats perGroupStats = replicationPerGroupStats.get(0); - assertEquals(perGroupStats.getShardId(), index_1_primary.shardId()); - Set<SegmentReplicationShardStats> replicaStats = perGroupStats.getReplicaStats(); - assertEquals(1, replicaStats.size()); - for (SegmentReplicationShardStats replica : replicaStats) { - assertNotNull(replica.getCurrentReplicationState()); - } + Map<String, List<SegmentReplicationPerGroupStats>> replicationStats = segmentReplicationStatsResponse.getReplicationStats(); + assertEquals(2, replicationStats.size()); + List<SegmentReplicationPerGroupStats> replicationPerGroupStats = replicationStats.get(INDEX_NAME); + assertEquals(1, replicationPerGroupStats.size()); + SegmentReplicationPerGroupStats perGroupStats = replicationPerGroupStats.get(0); + assertEquals(perGroupStats.getShardId(), index_1_primary.shardId()); + Set<SegmentReplicationShardStats> replicaStats = perGroupStats.getReplicaStats(); + assertEquals(1, replicaStats.size()); + for (SegmentReplicationShardStats replica : replicaStats) { + assertNotNull(replica.getCurrentReplicationState()); + } - replicationPerGroupStats = replicationStats.get(index_2); - assertEquals(1, replicationPerGroupStats.size()); - perGroupStats = replicationPerGroupStats.get(0); - assertEquals(perGroupStats.getShardId(), index_2_primary.shardId()); - replicaStats = perGroupStats.getReplicaStats(); - assertEquals(1, replicaStats.size()); - for (SegmentReplicationShardStats replica : replicaStats) { - assertNotNull(replica.getCurrentReplicationState()); - } + replicationPerGroupStats = replicationStats.get(index_2); + assertEquals(1, replicationPerGroupStats.size()); + perGroupStats = replicationPerGroupStats.get(0); + assertEquals(perGroupStats.getShardId(), index_2_primary.shardId()); + replicaStats = perGroupStats.getReplicaStats(); + assertEquals(1, replicaStats.size()); + for (SegmentReplicationShardStats replica : replicaStats) { + assertNotNull(replica.getCurrentReplicationState()); + } + }, 30, TimeUnit.SECONDS); // test only single index queried. - segmentReplicationStatsResponse = client().admin() + SegmentReplicationStatsResponse segmentReplicationStatsResponse = dataNodeClient().admin() .indices() .prepareSegmentReplicationStats() .setIndices(index_2) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java index 547f9e7a8d380..87e5df8c48981 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java @@ -287,7 +287,7 @@ public void testCloseWhileDeletingIndices() throws Exception { throw new AssertionError(e); } try { - assertAcked(client().admin().indices().prepareDelete(indexToDelete)); + assertAcked(client().admin().indices().prepareDelete(indexToDelete).setTimeout("60s")); } catch (final Exception e) { assertException(e, indexToDelete); } @@ -301,7 +301,7 @@ public void testCloseWhileDeletingIndices() throws Exception { throw new AssertionError(e); } try { - client().admin().indices().prepareClose(indexToClose).get(); + client().admin().indices().prepareClose(indexToClose).setTimeout("60s").get(); } catch (final Exception e) { assertException(e, indexToClose); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index 9c96d4861d426..1d5da9370cce3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -57,7 +57,6 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamOutput; @@ -85,7 +84,7 @@ import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -108,6 +107,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAllSuccessful; @@ -122,7 +122,7 @@ @ClusterScope(scope = Scope.SUITE, numDataNodes = 2, numClientNodes = 0) @SuppressCodecs("*") // requires custom completion format -public class IndexStatsIT extends ParameterizedOpenSearchIntegTestCase { +public class IndexStatsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public IndexStatsIT(Settings settings) { super(settings); } @@ -131,15 +131,11 @@ public IndexStatsIT(Settings settings) { public static Collection<Object[]> parameters() { return Arrays.asList( new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, - new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }, + new Object[] { Settings.builder().put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT).build() } ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(InternalSettingsPlugin.class); @@ -181,7 +177,7 @@ public void testFieldDataStats() throws InterruptedException { ensureGreen(); client().prepareIndex("test").setId("1").setSource("field", "value1", "field2", "value1").execute().actionGet(); client().prepareIndex("test").setId("2").setSource("field", "value2", "field2", "value2").execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); + refreshAndWaitForReplication(); indexRandomForConcurrentSearch("test"); NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); @@ -305,7 +301,7 @@ public void testClearAllCaches() throws Exception { client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); client().prepareIndex("test").setId("1").setSource("field", "value1").execute().actionGet(); client().prepareIndex("test").setId("2").setSource("field", "value2").execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); + refreshAndWaitForReplication(); indexRandomForConcurrentSearch("test"); NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); @@ -673,7 +669,7 @@ public void testSimpleStats() throws Exception { client().prepareIndex("test1").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); client().prepareIndex("test1").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); client().prepareIndex("test2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); - refresh(); + refreshAndWaitForReplication(); NumShards test1 = getNumShards("test1"); long test1ExpectedWrites = 2 * test1.dataCopies; @@ -688,7 +684,13 @@ public void testSimpleStats() throws Exception { assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexFailedCount(), equalTo(0L)); assertThat(stats.getPrimaries().getIndexing().getTotal().isThrottled(), equalTo(false)); assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0L)); - assertThat(stats.getTotal().getIndexing().getTotal().getIndexCount(), equalTo(totalExpectedWrites)); + + // This assert should not be done on segrep enabled indices because we are asserting Indexing/Write operations count on + // all primary and replica shards. But in case of segrep, Indexing/Write operation don't happen on replica shards. So we can + // ignore this assert check for segrep enabled indices. + if (isSegmentReplicationEnabledForIndex("test1") == false && isSegmentReplicationEnabledForIndex("test2") == false) { + assertThat(stats.getTotal().getIndexing().getTotal().getIndexCount(), equalTo(totalExpectedWrites)); + } assertThat(stats.getTotal().getStore(), notNullValue()); assertThat(stats.getTotal().getMerge(), notNullValue()); assertThat(stats.getTotal().getFlush(), notNullValue()); @@ -831,6 +833,7 @@ public void testMergeStats() { client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet(); stats = client().admin().indices().prepareStats().setMerge(true).execute().actionGet(); + refreshAndWaitForReplication(); assertThat(stats.getTotal().getMerge(), notNullValue()); assertThat(stats.getTotal().getMerge().getTotal(), greaterThan(0L)); } @@ -857,7 +860,7 @@ public void testSegmentsStats() { client().admin().indices().prepareFlush().get(); client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet(); - client().admin().indices().prepareRefresh().get(); + refreshAndWaitForReplication(); stats = client().admin().indices().prepareStats().setSegments(true).get(); assertThat(stats.getTotal().getSegments(), notNullValue()); @@ -875,7 +878,7 @@ public void testAllFlags() throws Exception { client().prepareIndex("test_index").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); client().prepareIndex("test_index_2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); + refreshAndWaitForReplication(); IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats(); Flag[] values = CommonStatsFlags.Flag.values(); for (Flag flag : values) { @@ -1459,6 +1462,7 @@ public void testZeroRemoteStoreStatsOnNonRemoteStoreIndex() { .get() .status() ); + refreshAndWaitForReplication(); ShardStats shard = client().admin().indices().prepareStats(indexName).setSegments(true).setTranslog(true).get().getShards()[0]; RemoteSegmentStats remoteSegmentStatsFromIndexStats = shard.getStats().getSegments().getRemoteSegmentStats(); assertZeroRemoteSegmentStats(remoteSegmentStatsFromIndexStats); diff --git a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java index f77ae80a55276..2f0d4959d217b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java @@ -42,11 +42,10 @@ import org.opensearch.action.get.MultiGetResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.search.fetch.subphase.FetchSourceContext; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -63,10 +62,10 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class SimpleMgetIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleMgetIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SimpleMgetIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleMgetIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException { createIndex("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/plugins/PluginsServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/plugins/PluginsServiceIT.java new file mode 100644 index 0000000000000..3cc10b0c0b858 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/plugins/PluginsServiceIT.java @@ -0,0 +1,115 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugins; + +import org.opensearch.Version; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.VersionUtils; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.hamcrest.Matchers.containsString; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) +public class PluginsServiceIT extends OpenSearchIntegTestCase { + + public void testNodeBootstrapWithCompatiblePlugin() throws IOException { + // Prepare the plugins directory and then start a node + Path baseDir = createTempDir(); + Path pluginDir = baseDir.resolve("plugins/dummy-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "dummy desc", + "name", + "dummyPlugin", + "version", + "1.0", + "opensearch.version", + Version.CURRENT.toString(), + "java.version", + System.getProperty("java.specification.version"), + "classname", + "test.DummyPlugin" + ); + try (InputStream jar = PluginsServiceTests.class.getResourceAsStream("dummy-plugin.jar")) { + Files.copy(jar, pluginDir.resolve("dummy-plugin.jar")); + } + internalCluster().startNode(Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), baseDir)); + for (PluginsService pluginsService : internalCluster().getDataNodeInstances(PluginsService.class)) { + // Ensure plugins service was able to load the plugin + assertEquals(1, pluginsService.info().getPluginInfos().stream().filter(info -> info.getName().equals("dummyPlugin")).count()); + } + } + + public void testNodeBootstrapWithRangeCompatiblePlugin() throws IOException { + // Prepare the plugins directory and then start a node + Path baseDir = createTempDir(); + Path pluginDir = baseDir.resolve("plugins/dummy-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "dummy desc", + "name", + "dummyPlugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"~" + Version.CURRENT + "\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "test.DummyPlugin" + ); + try (InputStream jar = PluginsServiceTests.class.getResourceAsStream("dummy-plugin.jar")) { + Files.copy(jar, pluginDir.resolve("dummy-plugin.jar")); + } + internalCluster().startNode(Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), baseDir)); + for (PluginsService pluginsService : internalCluster().getDataNodeInstances(PluginsService.class)) { + // Ensure plugins service was able to load the plugin + assertEquals(1, pluginsService.info().getPluginInfos().stream().filter(info -> info.getName().equals("dummyPlugin")).count()); + } + } + + public void testNodeBootstrapWithInCompatiblePlugin() throws IOException { + // Prepare the plugins directory with an incompatible plugin and attempt to start a node + Path baseDir = createTempDir(); + Path pluginDir = baseDir.resolve("plugins/dummy-plugin"); + String incompatibleRange = "~" + + VersionUtils.getVersion(Version.CURRENT.major, Version.CURRENT.minor, (byte) (Version.CURRENT.revision + 1)); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "dummy desc", + "name", + "dummyPlugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"" + incompatibleRange + "\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "test.DummyPlugin" + ); + try (InputStream jar = PluginsServiceTests.class.getResourceAsStream("dummy-plugin.jar")) { + Files.copy(jar, pluginDir.resolve("dummy-plugin.jar")); + } + IllegalArgumentException e = assertThrows( + IllegalArgumentException.class, + () -> internalCluster().startNode(Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), baseDir)) + ); + assertThat(e.getMessage(), containsString("Plugin [dummyPlugin] was built for OpenSearch version ")); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java index 30d5af58df545..eb293aeb6d490 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java @@ -32,6 +32,8 @@ package org.opensearch.recovery; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.admin.indices.refresh.RefreshResponse; @@ -52,10 +54,11 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.DocsStats; import org.opensearch.index.translog.Translog; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.BackgroundIndexer; -import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -69,12 +72,26 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; +import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAllSuccessful; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoTimeout; -public class RecoveryWhileUnderLoadIT extends OpenSearchIntegTestCase { +public class RecoveryWhileUnderLoadIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + + public RecoveryWhileUnderLoadIT(Settings settings) { + super(settings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.DOCUMENT).build() }, + new Object[] { Settings.builder().put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT).build() } + ); + } + private final Logger logger = LogManager.getLogger(RecoveryWhileUnderLoadIT.class); public static final class RetentionLeaseSyncIntervalSettingPlugin extends Plugin { @@ -150,7 +167,7 @@ public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> indexing threads stopped"); logger.info("--> refreshing the index"); - refreshAndAssert(); + assertAfterRefreshAndWaitForReplication(); logger.info("--> verifying indexed content"); iterateAssertCount(numberOfShards, 10, indexer.getIds()); } @@ -211,7 +228,7 @@ public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() thr logger.info("--> indexing threads stopped"); logger.info("--> refreshing the index"); - refreshAndAssert(); + assertAfterRefreshAndWaitForReplication(); logger.info("--> verifying indexed content"); iterateAssertCount(numberOfShards, 10, indexer.getIds()); } @@ -325,7 +342,7 @@ public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception ); logger.info("--> refreshing the index"); - refreshAndAssert(); + assertAfterRefreshAndWaitForReplication(); logger.info("--> verifying indexed content"); iterateAssertCount(numberOfShards, 10, indexer.getIds()); } @@ -375,7 +392,7 @@ public void testRecoverWhileRelocating() throws Exception { ensureGreen(TimeValue.timeValueMinutes(5)); logger.info("--> refreshing the index"); - refreshAndAssert(); + assertAfterRefreshAndWaitForReplication(); logger.info("--> verifying indexed content"); iterateAssertCount(numShards, 10, indexer.getIds()); } @@ -474,10 +491,11 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat ); } - private void refreshAndAssert() throws Exception { + private void assertAfterRefreshAndWaitForReplication() throws Exception { assertBusy(() -> { RefreshResponse actionGet = client().admin().indices().prepareRefresh().get(); assertAllSuccessful(actionGet); }, 5, TimeUnit.MINUTES); + waitForReplication(); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java index c957f1b338bfe..6de61cf203c60 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java @@ -157,10 +157,4 @@ public void testDisconnectsDuringRecovery() { public void testReplicaRecovery() { } - - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/9580") - public void testRerouteRecovery() { - - } - } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index d23e634bb3368..e43ff9a412784 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -9,6 +9,8 @@ package org.opensearch.remotestore; import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; +import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; +import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.opensearch.action.admin.indices.get.GetIndexRequest; import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.action.bulk.BulkItemResponse; @@ -37,7 +39,7 @@ import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.blobstore.BlobStoreRepository; -import org.opensearch.repositories.fs.FsRepository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.test.OpenSearchIntegTestCase; import org.junit.After; @@ -60,6 +62,7 @@ import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; +import static org.opensearch.repositories.fs.ReloadableFsRepository.REPOSITORIES_FAILRATE_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; public class RemoteStoreBaseIntegTestCase extends OpenSearchIntegTestCase { @@ -84,6 +87,10 @@ public class RemoteStoreBaseIntegTestCase extends OpenSearchIntegTestCase { ); protected Map<String, Long> indexData(int numberOfIterations, boolean invokeFlush, String index) { + return indexData(numberOfIterations, invokeFlush, false, index); + } + + protected Map<String, Long> indexData(int numberOfIterations, boolean invokeFlush, boolean emptyTranslog, String index) { long totalOperations = 0; long refreshedOrFlushedOperations = 0; long maxSeqNo = -1; @@ -96,6 +103,11 @@ protected Map<String, Long> indexData(int numberOfIterations, boolean invokeFlus } else { refresh(index); } + + // skip indexing if last iteration as we dont want to have any data in remote translog + if (emptyTranslog && i == numberOfIterations - 1) { + continue; + } maxSeqNoRefreshedOrFlushed = maxSeqNo; indexingStats.put(MAX_SEQ_NO_REFRESHED_OR_FLUSHED + "-shard-" + shardId, maxSeqNoRefreshedOrFlushed); refreshedOrFlushedOperations = totalOperations; @@ -137,6 +149,18 @@ protected Settings nodeSettings(int nodeOrdinal) { } } + protected void setFailRate(String repoName, int value) throws ExecutionException, InterruptedException { + GetRepositoriesRequest gr = new GetRepositoriesRequest(new String[] { repoName }); + GetRepositoriesResponse res = client().admin().cluster().getRepositories(gr).get(); + RepositoryMetadata rmd = res.repositories().get(0); + Settings.Builder settings = Settings.builder() + .put("location", rmd.settings().get("location")) + .put(REPOSITORIES_FAILRATE_SETTING.getKey(), value); + assertAcked( + client().admin().cluster().preparePutRepository(repoName).setType(ReloadableFsRepository.TYPE).setSettings(settings).get() + ); + } + public Settings indexSettings() { return defaultIndexSettings(); } @@ -215,10 +239,10 @@ public static Settings buildRemoteStoreNodeAttributes( return buildRemoteStoreNodeAttributes( segmentRepoName, segmentRepoPath, - FsRepository.TYPE, + ReloadableFsRepository.TYPE, translogRepoName, translogRepoPath, - FsRepository.TYPE, + ReloadableFsRepository.TYPE, withRateLimiterAttributes ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java index c61e2ec6e4f6c..3f90732f1f13d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java @@ -310,10 +310,16 @@ private void validateCurrentMetadata() throws Exception { internalCluster().getClusterManagerName() ); assertBusy(() -> { - ClusterMetadataManifest manifest = remoteClusterStateService.getLatestClusterMetadataManifest( - getClusterState().getClusterName().value(), - getClusterState().metadata().clusterUUID() - ).get(); + ClusterMetadataManifest manifest; + try { + manifest = remoteClusterStateService.getLatestClusterMetadataManifest( + getClusterState().getClusterName().value(), + getClusterState().metadata().clusterUUID() + ).get(); + } catch (IllegalStateException e) { + // AssertionError helps us use assertBusy and retry validation if failed due to a race condition. + throw new AssertionError("Error while validating latest cluster metadata", e); + } ClusterState clusterState = getClusterState(); Metadata currentMetadata = clusterState.metadata(); assertEquals(currentMetadata.indices().size(), manifest.getIndices().size()); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java index 7626e3dba6424..94acf2b1dbb27 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java @@ -17,8 +17,14 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.index.Index; +import org.opensearch.index.IndexService; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadata; +import org.opensearch.indices.IndicesService; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; @@ -44,7 +50,7 @@ public class RemoteStoreRestoreIT extends BaseRemoteStoreRestoreIT { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithNoDataPostCommit() throws Exception { - testRestoreFlow(1, true, randomIntBetween(1, 5)); + testRestoreFlow(1, true, true, randomIntBetween(1, 5)); } /** @@ -52,7 +58,7 @@ public void testRemoteTranslogRestoreWithNoDataPostCommit() throws Exception { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithNoDataPostRefresh() throws Exception { - testRestoreFlow(1, false, randomIntBetween(1, 5)); + testRestoreFlow(1, false, true, randomIntBetween(1, 5)); } /** @@ -61,7 +67,7 @@ public void testRemoteTranslogRestoreWithNoDataPostRefresh() throws Exception { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithRefreshedData() throws Exception { - testRestoreFlow(randomIntBetween(2, 5), false, randomIntBetween(1, 5)); + testRestoreFlow(randomIntBetween(2, 5), false, false, randomIntBetween(1, 5)); } /** @@ -70,7 +76,7 @@ public void testRemoteTranslogRestoreWithRefreshedData() throws Exception { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithCommittedData() throws Exception { - testRestoreFlow(randomIntBetween(2, 5), true, randomIntBetween(1, 5)); + testRestoreFlow(randomIntBetween(2, 5), true, false, randomIntBetween(1, 5)); } /** @@ -78,7 +84,7 @@ public void testRemoteTranslogRestoreWithCommittedData() throws Exception { * @throws IOException IO Exception. */ public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(1, true, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(1, true, true, randomIntBetween(1, 5)); } /** @@ -86,7 +92,7 @@ public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws Except * @throws IOException IO Exception. */ public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(1, false, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(1, false, true, randomIntBetween(1, 5)); } /** @@ -95,7 +101,7 @@ public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws Excep * @throws IOException IO Exception. */ public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), false, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), false, false, randomIntBetween(1, 5)); } /** @@ -104,7 +110,7 @@ public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws Exception * @throws IOException IO Exception. */ public void testRTSRestoreWithCommittedDataPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), true, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), true, false, randomIntBetween(1, 5)); } private void restoreAndVerify(int shardCount, int replicaCount, Map<String, Long> indexStats) throws Exception { @@ -122,9 +128,9 @@ private void restoreAndVerify(int shardCount, int replicaCount, Map<String, Long * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked. * @throws IOException IO Exception. */ - private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, int shardCount) throws Exception { + private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, boolean emptyTranslog, int shardCount) throws Exception { prepareCluster(1, 3, INDEX_NAME, 0, shardCount); - Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME); + Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, emptyTranslog, INDEX_NAME); assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0).get(), indexStats.get(REFRESHED_OR_FLUSHED_OPERATIONS)); @@ -135,15 +141,64 @@ private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, int sh restoreAndVerify(shardCount, 0, indexStats); } + public void testMultipleWriters() throws Exception { + prepareCluster(1, 2, INDEX_NAME, 1, 1); + Map<String, Long> indexStats = indexData(randomIntBetween(2, 5), true, true, INDEX_NAME); + assertEquals(2, getNumShards(INDEX_NAME).totalNumShards); + + // ensure replica has latest checkpoint + flushAndRefresh(INDEX_NAME); + flushAndRefresh(INDEX_NAME); + + Index indexObj = clusterService().state().metadata().indices().get(INDEX_NAME).getIndex(); + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, primaryNodeName(INDEX_NAME)); + IndexService indexService = indicesService.indexService(indexObj); + IndexShard indexShard = indexService.getShard(0); + RemoteSegmentMetadata remoteSegmentMetadataBeforeFailover = indexShard.getRemoteDirectory().readLatestMetadataFile(); + + // ensure all segments synced to replica + assertBusy( + () -> assertHitCount( + client(primaryNodeName(INDEX_NAME)).prepareSearch(INDEX_NAME).setSize(0).get(), + indexStats.get(TOTAL_OPERATIONS) + ), + 30, + TimeUnit.SECONDS + ); + assertBusy( + () -> assertHitCount( + client(replicaNodeName(INDEX_NAME)).prepareSearch(INDEX_NAME).setSize(0).get(), + indexStats.get(TOTAL_OPERATIONS) + ), + 30, + TimeUnit.SECONDS + ); + + String newPrimaryNodeName = replicaNodeName(INDEX_NAME); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME))); + ensureYellow(INDEX_NAME); + + indicesService = internalCluster().getInstance(IndicesService.class, newPrimaryNodeName); + indexService = indicesService.indexService(indexObj); + indexShard = indexService.getShard(0); + IndexShard finalIndexShard = indexShard; + assertBusy(() -> assertTrue(finalIndexShard.isStartedPrimary() && finalIndexShard.isPrimaryMode())); + assertEquals( + finalIndexShard.getLatestSegmentInfosAndCheckpoint().v2().getPrimaryTerm(), + remoteSegmentMetadataBeforeFailover.getPrimaryTerm() + 1 + ); + } + /** * Helper function to test restoring an index having replicas from remote store when all the nodes housing the primary/replica drop. * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data. * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked. * @throws IOException IO Exception. */ - private void testRestoreFlowBothPrimaryReplicasDown(int numberOfIterations, boolean invokeFlush, int shardCount) throws Exception { + private void testRestoreFlowBothPrimaryReplicasDown(int numberOfIterations, boolean invokeFlush, boolean emptyTranslog, int shardCount) + throws Exception { prepareCluster(1, 2, INDEX_NAME, 1, shardCount); - Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME); + Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, emptyTranslog, INDEX_NAME); assertEquals(shardCount * 2, getNumShards(INDEX_NAME).totalNumShards); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(INDEX_NAME))); @@ -391,7 +446,7 @@ public void testRTSRestoreWithCommittedDataExcludeIndicesPatterns() throws Excep * @throws IOException IO Exception. */ public void testRTSRestoreDataOnlyInTranslog() throws Exception { - testRestoreFlow(0, true, randomIntBetween(1, 5)); + testRestoreFlow(0, true, false, randomIntBetween(1, 5)); } public void testRateLimitedRemoteDownloads() throws Exception { @@ -425,7 +480,14 @@ public void testRateLimitedRemoteDownloads() throws Exception { settingsMap.entrySet().forEach(entry -> settings.put(entry.getKey(), entry.getValue())); settings.put("location", segmentRepoPath).put("max_remote_download_bytes_per_sec", 4, ByteSizeUnit.KB); - assertAcked(client().admin().cluster().preparePutRepository(REPOSITORY_NAME).setType("fs").setSettings(settings).get()); + assertAcked( + client().admin() + .cluster() + .preparePutRepository(REPOSITORY_NAME) + .setType(ReloadableFsRepository.TYPE) + .setSettings(settings) + .get() + ); for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { Repository segmentRepo = repositoriesService.repository(REPOSITORY_NAME); @@ -454,7 +516,14 @@ public void testRateLimitedRemoteDownloads() throws Exception { // revert repo metadata to pass asserts on repo metadata vs. node attrs during teardown // https://github.com/opensearch-project/OpenSearch/pull/9569#discussion_r1345668700 settings.remove("max_remote_download_bytes_per_sec"); - assertAcked(client().admin().cluster().preparePutRepository(REPOSITORY_NAME).setType("fs").setSettings(settings).get()); + assertAcked( + client().admin() + .cluster() + .preparePutRepository(REPOSITORY_NAME) + .setType(ReloadableFsRepository.TYPE) + .setSettings(settings) + .get() + ); for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { Repository segmentRepo = repositoriesService.repository(REPOSITORY_NAME); assertNull(segmentRepo.getMetadata().settings().get("max_remote_download_bytes_per_sec")); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java index 8372135fc55c4..3d8d001b17ddf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java @@ -23,8 +23,6 @@ import org.opensearch.indices.replication.SegmentReplicationState; import org.opensearch.indices.replication.SegmentReplicationTarget; import org.opensearch.indices.replication.SegmentReplicationTargetService; -import org.opensearch.indices.replication.common.ReplicationCollection; -import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.disruption.SlowClusterStateProcessing; @@ -33,6 +31,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + /** * This class runs tests with remote store + segRep while blocking file downloads */ @@ -59,22 +59,18 @@ public void testCancelReplicationWhileSyncingSegments() throws Exception { indexSingleDoc(); refresh(INDEX_NAME); waitForBlock(replicaNode, REPOSITORY_NAME, TimeValue.timeValueSeconds(10)); - final SegmentReplicationState state = targetService.getOngoingEventSegmentReplicationState(indexShard.shardId()); - assertEquals(SegmentReplicationState.Stage.GET_FILES, state.getStage()); - ReplicationCollection.ReplicationRef<SegmentReplicationTarget> segmentReplicationTargetReplicationRef = targetService.get( - state.getReplicationId() - ); - final SegmentReplicationTarget segmentReplicationTarget = segmentReplicationTargetReplicationRef.get(); - // close the target ref here otherwise it will hold a refcount - segmentReplicationTargetReplicationRef.close(); + SegmentReplicationTarget segmentReplicationTarget = targetService.get(indexShard.shardId()); assertNotNull(segmentReplicationTarget); + assertEquals(SegmentReplicationState.Stage.GET_FILES, segmentReplicationTarget.state().getStage()); assertTrue(segmentReplicationTarget.refCount() > 0); - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNode)); - assertBusy(() -> { - assertTrue(indexShard.routingEntry().primary()); - assertNull(targetService.getOngoingEventSegmentReplicationState(indexShard.shardId())); - assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); - }); + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings(INDEX_NAME) + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + ); + assertNull(targetService.getOngoingEventSegmentReplicationState(indexShard.shardId())); + assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); unblockNode(REPOSITORY_NAME, replicaNode); cleanupRepo(); } @@ -85,7 +81,6 @@ public void testCancelReplicationWhileFetchingMetadata() throws Exception { final Set<String> dataNodeNames = internalCluster().getDataNodeNames(); final String replicaNode = getNode(dataNodeNames, false); - final String primaryNode = getNode(dataNodeNames, true); SegmentReplicationTargetService targetService = internalCluster().getInstance(SegmentReplicationTargetService.class, replicaNode); ensureGreen(INDEX_NAME); @@ -94,22 +89,18 @@ public void testCancelReplicationWhileFetchingMetadata() throws Exception { indexSingleDoc(); refresh(INDEX_NAME); waitForBlock(replicaNode, REPOSITORY_NAME, TimeValue.timeValueSeconds(10)); - final SegmentReplicationState state = targetService.getOngoingEventSegmentReplicationState(indexShard.shardId()); - assertEquals(SegmentReplicationState.Stage.GET_CHECKPOINT_INFO, state.getStage()); - ReplicationCollection.ReplicationRef<SegmentReplicationTarget> segmentReplicationTargetReplicationRef = targetService.get( - state.getReplicationId() - ); - final SegmentReplicationTarget segmentReplicationTarget = segmentReplicationTargetReplicationRef.get(); - // close the target ref here otherwise it will hold a refcount - segmentReplicationTargetReplicationRef.close(); + SegmentReplicationTarget segmentReplicationTarget = targetService.get(indexShard.shardId()); assertNotNull(segmentReplicationTarget); + assertEquals(SegmentReplicationState.Stage.GET_CHECKPOINT_INFO, segmentReplicationTarget.state().getStage()); assertTrue(segmentReplicationTarget.refCount() > 0); - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNode)); - assertBusy(() -> { - assertTrue(indexShard.routingEntry().primary()); - assertNull(targetService.getOngoingEventSegmentReplicationState(indexShard.shardId())); - assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); - }); + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings(INDEX_NAME) + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + ); + assertNull(targetService.get(indexShard.shardId())); + assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); unblockNode(REPOSITORY_NAME, replicaNode); cleanupRepo(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java index 2fbaf4ea5a4d3..42c257eb79eff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java @@ -12,7 +12,6 @@ import org.opensearch.OpenSearchException; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.index.MockEngineFactoryPlugin; @@ -21,7 +20,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.MockSearchService; import org.opensearch.test.MockHttpTransport; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.TestGeoShapeFieldMapperPlugin; import org.opensearch.test.store.MockFSIndexStore; import org.opensearch.test.transport.MockTransportService; @@ -38,7 +37,7 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.apache.logging.log4j.core.util.Throwables.getRootCause; -public class ScriptCacheIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptCacheIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ScriptCacheIT(Settings settings) { super(settings); } @@ -51,11 +50,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java index 18b4625761c51..5a19e2b841c08 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.tasks.TaskCancelledException; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -62,7 +61,7 @@ import org.opensearch.search.lookup.LeafFieldsLookup; import org.opensearch.tasks.TaskInfo; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.transport.TransportException; import org.junit.After; @@ -91,7 +90,7 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchCancellationIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchCancellationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private TimeValue requestCancellationTimeout = TimeValue.timeValueSeconds(1); private TimeValue clusterCancellationTimeout = TimeValue.timeValueMillis(1500); @@ -109,11 +108,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(ScriptedBlockPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java index 52cc797ddd8da..ef7da395d2151 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java @@ -38,13 +38,12 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -59,7 +58,7 @@ import static org.opensearch.search.SearchTimeoutIT.ScriptedTimeoutPlugin.SCRIPT_NAME; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchTimeoutIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchTimeoutIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SearchTimeoutIT(Settings settings) { super(settings); } @@ -72,11 +71,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(ScriptedTimeoutPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java index 24c72a66da6d0..b7f71b00d802f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java @@ -38,9 +38,8 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchType; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -52,10 +51,10 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchWithRejectionsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWithRejectionsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWithRejectionsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWithRejectionsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java index a61102b9db144..7ed3526cabe3f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java @@ -38,9 +38,8 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -53,7 +52,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; @ClusterScope(scope = SUITE) -public class StressSearchServiceReaperIT extends ParameterizedOpenSearchIntegTestCase { +public class StressSearchServiceReaperIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public StressSearchServiceReaperIT(Settings settings) { super(settings); } @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { // very frequent checks diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java index 257786c1e9ce5..6059abce53c8b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; import org.opensearch.search.aggregations.bucket.terms.RareTermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.SignificantTermsAggregationBuilder; @@ -49,7 +48,7 @@ import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -62,7 +61,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AggregationsIntegrationIT extends ParameterizedOpenSearchIntegTestCase { +public class AggregationsIntegrationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int numDocs; @@ -71,8 +70,8 @@ public class AggregationsIntegrationIT extends ParameterizedOpenSearchIntegTestC + LARGE_STRING.length() + "] used in the request has exceeded the allowed maximum"; - public AggregationsIntegrationIT(Settings dynamicSettings) { - super(dynamicSettings); + public AggregationsIntegrationIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -83,11 +82,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("index").setMapping("f", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java index 3d3cf1943dfe3..1826dd69cd804 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java @@ -37,12 +37,11 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.missing.Missing; import org.opensearch.search.aggregations.bucket.terms.Terms; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.Arrays; @@ -61,10 +60,10 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.core.IsNull.notNullValue; -public class CombiIT extends ParameterizedOpenSearchIntegTestCase { +public class CombiIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CombiIT(Settings dynamicSettings) { - super(dynamicSettings); + public CombiIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Making sure that if there are multiple aggregations, working on the same field, yet require different * value source type, they can all still work. It used to fail as we used to cache the ValueSource by the diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java index 2ffdf5fb32778..302ec3116d187 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.RangeQueryBuilder; @@ -56,7 +55,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.opensearch.search.aggregations.metrics.Sum; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import org.junit.Before; @@ -94,10 +93,10 @@ * Additional tests that aim at testing more complex aggregation trees on larger random datasets, so that things like * the growth of dynamic arrays is tested. */ -public class EquivalenceIT extends ParameterizedOpenSearchIntegTestCase { +public class EquivalenceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public EquivalenceIT(Settings dynamicSettings) { - super(dynamicSettings); + public EquivalenceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -108,11 +107,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java index 1bc0cb36f5fe3..b650855083eed 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java @@ -37,11 +37,10 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.InternalBucketMetricValue; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -57,10 +56,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -public class MetadataIT extends ParameterizedOpenSearchIntegTestCase { +public class MetadataIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MetadataIT(Settings dynamicSettings) { - super(dynamicSettings); + public MetadataIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testMetadataSetOnAggregationResult() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("name", "type=keyword").get()); IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)]; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java index e6325987d330f..bdd16c7e74dc0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.terms.Terms; @@ -47,7 +46,7 @@ import org.opensearch.search.aggregations.metrics.Percentiles; import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -65,10 +64,10 @@ import static org.hamcrest.Matchers.closeTo; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MissingValueIT extends ParameterizedOpenSearchIntegTestCase { +public class MissingValueIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MissingValueIT(Settings dynamicSettings) { - super(dynamicSettings); + public MissingValueIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -79,11 +78,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int maximumNumberOfShards() { return 2; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java index cd0922606ec99..557ec9a37978d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.BoolQueryBuilder; @@ -50,7 +49,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -75,7 +74,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AdjacencyMatrixIT extends ParameterizedOpenSearchIntegTestCase { +public class AdjacencyMatrixIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { static int numDocs, numSingleTag1Docs, numSingleTag2Docs, numTag1Docs, numTag2Docs, numMultiTagDocs; static final int MAX_NUM_FILTERS = 3; @@ -92,11 +91,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java index 7ab1a44ce220c..9a1efb3336212 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java @@ -36,11 +36,10 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -53,7 +52,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BooleanTermsIT extends ParameterizedOpenSearchIntegTestCase { +public class BooleanTermsIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "b_value"; private static final String MULTI_VALUED_FIELD_NAME = "b_values"; @@ -72,11 +71,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index ee94e574228df..6a15490cbfe63 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; import org.opensearch.common.time.DateMathParser; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.index.query.MatchNoneQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -59,7 +58,7 @@ import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import org.junit.After; @@ -98,7 +97,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DateHistogramIT extends ParameterizedOpenSearchIntegTestCase { +public class DateHistogramIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static Map<ZonedDateTime, Map<String, Object>> expectedMultiSortBuckets; @@ -106,8 +105,8 @@ private ZonedDateTime date(int month, int day) { return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC); } - public DateHistogramIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateHistogramIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -118,11 +117,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ZonedDateTime date(String date) { return DateFormatters.from(DateFieldMapper.getDefaultDateTimeFormatter().parse(date)); } @@ -183,9 +177,9 @@ public void setupSuiteScopeCluster() throws Exception { indexDoc(2, 15, 3), // date: Feb 15, dates: Feb 15, Mar 16 indexDoc(3, 2, 4), // date: Mar 2, dates: Mar 2, Apr 3 indexDoc(3, 15, 5), // date: Mar 15, dates: Mar 15, Apr 16 - indexDoc(3, 23, 6) + indexDoc(3, 23, 6) // date: Mar 23, dates: Mar 23, Apr 24 ) - ); // date: Mar 23, dates: Mar 23, Apr 24 + ); indexRandom(true, builders); ensureSearchable(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java index d44071e1ef9c5..eea896e01afe1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -38,12 +38,11 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import org.junit.Before; @@ -69,13 +68,13 @@ */ @OpenSearchIntegTestCase.SuiteScopeTestCase @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class DateHistogramOffsetIT extends ParameterizedOpenSearchIntegTestCase { +public class DateHistogramOffsetIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss"; private static final DateFormatter FORMATTER = DateFormatter.forPattern(DATE_FORMAT); - public DateHistogramOffsetIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateHistogramOffsetIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -86,11 +85,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ZonedDateTime date(String date) { return DateFormatters.from(DateFieldMapper.getDefaultDateTimeFormatter().parse(date)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java index ae4243019ffb1..f00b601a54b80 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -49,7 +48,7 @@ import org.opensearch.search.aggregations.bucket.range.Range.Bucket; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.time.ZoneId; @@ -81,10 +80,10 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DateRangeIT extends ParameterizedOpenSearchIntegTestCase { +public class DateRangeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public DateRangeIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateRangeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { return client().prepareIndex("idx") .setSource( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 1d5f7f93e7410..b62e5f0f7f3b0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.action.support.WriteRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; @@ -48,7 +47,7 @@ import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.opensearch.search.aggregations.metrics.Max; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -71,12 +70,12 @@ * Tests the Sampler aggregation */ @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DiversifiedSamplerIT extends ParameterizedOpenSearchIntegTestCase { +public class DiversifiedSamplerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public static final int NUM_SHARDS = 2; - public DiversifiedSamplerIT(Settings dynamicSettings) { - super(dynamicSettings); + public DiversifiedSamplerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -87,11 +86,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SamplerAggregator.ExecutionMode.values()).toString(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java index 88bb41923e53f..ccb4af8386472 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java @@ -88,8 +88,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class DoubleTermsIT extends AbstractTermsTestCase { - public DoubleTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public DoubleTermsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java index 7aa98803403e0..2863711d49580 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -46,7 +45,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -68,12 +67,12 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class FilterIT extends ParameterizedOpenSearchIntegTestCase { +public class FilterIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int numDocs, numTag1Docs; - public FilterIT(Settings dynamicSettings) { - super(dynamicSettings); + public FilterIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -84,11 +83,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterRewriteIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterRewriteIT.java new file mode 100644 index 0000000000000..35e024603f0f2 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterRewriteIT.java @@ -0,0 +1,107 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.index.IndexRequestBuilder; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.time.DateFormatter; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.opensearch.search.aggregations.bucket.histogram.Histogram; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; + +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.opensearch.search.aggregations.AggregationBuilders.dateHistogram; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class FilterRewriteIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { + + // simulate segment level match all + private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); + private static final Map<String, Long> expected = new HashMap<>(); + + public FilterRewriteIT(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected void setupSuiteScopeCluster() throws Exception { + assertAcked(client().admin().indices().prepareCreate("idx").get()); + + final int segmentCount = randomIntBetween(2, 10); + final Set<Long> longTerms = new HashSet(); + + final Map<String, Integer> dateTerms = new HashMap<>(); + for (int i = 0; i < segmentCount; i++) { + final List<IndexRequestBuilder> indexRequests = new ArrayList<>(); + + long longTerm; + do { + longTerm = randomInt(segmentCount * 2); + } while (!longTerms.add(longTerm)); + ZonedDateTime time = ZonedDateTime.of(2024, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC); + String dateTerm = DateFormatter.forPattern("yyyy-MM-dd").format(time); + + final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); + for (int j = 0; j < frequency; j++) { + indexRequests.add( + client().prepareIndex("idx") + .setSource(jsonBuilder().startObject().field("date", dateTerm).field("match", true).endObject()) + ); + } + expected.put(dateTerm + "T00:00:00.000Z", (long) frequency); + + indexRandom(true, false, indexRequests); + } + + ensureSearchable(); + } + + public void testMinDocCountOnDateHistogram() throws Exception { + final SearchResponse allResponse = client().prepareSearch("idx") + .setSize(0) + .setQuery(QUERY) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0)) + .get(); + + final Histogram allHisto = allResponse.getAggregations().get("histo"); + Map<String, Long> results = new HashMap<>(); + allHisto.getBuckets().forEach(bucket -> results.put(bucket.getKeyAsString(), bucket.getDocCount())); + + for (Map.Entry<String, Long> entry : expected.entrySet()) { + assertEquals(entry.getValue(), results.get(entry.getKey())); + } + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java index b6cf515df78ba..e64877a1d4030 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -48,7 +47,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -72,7 +71,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class FiltersIT extends ParameterizedOpenSearchIntegTestCase { +public class FiltersIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { static int numDocs, numTag1Docs, numTag2Docs, numOtherDocs; @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java index 025bebf8b254d..ed0bd3aad5bab 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.DistanceUnit; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.InternalAggregation; @@ -50,7 +49,7 @@ import org.opensearch.search.aggregations.bucket.range.Range.Bucket; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import org.hamcrest.Matchers; @@ -76,10 +75,10 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class GeoDistanceIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoDistanceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoDistanceIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoDistanceIT(Settings staticSettings) { + super(staticSettings); } @Override @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { XContentBuilder source = jsonBuilder().startObject().field("city", name); source.startArray("location"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java index be31a3afadad0..a4aea6096a6e4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java @@ -37,13 +37,12 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -61,12 +60,12 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class GlobalIT extends ParameterizedOpenSearchIntegTestCase { +public class GlobalIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int numDocs; - public GlobalIT(Settings dynamicSettings) { - super(dynamicSettings); + public GlobalIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java index 75f57d1cc4c0e..4abd068d6fe37 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -56,7 +55,7 @@ import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.io.IOException; @@ -91,7 +90,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class HistogramIT extends ParameterizedOpenSearchIntegTestCase { +public class HistogramIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; @@ -102,8 +101,8 @@ public class HistogramIT extends ParameterizedOpenSearchIntegTestCase { static long[] valueCounts, valuesCounts; static Map<Long, Map<String, Object>> expectedMultiSortBuckets; - public HistogramIT(Settings dynamicSettings) { - super(dynamicSettings); + public HistogramIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -114,11 +113,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java index 14a3685bd183e..44789ea63f536 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; @@ -45,7 +44,7 @@ import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.bucket.range.Range; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -60,10 +59,10 @@ import static org.hamcrest.Matchers.instanceOf; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class IpRangeIT extends ParameterizedOpenSearchIntegTestCase { +public class IpRangeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public IpRangeIT(Settings dynamicSettings) { - super(dynamicSettings); + public IpRangeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -74,11 +73,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public static class DummyScriptPlugin extends MockScriptPlugin { @Override public Map<String, Function<Map<String, Object>, Object>> pluginScripts() { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java index c712c97af5c71..4d2da4fa1d14b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java @@ -51,8 +51,8 @@ public class IpTermsIT extends AbstractTermsTestCase { - public IpTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public IpTermsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java index 345cbdae8ef07..49031bfd3fc1d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java @@ -86,8 +86,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class LongTermsIT extends AbstractTermsTestCase { - public LongTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public LongTermsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java index 90dafc0d57887..781d2acc5e2be 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java @@ -82,8 +82,8 @@ public class MinDocCountIT extends AbstractTermsTestCase { private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); private static int cardinality; - public MinDocCountIT(Settings dynamicSettings) { - super(dynamicSettings); + public MinDocCountIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java index ea5a59d89309f..09133f720f9f7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java @@ -34,8 +34,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class MultiTermsIT extends BaseStringTermsTestCase { - public MultiTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiTermsIT(Settings staticSettings) { + super(staticSettings); } // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java index 6289cd5e36151..3eb813dcb91ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.Comparators; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -51,7 +50,7 @@ import org.opensearch.search.aggregations.support.ValuesSource; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -67,7 +66,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class NaNSortingIT extends ParameterizedOpenSearchIntegTestCase { +public class NaNSortingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private enum SubAggregation { AVG("avg") { @@ -139,8 +138,8 @@ public String sortKey() { public abstract double getValue(Aggregation aggregation); } - public NaNSortingIT(Settings dynamicSettings) { - super(dynamicSettings); + public NaNSortingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -151,11 +150,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("string_value", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java index 7af2ac218800d..288d4d2c4e525 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -57,7 +56,7 @@ import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -92,14 +91,14 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class NestedIT extends ParameterizedOpenSearchIntegTestCase { +public class NestedIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static int numParents; private static int[] numChildren; private static SubAggCollectionMode aggCollectionMode; - public NestedIT(Settings dynamicSettings) { - super(dynamicSettings); + public NestedIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -110,11 +109,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java index 5812b7796c33e..50cee4e9ecd92 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; @@ -51,7 +50,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -79,15 +78,15 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class RangeIT extends ParameterizedOpenSearchIntegTestCase { +public class RangeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; static int numDocs; - public RangeIT(Settings dynamicSettings) { - super(dynamicSettings); + public RangeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -98,11 +97,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java index 2716db6b7e745..3bf9233d3441d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BucketOrder; @@ -47,7 +46,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.ValueCount; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -76,10 +75,10 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ReverseNestedIT extends ParameterizedOpenSearchIntegTestCase { +public class ReverseNestedIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ReverseNestedIT(Settings dynamicSettings) { - super(dynamicSettings); + public ReverseNestedIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -90,11 +89,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java index c7b03d21cb6bb..3decab92acbff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.action.support.WriteRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.sampler.Sampler; @@ -48,7 +47,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; import org.opensearch.search.aggregations.metrics.Max; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -71,7 +70,7 @@ * Tests the Sampler aggregation */ @OpenSearchIntegTestCase.SuiteScopeTestCase -public class SamplerIT extends ParameterizedOpenSearchIntegTestCase { +public class SamplerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public static final int NUM_SHARDS = 2; @@ -79,8 +78,8 @@ public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SamplerAggregator.ExecutionMode.values()).toString(); } - public SamplerIT(Settings dynamicSettings) { - super(dynamicSettings); + public SamplerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java index 66d761c56634e..4cab6deb08bb5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.geometry.utils.Geohash; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -49,7 +48,7 @@ import org.opensearch.search.aggregations.bucket.range.Range; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -77,10 +76,10 @@ * we can make sure that the reduce is properly propagated by checking that empty buckets were created. */ @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ShardReduceIT extends ParameterizedOpenSearchIntegTestCase { +public class ShardReduceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ShardReduceIT(Settings dynamicSettings) { - super(dynamicSettings); + public ShardReduceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private IndexRequestBuilder indexDoc(String date, int value) throws Exception { return client().prepareIndex("idx") .setSource( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java index 7c7cc12888307..66cce21bcf86f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java @@ -47,8 +47,8 @@ public class ShardSizeTermsIT extends ShardSizeTestCase { - public ShardSizeTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public ShardSizeTermsIT(Settings staticSettings) { + super(staticSettings); } public void testNoShardSizeString() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index e914b87754865..f2e9265fa5cf9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; @@ -65,7 +64,7 @@ import org.opensearch.search.aggregations.bucket.terms.heuristic.ScriptHeuristic; import org.opensearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods; import java.io.IOException; @@ -95,14 +94,14 @@ import static org.hamcrest.Matchers.is; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SignificantTermsSignificanceScoreIT extends ParameterizedOpenSearchIntegTestCase { +public class SignificantTermsSignificanceScoreIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static final String INDEX_NAME = "testidx"; static final String TEXT_FIELD = "text"; static final String CLASS_FIELD = "class"; - public SignificantTermsSignificanceScoreIT(Settings dynamicSettings) { - super(dynamicSettings); + public SignificantTermsSignificanceScoreIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -113,11 +112,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(TestScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index 343cea4b94c87..add6b71cb1753 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -38,14 +38,13 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -67,7 +66,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class TermsDocCountErrorIT extends ParameterizedOpenSearchIntegTestCase { +public class TermsDocCountErrorIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String STRING_FIELD_NAME = "s_value"; private static final String LONG_FIELD_NAME = "l_value"; @@ -79,8 +78,8 @@ public static String randomExecutionHint() { private static int numRoutingValues; - public TermsDocCountErrorIT(Settings dynamicSettings) { - super(dynamicSettings); + public TermsDocCountErrorIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping(STRING_FIELD_NAME, "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java index 5ad913e8c7086..422af15d2881d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java @@ -14,27 +14,27 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.IndexSettings.MINIMUM_REFRESH_INTERVAL; import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.OpenSearchIntegTestCase.Scope.TEST; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = TEST, numClientNodes = 0, maxNumDataNodes = 1, supportsDedicatedMasters = false) -public class TermsFixedDocCountErrorIT extends ParameterizedOpenSearchIntegTestCase { +public class TermsFixedDocCountErrorIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String STRING_FIELD_NAME = "s_value"; - public TermsFixedDocCountErrorIT(Settings dynamicSettings) { - super(dynamicSettings); + public TermsFixedDocCountErrorIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -45,11 +45,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleAggErrorMultiShard() throws Exception { // size = 1, shard_size = 2 // Shard_1 [A, A, A, A, B, B, C, C, D, D] -> Buckets {"A" : 4, "B" : 2} @@ -71,7 +66,10 @@ public void testSimpleAggErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -89,7 +87,10 @@ public void testSimpleAggErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -127,7 +128,10 @@ public void testSimpleAggErrorSingleShard() throws Exception { assertAcked( prepareCreate("idx_shard_error").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -170,7 +174,10 @@ public void testSliceLevelDocCountErrorSingleShard() throws Exception { assertAcked( prepareCreate("idx_slice_error").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -248,7 +255,10 @@ public void testSliceLevelDocCountErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -288,7 +298,10 @@ public void testSliceLevelDocCountErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 3851b16551795..1cc250c00dba9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.BucketOrder; @@ -44,7 +43,7 @@ import org.opensearch.search.aggregations.bucket.terms.SignificantTerms; import org.opensearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory; import org.opensearch.search.aggregations.bucket.terms.Terms; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -61,12 +60,12 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; -public class TermsShardMinDocCountIT extends ParameterizedOpenSearchIntegTestCase { +public class TermsShardMinDocCountIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String index = "someindex"; - public TermsShardMinDocCountIT(Settings dynamicSettings) { - super(dynamicSettings); + public TermsShardMinDocCountIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java index 20caa4fd076fe..79aa4a648310a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java @@ -38,8 +38,8 @@ public class BaseStringTermsTestCase extends AbstractTermsTestCase { protected static final String MULTI_VALUED_FIELD_NAME = "s_values"; protected static Map<String, Map<String, Object>> expectedMultiSortBuckets; - public BaseStringTermsTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public BaseStringTermsTestCase(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java index 8c727d280ec52..edf9cd432dda2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -79,8 +79,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class StringTermsIT extends BaseStringTermsTestCase { - public StringTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public StringTermsIT(Settings staticSettings) { + super(staticSettings); } // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java index 9ebec21367164..db4ee3571d141 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -48,7 +47,7 @@ import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -71,10 +70,10 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class CardinalityIT extends ParameterizedOpenSearchIntegTestCase { +public class CardinalityIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CardinalityIT(Settings dynamicSettings) { - super(dynamicSettings); + public CardinalityIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index 94756f3fe9f99..8122304ba992c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -38,12 +38,11 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.search.aggregations.Aggregator; import org.opensearch.search.aggregations.BucketOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -54,10 +53,10 @@ import static org.opensearch.search.aggregations.AggregationBuilders.cardinality; import static org.opensearch.search.aggregations.AggregationBuilders.terms; -public class CardinalityWithRequestBreakerIT extends ParameterizedOpenSearchIntegTestCase { +public class CardinalityWithRequestBreakerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CardinalityWithRequestBreakerIT(Settings dynamicSettings) { - super(dynamicSettings); + public CardinalityWithRequestBreakerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -68,11 +67,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Test that searches using cardinality aggregations returns all request breaker memory. */ diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java index 3d804b9aa626e..4a2c100690de4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -70,8 +70,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { - public ExtendedStatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExtendedStatsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java index 78100d1778ecf..ed87fa6d8f5f6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java @@ -55,8 +55,8 @@ public class GeoCentroidIT extends AbstractGeoTestCase { private static final String aggName = "geoCentroid"; - public GeoCentroidIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoCentroidIT(Settings staticSettings) { + super(staticSettings); } public void testEmptyAggregation() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 7ca5130388eea..ae67f0b1c0b66 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -72,8 +72,8 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { - public HDRPercentileRanksIT(Settings dynamicSettings) { - super(dynamicSettings); + public HDRPercentileRanksIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java index ec913b3e130f5..ff1cab85c18e6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -75,8 +75,8 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { - public HDRPercentilesIT(Settings dynamicSettings) { - super(dynamicSettings); + public HDRPercentilesIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index b8447d682abae..0edba475a6401 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -91,8 +91,8 @@ public class MedianAbsoluteDeviationIT extends AbstractNumericTestCase { private static double singleValueExactMAD; private static double multiValueExactMAD; - public MedianAbsoluteDeviationIT(Settings dynamicSettings) { - super(dynamicSettings); + public MedianAbsoluteDeviationIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java index ced2358ac3f78..1725aa7847d72 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -56,7 +55,7 @@ import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.io.IOException; @@ -93,12 +92,12 @@ @ClusterScope(scope = Scope.SUITE) @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ScriptedMetricIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptedMetricIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static long numDocs; - public ScriptedMetricIT(Settings dynamicSettings) { - super(dynamicSettings); + public ScriptedMetricIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -109,11 +108,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java index f957a74eeb9d0..3708e1e6ab21b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java @@ -66,8 +66,8 @@ import static org.hamcrest.Matchers.sameInstance; public class StatsIT extends AbstractNumericTestCase { - public StatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public StatsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java index 382d656448114..b2aa3438b2306 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java @@ -68,8 +68,8 @@ public class SumIT extends AbstractNumericTestCase { - public SumIT(Settings dynamicSettings) { - super(dynamicSettings); + public SumIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 941d3a888db29..4225c027c4d96 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -72,8 +72,8 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { - public TDigestPercentileRanksIT(Settings dynamicSettings) { - super(dynamicSettings); + public TDigestPercentileRanksIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java index 6457cf9307fa1..974e90fab16e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -74,8 +74,8 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { - public TDigestPercentilesIT(Settings dynamicSettings) { - super(dynamicSettings); + public TDigestPercentilesIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index 10e51079cf389..5d84452998e40 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.common.document.DocumentField; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.MatchAllQueryBuilder; @@ -70,7 +69,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -111,13 +110,13 @@ import static org.hamcrest.Matchers.sameInstance; @OpenSearchIntegTestCase.SuiteScopeTestCase() -public class TopHitsIT extends ParameterizedOpenSearchIntegTestCase { +public class TopHitsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String TERMS_AGGS_FIELD = "terms"; private static final String SORT_FIELD = "sort"; - public TopHitsIT(Settings dynamicSettings) { - super(dynamicSettings); + public TopHitsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -128,11 +127,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java index 833d1ce3bb4c3..4610281c4b8a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -45,7 +44,7 @@ import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -73,10 +72,10 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ValueCountIT extends ParameterizedOpenSearchIntegTestCase { +public class ValueCountIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ValueCountIT(Settings dynamicSettings) { - super(dynamicSettings); + public ValueCountIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -87,11 +86,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java index bec9203384026..48fd06bac285b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -46,7 +45,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +65,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AvgBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class AvgBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -77,8 +76,8 @@ public class AvgBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public AvgBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public AvgBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java index 4c3129eb89e3b..1b22cf2018d96 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -51,7 +50,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -76,7 +75,7 @@ import static org.hamcrest.Matchers.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BucketScriptIT extends ParameterizedOpenSearchIntegTestCase { +public class BucketScriptIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String FIELD_1_NAME = "field1"; private static final String FIELD_2_NAME = "field2"; @@ -90,8 +89,8 @@ public class BucketScriptIT extends ParameterizedOpenSearchIntegTestCase { private static int maxNumber; private static long date; - public BucketScriptIT(Settings dynamicSettings) { - super(dynamicSettings); + public BucketScriptIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -102,11 +101,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java index a7b28add7373a..7dca1d0d79b1e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -50,7 +49,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -77,7 +76,7 @@ import static org.hamcrest.Matchers.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BucketSelectorIT extends ParameterizedOpenSearchIntegTestCase { +public class BucketSelectorIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String FIELD_1_NAME = "field1"; private static final String FIELD_2_NAME = "field2"; @@ -89,8 +88,8 @@ public class BucketSelectorIT extends ParameterizedOpenSearchIntegTestCase { private static int minNumber; private static int maxNumber; - public BucketSelectorIT(Settings dynamicSettings) { - super(dynamicSettings); + public BucketSelectorIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -101,11 +100,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java index 2e4fd7a412118..ffb607866935b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.bucket.histogram.Histogram; @@ -48,7 +47,7 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.time.ZonedDateTime; @@ -75,7 +74,7 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BucketSortIT extends ParameterizedOpenSearchIntegTestCase { +public class BucketSortIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INDEX = "bucket-sort-it-data-index"; private static final String INDEX_WITH_GAPS = "bucket-sort-it-data-index-with-gaps"; @@ -85,8 +84,8 @@ public class BucketSortIT extends ParameterizedOpenSearchIntegTestCase { private static final String VALUE_1_FIELD = "value_1"; private static final String VALUE_2_FIELD = "value_2"; - public BucketSortIT(Settings dynamicSettings) { - super(dynamicSettings); + public BucketSortIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex(INDEX, INDEX_WITH_GAPS); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java index b05ff7b4329cd..8c89c1232ebb3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -39,7 +39,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.InternalMultiBucketAggregation; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -48,7 +47,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.support.AggregationPath; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matcher; import org.junit.After; @@ -76,15 +75,15 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DateDerivativeIT extends ParameterizedOpenSearchIntegTestCase { +public class DateDerivativeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { // some index names used during these tests private static final String IDX_DST_START = "idx_dst_start"; private static final String IDX_DST_END = "idx_dst_end"; private static final String IDX_DST_KATHMANDU = "idx_dst_kathmandu"; - public DateDerivativeIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateDerivativeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ZonedDateTime date(int month, int day) { return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java index 41bbffc13658b..f8def40ec003a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.InternalAggregation; @@ -51,7 +50,7 @@ import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.search.aggregations.support.AggregationPath; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.io.IOException; @@ -78,7 +77,7 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DerivativeIT extends ParameterizedOpenSearchIntegTestCase { +public class DerivativeIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -112,11 +111,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 299827e2413d4..1bd04cc13268f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -49,7 +48,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -69,7 +68,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ExtendedStatsBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class ExtendedStatsBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -80,8 +79,8 @@ public class ExtendedStatsBucketIT extends ParameterizedOpenSearchIntegTestCase static int numValueBuckets; static long[] valueCounts; - public ExtendedStatsBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExtendedStatsBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -92,11 +91,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java index dc3b690c7f78f..ea6fcbd6a1560 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -58,7 +57,7 @@ import org.opensearch.search.aggregations.metrics.SumAggregationBuilder; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -80,7 +79,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MaxBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class MaxBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -91,8 +90,8 @@ public class MaxBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public MaxBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public MaxBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -103,11 +102,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java index 189bfd9b5b80a..44d12436382f6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -46,7 +45,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +65,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MinBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class MinBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -77,8 +76,8 @@ public class MinBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public MinBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public MinBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java index 8ad3107ac33ac..d35b80b7918fe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java @@ -43,14 +43,13 @@ import org.opensearch.client.Client; import org.opensearch.common.collect.EvictingQueue; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -77,7 +76,7 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MovAvgIT extends ParameterizedOpenSearchIntegTestCase { +public class MovAvgIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INTERVAL_FIELD = "l_value"; private static final String VALUE_FIELD = "v_value"; private static final String VALUE_FIELD2 = "v_value2"; @@ -133,8 +132,8 @@ public String toString() { } } - public MovAvgIT(Settings dynamicSettings) { - super(dynamicSettings); + public MovAvgIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -145,11 +144,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { prepareCreate("idx").setMapping( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java index 580497715ed6d..29cb334bfcd00 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; @@ -47,7 +46,7 @@ import org.opensearch.search.aggregations.metrics.Percentile; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -69,7 +68,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class PercentilesBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class PercentilesBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final double[] PERCENTS = { 0.0, 1.0, 25.0, 50.0, 75.0, 99.0, 100.0 }; @@ -80,8 +79,8 @@ public class PercentilesBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public PercentilesBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public PercentilesBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -92,11 +91,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java index b4da63802bc50..507bff51f0e39 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java @@ -38,12 +38,11 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.collect.EvictingQueue; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -69,7 +68,7 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class SerialDiffIT extends ParameterizedOpenSearchIntegTestCase { +public class SerialDiffIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INTERVAL_FIELD = "l_value"; private static final String VALUE_FIELD = "v_value"; @@ -98,8 +97,8 @@ public String toString() { } } - public SerialDiffIT(Settings dynamicSettings) { - super(dynamicSettings); + public SerialDiffIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -110,11 +109,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ValuesSourceAggregationBuilder<? extends ValuesSourceAggregationBuilder<?>> randomMetric(String name, String field) { int rand = randomIntBetween(0, 3); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java index 21fdd5e761e77..fbaf799871c8a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -46,7 +45,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +65,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class StatsBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class StatsBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; static int numDocs; @@ -76,8 +75,8 @@ public class StatsBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public StatsBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public StatsBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java index d4bd8f21b2a99..a5967124ff921 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -46,7 +45,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +65,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class SumBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class SumBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -77,8 +76,8 @@ public class SumBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public SumBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public SumBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java index 28ada82a1c56b..fb84134120e00 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java @@ -21,7 +21,6 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.StreamInput; @@ -37,7 +36,7 @@ import org.opensearch.tasks.CancellableTask; import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import org.hamcrest.MatcherAssert; @@ -61,13 +60,13 @@ import static org.hamcrest.Matchers.instanceOf; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchBackpressureIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchBackpressureIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final TimeValue TIMEOUT = new TimeValue(10, TimeUnit.SECONDS); private static final int MOVING_AVERAGE_WINDOW_SIZE = 10; - public SearchBackpressureIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchBackpressureIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { final List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java index bd623ccdf2731..ad1ce0582cfb3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java @@ -42,11 +42,10 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchService; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import java.util.Arrays; @@ -61,10 +60,10 @@ import static org.hamcrest.Matchers.lessThan; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SearchRedStateIndexIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchRedStateIndexIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchRedStateIndexIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchRedStateIndexIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testAllowPartialsWithRedState() throws Exception { final int numShards = cluster().numDataNodes() + 2; buildRedIndex(numShards); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java index a5989b693d332..681f7081fa2dc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java @@ -39,9 +39,8 @@ import org.opensearch.client.Client; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -54,10 +53,10 @@ * This test basically verifies that search with a single shard active (cause we indexed to it) and other * shards possibly not active at all (cause they haven't allocated) will still work. */ -public class SearchWhileCreatingIndexIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWhileCreatingIndexIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWhileCreatingIndexIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWhileCreatingIndexIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -68,11 +67,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testIndexCausesIndexCreation() throws Exception { searchWhileCreatingIndex(false, 1); // 1 replica in our default... } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java index 6d2ec845afa98..f7b8b0df7dca7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java @@ -40,10 +40,9 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHits; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -60,10 +59,10 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SearchWhileRelocatingIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWhileRelocatingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWhileRelocatingIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWhileRelocatingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -74,11 +73,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSearchAndRelocateConcurrentlyRandomReplicas() throws Exception { testSearchAndRelocateConcurrently(randomIntBetween(0, 1)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index aa82b9d21c7fb..614ec2ebd634a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -49,14 +49,13 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.engine.MockEngineSupport; import org.opensearch.test.engine.ThrowingLeafReaderWrapper; @@ -71,10 +70,10 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -public class SearchWithRandomExceptionsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWithRandomExceptionsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWithRandomExceptionsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWithRandomExceptionsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index 446a0bce58d66..b45b334fc1d1c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -45,13 +45,12 @@ import org.opensearch.client.Requests; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.store.MockFSDirectoryFactory; import org.opensearch.test.store.MockFSIndexStore; @@ -64,10 +63,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; -public class SearchWithRandomIOExceptionsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWithRandomIOExceptionsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWithRandomIOExceptionsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWithRandomIOExceptionsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(MockFSIndexStore.TestPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java index cbe52abf5279b..0e337822ba0e7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java @@ -44,11 +44,10 @@ import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -64,10 +63,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class TransportSearchFailuresIT extends ParameterizedOpenSearchIntegTestCase { +public class TransportSearchFailuresIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public TransportSearchFailuresIT(Settings dynamicSettings) { - super(dynamicSettings); + public TransportSearchFailuresIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int maximumNumberOfReplicas() { return 1; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java index edceb0cbc0d24..a82b6f12755ca 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.client.Requests; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -54,7 +53,7 @@ import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -80,10 +79,10 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -public class TransportTwoNodesSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class TransportTwoNodesSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public TransportTwoNodesSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public TransportTwoNodesSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -94,11 +93,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfReplicas() { return 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java index 87f2153eb800f..13b4abb58b4df 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.document.DocumentField; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -54,7 +53,7 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -74,10 +73,10 @@ import static org.hamcrest.CoreMatchers.equalTo; @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 2) -public class FetchSubPhasePluginIT extends ParameterizedOpenSearchIntegTestCase { +public class FetchSubPhasePluginIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FetchSubPhasePluginIT(Settings dynamicSettings) { - super(dynamicSettings); + public FetchSubPhasePluginIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singletonList(FetchTermVectorsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java index 1a730c01e4890..b743c00bf4549 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.BoolQueryBuilder; @@ -58,7 +57,7 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -88,10 +87,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class InnerHitsIT extends ParameterizedOpenSearchIntegTestCase { +public class InnerHitsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public InnerHitsIT(Settings dynamicSettings) { - super(dynamicSettings); + public InnerHitsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -102,11 +101,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java index 83cedb8c20e1d..a1adc6f99b92a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentHelper; @@ -45,7 +44,7 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -62,12 +61,14 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItemInArray; +import static org.hamcrest.Matchers.hasKey; -public class MatchedQueriesIT extends ParameterizedOpenSearchIntegTestCase { +public class MatchedQueriesIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MatchedQueriesIT(Settings dynamicSettings) { - super(dynamicSettings); + public MatchedQueriesIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -78,11 +79,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { createIndex("test"); ensureGreen(); @@ -101,15 +97,18 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { .should(rangeQuery("number").gte(2).queryName("test2")) ) ) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("3") || hit.getId().equals("2")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test2")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); + assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); } else if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test1")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); + assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -119,15 +118,18 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { .setQuery( boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")) ) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1") || hit.getId().equals("2")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test1")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); + assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); } else if (hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test2")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); + assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -153,12 +155,15 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(2)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); - assertThat(hit.getMatchedQueries(), hasItemInArray("title")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -174,12 +179,15 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(2)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); - assertThat(hit.getMatchedQueries(), hasItemInArray("title")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -203,9 +211,11 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(2)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); - assertThat(hit.getMatchedQueries(), hasItemInArray("title")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -237,13 +247,15 @@ public void testRegExpQuerySupportsName() throws InterruptedException { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("regex")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("regex")); + assertThat(hit.getMatchedQueryScore("regex"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -258,15 +270,17 @@ public void testPrefixQuerySupportsName() throws InterruptedException { refresh(); indexRandomForConcurrentSearch("test1"); - SearchResponse searchResponse = client().prepareSearch() + var query = client().prepareSearch() .setQuery(QueryBuilders.prefixQuery("title", "title").queryName("prefix")) - .get(); + .setIncludeNamedQueriesScore(true); + var searchResponse = query.get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("prefix")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("prefix")); + assertThat(hit.getMatchedQueryScore("prefix"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -288,8 +302,9 @@ public void testFuzzyQuerySupportsName() throws InterruptedException { for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("fuzzy")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("fuzzy")); + assertThat(hit.getMatchedQueryScore("fuzzy"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -306,13 +321,15 @@ public void testWildcardQuerySupportsName() throws InterruptedException { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.wildcardQuery("title", "titl*").queryName("wildcard")) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("wildcard")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("wildcard")); + assertThat(hit.getMatchedQueryScore("wildcard"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -334,8 +351,9 @@ public void testSpanFirstQuerySupportsName() throws InterruptedException { for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("span")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("span")); + assertThat(hit.getMatchedQueryScore("span"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -369,11 +387,13 @@ public void testMatchedWithShould() throws Exception { assertHitCount(searchResponse, 2L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("dolor")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("dolor")); + assertThat(hit.getMatchedQueryScore("dolor"), greaterThan(0f)); } else if (hit.getId().equals("2")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("elit")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("elit")); + assertThat(hit.getMatchedQueryScore("elit"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -397,7 +417,10 @@ public void testMatchedWithWrapperQuery() throws Exception { for (QueryBuilder query : queries) { SearchResponse searchResponse = client().prepareSearch().setQuery(query).get(); assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("abc")); + SearchHit hit = searchResponse.getHits().getAt(0); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("abc")); + assertThat(hit.getMatchedQueryScore("abc"), greaterThan(0f)); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index fe17c3e22d43c..66cbf36137551 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -35,12 +35,11 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.io.IOException; @@ -57,10 +56,10 @@ * Integration test for highlighters registered by a plugin. */ @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class CustomHighlighterSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class CustomHighlighterSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CustomHighlighterSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public CustomHighlighterSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomHighlighterPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 2afa911223074..5bfc556bb629e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -49,7 +49,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.time.DateFormatter; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; @@ -75,7 +74,7 @@ import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.MockKeywordPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -128,13 +127,13 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; -public class HighlighterSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class HighlighterSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { // TODO as we move analyzers out of the core we need to move some of these into HighlighterWithAnalyzersTests private static final String[] ALL_TYPES = new String[] { "plain", "fvh", "unified" }; - public HighlighterSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public HighlighterSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -145,11 +144,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class, MockAnalysisPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java index f5d1b8234558e..4d398f8ca09cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -38,13 +38,12 @@ import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.MapperPlugin; import org.opensearch.plugins.Plugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.util.ArrayList; @@ -59,10 +58,10 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -public class FieldCapabilitiesIT extends ParameterizedOpenSearchIntegTestCase { +public class FieldCapabilitiesIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FieldCapabilitiesIT(Settings dynamicSettings) { - super(dynamicSettings); + public FieldCapabilitiesIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -73,11 +72,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void setUp() throws Exception { super.setUp(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index ed8fe74504f92..906d45ef84b3f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateUtils; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.core.common.bytes.BytesArray; @@ -63,7 +62,7 @@ import org.opensearch.search.lookup.FieldLookup; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -104,10 +103,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class SearchFieldsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchFieldsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchFieldsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchFieldsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -118,11 +117,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java index 3a6624c2ad2e6..0380b3c7ddb89 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java @@ -46,7 +46,6 @@ import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -56,7 +55,7 @@ import org.opensearch.search.MultiValueMode; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.time.ZoneOffset; @@ -91,10 +90,10 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; -public class DecayFunctionScoreIT extends ParameterizedOpenSearchIntegTestCase { +public class DecayFunctionScoreIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public DecayFunctionScoreIT(Settings dynamicSettings) { - super(dynamicSettings); + public DecayFunctionScoreIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -105,11 +104,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java index 62d0d89c644a5..0573dcfc4863d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.Functions; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.plugins.ScriptPlugin; @@ -58,7 +57,7 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; @@ -83,10 +82,10 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class ExplainableScriptIT extends ParameterizedOpenSearchIntegTestCase { +public class ExplainableScriptIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ExplainableScriptIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExplainableScriptIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public static class ExplainableScriptPlugin extends Plugin implements ScriptPlugin { @Override public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java index d53f55b98bd23..6956833cf6d62 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -38,9 +38,8 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.lucene.search.function.FieldValueFactorFunction; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -61,10 +60,10 @@ /** * Tests for the {@code field_value_factor} function in a function_score query. */ -public class FunctionScoreFieldValueIT extends ParameterizedOpenSearchIntegTestCase { +public class FunctionScoreFieldValueIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FunctionScoreFieldValueIT(Settings dynamicSettings) { - super(dynamicSettings); + public FunctionScoreFieldValueIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testFieldValueFactor() throws IOException, InterruptedException { assertAcked( prepareCreate("test").setMapping( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java index 3b80d437e95c0..4f267f0059291 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java @@ -39,7 +39,6 @@ import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; @@ -50,7 +49,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -78,13 +77,13 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FunctionScoreIT extends ParameterizedOpenSearchIntegTestCase { +public class FunctionScoreIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static final String TYPE = "type"; static final String INDEX = "index"; - public FunctionScoreIT(Settings dynamicSettings) { - super(dynamicSettings); + public FunctionScoreIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java index a91f53dae04d2..593f844305743 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java @@ -40,7 +40,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.index.query.functionscore.DecayFunction; @@ -52,7 +51,7 @@ import org.opensearch.search.SearchHits; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; @@ -71,10 +70,10 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class FunctionScorePluginIT extends ParameterizedOpenSearchIntegTestCase { +public class FunctionScorePluginIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FunctionScorePluginIT(Settings dynamicSettings) { - super(dynamicSettings); + public FunctionScorePluginIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomDistanceScorePlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index bda6284d9535a..5121d5023fd95 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.Operator; @@ -55,7 +54,7 @@ import org.opensearch.search.rescore.QueryRescoreMode; import org.opensearch.search.rescore.QueryRescorerBuilder; import org.opensearch.search.sort.SortBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -84,6 +83,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSecondHit; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertThirdHit; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasMatchedQueries; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasScore; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -91,10 +91,10 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; -public class QueryRescorerIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryRescorerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public QueryRescorerIT(Settings dynamicSettings) { - super(dynamicSettings); + public QueryRescorerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -105,11 +105,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testEnforceWindowSize() throws InterruptedException { createIndex("test"); // this @@ -600,7 +595,7 @@ public void testExplain() throws Exception { SearchResponse searchResponse = client().prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR).queryName("hello-world")) .setRescorer(innerRescoreQuery, 5) .setExplain(true) .get(); @@ -608,7 +603,10 @@ public void testExplain() throws Exception { assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); - + final String[] matchedQueries = { "hello-world" }; + assertFirstHit(searchResponse, hasMatchedQueries(matchedQueries)); + assertSecondHit(searchResponse, hasMatchedQueries(matchedQueries)); + assertThirdHit(searchResponse, hasMatchedQueries(matchedQueries)); for (int j = 0; j < 3; j++) { assertThat(searchResponse.getHits().getAt(j).getExplanation().getDescription(), equalTo(descriptionModes[innerMode])); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java index 69e30fc879dd8..f1205ba0f1e93 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java @@ -36,7 +36,6 @@ import org.apache.lucene.util.ArrayUtil; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; @@ -47,7 +46,7 @@ import org.opensearch.script.Script; import org.opensearch.script.ScriptType; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.CoreMatchers; import java.util.Arrays; @@ -76,10 +75,10 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.nullValue; -public class RandomScoreFunctionIT extends ParameterizedOpenSearchIntegTestCase { +public class RandomScoreFunctionIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public RandomScoreFunctionIT(Settings dynamicSettings) { - super(dynamicSettings); + public RandomScoreFunctionIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -90,11 +89,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index ba519be04edff..701ff0a94baf2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -56,7 +56,6 @@ import org.opensearch.common.geo.builders.PointBuilder; import org.opensearch.common.geo.builders.PolygonBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.io.Streams; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.bytes.BytesReference; @@ -64,7 +63,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import org.junit.BeforeClass; @@ -99,10 +98,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; -public class GeoFilterIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoFilterIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoFilterIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoFilterIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -113,11 +112,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java index 85cb087585d31..2010a288427b3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java @@ -39,10 +39,9 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.util.ArrayList; @@ -60,10 +59,10 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class GeoPolygonIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoPolygonIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoPolygonIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoPolygonIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -74,11 +73,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index 1f9b6ae434f75..6dbffa019382d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -41,14 +41,13 @@ import org.opensearch.common.geo.builders.PointBuilder; import org.opensearch.common.geo.builders.ShapeBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.IndexService; import org.opensearch.index.mapper.GeoShapeFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.indices.IndicesService; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -61,10 +60,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class GeoShapeIntegrationIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoShapeIntegrationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoShapeIntegrationIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoShapeIntegrationIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index d21d6036c9673..e9115cf7dfbce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -41,7 +41,6 @@ import org.opensearch.cluster.routing.IndexShardRoutingTable; import org.opensearch.common.geo.builders.ShapeBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; @@ -50,7 +49,7 @@ import org.opensearch.index.mapper.LegacyGeoShapeFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.indices.IndicesService; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -63,10 +62,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class LegacyGeoShapeIntegrationIT extends ParameterizedOpenSearchIntegTestCase { +public class LegacyGeoShapeIntegrationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public LegacyGeoShapeIntegrationIT(Settings dynamicSettings) { - super(dynamicSettings); + public LegacyGeoShapeIntegrationIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Test that orientation parameter correctly persists across cluster restart */ diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index dc7c4e687c2fa..36fc5de0a5cf7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.MoreLikeThisQueryBuilder; @@ -50,7 +49,7 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -79,10 +78,10 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; -public class MoreLikeThisIT extends ParameterizedOpenSearchIntegTestCase { +public class MoreLikeThisIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MoreLikeThisIT(Settings dynamicSettings) { - super(dynamicSettings); + public MoreLikeThisIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -93,11 +92,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java index b35208941d2a2..9f49b7a27cda4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java @@ -37,10 +37,9 @@ import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -52,10 +51,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId; import static org.hamcrest.Matchers.equalTo; -public class MultiSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class MultiSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MultiSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleMultiSearch() throws InterruptedException { createIndex("test"); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java index 71f82d7c0b412..a6554271a0bc5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -31,11 +30,6 @@ */ public class SimpleNestedExplainIT extends OpenSearchIntegTestCase { - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /* * Tests the explain output for multiple docs. Concurrent search with multiple slices is tested * here as call to indexRandomForMultipleSlices is made and compared with explain output for diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index 8eeffcbecb377..19e38da1aed05 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -47,7 +47,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -56,7 +55,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortMode; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -77,10 +76,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; -public class SimpleNestedIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleNestedIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SimpleNestedIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleNestedIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleNested() throws Exception { assertAcked(prepareCreate("test").setMapping("nested1", "type=nested")); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java index a3432bfe7e3e4..8bea5ef97fbba 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java @@ -32,12 +32,11 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.junit.After; @@ -67,7 +66,7 @@ * Multi node integration tests for PIT creation and search operation with PIT ID. */ @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) -public class PitMultiNodeIT extends ParameterizedOpenSearchIntegTestCase { +public class PitMultiNodeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public PitMultiNodeIT(Settings settings) { super(settings); } @@ -80,11 +79,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void setupIndex() throws ExecutionException, InterruptedException { createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java index 6e40c08ed08a1..bc9eeb528b031 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java @@ -44,13 +44,12 @@ import org.opensearch.cluster.routing.OperationRouting; import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.node.Node; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -70,10 +69,10 @@ import static org.hamcrest.Matchers.not; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SearchPreferenceIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchPreferenceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchPreferenceIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchPreferenceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -84,11 +83,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java index 82dd6225fda4e..2f608a0cbe06f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.InternalAggregation; @@ -50,7 +49,7 @@ import org.opensearch.search.profile.query.CollectorResult; import org.opensearch.search.profile.query.QueryProfileShardResult; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.core.IsNull; import java.util.ArrayList; @@ -83,7 +82,7 @@ import static org.hamcrest.Matchers.sameInstance; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AggregationProfilerIT extends ParameterizedOpenSearchIntegTestCase { +public class AggregationProfilerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String BUILD_LEAF_COLLECTOR = AggregationTimingType.BUILD_LEAF_COLLECTOR.toString(); private static final String COLLECT = AggregationTimingType.COLLECT.toString(); @@ -166,8 +165,8 @@ public class AggregationProfilerIT extends ParameterizedOpenSearchIntegTestCase private static final String REASON_SEARCH_TOP_HITS = "search_top_hits"; private static final String REASON_AGGREGATION = "aggregation"; - public AggregationProfilerIT(Settings dynamicSettings) { - super(dynamicSettings); + public AggregationProfilerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -178,11 +177,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfShards() { return 1; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index ef73438114079..412a94aaf1b3e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -42,14 +42,13 @@ import org.opensearch.action.search.SearchType; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; import org.opensearch.search.profile.ProfileResult; import org.opensearch.search.profile.ProfileShardResult; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -68,7 +67,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; -public class QueryProfilerIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryProfilerIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { private final boolean concurrentSearchEnabled; private static final String MAX_PREFIX = "max_"; private static final String MIN_PREFIX = "min_"; @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * This test simply checks to make sure nothing crashes. Test indexes 100-150 documents, * constructs 20-100 random queries and tries to profile them diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java index e3253ea583ac2..b95542382e5fc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java @@ -38,13 +38,12 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -62,10 +61,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -public class ExistsIT extends ParameterizedOpenSearchIntegTestCase { +public class ExistsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ExistsIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExistsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -76,11 +75,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - // TODO: move this to a unit test somewhere... public void testEmptyIndex() throws Exception { createIndex("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java index 457114bac33b8..392f8b036b7a2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; @@ -54,7 +53,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.MockKeywordPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.io.IOException; @@ -92,10 +91,10 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; -public class MultiMatchQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class MultiMatchQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MultiMatchQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiMatchQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -106,11 +105,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(MockKeywordPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index 1ca5859f23bca..c43a9c23661ea 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperService; @@ -47,7 +46,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.SearchModule; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import org.junit.BeforeClass; @@ -70,12 +69,12 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class QueryStringIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryStringIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static int CLUSTER_MAX_CLAUSE_COUNT; - public QueryStringIT(Settings dynamicSettings) { - super(dynamicSettings); + public QueryStringIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -86,11 +85,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @BeforeClass public static void createRandomClusterSetting() { CLUSTER_MAX_CLAUSE_COUNT = randomIntBetween(50, 100); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java index 55029712a061c..136ddce152f63 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.RangeQueryBuilder; @@ -46,7 +45,7 @@ import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -67,10 +66,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertThirdHit; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasScore; -public class ScriptScoreQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptScoreQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ScriptScoreQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public ScriptScoreQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -81,11 +80,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index 03312c6e1e2f7..a58db51780826 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -80,7 +79,7 @@ import org.opensearch.search.SearchHits; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.junit.annotations.TestIssueLogging; import java.io.IOException; @@ -147,10 +146,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class SearchQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -161,11 +160,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, MockAnalysisPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index d8902238005da..31678d3f018a1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -43,7 +43,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -60,7 +59,7 @@ import org.opensearch.search.SearchHits; import org.opensearch.search.SearchModule; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.BeforeClass; import java.io.IOException; @@ -95,12 +94,12 @@ /** * Tests for the {@code simple_query_string} query */ -public class SimpleQueryStringIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleQueryStringIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static int CLUSTER_MAX_CLAUSE_COUNT; - public SimpleQueryStringIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleQueryStringIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -111,11 +110,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @BeforeClass public static void createRandomClusterSetting() { CLUSTER_MAX_CLAUSE_COUNT = randomIntBetween(60, 100); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java index ae00904f237a5..7dbc61a3ced39 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexModule; @@ -50,7 +49,7 @@ import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -71,7 +70,7 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class ScriptQuerySearchIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptQuerySearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ScriptQuerySearchIT(Settings settings) { super(settings); } @@ -84,11 +83,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomScriptPlugin.class, InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java index c7a6d18f881c6..55b3cfeef7419 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java @@ -40,14 +40,13 @@ import org.opensearch.action.search.SearchType; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.sort.SortBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -60,7 +59,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; -public class DuelScrollIT extends ParameterizedOpenSearchIntegTestCase { +public class DuelScrollIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public DuelScrollIT(Settings settings) { super(settings); } @@ -73,11 +72,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testDuelQueryThenFetch() throws Exception { TestContext context = create(SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java index b82048ffc924e..35b5a7949b20b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java @@ -45,7 +45,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.common.bytes.BytesReference; @@ -60,7 +59,7 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalTestCluster; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import org.junit.After; @@ -92,7 +91,7 @@ /** * Tests for scrolling. */ -public class SearchScrollIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchScrollIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SearchScrollIT(Settings settings) { super(settings); } @@ -105,11 +104,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @After public void cleanup() throws Exception { assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java index 27002b844da1d..38f65c8c2d0da 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -39,9 +39,8 @@ import org.opensearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -58,7 +57,7 @@ import static org.hamcrest.Matchers.lessThan; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 2, numClientNodes = 0) -public class SearchScrollWithFailingNodesIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchScrollWithFailingNodesIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SearchScrollWithFailingNodesIT(Settings settings) { super(settings); } @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfShards() { return 2; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index b99f66850e9e3..13c510ff21338 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -47,12 +47,11 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -69,7 +68,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class SearchAfterIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchAfterIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INDEX_NAME = "test"; private static final int NUM_DOCS = 100; @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testsShouldFail() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").setMapping("field1", "type=long", "field2", "type=keyword").get()); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java index 7aae41d939cac..0606bc1dbbdb6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest.RefreshPolicy; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.rest.RestStatus; @@ -52,7 +51,7 @@ import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.rescore.QueryRescorerBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -77,7 +76,7 @@ import static org.apache.lucene.search.TotalHits.Relation.EQUAL_TO; import static org.apache.lucene.search.TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; -public class SimpleSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SimpleSearchIT(Settings settings) { super(settings); @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSearchNullIndex() { expectThrows( NullPointerException.class, diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index 27a56f9d14f08..ea73f9ee1a2be 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -45,7 +45,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.Scroll; @@ -53,7 +52,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.sort.SortBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -70,9 +69,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; -public class SearchSliceIT extends ParameterizedOpenSearchIntegTestCase { - public SearchSliceIT(Settings dynamicSettings) { - super(dynamicSettings); +public class SearchSliceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public SearchSliceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -83,11 +82,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private void setupIndex(int numDocs, int numberOfShards) throws IOException, ExecutionException, InterruptedException { String mapping = XContentFactory.jsonBuilder() .startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 81e948640ee94..e40928f15e8a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -47,7 +47,6 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.Numbers; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -63,7 +62,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.io.IOException; @@ -109,7 +108,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; -public class FieldSortIT extends ParameterizedOpenSearchIntegTestCase { +public class FieldSortIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { public FieldSortIT(Settings dynamicSettings) { super(dynamicSettings); } @@ -122,11 +121,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public static class CustomScriptPlugin extends MockScriptPlugin { @Override protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { @@ -203,7 +197,8 @@ public void testIssue8226() throws InterruptedException { public void testIssue6614() throws ExecutionException, InterruptedException { List<IndexRequestBuilder> builders = new ArrayList<>(); boolean strictTimeBasedIndices = randomBoolean(); - final int numIndices = randomIntBetween(2, 25); // at most 25 days in the month + // consider only 15 days of the month to avoid hitting open file limit + final int numIndices = randomIntBetween(2, 15); int docs = 0; for (int i = 0; i < numIndices; i++) { final String indexId = strictTimeBasedIndices ? "idx_" + i : "idx"; @@ -2389,4 +2384,185 @@ public void testLongSortOptimizationCorrectResults() throws InterruptedException } } + public void testSimpleSortsPoints() throws Exception { + final int docs = 100; + + Random random = random(); + assertAcked( + prepareCreate("test").setMapping( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("str_value") + .field("type", "keyword") + .endObject() + .startObject("boolean_value") + .field("type", "boolean") + .endObject() + .startObject("byte_value") + .field("type", "byte") + .endObject() + .startObject("short_value") + .field("type", "short") + .endObject() + .startObject("integer_value") + .field("type", "integer") + .endObject() + .startObject("long_value") + .field("type", "long") + .endObject() + .startObject("unsigned_long_value") + .field("type", "unsigned_long") + .endObject() + .startObject("float_value") + .field("type", "float") + .endObject() + .startObject("half_float_value") + .field("type", "half_float") + .endObject() + .startObject("double_value") + .field("type", "double") + .endObject() + .endObject() + .endObject() + ) + ); + ensureGreen(); + BigInteger UNSIGNED_LONG_BASE = Numbers.MAX_UNSIGNED_LONG_VALUE.subtract(BigInteger.valueOf(10000 * docs)); + List<IndexRequestBuilder> builders = new ArrayList<>(); + for (int i = 0; i < docs / 2; i++) { + IndexRequestBuilder builder = client().prepareIndex("test") + .setId(Integer.toString(i)) + .setSource( + jsonBuilder().startObject() + .field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })) + .field("boolean_value", true) + .field("byte_value", i) + .field("short_value", i) + .field("integer_value", i) + .field("long_value", i) + .field("unsigned_long_value", UNSIGNED_LONG_BASE.add(BigInteger.valueOf(10000 * i))) + .field("float_value", 32 * i) + .field("half_float_value", 16 * i) + .field("double_value", 64 * i) + .endObject() + ); + builders.add(builder); + } + + // We keep half of the docs with numeric values and other half without + for (int i = docs / 2; i < docs; i++) { + IndexRequestBuilder builder = client().prepareIndex("test") + .setId(Integer.toString(i)) + .setSource( + jsonBuilder().startObject().field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })).endObject() + ); + builders.add(builder); + } + + int j = 0; + Collections.shuffle(builders, random); + for (IndexRequestBuilder builder : builders) { + builder.get(); + if ((++j % 25) == 0) { + refresh(); + } + + } + refresh(); + indexRandomForConcurrentSearch("test"); + + final int size = 2; + // HALF_FLOAT + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort("half_float_value", SortOrder.ASC) + .get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("half_float_value", SortOrder.DESC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + + // FLOAT + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.ASC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + + // DOUBLE + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.ASC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.DESC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + + // UNSIGNED_LONG + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort("unsigned_long_value", SortOrder.ASC) + .get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort("unsigned_long_value", SortOrder.DESC) + .get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java index 766ac6139b24b..492ffce3321e4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java @@ -39,13 +39,12 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.geo.GeoDistance; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.geometry.utils.Geohash; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.io.IOException; @@ -66,10 +65,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -public class GeoDistanceIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoDistanceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoDistanceIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoDistanceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -80,11 +79,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java index 1b8bd9694483d..b6f53936d5939 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java @@ -41,11 +41,10 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.DistanceUnit; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.GeoValidationMethod; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.io.IOException; @@ -65,7 +64,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSortValues; import static org.hamcrest.Matchers.closeTo; -public class GeoDistanceSortBuilderIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoDistanceSortBuilderIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public GeoDistanceSortBuilderIT(Settings settings) { super(settings); } @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static final String LOCATION_FIELD = "location"; @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index ddfbc3cce2be6..cb8b508c4496b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -40,7 +40,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.GeoUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -49,7 +48,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -75,12 +74,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class SimpleSortIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleSortIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String DOUBLE_APOSTROPHE = "\u0027\u0027"; - public SimpleSortIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleSortIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomScriptPlugin.class, InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java index 76e68781c72ba..ec891045cb510 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java @@ -12,14 +12,13 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.plugins.Plugin; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.plugin.CustomSortBuilder; import org.opensearch.search.sort.plugin.CustomSortPlugin; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -27,7 +26,7 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.hamcrest.Matchers.equalTo; -public class SortFromPluginIT extends ParameterizedOpenSearchIntegTestCase { +public class SortFromPluginIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SortFromPluginIT(Settings settings) { super(settings); } @@ -40,11 +39,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomSortPlugin.class, InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java index a9c4bf841d9a1..4c1e47ef8da99 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java @@ -38,14 +38,13 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.InnerHitBuilder; import org.opensearch.index.query.NestedQueryBuilder; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.SearchException; import org.opensearch.search.SearchHits; import org.opensearch.search.fetch.subphase.FetchSourceContext; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -57,10 +56,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class MetadataFetchingIT extends ParameterizedOpenSearchIntegTestCase { +public class MetadataFetchingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MetadataFetchingIT(Settings dynamicSettings) { - super(dynamicSettings); + public MetadataFetchingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimple() throws InterruptedException { assertAcked(prepareCreate("test")); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java index 805e82dc9850b..294657cedcc5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java @@ -36,8 +36,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -47,10 +46,10 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsEqual.equalTo; -public class SourceFetchingIT extends ParameterizedOpenSearchIntegTestCase { +public class SourceFetchingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SourceFetchingIT(Settings dynamicSettings) { - super(dynamicSettings); + public SourceFetchingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -61,11 +60,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSourceDefaultBehavior() throws InterruptedException { createIndex("test"); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java index f770bd9864850..f8d2955440bc4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java @@ -16,7 +16,6 @@ import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; import org.opensearch.indices.IndicesQueryCache; @@ -61,6 +60,7 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(IndicesService.INDICES_CACHE_CLEAN_INTERVAL_SETTING.getKey(), "1ms") .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true) .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, SEGMENT_SLICE_COUNT) + .put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true) .build(); } @@ -74,11 +74,6 @@ public Settings indexSettings() { .build(); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testConcurrentQueryCount() throws Exception { String INDEX_1 = "test-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); String INDEX_2 = "test-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java index 8fb3c57dd7680..99cb3a4e8ca20 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java @@ -45,7 +45,6 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.search.stats.SearchStats.Stats; import org.opensearch.plugins.Plugin; @@ -54,7 +53,7 @@ import org.opensearch.script.ScriptType; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -81,10 +80,10 @@ import static org.hamcrest.Matchers.nullValue; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, minNumDataNodes = 2) -public class SearchStatsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchStatsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchStatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchStatsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index b342e6d35f0b4..c72e128a88045 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -48,7 +48,6 @@ import org.opensearch.common.FieldMemoryStats; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperParsingException; @@ -65,7 +64,7 @@ import org.opensearch.search.suggest.completion.context.ContextMapping; import org.opensearch.search.suggest.completion.context.GeoContextMapping; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -100,7 +99,7 @@ import static org.hamcrest.Matchers.notNullValue; @SuppressCodecs("*") // requires custom completion format -public class CompletionSuggestSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class CompletionSuggestSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public CompletionSuggestSearchIT(Settings settings) { super(settings); } @@ -113,11 +112,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java index bac3e7fb61683..67523e9fd424a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -53,7 +52,7 @@ import org.opensearch.search.suggest.completion.context.ContextMapping; import org.opensearch.search.suggest.completion.context.GeoContextMapping; import org.opensearch.search.suggest.completion.context.GeoQueryContext; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -73,7 +72,7 @@ import static org.hamcrest.core.IsEqual.equalTo; @SuppressCodecs("*") // requires custom completion format -public class ContextCompletionSuggestSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class ContextCompletionSuggestSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ContextCompletionSuggestSearchIT(Settings settings) { super(settings); } @@ -86,11 +85,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index bc6e49008bf6b..e0afdbc816f5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; @@ -57,7 +56,7 @@ import org.opensearch.search.suggest.phrase.StupidBackoff; import org.opensearch.search.suggest.term.TermSuggestionBuilder; import org.opensearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; @@ -96,7 +95,7 @@ * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that * request, modify again, request again, etc. This makes it very obvious what changes between requests. */ -public class SuggestSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class SuggestSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SuggestSearchIT(Settings settings) { super(settings); } @@ -109,11 +108,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { assertAcked(prepareCreate("test").setMapping("text", "type=text")); diff --git a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java index 8c9bff9833462..b89541c647580 100644 --- a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java @@ -36,8 +36,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -49,7 +48,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class SimilarityIT extends ParameterizedOpenSearchIntegTestCase { +public class SimilarityIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SimilarityIT(Settings settings) { super(settings); } @@ -62,11 +61,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testCustomBM25Similarity() throws Exception { try { client().admin().indices().prepareDelete("test").execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java index 9a92ddc81852a..c89fef20aafb1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java @@ -7,6 +7,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.node.stats.NodeStats; import org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -25,15 +26,21 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.block.ClusterBlockException; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.GroupShardsIterator; import org.opensearch.cluster.routing.ShardIterator; import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.opensearch.common.Priority; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.shard.ShardPath; import org.opensearch.index.store.remote.file.CleanerDaemonThreadLeakFilter; import org.opensearch.index.store.remote.filecache.FileCacheStats; import org.opensearch.monitor.fs.FsInfo; @@ -47,6 +54,8 @@ import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -54,6 +63,7 @@ import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS; import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; @@ -234,6 +244,62 @@ public void testSearchableSnapshotAllocationForLocalAndRemoteShardsOnSameNode() assertDocCount(indexName, 100L); } + public void testSearchableSnapshotAllocationFilterSettings() throws Exception { + final int numShardsIndex = randomIntBetween(3, 6); + final String indexName = "test-idx"; + final String restoredIndexName = indexName + "-copy"; + final String repoName = "test-repo"; + final String snapshotName = "test-snap"; + final Client client = client(); + + internalCluster().ensureAtLeastNumSearchAndDataNodes(numShardsIndex); + createIndexWithDocsAndEnsureGreen(numShardsIndex, 1, 100, indexName); + createRepositoryWithSettings(null, repoName); + takeSnapshot(client, snapshotName, repoName, indexName); + + restoreSnapshotAndEnsureGreen(client, snapshotName, repoName); + assertRemoteSnapshotIndexSettings(client, restoredIndexName); + final Set<String> searchNodes = StreamSupport.stream(clusterService().state().getNodes().spliterator(), false) + .filter(DiscoveryNode::isSearchNode) + .map(DiscoveryNode::getId) + .collect(Collectors.toSet()); + + for (int i = searchNodes.size(); i > 2; --i) { + String pickedNode = randomFrom(searchNodes); + searchNodes.remove(pickedNode); + assertIndexAssignedToNodeOrNot(restoredIndexName, pickedNode, true); + assertTrue( + client.admin() + .indices() + .prepareUpdateSettings(restoredIndexName) + .setSettings(Settings.builder().put("index.routing.allocation.exclude._id", pickedNode)) + .execute() + .actionGet() + .isAcknowledged() + ); + ClusterHealthResponse clusterHealthResponse = client.admin() + .cluster() + .prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setTimeout(new TimeValue(5, TimeUnit.MINUTES)) + .execute() + .actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + assertIndexAssignedToNodeOrNot(restoredIndexName, pickedNode, false); + assertIndexAssignedToNodeOrNot(indexName, pickedNode, true); + } + } + + private void assertIndexAssignedToNodeOrNot(String index, String node, boolean assigned) { + final ClusterState state = clusterService().state(); + if (assigned) { + assertTrue(state.getRoutingTable().allShards(index).stream().anyMatch(shard -> shard.currentNodeId().equals(node))); + } else { + assertTrue(state.getRoutingTable().allShards(index).stream().noneMatch(shard -> shard.currentNodeId().equals(node))); + } + } + /** * Tests the functionality of remote shard allocation to * ensure it can handle node drops for failover scenarios and the cluster gets back to a healthy state when @@ -341,11 +407,16 @@ public void testDeleteSearchableSnapshotBackingIndex() throws Exception { } private void createIndexWithDocsAndEnsureGreen(int numReplicasIndex, int numOfDocs, String indexName) throws InterruptedException { + createIndexWithDocsAndEnsureGreen(1, numReplicasIndex, numOfDocs, indexName); + } + + private void createIndexWithDocsAndEnsureGreen(int numShardsIndex, int numReplicasIndex, int numOfDocs, String indexName) + throws InterruptedException { createIndex( indexName, Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, Integer.toString(numReplicasIndex)) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "1") + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicasIndex) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShardsIndex) .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()) .build() ); @@ -722,6 +793,47 @@ public void testDefaultShardPreference() throws Exception { } } + public void testRestoreSearchableSnapshotWithIndexStoreTypeThrowsException() throws Exception { + final String snapshotName = "test-snap"; + final String repoName = "test-repo"; + final String indexName1 = "test-idx-1"; + final int numReplicasIndex1 = randomIntBetween(1, 4); + final Client client = client(); + + internalCluster().ensureAtLeastNumDataNodes(numReplicasIndex1 + 1); + createIndexWithDocsAndEnsureGreen(numReplicasIndex1, 100, indexName1); + + createRepositoryWithSettings(null, repoName); + takeSnapshot(client, snapshotName, repoName, indexName1); + deleteIndicesAndEnsureGreen(client, indexName1); + + internalCluster().ensureAtLeastNumSearchNodes(numReplicasIndex1 + 1); + + // set "index.store.type" to "remote_snapshot" in index settings of restore API and assert appropriate exception with error message + // is thrown. + final SnapshotRestoreException error = expectThrows( + SnapshotRestoreException.class, + () -> client.admin() + .cluster() + .prepareRestoreSnapshot(repoName, snapshotName) + .setRenamePattern("(.+)") + .setRenameReplacement("$1-copy") + .setIndexSettings( + Settings.builder() + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT) + ) + .setWaitForCompletion(true) + .execute() + .actionGet() + ); + assertThat( + error.getMessage(), + containsString( + "cannot restore remote snapshot with index settings \"index.store.type\" set to \"remote_snapshot\". Instead use \"storage_type\": \"remote_snapshot\" as argument to restore." + ) + ); + } + /** * Asserts the cache folder count to match the number of shards and the number of indices within the cache folder * as provided. @@ -750,4 +862,75 @@ private void assertCacheDirectoryReplicaAndIndexCount(int numCacheFolderCount, i // Verifies if all the shards (primary and replica) have been deleted assertEquals(numCacheFolderCount, searchNodeFileCachePaths.size()); } + + public void testRelocateSearchableSnapshotIndex() throws Exception { + final String snapshotName = "test-snap"; + final String repoName = "test-repo"; + final String indexName = "test-idx-1"; + final String restoredIndexName = indexName + "-copy"; + final Client client = client(); + + internalCluster().ensureAtLeastNumDataNodes(1); + createIndexWithDocsAndEnsureGreen(0, 100, indexName); + + createRepositoryWithSettings(null, repoName); + takeSnapshot(client, snapshotName, repoName, indexName); + deleteIndicesAndEnsureGreen(client, indexName); + + String searchNode1 = internalCluster().startSearchOnlyNodes(1).get(0); + internalCluster().validateClusterFormed(); + restoreSnapshotAndEnsureGreen(client, snapshotName, repoName); + assertRemoteSnapshotIndexSettings(client, restoredIndexName); + + String searchNode2 = internalCluster().startSearchOnlyNodes(1).get(0); + internalCluster().validateClusterFormed(); + + final Index index = resolveIndex(restoredIndexName); + assertSearchableSnapshotIndexDirectoryExistence(searchNode1, index, true); + assertSearchableSnapshotIndexDirectoryExistence(searchNode2, index, false); + + // relocate the shard from node1 to node2 + client.admin() + .cluster() + .prepareReroute() + .add(new MoveAllocationCommand(restoredIndexName, 0, searchNode1, searchNode2)) + .execute() + .actionGet(); + ClusterHealthResponse clusterHealthResponse = client.admin() + .cluster() + .prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setTimeout(new TimeValue(5, TimeUnit.MINUTES)) + .execute() + .actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + assertDocCount(restoredIndexName, 100L); + + assertSearchableSnapshotIndexDirectoryExistence(searchNode1, index, false); + assertSearchableSnapshotIndexDirectoryExistence(searchNode2, index, true); + deleteIndicesAndEnsureGreen(client, restoredIndexName); + assertSearchableSnapshotIndexDirectoryExistence(searchNode2, index, false); + } + + private void assertSearchableSnapshotIndexDirectoryExistence(String nodeName, Index index, boolean exists) throws Exception { + final Node node = internalCluster().getInstance(Node.class, nodeName); + final ShardId shardId = new ShardId(index, 0); + final ShardPath shardPath = ShardPath.loadFileCachePath(node.getNodeEnvironment(), shardId); + + assertBusy(() -> { + assertTrue( + "shard state path should " + (exists ? "exist" : "not exist"), + Files.exists(shardPath.getShardStatePath()) == exists + ); + assertTrue("shard cache path should " + (exists ? "exist" : "not exist"), Files.exists(shardPath.getDataPath()) == exists); + }, 30, TimeUnit.SECONDS); + + final Path indexDataPath = node.getNodeEnvironment().fileCacheNodePath().fileCachePath.resolve(index.getUUID()); + final Path indexPath = node.getNodeEnvironment().fileCacheNodePath().indicesPath.resolve(index.getUUID()); + assertBusy(() -> { + assertTrue("index path should " + (exists ? "exist" : "not exist"), Files.exists(indexDataPath) == exists); + assertTrue("index cache path should " + (exists ? "exist" : "not exist"), Files.exists(indexPath) == exists); + }, 30, TimeUnit.SECONDS); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java index f50fc691fb232..28b84655a2cc7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java @@ -12,7 +12,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.repositories.RepositoryException; -import org.opensearch.repositories.fs.FsRepository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.test.OpenSearchIntegTestCase; import org.junit.Before; @@ -53,7 +53,7 @@ public void testRestrictedSettingsCantBeUpdated() { assertEquals( e.getMessage(), "[system-repo-name] trying to modify an unmodifiable attribute type of system " - + "repository from current value [fs] to new value [mock]" + + "repository from current value [reloadable-fs] to new value [mock]" ); } @@ -65,7 +65,12 @@ public void testSystemRepositoryNonRestrictedSettingsCanBeUpdated() { final Settings.Builder repoSettings = Settings.builder().put("location", absolutePath).put("chunk_size", new ByteSizeValue(20)); assertAcked( - client.admin().cluster().preparePutRepository(systemRepoName).setType(FsRepository.TYPE).setSettings(repoSettings).get() + client.admin() + .cluster() + .preparePutRepository(systemRepoName) + .setType(ReloadableFsRepository.TYPE) + .setSettings(repoSettings) + .get() ); } } diff --git a/server/src/main/java/org/opensearch/SpecialPermission.java b/server/src/main/java/org/opensearch/SpecialPermission.java index 8a694d4543f32..8348f0844acc6 100644 --- a/server/src/main/java/org/opensearch/SpecialPermission.java +++ b/server/src/main/java/org/opensearch/SpecialPermission.java @@ -98,6 +98,7 @@ public SpecialPermission(String name, String actions) { /** * Check that the current stack has {@link SpecialPermission} access according to the {@link SecurityManager}. */ + @SuppressWarnings("removal") public static void check() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java index 8293a5bb27612..8562a7eb37709 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java @@ -229,8 +229,7 @@ public NodeStats(StreamInput in) throws IOException { } else { repositoriesStats = null; } - // TODO: change to V_2_12_0 on main after backport to 2.x - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_12_0)) { admissionControlStats = in.readOptionalWriteable(AdmissionControlStats::new); } else { admissionControlStats = null; @@ -504,8 +503,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_2_12_0)) { out.writeOptionalWriteable(repositoriesStats); } - // TODO: change to V_2_12_0 on main after backport to 2.x - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_12_0)) { out.writeOptionalWriteable(admissionControlStats); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index 14c985f1d3427..9265c6ae60678 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -82,7 +82,7 @@ public class TransportUpdateSettingsAction extends TransportClusterManagerNodeAc "index.number_of_replicas" ); - private final static String[] ALLOWLIST_REMOTE_SNAPSHOT_SETTINGS_PREFIXES = { "index.search.slowlog" }; + private final static String[] ALLOWLIST_REMOTE_SNAPSHOT_SETTINGS_PREFIXES = { "index.search.slowlog", "index.routing.allocation" }; private final MetadataUpdateSettingsService updateSettingsService; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java index 23cd8efdcaf59..ca4c16935c2b9 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java @@ -168,9 +168,11 @@ protected void clusterManagerOperation( .getSegments() .getReplicationStats().maxBytesBehind != 0) { throw new IllegalStateException( - " For index [" + "Replication still in progress for index [" + sourceIndex - + "] replica shards haven't caught up with primary, please retry after sometime." + + "]. Please wait for replication to complete and retry. Use the _cat/segment_replication/" + + sourceIndex + + " api to check if the index is up to date (e.g. bytes_behind == 0)." ); } diff --git a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java index 5b41c2a13b596..3c27d3ce59e4c 100644 --- a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java @@ -432,16 +432,18 @@ public final void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPha } private void onPhaseEnd(SearchRequestContext searchRequestContext) { - if (getCurrentPhase() != null) { + if (getCurrentPhase() != null && SearchPhaseName.isValidName(getName())) { long tookInNanos = System.nanoTime() - getCurrentPhase().getStartTimeInNanos(); searchRequestContext.updatePhaseTookMap(getCurrentPhase().getName(), TimeUnit.NANOSECONDS.toMillis(tookInNanos)); + this.searchRequestContext.getSearchRequestOperationsListener().onPhaseEnd(this, searchRequestContext); } - this.searchRequestContext.getSearchRequestOperationsListener().onPhaseEnd(this, searchRequestContext); } - private void onPhaseStart(SearchPhase phase) { + void onPhaseStart(SearchPhase phase) { setCurrentPhase(phase); - this.searchRequestContext.getSearchRequestOperationsListener().onPhaseStart(this); + if (SearchPhaseName.isValidName(phase.getName())) { + this.searchRequestContext.getSearchRequestOperationsListener().onPhaseStart(this); + } } private void onRequestEnd(SearchRequestContext searchRequestContext) { @@ -714,7 +716,9 @@ public void sendSearchResponse(InternalSearchResponse internalSearchResponse, At @Override public final void onPhaseFailure(SearchPhase phase, String msg, Throwable cause) { - this.searchRequestContext.getSearchRequestOperationsListener().onPhaseFailure(this); + if (SearchPhaseName.isValidName(phase.getName())) { + this.searchRequestContext.getSearchRequestOperationsListener().onPhaseFailure(this); + } raisePhaseFailure(new SearchPhaseExecutionException(phase.getName(), msg, cause, buildShardFailures())); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java index 8cf92934c8a52..c6f3d4c70632d 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java +++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java @@ -10,6 +10,9 @@ import org.opensearch.common.annotation.PublicApi; +import java.util.HashSet; +import java.util.Set; + /** * Enum for different Search Phases in OpenSearch * @@ -25,6 +28,12 @@ public enum SearchPhaseName { CAN_MATCH("can_match"); private final String name; + private static final Set<String> PHASE_NAMES = new HashSet<>(); + static { + for (SearchPhaseName phaseName : SearchPhaseName.values()) { + PHASE_NAMES.add(phaseName.name); + } + } SearchPhaseName(final String name) { this.name = name; @@ -33,4 +42,8 @@ public enum SearchPhaseName { public String getName() { return name; } + + public static boolean isValidName(String phaseName) { + return PHASE_NAMES.contains(phaseName); + } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequest.java b/server/src/main/java/org/opensearch/action/search/SearchRequest.java index 96cea17ff4972..f738c182c06da 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequest.java @@ -359,7 +359,7 @@ boolean isFinalReduce() { * request. When created through {@link #subSearchRequest(SearchRequest, String[], String, long, boolean)}, this method returns * the provided current time, otherwise it will return {@link System#currentTimeMillis()}. */ - long getOrCreateAbsoluteStartMillis() { + public long getOrCreateAbsoluteStartMillis() { return absoluteStartMillis == DEFAULT_ABSOLUTE_START_MILLIS ? System.currentTimeMillis() : absoluteStartMillis; } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java index e949c5e0bea29..9dac827e7d518 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java @@ -406,6 +406,15 @@ public SearchRequestBuilder setTrackScores(boolean trackScores) { return this; } + /** + * Applies when fetching scores with named queries, and controls if scores will be tracked as well. + * Defaults to {@code false}. + */ + public SearchRequestBuilder setIncludeNamedQueriesScore(boolean includeNamedQueriesScore) { + sourceBuilder().includeNamedQueriesScores(includeNamedQueriesScore); + return this; + } + /** * Indicates if the total hit count for the query should be tracked. Requests will count total hit count accurately * up to 10,000 by default, see {@link #setTrackTotalHitsUpTo(int)} to change this value or set to true/false to always/never diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java index eceac7204b196..383d9b5e82fe2 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java @@ -22,7 +22,7 @@ * @opensearch.internal */ @InternalApi -class SearchRequestContext { +public class SearchRequestContext { private final SearchRequestOperationsListener searchRequestOperationsListener; private long absoluteStartNanos; private final Map<String, Long> phaseTookMap; @@ -47,7 +47,7 @@ void updatePhaseTookMap(String phaseName, Long tookTime) { this.phaseTookMap.put(phaseName, tookTime); } - Map<String, Long> phaseTookMap() { + public Map<String, Long> phaseTookMap() { return phaseTookMap; } @@ -70,7 +70,7 @@ void setAbsoluteStartNanos(long absoluteStartNanos) { /** * Request start time in nanos */ - long getAbsoluteStartNanos() { + public long getAbsoluteStartNanos() { return absoluteStartNanos; } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java index 2a09cc084f79f..2acb35af667f0 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java @@ -31,21 +31,21 @@ protected SearchRequestOperationsListener(final boolean enabled) { this.enabled = enabled; } - abstract void onPhaseStart(SearchPhaseContext context); + protected abstract void onPhaseStart(SearchPhaseContext context); - abstract void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext); + protected abstract void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext); - abstract void onPhaseFailure(SearchPhaseContext context); + protected abstract void onPhaseFailure(SearchPhaseContext context); - void onRequestStart(SearchRequestContext searchRequestContext) {} + protected void onRequestStart(SearchRequestContext searchRequestContext) {} - void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + protected void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} - boolean isEnabled(SearchRequest searchRequest) { + protected boolean isEnabled(SearchRequest searchRequest) { return isEnabled(); } - boolean isEnabled() { + protected boolean isEnabled() { return enabled; } @@ -69,7 +69,7 @@ static final class CompositeListener extends SearchRequestOperationsListener { } @Override - void onPhaseStart(SearchPhaseContext context) { + protected void onPhaseStart(SearchPhaseContext context) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onPhaseStart(context); @@ -80,7 +80,7 @@ void onPhaseStart(SearchPhaseContext context) { } @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onPhaseEnd(context, searchRequestContext); @@ -91,7 +91,7 @@ void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestCo } @Override - void onPhaseFailure(SearchPhaseContext context) { + protected void onPhaseFailure(SearchPhaseContext context) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onPhaseFailure(context); @@ -102,7 +102,7 @@ void onPhaseFailure(SearchPhaseContext context) { } @Override - void onRequestStart(SearchRequestContext searchRequestContext) { + protected void onRequestStart(SearchRequestContext searchRequestContext) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onRequestStart(searchRequestContext); diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java index 7f25f9026f215..74e04d976cb1c 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java @@ -134,19 +134,19 @@ public SearchRequestSlowLog(ClusterService clusterService) { } @Override - void onPhaseStart(SearchPhaseContext context) {} + protected void onPhaseStart(SearchPhaseContext context) {} @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} @Override - void onPhaseFailure(SearchPhaseContext context) {} + protected void onPhaseFailure(SearchPhaseContext context) {} @Override - void onRequestStart(SearchRequestContext searchRequestContext) {} + protected void onRequestStart(SearchRequestContext searchRequestContext) {} @Override - void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { + protected void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { long tookInNanos = System.nanoTime() - searchRequestContext.getAbsoluteStartNanos(); if (warnThreshold >= 0 && tookInNanos > warnThreshold && level.isLevelEnabledFor(SlowLogLevel.WARN)) { diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java index 88d599a0dcdaa..ac32b08afb7f6 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java @@ -58,12 +58,12 @@ public long getPhaseMetric(SearchPhaseName searchPhaseName) { } @Override - void onPhaseStart(SearchPhaseContext context) { + protected void onPhaseStart(SearchPhaseContext context) { phaseStatsMap.get(context.getCurrentPhase().getSearchPhaseName()).current.inc(); } @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { StatsHolder phaseStats = phaseStatsMap.get(context.getCurrentPhase().getSearchPhaseName()); phaseStats.current.dec(); phaseStats.total.inc(); @@ -71,7 +71,7 @@ void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestCo } @Override - void onPhaseFailure(SearchPhaseContext context) { + protected void onPhaseFailure(SearchPhaseContext context) { phaseStatsMap.get(context.getCurrentPhase().getSearchPhaseName()).current.dec(); } diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java index 842c10b700d24..79e599ec9387b 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java @@ -1238,7 +1238,7 @@ private AbstractSearchAsyncAction<? extends SearchPhaseResult> searchAsyncAction clusters, searchRequestContext ); - return new SearchPhase(action.getName()) { + return new SearchPhase("none") { @Override public void run() { action.start(); diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java index e43c42446de2c..485dd43a5999c 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java @@ -718,6 +718,7 @@ public final BootstrapCheckResult check(BootstrapContext context) { return BootstrapCheckResult.success(); } + @SuppressWarnings("removal") boolean isAllPermissionGranted() { final SecurityManager sm = System.getSecurityManager(); assert sm != null; diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java index 0aa965ce46096..52dd5d710eedc 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java @@ -128,6 +128,7 @@ public Object remove(Object key) { /** * Returns a read-only view of all system properties */ + @SuppressWarnings("removal") public static Dictionary<Object, Object> getSystemProperties() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java b/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java index 4d36efff0e192..8eb4f841b9671 100644 --- a/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java +++ b/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java @@ -83,6 +83,7 @@ class OpenSearch extends EnvironmentAwareCommand { /** * Main entry point for starting opensearch */ + @SuppressWarnings("removal") public static void main(final String[] args) throws Exception { overrideDnsCachePolicyProperties(); /* diff --git a/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java b/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java index 14435db64274c..4571eb35ca93c 100644 --- a/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java +++ b/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java @@ -53,6 +53,7 @@ * * @opensearch.internal **/ +@SuppressWarnings("removal") final class OpenSearchPolicy extends Policy { /** template policy file, the one used in tests */ diff --git a/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java b/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java index 2b28260097ce1..5f9a01436b4cb 100644 --- a/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java +++ b/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java @@ -98,6 +98,7 @@ void onNonFatalUncaught(final String threadName, final Throwable t) { Terminal.DEFAULT.flush(); } + @SuppressWarnings("removal") void halt(int status) { AccessController.doPrivileged(new PrivilegedHaltAction(status)); } diff --git a/server/src/main/java/org/opensearch/bootstrap/Security.java b/server/src/main/java/org/opensearch/bootstrap/Security.java index 749c146de4f16..a48bbd61016e3 100644 --- a/server/src/main/java/org/opensearch/bootstrap/Security.java +++ b/server/src/main/java/org/opensearch/bootstrap/Security.java @@ -119,6 +119,7 @@ * * @opensearch.internal */ +@SuppressWarnings("removal") final class Security { /** no instantiation */ private Security() {} diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index 3384393d8feaf..4dde5d0ea013f 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -39,6 +39,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.ResourceAlreadyExistsException; import org.opensearch.Version; +import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.opensearch.action.admin.indices.shrink.ResizeType; @@ -135,6 +136,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; import static org.opensearch.cluster.metadata.Metadata.DEFAULT_REPLICA_COUNT_SETTING; +import static org.opensearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.isRemoteStoreAttributePresent; @@ -816,6 +818,16 @@ static Settings aggregateIndexSettings( final Settings.Builder requestSettings = Settings.builder().put(request.settings()); final Settings.Builder indexSettingsBuilder = Settings.builder(); + + // Store type of `remote_snapshot` is intended to be system-managed for searchable snapshot indexes so a special case is needed here + // to prevent a user specifying this value when creating an index + String storeTypeSetting = request.settings().get(INDEX_STORE_TYPE_SETTING.getKey()); + if (storeTypeSetting != null && storeTypeSetting.equals(RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT.toString())) { + throw new IllegalArgumentException( + "cannot create index with index setting \"index.store.type\" set to \"remote_snapshot\". Store type can be set to \"remote_snapshot\" only when restoring a remote snapshot by using \"storage_type\": \"remote_snapshot\"" + ); + } + if (sourceMetadata == null) { final Settings.Builder additionalIndexSettings = Settings.builder(); final Settings templateAndRequestSettings = Settings.builder().put(combinedTemplateSettings).put(request.settings()).build(); diff --git a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java index a406552f854da..938a603c459c9 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java +++ b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java @@ -729,23 +729,6 @@ assert node(failedShard.currentNodeId()).getByShardId(failedShard.shardId()) == + " was matched but wasn't removed"; } - public void swapPrimaryWithReplica( - Logger logger, - ShardRouting primaryShard, - ShardRouting replicaShard, - RoutingChangesObserver changes - ) { - assert primaryShard.primary() : "Invalid primary shard provided"; - assert !replicaShard.primary() : "Invalid Replica shard provided"; - - ShardRouting newPrimary = primaryShard.moveActivePrimaryToReplica(); - ShardRouting newReplica = replicaShard.moveActiveReplicaToPrimary(); - updateAssigned(primaryShard, newPrimary); - updateAssigned(replicaShard, newReplica); - logger.info("Swap relocation performed for shard [{}]", newPrimary.shortSummary()); - changes.replicaPromoted(newPrimary); - } - private void unassignPrimaryAndPromoteActiveReplicaIfExists( ShardRouting failedShard, UnassignedInfo unassignedInfo, diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java index 8a14ce3f1a288..a05938c176678 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java @@ -43,6 +43,8 @@ public final class RemoteShardsBalancer extends ShardsBalancer { private final Logger logger; private final RoutingAllocation allocation; private final RoutingNodes routingNodes; + // indicates if there are any nodes being throttled for allocating any unassigned shards + private boolean anyNodesThrottled = false; public RemoteShardsBalancer(Logger logger, RoutingAllocation allocation) { this.logger = logger; @@ -84,24 +86,39 @@ void moveShards() { Queue<RoutingNode> excludedNodes = new ArrayDeque<>(); classifyNodesForShardMovement(eligibleNodes, excludedNodes); - if (excludedNodes.isEmpty()) { - logger.debug("No excluded nodes found. Returning..."); - return; - } - - while (!eligibleNodes.isEmpty() && !excludedNodes.isEmpty()) { - RoutingNode sourceNode = excludedNodes.poll(); - for (ShardRouting ineligibleShard : sourceNode) { - if (ineligibleShard.started() == false) { + // move shards that cannot remain on eligible nodes + final List<ShardRouting> forceMoveShards = new ArrayList<>(); + eligibleNodes.forEach(sourceNode -> { + for (final ShardRouting shardRouting : sourceNode) { + if (ineligibleForMove(shardRouting)) { continue; } - if (!RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(ineligibleShard, allocation))) { + if (allocation.deciders().canRemain(shardRouting, sourceNode, allocation) == Decision.NO) { + forceMoveShards.add(shardRouting); + } + } + }); + for (final ShardRouting shard : forceMoveShards) { + if (eligibleNodes.isEmpty()) { + logger.trace("there are no eligible nodes available, return"); + return; + } + + tryShardMovementToEligibleNode(eligibleNodes, shard); + } + + // move shards that are currently assigned on excluded nodes + while (eligibleNodes.isEmpty() == false && excludedNodes.isEmpty() == false) { + RoutingNode sourceNode = excludedNodes.poll(); + for (final ShardRouting ineligibleShard : sourceNode) { + if (ineligibleForMove(ineligibleShard)) { continue; } if (eligibleNodes.isEmpty()) { - break; + logger.trace("there are no eligible nodes available, return"); + return; } tryShardMovementToEligibleNode(eligibleNodes, ineligibleShard); @@ -109,6 +126,10 @@ void moveShards() { } } + private boolean ineligibleForMove(ShardRouting shard) { + return shard.started() == false || RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation)) == false; + } + /** * Classifies the nodes into eligible and excluded depending on whether node is able or unable for shard assignment * @param eligibleNodes contains the list of classified nodes eligible to accept shards @@ -145,10 +166,23 @@ private void classifyNodesForShardMovement(Queue<RoutingNode> eligibleNodes, Que * @param shard the ineligible shard to be moved */ private void tryShardMovementToEligibleNode(Queue<RoutingNode> eligibleNodes, ShardRouting shard) { - Set<String> nodesCheckedForShard = new HashSet<>(); - while (!eligibleNodes.isEmpty()) { - RoutingNode targetNode = eligibleNodes.poll(); - Decision currentShardDecision = allocation.deciders().canAllocate(shard, targetNode, allocation); + final Set<String> nodesCheckedForShard = new HashSet<>(); + int numNodesToCheck = eligibleNodes.size(); + while (eligibleNodes.isEmpty() == false) { + assert numNodesToCheck > 0; + final RoutingNode targetNode = eligibleNodes.poll(); + --numNodesToCheck; + // skip the node that the target shard is currently allocated on + if (targetNode.nodeId().equals(shard.currentNodeId())) { + assert nodesCheckedForShard.add(targetNode.nodeId()); + eligibleNodes.offer(targetNode); + if (numNodesToCheck == 0) { + return; + } + continue; + } + + final Decision currentShardDecision = allocation.deciders().canAllocate(shard, targetNode, allocation); if (currentShardDecision.type() == Decision.Type.YES) { if (logger.isDebugEnabled()) { @@ -166,7 +200,7 @@ private void tryShardMovementToEligibleNode(Queue<RoutingNode> eligibleNodes, Sh allocation.changes() ); eligibleNodes.offer(targetNode); - break; + return; } else { if (logger.isTraceEnabled()) { logger.trace( @@ -177,18 +211,19 @@ private void tryShardMovementToEligibleNode(Queue<RoutingNode> eligibleNodes, Sh ); } - Decision nodeLevelDecision = allocation.deciders().canAllocateAnyShardToNode(targetNode, allocation); + final Decision nodeLevelDecision = allocation.deciders().canAllocateAnyShardToNode(targetNode, allocation); if (nodeLevelDecision.type() == Decision.Type.YES) { logger.debug("Node: [{}] can still accept shards. Adding it back to the queue.", targetNode.nodeId()); eligibleNodes.offer(targetNode); - nodesCheckedForShard.add(targetNode.nodeId()); + assert nodesCheckedForShard.add(targetNode.nodeId()); } else { logger.debug("Node: [{}] cannot accept any more shards. Removing it from queue.", targetNode.nodeId()); } - // Break out if all nodes in the queue have been checked for this shard - if (eligibleNodes.stream().allMatch(rn -> nodesCheckedForShard.contains(rn.nodeId()))) { - break; + // Break out if all eligible nodes have been examined + if (numNodesToCheck == 0) { + assert eligibleNodes.stream().allMatch(rn -> nodesCheckedForShard.contains(rn.nodeId())); + return; } } } @@ -225,7 +260,7 @@ void balance() { } } - while (!sourceNodes.isEmpty() && !targetNodes.isEmpty()) { + while (sourceNodes.isEmpty() == false && targetNodes.isEmpty() == false) { RoutingNode sourceNode = sourceNodes.poll(); tryRebalanceNode(sourceNode, targetNodes, avgPrimaryPerNode, nodePrimaryShardCount); } @@ -275,11 +310,11 @@ public Map<String, UnassignedIndexShards> groupUnassignedShardsByIndex() { HashMap<String, UnassignedIndexShards> unassignedShardMap = new HashMap<>(); for (ShardRouting shard : routingNodes.unassigned().drain()) { String index = shard.getIndexName(); - if (!RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation))) { + if (RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation)) == false) { routingNodes.unassigned().add(shard); continue; } - if (!unassignedShardMap.containsKey(index)) { + if (unassignedShardMap.containsKey(index) == false) { unassignedShardMap.put(index, new UnassignedIndexShards()); } unassignedShardMap.get(index).addShard(shard); @@ -296,13 +331,15 @@ private void unassignIgnoredRemoteShards(RoutingAllocation routingAllocation) { RoutingNodes.UnassignedShards unassignedShards = routingAllocation.routingNodes().unassigned(); for (ShardRouting shard : unassignedShards.drainIgnored()) { RoutingPool pool = RoutingPool.getShardPool(shard, routingAllocation); - if (pool == RoutingPool.REMOTE_CAPABLE && shard.unassigned() && (shard.primary() || !shard.unassignedInfo().isDelayed())) { + if (pool == RoutingPool.REMOTE_CAPABLE + && shard.unassigned() + && (shard.primary() || shard.unassignedInfo().isDelayed() == false)) { ShardRouting unassignedShard = shard; // Shard when moved to an unassigned state updates the recovery source to be ExistingStoreRecoverySource // Remote shards do not have an existing store to recover from and can be recovered from an empty source // to re-fetch any shard blocks from the repository. if (shard.primary()) { - if (!RecoverySource.Type.SNAPSHOT.equals(shard.recoverySource().getType())) { + if (RecoverySource.Type.SNAPSHOT.equals(shard.recoverySource().getType()) == false) { unassignedShard = shard.updateUnassigned(shard.unassignedInfo(), RecoverySource.EmptyStoreRecoverySource.INSTANCE); } } @@ -323,12 +360,16 @@ private void allocateUnassignedReplicas(Queue<RoutingNode> nodeQueue, Map<String } private void ignoreRemainingShards(Map<String, UnassignedIndexShards> unassignedShardMap) { + // If any nodes are throttled during allocation, mark all remaining unassigned shards as THROTTLED + final UnassignedInfo.AllocationStatus status = anyNodesThrottled + ? UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED + : UnassignedInfo.AllocationStatus.DECIDERS_NO; for (UnassignedIndexShards indexShards : unassignedShardMap.values()) { for (ShardRouting shard : indexShards.getPrimaries()) { - routingNodes.unassigned().ignoreShard(shard, UnassignedInfo.AllocationStatus.DECIDERS_NO, allocation.changes()); + routingNodes.unassigned().ignoreShard(shard, status, allocation.changes()); } for (ShardRouting shard : indexShards.getReplicas()) { - routingNodes.unassigned().ignoreShard(shard, UnassignedInfo.AllocationStatus.DECIDERS_NO, allocation.changes()); + routingNodes.unassigned().ignoreShard(shard, status, allocation.changes()); } } } @@ -353,7 +394,7 @@ private void allocateUnassignedShards( } logger.debug("Allocating shards for index: [{}]", index); - while (!shardsToAllocate.isEmpty() && !nodeQueue.isEmpty()) { + while (shardsToAllocate.isEmpty() == false && nodeQueue.isEmpty() == false) { ShardRouting shard = shardsToAllocate.poll(); if (shard.assignedToNode()) { if (logger.isDebugEnabled()) { @@ -389,11 +430,11 @@ private void allocateUnassignedShards( private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouting shard) { boolean allocated = false; boolean throttled = false; - Set<String> nodesCheckedForShard = new HashSet<>(); - while (!nodeQueue.isEmpty()) { + int numNodesToCheck = nodeQueue.size(); + while (nodeQueue.isEmpty() == false) { RoutingNode node = nodeQueue.poll(); + --numNodesToCheck; Decision allocateDecision = allocation.deciders().canAllocate(shard, node, allocation); - nodesCheckedForShard.add(node.nodeId()); if (allocateDecision.type() == Decision.Type.YES) { if (logger.isTraceEnabled()) { logger.trace("Assigned shard [{}] to [{}]", shardShortSummary(shard), node.nodeId()); @@ -432,6 +473,10 @@ private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouti } nodeQueue.offer(node); } else { + if (nodeLevelDecision.type() == Decision.Type.THROTTLE) { + anyNodesThrottled = true; + } + if (logger.isTraceEnabled()) { logger.trace( "Cannot allocate any shard to node: [{}]. Removing from queue. Node level decisions: [{}],[{}]", @@ -443,14 +488,14 @@ private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouti } // Break out if all nodes in the queue have been checked for this shard - if (nodeQueue.stream().allMatch(rn -> nodesCheckedForShard.contains(rn.nodeId()))) { + if (numNodesToCheck == 0) { break; } } } - if (!allocated) { - UnassignedInfo.AllocationStatus status = throttled + if (allocated == false) { + UnassignedInfo.AllocationStatus status = (throttled || anyNodesThrottled) ? UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED : UnassignedInfo.AllocationStatus.DECIDERS_NO; routingNodes.unassigned().ignoreShard(shard, status, allocation.changes()); @@ -470,14 +515,16 @@ private void tryRebalanceNode( // Try to relocate the valid shards on the sourceNode, one at a time; // until either sourceNode is balanced OR no more active primary shard available OR all the target nodes are exhausted - while (shardsToBalance > 0 && shardIterator.hasNext() && !targetNodes.isEmpty()) { + while (shardsToBalance > 0 && shardIterator.hasNext() && targetNodes.isEmpty() == false) { // Find an active primary shard to relocate ShardRouting shard = shardIterator.next(); - if (!shard.started() || !shard.primary() || !RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation))) { + if (shard.started() == false + || shard.primary() == false + || RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation)) == false) { continue; } - while (!targetNodes.isEmpty()) { + while (targetNodes.isEmpty() == false) { // Find a valid target node that can accommodate the current shard relocation RoutingNode targetNode = targetNodes.poll(); if (primaryCount.get(targetNode.nodeId()) >= avgPrimary) { @@ -485,6 +532,10 @@ private void tryRebalanceNode( continue; } + if (targetNode.getByShardId(shard.shardId()) != null) { + continue; + } + // Try relocate the shard on the target node Decision rebalanceDecision = tryRelocateShard(shard, targetNode); @@ -522,21 +573,10 @@ private void tryRebalanceNode( } /** - * For every primary shard for which this method is invoked, - * swap is attempted with the destination node in case replica shard is present. - * In case replica is not present, relocation of the shard id performed. + * For every primary shard for which this method is invoked, relocation of the shard id performed. */ private Decision tryRelocateShard(ShardRouting shard, RoutingNode destinationNode) { - // Check if there is already a replica for the shard on the destination node. - // Then we can directly swap the replica with the primary shards. - // Invariant: We only allow swap relocation on remote shards. - ShardRouting replicaShard = destinationNode.getByShardId(shard.shardId()); - if (replicaShard != null) { - assert !replicaShard.primary() : "Primary Shard found while expected Replica during shard rebalance"; - return executeSwapShard(shard, replicaShard, allocation); - } - - // Since no replica present on the destinationNode; try relocating the shard to the destination node + assert destinationNode.getByShardId(shard.shardId()) == null; Decision allocationDecision = allocation.deciders().canAllocate(shard, destinationNode, allocation); Decision rebalanceDecision = allocation.deciders().canRebalance(shard, allocation); logger.trace( @@ -566,15 +606,6 @@ private Decision tryRelocateShard(ShardRouting shard, RoutingNode destinationNod return Decision.NO; } - private Decision executeSwapShard(ShardRouting primaryShard, ShardRouting replicaShard, RoutingAllocation allocation) { - if (!replicaShard.started()) { - return new Decision.Single(Decision.Type.NO); - } - - allocation.routingNodes().swapPrimaryWithReplica(logger, primaryShard, replicaShard, allocation.changes()); - return new Decision.Single(Decision.Type.YES); - } - private void failUnattemptedShards() { RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); while (unassignedIterator.hasNext()) { diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java index 90350c0a21a42..5aff09d715622 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java @@ -45,6 +45,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.opensearch.Version; @@ -302,6 +303,11 @@ public DocIdSetIterator iterator() { return subQueryScorer.iterator(); } + @Override + public TwoPhaseIterator twoPhaseIterator() { + return subQueryScorer.twoPhaseIterator(); + } + @Override public float getMaxScore(int upTo) { return Float.MAX_VALUE; // TODO: what would be a good upper bound? diff --git a/server/src/main/java/org/opensearch/common/path/PathTrie.java b/server/src/main/java/org/opensearch/common/path/PathTrie.java index 7cb7b46acfafe..0b516fa037c48 100644 --- a/server/src/main/java/org/opensearch/common/path/PathTrie.java +++ b/server/src/main/java/org/opensearch/common/path/PathTrie.java @@ -37,6 +37,7 @@ import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; +import java.util.Stack; import java.util.function.BiFunction; import java.util.function.Supplier; @@ -405,4 +406,45 @@ public T next() { } }; } + + public Iterator<T> retrieveAll() { + Stack<TrieNode> stack = new Stack<>(); + stack.add(root); + + return new Iterator<T>() { + @Override + public boolean hasNext() { + while (!stack.empty()) { + TrieNode node = stack.peek(); + + if (node.value != null) { + return true; + } + + advance(); + } + + return false; + } + + @Override + public T next() { + while (!stack.empty()) { + TrieNode node = advance(); + + if (node.value != null) { + return node.value; + } + } + + throw new NoSuchElementException("called next() without validating hasNext()! no more nodes available"); + } + + private TrieNode advance() { + TrieNode node = stack.pop(); + stack.addAll(node.children.values()); + return node; + } + }; + } } diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 277286ae1ff1b..0c97d62c44a5e 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -293,6 +293,7 @@ public void apply(Settings value, Settings current, Settings previous) { RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_FILE_CHUNKS_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_OPERATIONS_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_REMOTE_STORE_STREAMS_SETTING, + RecoverySettings.INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT, RecoverySettings.CLUSTER_REMOTE_INDEX_SEGMENT_METADATA_RETENTION_MAX_COUNT_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_REPLICAS_RECOVERIES_SETTING, @@ -703,7 +704,11 @@ public void apply(Settings value, Settings current, Settings previous) { CpuBasedAdmissionControllerSettings.CPU_BASED_ADMISSION_CONTROLLER_TRANSPORT_LAYER_MODE, CpuBasedAdmissionControllerSettings.INDEXING_CPU_USAGE_LIMIT, CpuBasedAdmissionControllerSettings.SEARCH_CPU_USAGE_LIMIT, - IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING + IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING, + + // Concurrent segment search settings + SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING, + SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING ) ) ); @@ -716,11 +721,6 @@ public void apply(Settings value, Settings current, Settings previous) { * setting should be moved to {@link #BUILT_IN_CLUSTER_SETTINGS}. */ public static final Map<List<String>, List<Setting>> FEATURE_FLAGGED_CLUSTER_SETTINGS = Map.of( - List.of(FeatureFlags.CONCURRENT_SEGMENT_SEARCH), - List.of( - SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING, - SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING - ), List.of(FeatureFlags.TELEMETRY), List.of( TelemetrySettings.TRACER_ENABLED_SETTING, diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java index e19f8e8370d5b..e6d7ba0c60772 100644 --- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java @@ -32,9 +32,9 @@ protected FeatureFlagSettings( public static final Set<Setting<?>> BUILT_IN_FEATURE_FLAGS = Set.of( FeatureFlags.EXTENSIONS_SETTING, FeatureFlags.IDENTITY_SETTING, - FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING, FeatureFlags.TELEMETRY_SETTING, FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, - FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING + FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING, + FeatureFlags.DOC_ID_FUZZY_SET_SETTING ); } diff --git a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java index b34a2aaffe408..49bb3abf1decd 100644 --- a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java @@ -41,7 +41,6 @@ import org.opensearch.common.annotation.PublicApi; import org.opensearch.common.logging.Loggers; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; import org.opensearch.index.IndexSortConfig; @@ -150,6 +149,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING, IndexSettings.MAX_ANALYZED_OFFSET_SETTING, IndexSettings.MAX_TERMS_COUNT_SETTING, + IndexSettings.MAX_NESTED_QUERY_DEPTH_SETTING, IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING, IndexSettings.DEFAULT_FIELD_SETTING, IndexSettings.QUERY_STRING_LENIENT_SETTING, @@ -230,6 +230,12 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexMetadata.INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING, IndexMetadata.INDEX_REMOTE_TRANSLOG_REPOSITORY_SETTING, + IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING, + IndexSettings.INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING, + + // Settings for concurrent segment search + IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING, + // validate that built-in similarities don't get redefined Setting.groupSetting("index.similarity.", (s) -> { Map<String, Settings> groups = s.getAsGroups(); @@ -252,10 +258,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { * is ready for production release, the feature flag can be removed, and the * setting should be moved to {@link #BUILT_IN_INDEX_SETTINGS}. */ - public static final Map<String, List<Setting>> FEATURE_FLAGGED_INDEX_SETTINGS = Map.of( - FeatureFlags.CONCURRENT_SEGMENT_SEARCH, - List.of(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING) - ); + public static final Map<String, List<Setting>> FEATURE_FLAGGED_INDEX_SETTINGS = Map.of(); public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, BUILT_IN_INDEX_SETTINGS); diff --git a/server/src/main/java/org/opensearch/common/time/DateFormatters.java b/server/src/main/java/org/opensearch/common/time/DateFormatters.java index e74ab687b903b..527dce7677dd8 100644 --- a/server/src/main/java/org/opensearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/opensearch/common/time/DateFormatters.java @@ -1299,6 +1299,41 @@ public class DateFormatters { .withResolverStyle(ResolverStyle.STRICT) ); + /** + * Returns RFC 3339 a popular ISO 8601 profile compatible date time formatter and parser. + * This is not fully compatible to the existing spec, its more linient and closely follows w3c note on datetime + */ + + public static final DateFormatter RFC3339_LENIENT_DATE_FORMATTER = new JavaDateFormatter( + "rfc3339_lenient", + new OpenSearchDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + new RFC3339CompatibleDateTimeFormatter( + new DateTimeFormatterBuilder().append(DATE_FORMATTER) + .optionalStart() + .appendLiteral('T') + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 1, 9, true) + .optionalEnd() + .optionalStart() + .appendLiteral(',') + .appendFraction(NANO_OF_SECOND, 1, 9, false) + .optionalEnd() + .optionalStart() + .appendOffsetId() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT) + ) + ); + private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder().append(HOUR_MINUTE_FORMATTER) .appendLiteral(":") .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) @@ -2152,6 +2187,8 @@ static DateFormatter forPattern(String input) { return STRICT_YEAR_MONTH; } else if (FormatNames.STRICT_YEAR_MONTH_DAY.matches(input)) { return STRICT_YEAR_MONTH_DAY; + } else if (FormatNames.RFC3339_LENIENT.matches(input)) { + return RFC3339_LENIENT_DATE_FORMATTER; } else { try { return new JavaDateFormatter( diff --git a/server/src/main/java/org/opensearch/common/time/FormatNames.java b/server/src/main/java/org/opensearch/common/time/FormatNames.java index ba0a8fcf4a17a..ec5e825fc933e 100644 --- a/server/src/main/java/org/opensearch/common/time/FormatNames.java +++ b/server/src/main/java/org/opensearch/common/time/FormatNames.java @@ -44,6 +44,7 @@ */ public enum FormatNames { ISO8601(null, "iso8601"), + RFC3339_LENIENT(null, "rfc3339_lenient"), BASIC_DATE("basicDate", "basic_date"), BASIC_DATE_TIME("basicDateTime", "basic_date_time"), BASIC_DATE_TIME_NO_MILLIS("basicDateTimeNoMillis", "basic_date_time_no_millis"), diff --git a/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java index f711b14aeb928..033ea280e6172 100644 --- a/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java @@ -36,6 +36,7 @@ import org.opensearch.core.common.Strings; import java.text.ParsePosition; +import java.time.DateTimeException; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; @@ -52,7 +53,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; import java.util.stream.Collectors; @@ -70,11 +70,11 @@ class JavaDateFormatter implements DateFormatter { private final String format; private final String printFormat; - private final DateTimeFormatter printer; - private final List<DateTimeFormatter> parsers; + private final OpenSearchDateTimePrinter printer; + private final List<OpenSearchDateTimeFormatter> parsers; private final JavaDateFormatter roundupParser; private final Boolean canCacheLastParsedFormatter; - private volatile DateTimeFormatter lastParsedformatter = null; + private volatile OpenSearchDateTimeFormatter lastParsedformatter = null; /** * A round up formatter @@ -83,11 +83,11 @@ class JavaDateFormatter implements DateFormatter { */ static class RoundUpFormatter extends JavaDateFormatter { - RoundUpFormatter(String format, List<DateTimeFormatter> roundUpParsers) { + RoundUpFormatter(String format, List<OpenSearchDateTimeFormatter> roundUpParsers) { super(format, firstFrom(roundUpParsers), null, roundUpParsers); } - private static DateTimeFormatter firstFrom(List<DateTimeFormatter> roundUpParsers) { + private static OpenSearchDateTimeFormatter firstFrom(List<OpenSearchDateTimeFormatter> roundUpParsers) { return roundUpParsers.get(0); } @@ -101,14 +101,18 @@ JavaDateFormatter getRoundupParser() { JavaDateFormatter( String format, String printFormat, - DateTimeFormatter printer, + OpenSearchDateTimePrinter printer, Boolean canCacheLastParsedFormatter, - DateTimeFormatter... parsers + OpenSearchDateTimeFormatter... parsers ) { this(format, printFormat, printer, ROUND_UP_BASE_FIELDS, canCacheLastParsedFormatter, parsers); } JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) { + this(format, format, wrapFormatter(printer), false, wrapAllFormatters(parsers)); + } + + JavaDateFormatter(String format, OpenSearchDateTimePrinter printer, OpenSearchDateTimeFormatter... parsers) { this(format, format, printer, false, parsers); } @@ -127,19 +131,19 @@ JavaDateFormatter getRoundupParser() { JavaDateFormatter( String format, String printFormat, - DateTimeFormatter printer, + OpenSearchDateTimePrinter printer, BiConsumer<DateTimeFormatterBuilder, DateTimeFormatter> roundupParserConsumer, Boolean canCacheLastParsedFormatter, - DateTimeFormatter... parsers + OpenSearchDateTimeFormatter... parsers ) { if (printer == null) { throw new IllegalArgumentException("printer may not be null"); } - long distinctZones = Arrays.stream(parsers).map(DateTimeFormatter::getZone).distinct().count(); + long distinctZones = Arrays.stream(parsers).map(OpenSearchDateTimeFormatter::getZone).distinct().count(); if (distinctZones > 1) { throw new IllegalArgumentException("formatters must have the same time zone"); } - long distinctLocales = Arrays.stream(parsers).map(DateTimeFormatter::getLocale).distinct().count(); + long distinctLocales = Arrays.stream(parsers).map(OpenSearchDateTimeFormatter::getLocale).distinct().count(); if (distinctLocales > 1) { throw new IllegalArgumentException("formatters must have the same locale"); } @@ -149,12 +153,12 @@ JavaDateFormatter getRoundupParser() { this.canCacheLastParsedFormatter = canCacheLastParsedFormatter; if (parsers.length == 0) { - this.parsers = Collections.singletonList(printer); + this.parsers = Collections.singletonList((OpenSearchDateTimeFormatter) printer); } else { this.parsers = Arrays.asList(parsers); } List<DateTimeFormatter> roundUp = createRoundUpParser(format, roundupParserConsumer); - this.roundupParser = new RoundUpFormatter(format, roundUp); + this.roundupParser = new RoundUpFormatter(format, wrapAllFormatters(roundUp)); } JavaDateFormatter( @@ -163,7 +167,7 @@ JavaDateFormatter getRoundupParser() { BiConsumer<DateTimeFormatterBuilder, DateTimeFormatter> roundupParserConsumer, DateTimeFormatter... parsers ) { - this(format, format, printer, roundupParserConsumer, false, parsers); + this(format, format, wrapFormatter(printer), roundupParserConsumer, false, wrapAllFormatters(parsers)); } /** @@ -181,7 +185,8 @@ private List<DateTimeFormatter> createRoundUpParser( ) { if (format.contains("||") == false) { List<DateTimeFormatter> roundUpParsers = new ArrayList<>(); - for (DateTimeFormatter parser : this.parsers) { + for (OpenSearchDateTimeFormatter customparser : this.parsers) { + DateTimeFormatter parser = customparser.getFormatter(); DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); builder.append(parser); roundupParserConsumer.accept(builder, parser); @@ -201,12 +206,12 @@ public static DateFormatter combined( assert formatters.size() > 0; assert printFormatter != null; - List<DateTimeFormatter> parsers = new ArrayList<>(formatters.size()); - List<DateTimeFormatter> roundUpParsers = new ArrayList<>(formatters.size()); + List<OpenSearchDateTimeFormatter> parsers = new ArrayList<>(formatters.size()); + List<OpenSearchDateTimeFormatter> roundUpParsers = new ArrayList<>(formatters.size()); assert printFormatter instanceof JavaDateFormatter; JavaDateFormatter javaPrintFormatter = (JavaDateFormatter) printFormatter; - DateTimeFormatter printer = javaPrintFormatter.getPrinter(); + OpenSearchDateTimePrinter printer = javaPrintFormatter.getPrinter(); for (DateFormatter formatter : formatters) { assert formatter instanceof JavaDateFormatter; JavaDateFormatter javaDateFormatter = (JavaDateFormatter) formatter; @@ -227,9 +232,9 @@ public static DateFormatter combined( private JavaDateFormatter( String format, String printFormat, - DateTimeFormatter printer, - List<DateTimeFormatter> roundUpParsers, - List<DateTimeFormatter> parsers, + OpenSearchDateTimePrinter printer, + List<OpenSearchDateTimeFormatter> roundUpParsers, + List<OpenSearchDateTimeFormatter> parsers, Boolean canCacheLastParsedFormatter ) { this.format = format; @@ -245,6 +250,15 @@ private JavaDateFormatter( DateTimeFormatter printer, List<DateTimeFormatter> roundUpParsers, List<DateTimeFormatter> parsers + ) { + this(format, format, wrapFormatter(printer), wrapAllFormatters(roundUpParsers), wrapAllFormatters(parsers), false); + } + + private JavaDateFormatter( + String format, + OpenSearchDateTimePrinter printer, + List<OpenSearchDateTimeFormatter> roundUpParsers, + List<OpenSearchDateTimeFormatter> parsers ) { this(format, format, printer, roundUpParsers, parsers, false); } @@ -253,7 +267,7 @@ JavaDateFormatter getRoundupParser() { return roundupParser; } - DateTimeFormatter getPrinter() { + OpenSearchDateTimePrinter getPrinter() { return printer; } @@ -265,7 +279,7 @@ public TemporalAccessor parse(String input) { try { return doParse(input); - } catch (DateTimeParseException e) { + } catch (DateTimeException e) { throw new IllegalArgumentException("failed to parse date field [" + input + "] with format [" + format + "]", e); } } @@ -289,14 +303,14 @@ private TemporalAccessor doParse(String input) { Object object = null; if (canCacheLastParsedFormatter && lastParsedformatter != null) { ParsePosition pos = new ParsePosition(0); - object = lastParsedformatter.toFormat().parseObject(input, pos); + object = lastParsedformatter.parseObject(input, pos); if (parsingSucceeded(object, input, pos)) { return (TemporalAccessor) object; } } - for (DateTimeFormatter formatter : parsers) { + for (OpenSearchDateTimeFormatter formatter : parsers) { ParsePosition pos = new ParsePosition(0); - object = formatter.toFormat().parseObject(input, pos); + object = formatter.parseObject(input, pos); if (parsingSucceeded(object, input, pos)) { lastParsedformatter = formatter; return (TemporalAccessor) object; @@ -312,16 +326,28 @@ private boolean parsingSucceeded(Object object, String input, ParsePosition pos) return object != null && pos.getIndex() == input.length(); } + private static OpenSearchDateTimeFormatter wrapFormatter(DateTimeFormatter formatter) { + return new OpenSearchDateTimeFormatter(formatter); + } + + private static OpenSearchDateTimeFormatter[] wrapAllFormatters(DateTimeFormatter... formatters) { + return Arrays.stream(formatters).map(JavaDateFormatter::wrapFormatter).toArray(OpenSearchDateTimeFormatter[]::new); + } + + private static List<OpenSearchDateTimeFormatter> wrapAllFormatters(List<DateTimeFormatter> formatters) { + return formatters.stream().map(JavaDateFormatter::wrapFormatter).collect(Collectors.toList()); + } + @Override public DateFormatter withZone(ZoneId zoneId) { // shortcurt to not create new objects unnecessarily if (zoneId.equals(zone())) { return this; } - List<DateTimeFormatter> parsers = new CopyOnWriteArrayList<>( + List<OpenSearchDateTimeFormatter> parsers = new ArrayList<>( this.parsers.stream().map(p -> p.withZone(zoneId)).collect(Collectors.toList()) ); - List<DateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() + List<OpenSearchDateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() .stream() .map(p -> p.withZone(zoneId)) .collect(Collectors.toList()); @@ -334,10 +360,10 @@ public DateFormatter withLocale(Locale locale) { if (locale.equals(locale())) { return this; } - List<DateTimeFormatter> parsers = new CopyOnWriteArrayList<>( + List<OpenSearchDateTimeFormatter> parsers = new ArrayList<>( this.parsers.stream().map(p -> p.withLocale(locale)).collect(Collectors.toList()) ); - List<DateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() + List<OpenSearchDateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() .stream() .map(p -> p.withLocale(locale)) .collect(Collectors.toList()); @@ -396,7 +422,7 @@ public String toString() { return String.format(Locale.ROOT, "format[%s] locale[%s]", format, locale()); } - Collection<DateTimeFormatter> getParsers() { + Collection<OpenSearchDateTimeFormatter> getParsers() { return parsers; } } diff --git a/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimeFormatter.java b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimeFormatter.java new file mode 100644 index 0000000000000..3a629d8843949 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimeFormatter.java @@ -0,0 +1,85 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.time; + +import java.text.Format; +import java.text.ParsePosition; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalQuery; +import java.util.Locale; + +/** +* Wrapper class for DateTimeFormatter{@link java.time.format.DateTimeFormatter} +* to allow for custom implementations for datetime parsing/formatting + */ +class OpenSearchDateTimeFormatter implements OpenSearchDateTimePrinter { + private final DateTimeFormatter formatter; + + public OpenSearchDateTimeFormatter(String pattern) { + this.formatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + } + + public OpenSearchDateTimeFormatter(String pattern, Locale locale) { + this.formatter = DateTimeFormatter.ofPattern(pattern, locale); + } + + public OpenSearchDateTimeFormatter(DateTimeFormatter formatter) { + this.formatter = formatter; + } + + public OpenSearchDateTimeFormatter withLocale(Locale locale) { + return new OpenSearchDateTimeFormatter(getFormatter().withLocale(locale)); + } + + public OpenSearchDateTimeFormatter withZone(ZoneId zoneId) { + return new OpenSearchDateTimeFormatter(getFormatter().withZone(zoneId)); + } + + public String format(TemporalAccessor temporal) { + return this.getFormatter().format(temporal); + } + + public TemporalAccessor parse(CharSequence text, ParsePosition position) { + return this.getFormatter().parse(text, position); + } + + public TemporalAccessor parse(CharSequence text) { + return this.getFormatter().parse(text); + } + + public <T> T parse(CharSequence text, TemporalQuery<T> query) { + return this.getFormatter().parse(text, query); + } + + public ZoneId getZone() { + return this.getFormatter().getZone(); + } + + public Locale getLocale() { + return this.getFormatter().getLocale(); + } + + public TemporalAccessor parse(String input) { + return formatter.parse(input); + } + + public DateTimeFormatter getFormatter() { + return formatter; + } + + public Format toFormat() { + return getFormatter().toFormat(); + } + + public Object parseObject(String text, ParsePosition pos) { + return getFormatter().toFormat().parseObject(text, pos); + } +} diff --git a/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimePrinter.java b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimePrinter.java new file mode 100644 index 0000000000000..350bae21b22b1 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimePrinter.java @@ -0,0 +1,30 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.time; + +import java.time.ZoneId; +import java.time.temporal.TemporalAccessor; +import java.util.Locale; + +/** + * Interface for DateTimeFormatter{@link java.time.format.DateTimeFormatter} + * to allow for custom implementations for datetime formatting + */ +interface OpenSearchDateTimePrinter { + + public OpenSearchDateTimePrinter withLocale(Locale locale); + + public OpenSearchDateTimePrinter withZone(ZoneId zoneId); + + public String format(TemporalAccessor temporal); + + public Locale getLocale(); + + public ZoneId getZone(); +} diff --git a/server/src/main/java/org/opensearch/common/time/RFC3339CompatibleDateTimeFormatter.java b/server/src/main/java/org/opensearch/common/time/RFC3339CompatibleDateTimeFormatter.java new file mode 100644 index 0000000000000..98b87efd2380b --- /dev/null +++ b/server/src/main/java/org/opensearch/common/time/RFC3339CompatibleDateTimeFormatter.java @@ -0,0 +1,428 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Based on code from the Internet Time Utility project (https://github.com/ethlo/itu) under the Apache License, version 2.0. + * Copyright (C) 2017 Morten Haraldsen (ethlo) + * Modifications (C) OpenSearch Contributors. All Rights Reserved. + */ + +package org.opensearch.common.time; + +import java.text.ParsePosition; +import java.time.DateTimeException; +import java.time.OffsetDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeParseException; +import java.time.temporal.TemporalAccessor; +import java.util.Arrays; +import java.util.Locale; + +/** + * Defines a close profile of RFC3339 datetime format where the date is mandatory and the time is optional. + * <p> + * The returned formatter can only be used for parsing, printing is unsupported. + * <p> + * This parser can parse zoned datetimes. + * The parser is strict by default, thus time string {@code 24:00} cannot be parsed. + * <p> + * It accepts formats described by the following syntax: + * <pre> + * Year: + * YYYY (eg 1997) + * Year and month: + * YYYY-MM (eg 1997-07) + * Complete date: + * YYYY-MM-DD (eg 1997-07-16) + * Complete date plus hours and minutes: + * YYYY-MM-DDThh:mmTZD (eg 1997-07-16T19:20+01:00) + * Complete date plus hours, minutes and seconds: + * YYYY-MM-DDThh:mm:ssTZD (eg 1997-07-16T19:20:30+01:00) + * Complete date plus hours, minutes, seconds and a decimal fraction of a second + * YYYY-MM-DDThh:mm:ss.sTZD (eg 1997-07-16T19:20:30.45+01:00) + * YYYY-MM-DDThh:mm:ss,sTZD (eg 1997-07-16T19:20:30,45+01:00) + * where: + * + * YYYY = four-digit year + * MM = two-digit month (01=January, etc.) + * DD = two-digit day of month (01 through 31) + * hh = two digits of hour (00 through 23) (am/pm NOT allowed) + * mm = two digits of minute (00 through 59) + * ss = two digits of second (00 through 59) + * s = one or more(max 9) digits representing a decimal fraction of a second + * TZD = time zone designator (Z or z or +hh:mm or -hh:mm) + * </pre> + */ +final class RFC3339CompatibleDateTimeFormatter extends OpenSearchDateTimeFormatter { + public static final char DATE_SEPARATOR = '-'; + public static final char TIME_SEPARATOR = ':'; + public static final char SEPARATOR_UPPER = 'T'; + private static final char PLUS = '+'; + private static final char MINUS = '-'; + private static final char SEPARATOR_LOWER = 't'; + private static final char SEPARATOR_SPACE = ' '; + private static final char FRACTION_SEPARATOR_1 = '.'; + private static final char FRACTION_SEPARATOR_2 = ','; + private static final char ZULU_UPPER = 'Z'; + private static final char ZULU_LOWER = 'z'; + + private ZoneId zone; + + public RFC3339CompatibleDateTimeFormatter(String pattern) { + super(pattern); + } + + public RFC3339CompatibleDateTimeFormatter(java.time.format.DateTimeFormatter formatter) { + super(formatter); + } + + public RFC3339CompatibleDateTimeFormatter(java.time.format.DateTimeFormatter formatter, ZoneId zone) { + super(formatter); + this.zone = zone; + } + + @Override + public OpenSearchDateTimeFormatter withZone(ZoneId zoneId) { + return new RFC3339CompatibleDateTimeFormatter(getFormatter().withZone(zoneId), zoneId); + } + + @Override + public OpenSearchDateTimeFormatter withLocale(Locale locale) { + return new RFC3339CompatibleDateTimeFormatter(getFormatter().withLocale(locale)); + } + + @Override + public Object parseObject(String text, ParsePosition pos) { + try { + return parse(text); + } catch (DateTimeException e) { + return null; + } + } + + @Override + public TemporalAccessor parse(final String dateTime) { + OffsetDateTime parsedDatetime = parse(dateTime, new ParsePosition(0)); + return zone == null ? parsedDatetime : parsedDatetime.atZoneSameInstant(zone); + } + + public OffsetDateTime parse(String date, ParsePosition pos) { + if (date == null) { + throw new IllegalArgumentException("date cannot be null"); + } + + final int len = date.length() - pos.getIndex(); + if (len <= 0) { + throw new DateTimeParseException("out of bound parse position", date, pos.getIndex()); + } + final char[] chars = date.substring(pos.getIndex()).toCharArray(); + + // Date portion + + // YEAR + final int years = getYear(chars, pos); + if (4 == len) { + return OffsetDateTime.of(years, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + } + + // MONTH + consumeChar(chars, pos, DATE_SEPARATOR); + final int months = getMonth(chars, pos); + if (7 == len) { + return OffsetDateTime.of(years, months, 1, 0, 0, 0, 0, ZoneOffset.UTC); + } + + // DAY + consumeChar(chars, pos, DATE_SEPARATOR); + final int days = getDay(chars, pos); + if (10 == len) { + return OffsetDateTime.of(years, months, days, 0, 0, 0, 0, ZoneOffset.UTC); + } + + // HOURS + consumeChar(chars, pos, SEPARATOR_UPPER, SEPARATOR_LOWER, SEPARATOR_SPACE); + final int hours = getHour(chars, pos); + + // MINUTES + consumeChar(chars, pos, TIME_SEPARATOR); + final int minutes = getMinute(chars, pos); + if (16 == len) { + throw new DateTimeParseException("No timezone offset information", new String(chars), pos.getIndex()); + } + + // SECONDS or TIMEZONE + return handleTime(chars, pos, years, months, days, hours, minutes); + } + + private static boolean isDigit(char c) { + return (c >= '0' && c <= '9'); + } + + private static int digit(char c) { + return c - '0'; + } + + private static int readInt(final char[] strNum, ParsePosition pos, int n) { + int start = pos.getIndex(), end = start + n; + if (end > strNum.length) { + pos.setErrorIndex(end); + throw new DateTimeParseException("Unexpected end of expression at position " + strNum.length, new String(strNum), end); + } + + int result = 0; + for (int i = start; i < end; i++) { + final char c = strNum[i]; + if (isDigit(c) == false) { + pos.setErrorIndex(i); + throw new DateTimeParseException("Character " + c + " is not a digit", new String(strNum), i); + } + int digit = digit(c); + result = result * 10 + digit; + } + pos.setIndex(end); + return result; + } + + private static int readIntUnchecked(final char[] strNum, ParsePosition pos, int n) { + int start = pos.getIndex(), end = start + n; + int result = 0; + for (int i = start; i < end; i++) { + final char c = strNum[i]; + int digit = digit(c); + result = result * 10 + digit; + } + pos.setIndex(end); + return result; + } + + private static int getHour(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getMinute(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getDay(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static boolean isValidOffset(char[] chars, int offset) { + return offset < chars.length; + } + + private static void consumeChar(char[] chars, ParsePosition pos, char expected) { + int offset = pos.getIndex(); + if (isValidOffset(chars, offset) == false) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + + if (chars[offset] != expected) { + throw new DateTimeParseException("Expected character " + expected + " at position " + offset, new String(chars), offset); + } + pos.setIndex(offset + 1); + } + + private static void consumeNextChar(char[] chars, ParsePosition pos) { + int offset = pos.getIndex(); + if (isValidOffset(chars, offset) == false) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + pos.setIndex(offset + 1); + } + + private static boolean checkPositionContains(char[] chars, ParsePosition pos, char... expected) { + int offset = pos.getIndex(); + if (offset >= chars.length) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + + boolean found = false; + for (char e : expected) { + if (chars[offset] == e) { + found = true; + break; + } + } + return found; + } + + private static void consumeChar(char[] chars, ParsePosition pos, char... expected) { + int offset = pos.getIndex(); + if (offset >= chars.length) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + + boolean found = false; + for (char e : expected) { + if (chars[offset] == e) { + found = true; + pos.setIndex(offset + 1); + break; + } + } + if (!found) { + throw new DateTimeParseException( + "Expected character " + Arrays.toString(expected) + " at position " + offset, + new String(chars), + offset + ); + } + } + + private static void assertNoMoreChars(char[] chars, ParsePosition pos) { + if (chars.length > pos.getIndex()) { + throw new DateTimeParseException("Trailing junk data after position " + pos.getIndex(), new String(chars), pos.getIndex()); + } + } + + private static ZoneOffset parseTimezone(char[] chars, ParsePosition pos) { + int offset = pos.getIndex(); + final int left = chars.length - offset; + if (checkPositionContains(chars, pos, ZULU_LOWER, ZULU_UPPER)) { + consumeNextChar(chars, pos); + assertNoMoreChars(chars, pos); + return ZoneOffset.UTC; + } + + if (left != 6) { + throw new DateTimeParseException("Invalid timezone offset", new String(chars, offset, left), offset); + } + + final char sign = chars[offset]; + consumeNextChar(chars, pos); + int hours = getHour(chars, pos); + consumeChar(chars, pos, TIME_SEPARATOR); + int minutes = getMinute(chars, pos); + if (sign == MINUS) { + if (hours == 0 && minutes == 0) { + throw new DateTimeParseException("Unknown 'Local Offset Convention' date-time not allowed", new String(chars), offset); + } + hours = -hours; + minutes = -minutes; + } else if (sign != PLUS) { + throw new DateTimeParseException("Invalid character starting at position " + offset, new String(chars), offset); + } + + return ZoneOffset.ofHoursMinutes(hours, minutes); + } + + private static OffsetDateTime handleTime(char[] chars, ParsePosition pos, int year, int month, int day, int hour, int minute) { + switch (chars[pos.getIndex()]) { + case TIME_SEPARATOR: + consumeChar(chars, pos, TIME_SEPARATOR); + return handleSeconds(year, month, day, hour, minute, chars, pos); + + case PLUS: + case MINUS: + case ZULU_UPPER: + case ZULU_LOWER: + final ZoneOffset zoneOffset = parseTimezone(chars, pos); + return OffsetDateTime.of(year, month, day, hour, minute, 0, 0, zoneOffset); + } + throw new DateTimeParseException("Unexpected character " + " at position " + pos.getIndex(), new String(chars), pos.getIndex()); + } + + private static int getMonth(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getYear(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 4); + } + + private static int getSeconds(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getFractions(final char[] chars, final ParsePosition pos, final int len) { + final int fractions; + fractions = readIntUnchecked(chars, pos, len); + switch (len) { + case 0: + throw new DateTimeParseException("Must have at least 1 fraction digit", new String(chars), pos.getIndex()); + case 1: + return fractions * 100_000_000; + case 2: + return fractions * 10_000_000; + case 3: + return fractions * 1_000_000; + case 4: + return fractions * 100_000; + case 5: + return fractions * 10_000; + case 6: + return fractions * 1_000; + case 7: + return fractions * 100; + case 8: + return fractions * 10; + default: + return fractions; + } + } + + public static int indexOfNonDigit(final char[] text, int offset) { + for (int i = offset; i < text.length; i++) { + if (isDigit(text[i]) == false) { + return i; + } + } + return -1; + } + + public static void consumeDigits(final char[] text, ParsePosition pos) { + final int idx = indexOfNonDigit(text, pos.getIndex()); + if (idx == -1) { + pos.setErrorIndex(text.length); + } else { + pos.setIndex(idx); + } + } + + private static OffsetDateTime handleSeconds(int year, int month, int day, int hour, int minute, char[] chars, ParsePosition pos) { + // From here the specification is more lenient + final int seconds = getSeconds(chars, pos); + int currPos = pos.getIndex(); + final int remaining = chars.length - currPos; + if (remaining == 0) { + // No offset + throw new DateTimeParseException("No timezone offset information", new String(chars), pos.getIndex()); + } + + ZoneOffset offset = null; + int fractions = 0; + if (remaining == 1 && checkPositionContains(chars, pos, ZULU_LOWER, ZULU_UPPER)) { + consumeNextChar(chars, pos); + // Do nothing we are done + offset = ZoneOffset.UTC; + assertNoMoreChars(chars, pos); + } else if (remaining >= 1 && checkPositionContains(chars, pos, FRACTION_SEPARATOR_1, FRACTION_SEPARATOR_2)) { + // We have fractional seconds; + consumeNextChar(chars, pos); + ParsePosition initPosition = new ParsePosition(pos.getIndex()); + consumeDigits(chars, pos); + if (pos.getErrorIndex() == -1) { + // We have an end of fractions + final int len = pos.getIndex() - initPosition.getIndex(); + fractions = getFractions(chars, initPosition, len); + offset = parseTimezone(chars, pos); + } else { + throw new DateTimeParseException("No timezone offset information", new String(chars), pos.getIndex()); + } + } else if (remaining >= 1 && checkPositionContains(chars, pos, PLUS, MINUS)) { + // No fractional sections + offset = parseTimezone(chars, pos); + } else { + throw new DateTimeParseException("Unexpected character at position " + (pos.getIndex()), new String(chars), pos.getIndex()); + } + + return OffsetDateTime.of(year, month, day, hour, minute, seconds, fractions, offset); + } +} diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java index d4ab161527cc0..075dc9934e130 100644 --- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java +++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java @@ -38,12 +38,6 @@ public class FeatureFlags { */ public static final String IDENTITY = "opensearch.experimental.feature.identity.enabled"; - /** - * Gates the functionality of concurrently searching the segments - * Once the feature is ready for release, this feature flag can be removed. - */ - public static final String CONCURRENT_SEGMENT_SEARCH = "opensearch.experimental.feature.concurrent_segment_search.enabled"; - /** * Gates the functionality of telemetry framework. */ @@ -60,6 +54,11 @@ public class FeatureFlags { */ public static final String WRITEABLE_REMOTE_INDEX = "opensearch.experimental.feature.writeable_remote_index.enabled"; + /** + * Gates the optimization to enable bloom filters for doc id lookup. + */ + public static final String DOC_ID_FUZZY_SET = "opensearch.experimental.optimize_doc_id_lookup.fuzzy_set.enabled"; + /** * Should store the settings from opensearch.yml. */ @@ -105,12 +104,6 @@ public static boolean isEnabled(Setting<Boolean> featureFlag) { public static final Setting<Boolean> TELEMETRY_SETTING = Setting.boolSetting(TELEMETRY, false, Property.NodeScope); - public static final Setting<Boolean> CONCURRENT_SEGMENT_SEARCH_SETTING = Setting.boolSetting( - CONCURRENT_SEGMENT_SEARCH, - false, - Property.NodeScope - ); - public static final Setting<Boolean> DATETIME_FORMATTER_CACHING_SETTING = Setting.boolSetting( DATETIME_FORMATTER_CACHING, true, @@ -122,4 +115,6 @@ public static boolean isEnabled(Setting<Boolean> featureFlag) { false, Property.NodeScope ); + + public static final Setting<Boolean> DOC_ID_FUZZY_SET_SETTING = Setting.boolSetting(DOC_ID_FUZZY_SET, false, Property.NodeScope); } diff --git a/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java b/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java index 86e7227cb6c85..fe053a26329e4 100644 --- a/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java +++ b/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java @@ -118,10 +118,17 @@ public ReorganizingLongHash(final long initialCapacity, final float loadFactor, mask = capacity - 1; grow = (long) (capacity * loadFactor); size = 0; - - table = bigArrays.newLongArray(capacity, false); - table.fill(0, capacity, -1); // -1 represents an empty slot - keys = bigArrays.newLongArray(initialCapacity, false); + try { + table = bigArrays.newLongArray(capacity, false); + table.fill(0, capacity, -1); // -1 represents an empty slot + keys = bigArrays.newLongArray(initialCapacity, false); + } finally { + if (table == null || keys == null) { + // it's important to close the arrays initialized above to prevent memory leak + // refer: https://github.com/opensearch-project/OpenSearch/issues/10154 + Releasables.closeWhileHandlingException(table, keys); + } + } } /** diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java index ec1024bbe5f30..6e45c3fb7b58d 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java @@ -393,6 +393,7 @@ static class OpenSearchThreadFactory implements ThreadFactory { final AtomicInteger threadNumber = new AtomicInteger(1); final String namePrefix; + @SuppressWarnings("removal") OpenSearchThreadFactory(String namePrefix) { this.namePrefix = namePrefix; SecurityManager s = System.getSecurityManager(); @@ -446,6 +447,30 @@ public boolean offer(E e) { } } + /** + * Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2. + */ + @Override + public void put(E e) { + super.offer(e); + } + + /** + * Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2. + */ + @Override + public boolean offer(E e, long timeout, TimeUnit unit) { + return super.offer(e); + } + + /** + * Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2. + */ + @Override + public boolean add(E e) { + return super.offer(e); + } + } /** diff --git a/server/src/main/java/org/opensearch/env/NodeEnvironment.java b/server/src/main/java/org/opensearch/env/NodeEnvironment.java index c7ba5eb040a1f..2748938d8b761 100644 --- a/server/src/main/java/org/opensearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/opensearch/env/NodeEnvironment.java @@ -199,6 +199,8 @@ public String toString() { private final NodeMetadata nodeMetadata; + private final IndexStoreListener indexStoreListener; + /** * Maximum number of data nodes that should run in an environment. */ @@ -295,18 +297,23 @@ public void close() { } } + public NodeEnvironment(Settings settings, Environment environment) throws IOException { + this(settings, environment, IndexStoreListener.EMPTY); + } + /** * Setup the environment. * @param settings settings from opensearch.yml */ - public NodeEnvironment(Settings settings, Environment environment) throws IOException { - if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) { + public NodeEnvironment(Settings settings, Environment environment, IndexStoreListener indexStoreListener) throws IOException { + if (DiscoveryNode.nodeRequiresLocalStorage(settings) == false) { nodePaths = null; fileCacheNodePath = null; sharedDataPath = null; locks = null; nodeLockId = -1; nodeMetadata = new NodeMetadata(generateNodeId(settings), Version.CURRENT); + this.indexStoreListener = IndexStoreListener.EMPTY; return; } boolean success = false; @@ -385,6 +392,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce } this.nodeMetadata = loadNodeMetadata(settings, logger, nodePaths); + this.indexStoreListener = indexStoreListener; success = true; } finally { if (success == false) { @@ -577,6 +585,9 @@ public static void acquireFSLockForPaths(IndexSettings indexSettings, Path... sh public void deleteShardDirectoryUnderLock(ShardLock lock, IndexSettings indexSettings) throws IOException { final ShardId shardId = lock.getShardId(); assert isShardLocked(shardId) : "shard " + shardId + " is not locked"; + + indexStoreListener.beforeShardPathDeleted(shardId, indexSettings, this); + final Path[] paths = availableShardPaths(shardId); logger.trace("acquiring locks for {}, paths: [{}]", shardId, paths); acquireFSLockForPaths(indexSettings, paths); @@ -653,6 +664,8 @@ public void deleteIndexDirectorySafe(Index index, long lockTimeoutMS, IndexSetti * @param indexSettings settings for the index being deleted */ public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettings) throws IOException { + indexStoreListener.beforeIndexPathDeleted(index, indexSettings, this); + final Path[] indexPaths = indexPaths(index); logger.trace("deleting index {} directory, paths({}): [{}]", index, indexPaths.length, indexPaths); IOUtils.rm(indexPaths); @@ -663,6 +676,18 @@ public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettin } } + private void deleteIndexFileCacheDirectory(Index index) { + final Path indexCachePath = fileCacheNodePath().fileCachePath.resolve(index.getUUID()); + logger.trace("deleting index {} file cache directory, path: [{}]", index, indexCachePath); + if (Files.exists(indexCachePath)) { + try { + IOUtils.rm(indexCachePath); + } catch (IOException e) { + logger.error(() -> new ParameterizedMessage("Failed to delete cache path for index {}", index), e); + } + } + } + /** * Tries to lock all local shards for the given index. If any of the shard locks can't be acquired * a {@link ShardLockObtainFailedException} is thrown and all previously acquired locks are released. @@ -1387,4 +1412,18 @@ private static void tryWriteTempFile(Path path) throws IOException { } } } + + /** + * A listener that is executed on per-index and per-shard store events, like deleting shard path + * + * @opensearch.internal + */ + public interface IndexStoreListener { + default void beforeShardPathDeleted(ShardId shardId, IndexSettings indexSettings, NodeEnvironment env) {} + + default void beforeIndexPathDeleted(Index index, IndexSettings indexSettings, NodeEnvironment env) {} + + IndexStoreListener EMPTY = new IndexStoreListener() { + }; + } } diff --git a/server/src/main/java/org/opensearch/gateway/GatewayModule.java b/server/src/main/java/org/opensearch/gateway/GatewayModule.java index 59ec0243c88c9..847ba01737332 100644 --- a/server/src/main/java/org/opensearch/gateway/GatewayModule.java +++ b/server/src/main/java/org/opensearch/gateway/GatewayModule.java @@ -47,6 +47,7 @@ protected void configure() { bind(GatewayService.class).asEagerSingleton(); bind(TransportNodesListGatewayMetaState.class).asEagerSingleton(); bind(TransportNodesListGatewayStartedShards.class).asEagerSingleton(); + bind(TransportNodesListGatewayStartedShardsBatch.class).asEagerSingleton(); bind(LocalAllocateDangledIndices.class).asEagerSingleton(); } } diff --git a/server/src/main/java/org/opensearch/gateway/TransportNodesGatewayStartedShardHelper.java b/server/src/main/java/org/opensearch/gateway/TransportNodesGatewayStartedShardHelper.java new file mode 100644 index 0000000000000..403e3e96fa209 --- /dev/null +++ b/server/src/main/java/org/opensearch/gateway/TransportNodesGatewayStartedShardHelper.java @@ -0,0 +1,114 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gateway; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.OpenSearchException; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.index.shard.ShardPath; +import org.opensearch.index.shard.ShardStateMetadata; +import org.opensearch.index.store.Store; +import org.opensearch.indices.IndicesService; + +import java.io.IOException; + +/** + * This class has the common code used in {@link TransportNodesListGatewayStartedShards} and + * {@link TransportNodesListGatewayStartedShardsBatch} to get the shard info on the local node. + * <p> + * This class should not be used to add more functions and will be removed when the + * {@link TransportNodesListGatewayStartedShards} will be deprecated and all the code will be moved to + * {@link TransportNodesListGatewayStartedShardsBatch} + * + * @opensearch.internal + */ +public class TransportNodesGatewayStartedShardHelper { + public static TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard getShardInfoOnLocalNode( + Logger logger, + final ShardId shardId, + NamedXContentRegistry namedXContentRegistry, + NodeEnvironment nodeEnv, + IndicesService indicesService, + String shardDataPathInRequest, + Settings settings, + ClusterService clusterService + ) throws IOException { + logger.trace("{} loading local shard state info", shardId); + ShardStateMetadata shardStateMetadata = ShardStateMetadata.FORMAT.loadLatestState( + logger, + namedXContentRegistry, + nodeEnv.availableShardPaths(shardId) + ); + if (shardStateMetadata != null) { + if (indicesService.getShardOrNull(shardId) == null + && shardStateMetadata.indexDataLocation == ShardStateMetadata.IndexDataLocation.LOCAL) { + final String customDataPath; + if (shardDataPathInRequest != null) { + customDataPath = shardDataPathInRequest; + } else { + // TODO: Fallback for BWC with older OpenSearch versions. + // Remove once request.getCustomDataPath() always returns non-null + final IndexMetadata metadata = clusterService.state().metadata().index(shardId.getIndex()); + if (metadata != null) { + customDataPath = new IndexSettings(metadata, settings).customDataPath(); + } else { + logger.trace("{} node doesn't have meta data for the requests index", shardId); + throw new OpenSearchException("node doesn't have meta data for index " + shardId.getIndex()); + } + } + // we don't have an open shard on the store, validate the files on disk are openable + ShardPath shardPath = null; + try { + shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, customDataPath); + if (shardPath == null) { + throw new IllegalStateException(shardId + " no shard path found"); + } + Store.tryOpenIndex(shardPath.resolveIndex(), shardId, nodeEnv::shardLock, logger); + } catch (Exception exception) { + final ShardPath finalShardPath = shardPath; + logger.trace( + () -> new ParameterizedMessage( + "{} can't open index for shard [{}] in path [{}]", + shardId, + shardStateMetadata, + (finalShardPath != null) ? finalShardPath.resolveIndex() : "" + ), + exception + ); + String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; + return new TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard( + allocationId, + shardStateMetadata.primary, + null, + exception + ); + } + } + + logger.debug("{} shard state info found: [{}]", shardId, shardStateMetadata); + String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; + final IndexShard shard = indicesService.getShardOrNull(shardId); + return new TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard( + allocationId, + shardStateMetadata.primary, + shard != null ? shard.getLatestReplicationCheckpoint() : null + ); + } + logger.trace("{} no local shard info found", shardId); + return new TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard(null, false, null); + } +} diff --git a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java index 601a5c671d67c..0ba872aab9974 100644 --- a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java @@ -32,7 +32,6 @@ package org.opensearch.gateway; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.ActionType; @@ -43,7 +42,6 @@ import org.opensearch.action.support.nodes.BaseNodesResponse; import org.opensearch.action.support.nodes.TransportNodesAction; import org.opensearch.cluster.ClusterName; -import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; @@ -55,11 +53,6 @@ import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.NodeEnvironment; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.shard.IndexShard; -import org.opensearch.index.shard.ShardPath; -import org.opensearch.index.shard.ShardStateMetadata; -import org.opensearch.index.store.Store; import org.opensearch.indices.IndicesService; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; import org.opensearch.indices.store.ShardAttributes; @@ -72,6 +65,8 @@ import java.util.Map; import java.util.Objects; +import static org.opensearch.gateway.TransportNodesGatewayStartedShardHelper.getShardInfoOnLocalNode; + /** * This transport action is used to fetch the shard version from each node during primary allocation in {@link GatewayAllocator}. * We use this to find out which node holds the latest shard version and which of them used to be a primary in order to allocate @@ -159,72 +154,23 @@ protected NodesGatewayStartedShards newResponse( @Override protected NodeGatewayStartedShards nodeOperation(NodeRequest request) { try { - final ShardId shardId = request.getShardId(); - logger.trace("{} loading local shard state info", shardId); - ShardStateMetadata shardStateMetadata = ShardStateMetadata.FORMAT.loadLatestState( + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard shardInfo = getShardInfoOnLocalNode( logger, + request.getShardId(), namedXContentRegistry, - nodeEnv.availableShardPaths(request.shardId) + nodeEnv, + indicesService, + request.getCustomDataPath(), + settings, + clusterService + ); + return new NodeGatewayStartedShards( + clusterService.localNode(), + shardInfo.allocationId(), + shardInfo.primary(), + shardInfo.replicationCheckpoint(), + shardInfo.storeException() ); - if (shardStateMetadata != null) { - if (indicesService.getShardOrNull(shardId) == null - && shardStateMetadata.indexDataLocation == ShardStateMetadata.IndexDataLocation.LOCAL) { - final String customDataPath; - if (request.getCustomDataPath() != null) { - customDataPath = request.getCustomDataPath(); - } else { - // TODO: Fallback for BWC with older OpenSearch versions. - // Remove once request.getCustomDataPath() always returns non-null - final IndexMetadata metadata = clusterService.state().metadata().index(shardId.getIndex()); - if (metadata != null) { - customDataPath = new IndexSettings(metadata, settings).customDataPath(); - } else { - logger.trace("{} node doesn't have meta data for the requests index", shardId); - throw new OpenSearchException("node doesn't have meta data for index " + shardId.getIndex()); - } - } - // we don't have an open shard on the store, validate the files on disk are openable - ShardPath shardPath = null; - try { - shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, customDataPath); - if (shardPath == null) { - throw new IllegalStateException(shardId + " no shard path found"); - } - Store.tryOpenIndex(shardPath.resolveIndex(), shardId, nodeEnv::shardLock, logger); - } catch (Exception exception) { - final ShardPath finalShardPath = shardPath; - logger.trace( - () -> new ParameterizedMessage( - "{} can't open index for shard [{}] in path [{}]", - shardId, - shardStateMetadata, - (finalShardPath != null) ? finalShardPath.resolveIndex() : "" - ), - exception - ); - String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; - return new NodeGatewayStartedShards( - clusterService.localNode(), - allocationId, - shardStateMetadata.primary, - null, - exception - ); - } - } - - logger.debug("{} shard state info found: [{}]", shardId, shardStateMetadata); - String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; - final IndexShard shard = indicesService.getShardOrNull(shardId); - return new NodeGatewayStartedShards( - clusterService.localNode(), - allocationId, - shardStateMetadata.primary, - shard != null ? shard.getLatestReplicationCheckpoint() : null - ); - } - logger.trace("{} no local shard info found", shardId); - return new NodeGatewayStartedShards(clusterService.localNode(), null, false, null); } catch (Exception e) { throw new OpenSearchException("failed to load started shards", e); } diff --git a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShardsBatch.java b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShardsBatch.java new file mode 100644 index 0000000000000..bc327c1b85748 --- /dev/null +++ b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShardsBatch.java @@ -0,0 +1,401 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gateway; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.ActionType; +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.nodes.BaseNodeResponse; +import org.opensearch.action.support.nodes.BaseNodesRequest; +import org.opensearch.action.support.nodes.BaseNodesResponse; +import org.opensearch.action.support.nodes.TransportNodesAction; +import org.opensearch.cluster.ClusterName; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.indices.IndicesService; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.indices.store.ShardAttributes; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportRequest; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.opensearch.gateway.TransportNodesGatewayStartedShardHelper.getShardInfoOnLocalNode; + +/** + * This transport action is used to fetch batch of unassigned shard version from each node during primary allocation in {@link GatewayAllocator}. + * We use this to find out which node holds the latest shard version and which of them used to be a primary in order to allocate + * shards after node or cluster restarts. + * + * @opensearch.internal + */ +public class TransportNodesListGatewayStartedShardsBatch extends TransportNodesAction< + TransportNodesListGatewayStartedShardsBatch.Request, + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch, + TransportNodesListGatewayStartedShardsBatch.NodeRequest, + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShardsBatch> + implements + AsyncShardFetch.Lister< + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch, + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShardsBatch> { + + public static final String ACTION_NAME = "internal:gateway/local/started_shards_batch"; + public static final ActionType<NodesGatewayStartedShardsBatch> TYPE = new ActionType<>( + ACTION_NAME, + NodesGatewayStartedShardsBatch::new + ); + + private final Settings settings; + private final NodeEnvironment nodeEnv; + private final IndicesService indicesService; + private final NamedXContentRegistry namedXContentRegistry; + + @Inject + public TransportNodesListGatewayStartedShardsBatch( + Settings settings, + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + NodeEnvironment env, + IndicesService indicesService, + NamedXContentRegistry namedXContentRegistry + ) { + super( + ACTION_NAME, + threadPool, + clusterService, + transportService, + actionFilters, + Request::new, + NodeRequest::new, + ThreadPool.Names.FETCH_SHARD_STARTED, + NodeGatewayStartedShardsBatch.class + ); + this.settings = settings; + this.nodeEnv = env; + this.indicesService = indicesService; + this.namedXContentRegistry = namedXContentRegistry; + } + + @Override + public void list( + Map<ShardId, ShardAttributes> shardAttributesMap, + DiscoveryNode[] nodes, + ActionListener<NodesGatewayStartedShardsBatch> listener + ) { + execute(new Request(nodes, shardAttributesMap), listener); + } + + @Override + protected NodeRequest newNodeRequest(Request request) { + return new NodeRequest(request); + } + + @Override + protected NodeGatewayStartedShardsBatch newNodeResponse(StreamInput in) throws IOException { + return new NodeGatewayStartedShardsBatch(in); + } + + @Override + protected NodesGatewayStartedShardsBatch newResponse( + Request request, + List<NodeGatewayStartedShardsBatch> responses, + List<FailedNodeException> failures + ) { + return new NodesGatewayStartedShardsBatch(clusterService.getClusterName(), responses, failures); + } + + /** + * This function is similar to nodeOperation method of {@link TransportNodesListGatewayStartedShards} we loop over + * the shards here and populate the data about the shards held by the local node. + * + * @param request Request containing the map shardIdsWithCustomDataPath. + * @return NodeGatewayStartedShardsBatch contains the data about the primary shards held by the local node + */ + @Override + protected NodeGatewayStartedShardsBatch nodeOperation(NodeRequest request) { + Map<ShardId, NodeGatewayStartedShard> shardsOnNode = new HashMap<>(); + for (ShardAttributes shardAttr : request.shardAttributes.values()) { + final ShardId shardId = shardAttr.getShardId(); + try { + shardsOnNode.put( + shardId, + getShardInfoOnLocalNode( + logger, + shardId, + namedXContentRegistry, + nodeEnv, + indicesService, + shardAttr.getCustomDataPath(), + settings, + clusterService + ) + ); + } catch (Exception e) { + shardsOnNode.put( + shardId, + new NodeGatewayStartedShard(null, false, null, new OpenSearchException("failed to load started shards", e)) + ); + } + } + return new NodeGatewayStartedShardsBatch(clusterService.localNode(), shardsOnNode); + } + + /** + * This is used in constructing the request for making the transport request to set of other node. + * Refer {@link TransportNodesAction} class start method. + * + * @opensearch.internal + */ + public static class Request extends BaseNodesRequest<Request> { + private final Map<ShardId, ShardAttributes> shardAttributes; + + public Request(StreamInput in) throws IOException { + super(in); + shardAttributes = in.readMap(ShardId::new, ShardAttributes::new); + } + + public Request(DiscoveryNode[] nodes, Map<ShardId, ShardAttributes> shardAttributes) { + super(nodes); + this.shardAttributes = Objects.requireNonNull(shardAttributes); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(shardAttributes, (o, k) -> k.writeTo(o), (o, v) -> v.writeTo(o)); + } + + public Map<ShardId, ShardAttributes> getShardAttributes() { + return shardAttributes; + } + } + + /** + * Responses received from set of other nodes is clubbed into this class and sent back to the caller + * of this transport request. Refer {@link TransportNodesAction} + * + * @opensearch.internal + */ + public static class NodesGatewayStartedShardsBatch extends BaseNodesResponse<NodeGatewayStartedShardsBatch> { + + public NodesGatewayStartedShardsBatch(StreamInput in) throws IOException { + super(in); + } + + public NodesGatewayStartedShardsBatch( + ClusterName clusterName, + List<NodeGatewayStartedShardsBatch> nodes, + List<FailedNodeException> failures + ) { + super(clusterName, nodes, failures); + } + + @Override + protected List<NodeGatewayStartedShardsBatch> readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeGatewayStartedShardsBatch::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List<NodeGatewayStartedShardsBatch> nodes) throws IOException { + out.writeList(nodes); + } + } + + /** + * NodeRequest class is for deserializing the request received by this node from other node for this transport action. + * This is used in {@link TransportNodesAction} + * + * @opensearch.internal + */ + public static class NodeRequest extends TransportRequest { + private final Map<ShardId, ShardAttributes> shardAttributes; + + public NodeRequest(StreamInput in) throws IOException { + super(in); + shardAttributes = in.readMap(ShardId::new, ShardAttributes::new); + } + + public NodeRequest(Request request) { + this.shardAttributes = Objects.requireNonNull(request.getShardAttributes()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(shardAttributes, (o, k) -> k.writeTo(o), (o, v) -> v.writeTo(o)); + } + } + + /** + * This class encapsulates the metadata about a started shard that needs to be persisted or sent between nodes. + * This is used in {@link NodeGatewayStartedShardsBatch} to construct the response for each node, instead of + * {@link TransportNodesListGatewayStartedShards.NodeGatewayStartedShards} because we don't need to save an extra + * {@link DiscoveryNode} object like in {@link TransportNodesListGatewayStartedShards.NodeGatewayStartedShards} + * which reduces memory footprint of its objects. + * + * @opensearch.internal + */ + public static class NodeGatewayStartedShard { + private final String allocationId; + private final boolean primary; + private final Exception storeException; + private final ReplicationCheckpoint replicationCheckpoint; + + public NodeGatewayStartedShard(StreamInput in) throws IOException { + allocationId = in.readOptionalString(); + primary = in.readBoolean(); + if (in.readBoolean()) { + storeException = in.readException(); + } else { + storeException = null; + } + if (in.readBoolean()) { + replicationCheckpoint = new ReplicationCheckpoint(in); + } else { + replicationCheckpoint = null; + } + } + + public NodeGatewayStartedShard(String allocationId, boolean primary, ReplicationCheckpoint replicationCheckpoint) { + this(allocationId, primary, replicationCheckpoint, null); + } + + public NodeGatewayStartedShard( + String allocationId, + boolean primary, + ReplicationCheckpoint replicationCheckpoint, + Exception storeException + ) { + this.allocationId = allocationId; + this.primary = primary; + this.replicationCheckpoint = replicationCheckpoint; + this.storeException = storeException; + } + + public String allocationId() { + return this.allocationId; + } + + public boolean primary() { + return this.primary; + } + + public ReplicationCheckpoint replicationCheckpoint() { + return this.replicationCheckpoint; + } + + public Exception storeException() { + return this.storeException; + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(allocationId); + out.writeBoolean(primary); + if (storeException != null) { + out.writeBoolean(true); + out.writeException(storeException); + } else { + out.writeBoolean(false); + } + if (replicationCheckpoint != null) { + out.writeBoolean(true); + replicationCheckpoint.writeTo(out); + } else { + out.writeBoolean(false); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + NodeGatewayStartedShard that = (NodeGatewayStartedShard) o; + + return primary == that.primary + && Objects.equals(allocationId, that.allocationId) + && Objects.equals(storeException, that.storeException) + && Objects.equals(replicationCheckpoint, that.replicationCheckpoint); + } + + @Override + public int hashCode() { + int result = (allocationId != null ? allocationId.hashCode() : 0); + result = 31 * result + (primary ? 1 : 0); + result = 31 * result + (storeException != null ? storeException.hashCode() : 0); + result = 31 * result + (replicationCheckpoint != null ? replicationCheckpoint.hashCode() : 0); + return result; + } + + @Override + public String toString() { + StringBuilder buf = new StringBuilder(); + buf.append("NodeGatewayStartedShards[").append("allocationId=").append(allocationId).append(",primary=").append(primary); + if (storeException != null) { + buf.append(",storeException=").append(storeException); + } + if (replicationCheckpoint != null) { + buf.append(",ReplicationCheckpoint=").append(replicationCheckpoint.toString()); + } + buf.append("]"); + return buf.toString(); + } + } + + /** + * This is the response from a single node, this is used in {@link NodesGatewayStartedShardsBatch} for creating + * node to its response mapping for this transport request. + * Refer {@link TransportNodesAction} start method + * + * @opensearch.internal + */ + public static class NodeGatewayStartedShardsBatch extends BaseNodeResponse { + private final Map<ShardId, NodeGatewayStartedShard> nodeGatewayStartedShardsBatch; + + public Map<ShardId, NodeGatewayStartedShard> getNodeGatewayStartedShardsBatch() { + return nodeGatewayStartedShardsBatch; + } + + public NodeGatewayStartedShardsBatch(StreamInput in) throws IOException { + super(in); + this.nodeGatewayStartedShardsBatch = in.readMap(ShardId::new, NodeGatewayStartedShard::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(nodeGatewayStartedShardsBatch, (o, k) -> k.writeTo(o), (o, v) -> v.writeTo(o)); + } + + public NodeGatewayStartedShardsBatch(DiscoveryNode node, Map<ShardId, NodeGatewayStartedShard> nodeGatewayStartedShardsBatch) { + super(node); + this.nodeGatewayStartedShardsBatch = nodeGatewayStartedShardsBatch; + } + } +} diff --git a/server/src/main/java/org/opensearch/index/IndexSettings.java b/server/src/main/java/org/opensearch/index/IndexSettings.java index 00e765d73f77f..36e48b2590a4e 100644 --- a/server/src/main/java/org/opensearch/index/IndexSettings.java +++ b/server/src/main/java/org/opensearch/index/IndexSettings.java @@ -65,7 +65,9 @@ import java.util.function.UnaryOperator; import static org.opensearch.Version.V_2_7_0; +import static org.opensearch.common.util.FeatureFlags.DOC_ID_FUZZY_SET_SETTING; import static org.opensearch.common.util.FeatureFlags.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY; +import static org.opensearch.index.codec.fuzzy.FuzzySetParameters.DEFAULT_FALSE_POSITIVE_PROBABILITY; import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING; import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING; import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING; @@ -269,6 +271,17 @@ public static IndexMergePolicy fromString(String text) { Property.IndexScope ); + /** + * Index setting describing the maximum number of nested scopes in queries. + * The default maximum of 20. 1 means once nesting. + */ + public static final Setting<Integer> MAX_NESTED_QUERY_DEPTH_SETTING = Setting.intSetting( + "index.query.max_nested_depth", + 20, + 1, + Property.Dynamic, + Property.IndexScope + ); /** * Index setting describing for NGramTokenizer and NGramTokenFilter * the maximum difference between @@ -658,6 +671,22 @@ public static IndexMergePolicy fromString(String text) { Property.Dynamic ); + public static final Setting<Boolean> INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING = Setting.boolSetting( + "index.optimize_doc_id_lookup.fuzzy_set.enabled", + false, + Property.IndexScope, + Property.Dynamic + ); + + public static final Setting<Double> INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING = Setting.doubleSetting( + "index.optimize_doc_id_lookup.fuzzy_set.false_positive_probability", + DEFAULT_FALSE_POSITIVE_PROBABILITY, + 0.01, + 0.50, + Property.IndexScope, + Property.Dynamic + ); + public static final TimeValue DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL = new TimeValue(650, TimeUnit.MILLISECONDS); public static final TimeValue MINIMUM_REMOTE_TRANSLOG_BUFFER_INTERVAL = TimeValue.ZERO; public static final Setting<TimeValue> INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING = Setting.timeSetting( @@ -747,6 +776,8 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { private volatile TimeValue searchIdleAfter; private volatile int maxAnalyzedOffset; private volatile int maxTermsCount; + + private volatile int maxNestedQueryDepth; private volatile String defaultPipeline; private volatile String requiredPipeline; private volatile boolean searchThrottled; @@ -787,6 +818,16 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { */ private volatile UnaryOperator<MergePolicy> mergeOnFlushPolicy; + /** + * Is fuzzy set enabled for doc id + */ + private volatile boolean enableFuzzySetForDocId; + + /** + * False positive probability to use while creating fuzzy set. + */ + private volatile double docIdFuzzySetFalsePositiveProbability; + /** * Returns the default search fields for this index. */ @@ -902,6 +943,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti maxSlicesPerPit = scopedSettings.get(MAX_SLICES_PER_PIT); maxAnalyzedOffset = scopedSettings.get(MAX_ANALYZED_OFFSET_SETTING); maxTermsCount = scopedSettings.get(MAX_TERMS_COUNT_SETTING); + maxNestedQueryDepth = scopedSettings.get(MAX_NESTED_QUERY_DEPTH_SETTING); maxRegexLength = scopedSettings.get(MAX_REGEX_LENGTH_SETTING); this.tieredMergePolicyProvider = new TieredMergePolicyProvider(logger, this); this.logByteSizeMergePolicyProvider = new LogByteSizeMergePolicyProvider(logger, this); @@ -926,6 +968,13 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti * Now this sortField (IndexSort) is stored in SegmentInfo and we need to maintain backward compatibility for them. */ widenIndexSortType = IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).before(V_2_7_0); + + boolean isOptimizeDocIdLookupUsingFuzzySetFeatureEnabled = FeatureFlags.isEnabled(DOC_ID_FUZZY_SET_SETTING); + if (isOptimizeDocIdLookupUsingFuzzySetFeatureEnabled) { + enableFuzzySetForDocId = scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING); + docIdFuzzySetFalsePositiveProbability = scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING); + } + scopedSettings.addSettingsUpdateConsumer( TieredMergePolicyProvider.INDEX_COMPOUND_FORMAT_SETTING, tieredMergePolicyProvider::setNoCFSRatio @@ -1007,6 +1056,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti scopedSettings.addSettingsUpdateConsumer(MAX_REFRESH_LISTENERS_PER_SHARD, this::setMaxRefreshListeners); scopedSettings.addSettingsUpdateConsumer(MAX_ANALYZED_OFFSET_SETTING, this::setHighlightMaxAnalyzedOffset); scopedSettings.addSettingsUpdateConsumer(MAX_TERMS_COUNT_SETTING, this::setMaxTermsCount); + scopedSettings.addSettingsUpdateConsumer(MAX_NESTED_QUERY_DEPTH_SETTING, this::setMaxNestedQueryDepth); scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_SCROLL, this::setMaxSlicesPerScroll); scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_PIT, this::setMaxSlicesPerPit); scopedSettings.addSettingsUpdateConsumer(DEFAULT_FIELD_SETTING, this::setDefaultFields); @@ -1032,6 +1082,11 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti this::setRemoteTranslogUploadBufferInterval ); scopedSettings.addSettingsUpdateConsumer(INDEX_REMOTE_TRANSLOG_KEEP_EXTRA_GEN_SETTING, this::setRemoteTranslogKeepExtraGen); + scopedSettings.addSettingsUpdateConsumer(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING, this::setEnableFuzzySetForDocId); + scopedSettings.addSettingsUpdateConsumer( + INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING, + this::setDocIdFuzzySetFalsePositiveProbability + ); } private void setSearchIdleAfter(TimeValue searchIdleAfter) { @@ -1517,6 +1572,17 @@ private void setMaxTermsCount(int maxTermsCount) { this.maxTermsCount = maxTermsCount; } + /** + * @return max level of nested queries and documents + */ + public int getMaxNestedQueryDepth() { + return this.maxNestedQueryDepth; + } + + private void setMaxNestedQueryDepth(int maxNestedQueryDepth) { + this.maxNestedQueryDepth = maxNestedQueryDepth; + } + /** * Returns the maximum number of allowed script_fields to retrieve in a search request */ @@ -1801,4 +1867,36 @@ public void setDefaultSearchPipeline(String defaultSearchPipeline) { public boolean shouldWidenIndexSortType() { return this.widenIndexSortType; } + + public boolean isEnableFuzzySetForDocId() { + return enableFuzzySetForDocId; + } + + public void setEnableFuzzySetForDocId(boolean enableFuzzySetForDocId) { + verifyFeatureToSetDocIdFuzzySetSetting(enabled -> this.enableFuzzySetForDocId = enabled, enableFuzzySetForDocId); + } + + public double getDocIdFuzzySetFalsePositiveProbability() { + return docIdFuzzySetFalsePositiveProbability; + } + + public void setDocIdFuzzySetFalsePositiveProbability(double docIdFuzzySetFalsePositiveProbability) { + verifyFeatureToSetDocIdFuzzySetSetting( + fpp -> this.docIdFuzzySetFalsePositiveProbability = fpp, + docIdFuzzySetFalsePositiveProbability + ); + } + + private static <T> void verifyFeatureToSetDocIdFuzzySetSetting(Consumer<T> settingUpdater, T val) { + if (FeatureFlags.isEnabled(DOC_ID_FUZZY_SET_SETTING)) { + settingUpdater.accept(val); + } else { + throw new IllegalArgumentException( + "Fuzzy set for optimizing doc id lookup " + + "cannot be enabled with feature flag [" + + FeatureFlags.DOC_ID_FUZZY_SET + + "] set to false" + ); + } + } } diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index dc28ad2d6dc07..1ad17f121560c 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -39,10 +39,16 @@ import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.common.lucene.Lucene; +import org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat; +import org.opensearch.index.codec.fuzzy.FuzzySetFactory; +import org.opensearch.index.codec.fuzzy.FuzzySetParameters; import org.opensearch.index.mapper.CompletionFieldMapper; +import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; +import java.util.Map; + /** * {@link PerFieldMappingPostingFormatCodec This postings format} is the default * {@link PostingsFormat} for OpenSearch. It utilizes the @@ -57,6 +63,8 @@ public class PerFieldMappingPostingFormatCodec extends Lucene99Codec { private final Logger logger; private final MapperService mapperService; private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); + private final FuzzySetFactory fuzzySetFactory; + private PostingsFormat docIdPostingsFormat; static { assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMappingPostingFormatCodec.class) @@ -67,6 +75,12 @@ public PerFieldMappingPostingFormatCodec(Mode compressionMode, MapperService map super(compressionMode); this.mapperService = mapperService; this.logger = logger; + fuzzySetFactory = new FuzzySetFactory( + Map.of( + IdFieldMapper.NAME, + new FuzzySetParameters(() -> mapperService.getIndexSettings().getDocIdFuzzySetFalsePositiveProbability()) + ) + ); } @Override @@ -76,6 +90,11 @@ public PostingsFormat getPostingsFormatForField(String field) { logger.warn("no index mapper found for field: [{}] returning default postings format", field); } else if (fieldType instanceof CompletionFieldMapper.CompletionFieldType) { return CompletionFieldMapper.CompletionFieldType.postingsFormat(); + } else if (IdFieldMapper.NAME.equals(field) && mapperService.getIndexSettings().isEnableFuzzySetForDocId()) { + if (docIdPostingsFormat == null) { + docIdPostingsFormat = new FuzzyFilterPostingsFormat(super.getPostingsFormatForField(field), fuzzySetFactory); + } + return docIdPostingsFormat; } return super.getPostingsFormatForField(field); } diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/AbstractFuzzySet.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/AbstractFuzzySet.java new file mode 100644 index 0000000000000..09976297361fa --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/AbstractFuzzySet.java @@ -0,0 +1,61 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.CheckedSupplier; +import org.opensearch.common.hash.T1ha1; + +import java.io.IOException; +import java.util.Iterator; + +/** + * Encapsulates common behaviour implementation for a fuzzy set. + */ +public abstract class AbstractFuzzySet implements FuzzySet { + + /** + * Add an item to this fuzzy set. + * @param value The value to be added + */ + protected abstract void add(BytesRef value); + + /** + * Add all items to the underlying set. + * Implementations can choose to perform this using an optimized strategy based on the type of set. + * @param valuesIteratorProvider Supplier for an iterator over All values which should be added to the set. + */ + protected void addAll(CheckedSupplier<Iterator<BytesRef>, IOException> valuesIteratorProvider) throws IOException { + Iterator<BytesRef> values = valuesIteratorProvider.get(); + while (values.hasNext()) { + add(values.next()); + } + } + + public Result contains(BytesRef val) { + return containsHash(generateKey(val)); + } + + protected abstract Result containsHash(long hash); + + protected long generateKey(BytesRef value) { + return T1ha1.hash(value.bytes, value.offset, value.length, 0L); + } + + protected void assertAllElementsExist(CheckedSupplier<Iterator<BytesRef>, IOException> iteratorProvider) throws IOException { + Iterator<BytesRef> iter = iteratorProvider.get(); + int cnt = 0; + while (iter.hasNext()) { + BytesRef item = iter.next(); + assert contains(item) == Result.MAYBE + : "Expected Filter to return positive response for elements added to it. Elements matched: " + cnt; + cnt++; + } + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/BloomFilter.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/BloomFilter.java new file mode 100644 index 0000000000000..b8a8352183ca8 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/BloomFilter.java @@ -0,0 +1,150 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Based on code from the Apache Lucene project (https://github.com/apache/lucene) under the Apache License, version 2.0. + * Copyright 2001-2022 The Apache Software Foundation + * Modifications (C) OpenSearch Contributors. All Rights Reserved. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.opensearch.common.CheckedSupplier; +import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.Assertions; + +import java.io.IOException; +import java.util.Iterator; + +/** + * The code is based on Lucene's implementation of Bloom Filter. + * It represents a subset of the Lucene implementation needed for OpenSearch use cases. + * Since the Lucene implementation is marked experimental, + * this aims to ensure we can provide a bwc implementation during upgrades. + */ +public class BloomFilter extends AbstractFuzzySet { + + private static final Logger logger = LogManager.getLogger(BloomFilter.class); + + // The sizes of BitSet used are all numbers that, when expressed in binary form, + // are all ones. This is to enable fast downsizing from one bitset to another + // by simply ANDing each set index in one bitset with the size of the target bitset + // - this provides a fast modulo of the number. Values previously accumulated in + // a large bitset and then mapped to a smaller set can be looked up using a single + // AND operation of the query term's hash rather than needing to perform a 2-step + // translation of the query term that mirrors the stored content's reprojections. + static final int[] usableBitSetSizes; + + static { + usableBitSetSizes = new int[26]; + for (int i = 0; i < usableBitSetSizes.length; i++) { + usableBitSetSizes[i] = (1 << (i + 6)) - 1; + } + } + + private final LongArrayBackedBitSet bitset; + private final int setSize; + private final int hashCount; + + BloomFilter(long maxDocs, double maxFpp, CheckedSupplier<Iterator<BytesRef>, IOException> fieldIteratorProvider) throws IOException { + int setSize = (int) Math.ceil((maxDocs * Math.log(maxFpp)) / Math.log(1 / Math.pow(2, Math.log(2)))); + setSize = getNearestSetSize(setSize < Integer.MAX_VALUE / 2 ? 2 * setSize : Integer.MAX_VALUE); + int optimalK = (int) Math.round(((double) setSize / maxDocs) * Math.log(2)); + this.bitset = new LongArrayBackedBitSet(setSize); + this.setSize = setSize; + this.hashCount = optimalK; + addAll(fieldIteratorProvider); + if (Assertions.ENABLED) { + assertAllElementsExist(fieldIteratorProvider); + } + logger.debug("Bloom filter created with fpp: {}, setSize: {}, hashCount: {}", maxFpp, setSize, hashCount); + } + + BloomFilter(IndexInput in) throws IOException { + hashCount = in.readInt(); + setSize = in.readInt(); + this.bitset = new LongArrayBackedBitSet(in); + } + + @Override + public void writeTo(DataOutput out) throws IOException { + out.writeInt(hashCount); + out.writeInt(setSize); + bitset.writeTo(out); + } + + private static int getNearestSetSize(int maxNumberOfBits) { + assert maxNumberOfBits > 0 : "Provided size estimate for bloom filter is illegal (<=0) : " + maxNumberOfBits; + int result = usableBitSetSizes[0]; + for (int i = 0; i < usableBitSetSizes.length; i++) { + if (usableBitSetSizes[i] <= maxNumberOfBits) { + result = usableBitSetSizes[i]; + } + } + return result; + } + + @Override + public SetType setType() { + return SetType.BLOOM_FILTER_V1; + } + + @Override + public Result containsHash(long hash) { + int msb = (int) (hash >>> Integer.SIZE); + int lsb = (int) hash; + for (int i = 0; i < hashCount; i++) { + int bloomPos = (lsb + i * msb); + if (!mayContainValue(bloomPos)) { + return Result.NO; + } + } + return Result.MAYBE; + } + + protected void add(BytesRef value) { + long hash = generateKey(value); + int msb = (int) (hash >>> Integer.SIZE); + int lsb = (int) hash; + for (int i = 0; i < hashCount; i++) { + // Bitmasking using bloomSize is effectively a modulo operation since set sizes are always power of 2 + int bloomPos = (lsb + i * msb) & setSize; + bitset.set(bloomPos); + } + } + + @Override + public boolean isSaturated() { + long numBitsSet = bitset.cardinality(); + // Don't bother saving bitsets if >90% of bits are set - we don't want to + // throw any more memory at this problem. + return (float) numBitsSet / (float) setSize > 0.9f; + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.sizeOf(bitset.ramBytesUsed()); + } + + private boolean mayContainValue(int aHash) { + // Bloom sizes are always base 2 and so can be ANDed for a fast modulo + int pos = aHash & setSize; + return bitset.get(pos); + } + + @Override + public void close() throws IOException { + IOUtils.close(bitset); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormat.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormat.java new file mode 100644 index 0000000000000..01f8054fc91be --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormat.java @@ -0,0 +1,492 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Based on code from the Apache Lucene project (https://github.com/apache/lucene) under the Apache License, version 2.0. + * Copyright 2001-2022 The Apache Software Foundation + * Modifications (C) OpenSearch Contributors. All Rights Reserved. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.FieldsConsumer; +import org.apache.lucene.codecs.FieldsProducer; +import org.apache.lucene.codecs.NormsProducer; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.index.BaseTermsEnum; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.opensearch.common.util.io.IOUtils; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * Based on Lucene's BloomFilterPostingsFormat. + * Discussion with Lucene community based on which the decision to have this in OpenSearch code was taken + * is captured here: https://github.com/apache/lucene/issues/12986 + * + * The class deals with persisting the bloom filter through the postings format, + * and reading the field via a bloom filter fronted terms enum (to reduce disk seeks in case of absence of requested values) + * The class should be handled during lucene upgrades. There are bwc tests present to verify the format continues to work after upgrade. + */ + +public final class FuzzyFilterPostingsFormat extends PostingsFormat { + + private static final Logger logger = LogManager.getLogger(FuzzyFilterPostingsFormat.class); + + /** + * This name is stored in headers. If changing the implementation for the format, this name/version should be updated + * so that reads can work as expected. + */ + public static final String FUZZY_FILTER_CODEC_NAME = "FuzzyFilterCodec99"; + + public static final int VERSION_START = 0; + public static final int VERSION_CURRENT = VERSION_START; + + /** Extension of Fuzzy Filters file */ + public static final String FUZZY_FILTER_FILE_EXTENSION = "fzd"; + + private final PostingsFormat delegatePostingsFormat; + private final FuzzySetFactory fuzzySetFactory; + + public FuzzyFilterPostingsFormat(PostingsFormat delegatePostingsFormat, FuzzySetFactory fuzzySetFactory) { + super(FUZZY_FILTER_CODEC_NAME); + this.delegatePostingsFormat = delegatePostingsFormat; + this.fuzzySetFactory = fuzzySetFactory; + } + + // Needed for SPI + public FuzzyFilterPostingsFormat() { + this(null, null); + } + + @Override + public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + if (delegatePostingsFormat == null) { + throw new UnsupportedOperationException( + "Error - " + getClass().getName() + " has been constructed without a choice of PostingsFormat" + ); + } + FieldsConsumer fieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); + return new FuzzyFilteredFieldsConsumer(fieldsConsumer, state); + } + + @Override + public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException { + return new FuzzyFilteredFieldsProducer(state); + } + + static class FuzzyFilteredFieldsProducer extends FieldsProducer { + private FieldsProducer delegateFieldsProducer; + HashMap<String, FuzzySet> fuzzySetsByFieldName = new HashMap<>(); + private List<Closeable> closeables = new ArrayList<>(); + + public FuzzyFilteredFieldsProducer(SegmentReadState state) throws IOException { + String fuzzyFilterFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + FUZZY_FILTER_FILE_EXTENSION + ); + IndexInput filterIn = null; + boolean success = false; + try { + // Using IndexInput directly instead of ChecksumIndexInput since we want to support RandomAccessInput + filterIn = state.directory.openInput(fuzzyFilterFileName, state.context); + + CodecUtil.checkIndexHeader( + filterIn, + FUZZY_FILTER_CODEC_NAME, + VERSION_START, + VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + // Load the delegate postings format + PostingsFormat delegatePostingsFormat = PostingsFormat.forName(filterIn.readString()); + this.delegateFieldsProducer = delegatePostingsFormat.fieldsProducer(state); + int numFilters = filterIn.readInt(); + for (int i = 0; i < numFilters; i++) { + int fieldNum = filterIn.readInt(); + FuzzySet set = FuzzySetFactory.deserializeFuzzySet(filterIn); + closeables.add(set); + FieldInfo fieldInfo = state.fieldInfos.fieldInfo(fieldNum); + fuzzySetsByFieldName.put(fieldInfo.name, set); + } + CodecUtil.retrieveChecksum(filterIn); + + // Can we disable it if we foresee performance issues? + CodecUtil.checksumEntireFile(filterIn); + success = true; + closeables.add(filterIn); + } finally { + if (!success) { + IOUtils.closeWhileHandlingException(filterIn, delegateFieldsProducer); + } + } + } + + @Override + public Iterator<String> iterator() { + return delegateFieldsProducer.iterator(); + } + + @Override + public void close() throws IOException { + // Why closing here? + IOUtils.closeWhileHandlingException(closeables); + delegateFieldsProducer.close(); + } + + @Override + public Terms terms(String field) throws IOException { + FuzzySet filter = fuzzySetsByFieldName.get(field); + if (filter == null) { + return delegateFieldsProducer.terms(field); + } else { + Terms result = delegateFieldsProducer.terms(field); + if (result == null) { + return null; + } + return new FuzzyFilteredTerms(result, filter); + } + } + + @Override + public int size() { + return delegateFieldsProducer.size(); + } + + static class FuzzyFilteredTerms extends Terms { + private Terms delegateTerms; + private FuzzySet filter; + + public FuzzyFilteredTerms(Terms terms, FuzzySet filter) { + this.delegateTerms = terms; + this.filter = filter; + } + + @Override + public TermsEnum intersect(CompiledAutomaton compiled, final BytesRef startTerm) throws IOException { + return delegateTerms.intersect(compiled, startTerm); + } + + @Override + public TermsEnum iterator() throws IOException { + return new FilterAppliedTermsEnum(delegateTerms, filter); + } + + @Override + public long size() throws IOException { + return delegateTerms.size(); + } + + @Override + public long getSumTotalTermFreq() throws IOException { + return delegateTerms.getSumTotalTermFreq(); + } + + @Override + public long getSumDocFreq() throws IOException { + return delegateTerms.getSumDocFreq(); + } + + @Override + public int getDocCount() throws IOException { + return delegateTerms.getDocCount(); + } + + @Override + public boolean hasFreqs() { + return delegateTerms.hasFreqs(); + } + + @Override + public boolean hasOffsets() { + return delegateTerms.hasOffsets(); + } + + @Override + public boolean hasPositions() { + return delegateTerms.hasPositions(); + } + + @Override + public boolean hasPayloads() { + return delegateTerms.hasPayloads(); + } + + @Override + public BytesRef getMin() throws IOException { + return delegateTerms.getMin(); + } + + @Override + public BytesRef getMax() throws IOException { + return delegateTerms.getMax(); + } + } + + static final class FilterAppliedTermsEnum extends BaseTermsEnum { + + private Terms delegateTerms; + private TermsEnum delegateTermsEnum; + private final FuzzySet filter; + + public FilterAppliedTermsEnum(Terms delegateTerms, FuzzySet filter) throws IOException { + this.delegateTerms = delegateTerms; + this.filter = filter; + } + + void reset(Terms delegateTerms) throws IOException { + this.delegateTerms = delegateTerms; + this.delegateTermsEnum = null; + } + + private TermsEnum delegate() throws IOException { + if (delegateTermsEnum == null) { + /* pull the iterator only if we really need it - + * this can be a relativly heavy operation depending on the + * delegate postings format and the underlying directory + * (clone IndexInput) */ + delegateTermsEnum = delegateTerms.iterator(); + } + return delegateTermsEnum; + } + + @Override + public BytesRef next() throws IOException { + return delegate().next(); + } + + @Override + public boolean seekExact(BytesRef text) throws IOException { + // The magical fail-fast speed up that is the entire point of all of + // this code - save a disk seek if there is a match on an in-memory + // structure + // that may occasionally give a false positive but guaranteed no false + // negatives + if (filter.contains(text) == FuzzySet.Result.NO) { + return false; + } + return delegate().seekExact(text); + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + return delegate().seekCeil(text); + } + + @Override + public void seekExact(long ord) throws IOException { + delegate().seekExact(ord); + } + + @Override + public BytesRef term() throws IOException { + return delegate().term(); + } + + @Override + public long ord() throws IOException { + return delegate().ord(); + } + + @Override + public int docFreq() throws IOException { + return delegate().docFreq(); + } + + @Override + public long totalTermFreq() throws IOException { + return delegate().totalTermFreq(); + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + return delegate().postings(reuse, flags); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + return delegate().impacts(flags); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(filter=" + filter.toString() + ")"; + } + } + + @Override + public void checkIntegrity() throws IOException { + delegateFieldsProducer.checkIntegrity(); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(fields=" + fuzzySetsByFieldName.size() + ",delegate=" + delegateFieldsProducer + ")"; + } + } + + class FuzzyFilteredFieldsConsumer extends FieldsConsumer { + private FieldsConsumer delegateFieldsConsumer; + private Map<FieldInfo, FuzzySet> fuzzySets = new HashMap<>(); + private SegmentWriteState state; + private List<Closeable> closeables = new ArrayList<>(); + + public FuzzyFilteredFieldsConsumer(FieldsConsumer fieldsConsumer, SegmentWriteState state) { + this.delegateFieldsConsumer = fieldsConsumer; + this.state = state; + } + + @Override + public void write(Fields fields, NormsProducer norms) throws IOException { + + // Delegate must write first: it may have opened files + // on creating the class + // (e.g. Lucene41PostingsConsumer), and write() will + // close them; alternatively, if we delayed pulling + // the fields consumer until here, we could do it + // afterwards: + delegateFieldsConsumer.write(fields, norms); + + for (String field : fields) { + Terms terms = fields.terms(field); + if (terms == null) { + continue; + } + FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field); + FuzzySet fuzzySet = fuzzySetFactory.createFuzzySet(state.segmentInfo.maxDoc(), fieldInfo.name, () -> iterator(terms)); + if (fuzzySet == null) { + break; + } + assert fuzzySets.containsKey(fieldInfo) == false; + closeables.add(fuzzySet); + fuzzySets.put(fieldInfo, fuzzySet); + } + } + + private Iterator<BytesRef> iterator(Terms terms) throws IOException { + TermsEnum termIterator = terms.iterator(); + return new Iterator<>() { + + private BytesRef currentTerm; + private PostingsEnum postingsEnum; + + @Override + public boolean hasNext() { + try { + do { + currentTerm = termIterator.next(); + if (currentTerm == null) { + return false; + } + postingsEnum = termIterator.postings(postingsEnum, 0); + if (postingsEnum.nextDoc() != PostingsEnum.NO_MORE_DOCS) { + return true; + } + } while (true); + } catch (IOException ex) { + throw new IllegalStateException("Cannot read terms: " + termIterator.attributes()); + } + } + + @Override + public BytesRef next() { + return currentTerm; + } + }; + } + + private boolean closed; + + @Override + public void close() throws IOException { + if (closed) { + return; + } + closed = true; + delegateFieldsConsumer.close(); + + // Now we are done accumulating values for these fields + List<Map.Entry<FieldInfo, FuzzySet>> nonSaturatedSets = new ArrayList<>(); + + for (Map.Entry<FieldInfo, FuzzySet> entry : fuzzySets.entrySet()) { + FuzzySet fuzzySet = entry.getValue(); + if (!fuzzySet.isSaturated()) { + nonSaturatedSets.add(entry); + } + } + String fuzzyFilterFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + FUZZY_FILTER_FILE_EXTENSION + ); + try (IndexOutput fuzzyFilterFileOutput = state.directory.createOutput(fuzzyFilterFileName, state.context)) { + logger.trace( + "Writing fuzzy filter postings with version: {} for segment: {}", + VERSION_CURRENT, + state.segmentInfo.toString() + ); + CodecUtil.writeIndexHeader( + fuzzyFilterFileOutput, + FUZZY_FILTER_CODEC_NAME, + VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + + // remember the name of the postings format we will delegate to + fuzzyFilterFileOutput.writeString(delegatePostingsFormat.getName()); + + // First field in the output file is the number of fields+sets saved + fuzzyFilterFileOutput.writeInt(nonSaturatedSets.size()); + for (Map.Entry<FieldInfo, FuzzySet> entry : nonSaturatedSets) { + FieldInfo fieldInfo = entry.getKey(); + FuzzySet fuzzySet = entry.getValue(); + saveAppropriatelySizedFuzzySet(fuzzyFilterFileOutput, fuzzySet, fieldInfo); + } + CodecUtil.writeFooter(fuzzyFilterFileOutput); + } + // We are done with large bitsets so no need to keep them hanging around + fuzzySets.clear(); + IOUtils.closeWhileHandlingException(closeables); + } + + private void saveAppropriatelySizedFuzzySet(IndexOutput fileOutput, FuzzySet fuzzySet, FieldInfo fieldInfo) throws IOException { + fileOutput.writeInt(fieldInfo.number); + fileOutput.writeString(fuzzySet.setType().getSetName()); + fuzzySet.writeTo(fileOutput); + } + } + + @Override + public String toString() { + return "FuzzyFilterPostingsFormat(" + delegatePostingsFormat + ")"; + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySet.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySet.java new file mode 100644 index 0000000000000..df443ffbca33d --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySet.java @@ -0,0 +1,98 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.CheckedFunction; + +import java.io.Closeable; +import java.io.IOException; +import java.util.List; + +/** + * Fuzzy Filter interface + */ +public interface FuzzySet extends Accountable, Closeable { + + /** + * Name used for a codec to be aware of what fuzzy set has been used. + */ + SetType setType(); + + /** + * @param value the item whose membership needs to be checked. + */ + Result contains(BytesRef value); + + boolean isSaturated(); + + void writeTo(DataOutput out) throws IOException; + + /** + * Enum to represent result of membership check on a fuzzy set. + */ + enum Result { + /** + * A definite no for the set membership of an item. + */ + NO, + + /** + * Fuzzy sets cannot guarantee that a given item is present in the set or not due the data being stored in + * a lossy format (e.g. fingerprint, hash). + * Hence, we return a response denoting that the item maybe present. + */ + MAYBE + } + + /** + * Enum to declare supported properties and mappings for a fuzzy set implementation. + */ + enum SetType { + BLOOM_FILTER_V1("bloom_filter_v1", BloomFilter::new, List.of("bloom_filter")); + + /** + * Name persisted in postings file. This will be used when reading to determine the bloom filter implementation. + */ + private final String setName; + + /** + * Interface for reading the actual fuzzy set implementation into java object. + */ + private final CheckedFunction<IndexInput, ? extends FuzzySet, IOException> deserializer; + + SetType(String setName, CheckedFunction<IndexInput, ? extends FuzzySet, IOException> deserializer, List<String> aliases) { + if (aliases.size() < 1) { + throw new IllegalArgumentException("Alias list is empty. Could not create Set Type: " + setName); + } + this.setName = setName; + this.deserializer = deserializer; + } + + public String getSetName() { + return setName; + } + + public CheckedFunction<IndexInput, ? extends FuzzySet, IOException> getDeserializer() { + return deserializer; + } + + public static SetType from(String name) { + for (SetType type : SetType.values()) { + if (type.setName.equals(name)) { + return type; + } + } + throw new IllegalArgumentException("There is no implementation for fuzzy set: " + name); + } + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetFactory.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetFactory.java new file mode 100644 index 0000000000000..5d1fd03f099d4 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetFactory.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.CheckedSupplier; + +import java.io.IOException; +import java.util.Iterator; +import java.util.Map; + +/** + * Factory class to create fuzzy set. + * Supports bloom filters for now. More sets can be added as required. + */ +public class FuzzySetFactory { + + private final Map<String, FuzzySetParameters> setTypeForField; + + public FuzzySetFactory(Map<String, FuzzySetParameters> setTypeForField) { + this.setTypeForField = setTypeForField; + } + + public FuzzySet createFuzzySet(int maxDocs, String fieldName, CheckedSupplier<Iterator<BytesRef>, IOException> iteratorProvider) + throws IOException { + FuzzySetParameters params = setTypeForField.get(fieldName); + if (params == null) { + throw new IllegalArgumentException("No fuzzy set defined for field: " + fieldName); + } + switch (params.getSetType()) { + case BLOOM_FILTER_V1: + return new BloomFilter(maxDocs, params.getFalsePositiveProbability(), iteratorProvider); + default: + throw new IllegalArgumentException("No Implementation for set type: " + params.getSetType()); + } + } + + public static FuzzySet deserializeFuzzySet(IndexInput in) throws IOException { + FuzzySet.SetType setType = FuzzySet.SetType.from(in.readString()); + return setType.getDeserializer().apply(in); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetParameters.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetParameters.java new file mode 100644 index 0000000000000..7bb96e7c34f0b --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetParameters.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import java.util.function.Supplier; + +/** + * Wrapper for params to create a fuzzy set. + */ +public class FuzzySetParameters { + private final Supplier<Double> falsePositiveProbabilityProvider; + private final FuzzySet.SetType setType; + + public static final double DEFAULT_FALSE_POSITIVE_PROBABILITY = 0.2047d; + + public FuzzySetParameters(Supplier<Double> falsePositiveProbabilityProvider) { + this.falsePositiveProbabilityProvider = falsePositiveProbabilityProvider; + this.setType = FuzzySet.SetType.BLOOM_FILTER_V1; + } + + public double getFalsePositiveProbability() { + return falsePositiveProbabilityProvider.get(); + } + + public FuzzySet.SetType getSetType() { + return setType; + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/IndexInputImmutableLongArray.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/IndexInputImmutableLongArray.java new file mode 100644 index 0000000000000..08d6059c1e82e --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/IndexInputImmutableLongArray.java @@ -0,0 +1,70 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.RamUsageEstimator; +import org.opensearch.OpenSearchException; +import org.opensearch.common.util.LongArray; + +import java.io.IOException; + +/** + * A Long array backed by RandomAccessInput. + * This implementation supports read operations only. + */ +class IndexInputImmutableLongArray implements LongArray { + + private final RandomAccessInput input; + private final long size; + + IndexInputImmutableLongArray(long size, RandomAccessInput input) { + this.size = size; + this.input = input; + } + + @Override + public void close() {} + + @Override + public long size() { + return size; + } + + @Override + public synchronized long get(long index) { + try { + // Multiplying by 8 since each long is 8 bytes, and we need to get the long value at (index * 8) in the + // RandomAccessInput being accessed. + return input.readLong(index << 3); + } catch (IOException ex) { + throw new OpenSearchException(ex); + } + } + + @Override + public long set(long index, long value) { + throw new UnsupportedOperationException(); + } + + @Override + public long increment(long index, long inc) { + throw new UnsupportedOperationException(); + } + + @Override + public void fill(long fromIndex, long toIndex, long value) { + throw new UnsupportedOperationException(); + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.shallowSizeOfInstance(IndexInputImmutableLongArray.class); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/LongArrayBackedBitSet.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/LongArrayBackedBitSet.java new file mode 100644 index 0000000000000..bd4936aeec366 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/LongArrayBackedBitSet.java @@ -0,0 +1,105 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.Accountable; +import org.opensearch.common.util.BigArrays; +import org.opensearch.common.util.LongArray; +import org.opensearch.common.util.io.IOUtils; + +import java.io.Closeable; +import java.io.IOException; + +/** + * A bitset backed by a long-indexed array. + */ +class LongArrayBackedBitSet implements Accountable, Closeable { + + private long underlyingArrayLength = 0L; + private LongArray longArray; + + /** + * Constructor which uses an on heap array. This should be using during construction of the bitset. + * @param capacity The maximum capacity to provision for the bitset. + */ + LongArrayBackedBitSet(long capacity) { + // Since the bitset is backed by a long array, we only need 1 element for every 64 bits in the underlying array. + underlyingArrayLength = (capacity >> 6) + 1L; + this.longArray = BigArrays.NON_RECYCLING_INSTANCE.withCircuitBreaking().newLongArray(underlyingArrayLength); + } + + /** + * Constructor which uses Lucene's IndexInput to read the bitset into a read-only buffer. + * @param in IndexInput containing the serialized bitset. + * @throws IOException + */ + LongArrayBackedBitSet(IndexInput in) throws IOException { + underlyingArrayLength = in.readLong(); + // Multiplying by 8 since the length above is of the long array, so we will have + // 8 times the number of bytes in our stream. + long streamLength = underlyingArrayLength << 3; + this.longArray = new IndexInputImmutableLongArray(underlyingArrayLength, in.randomAccessSlice(in.getFilePointer(), streamLength)); + in.skipBytes(streamLength); + } + + public void writeTo(DataOutput out) throws IOException { + out.writeLong(underlyingArrayLength); + for (int idx = 0; idx < underlyingArrayLength; idx++) { + out.writeLong(longArray.get(idx)); + } + } + + /** + * This is an O(n) operation, and will iterate over all the elements in the underlying long array + * to determine cardinality of the set. + * @return number of set bits in the bitset. + */ + public long cardinality() { + long tot = 0; + for (int i = 0; i < underlyingArrayLength; ++i) { + tot += Long.bitCount(longArray.get(i)); + } + return tot; + } + + /** + * Retrieves whether the bit is set or not at the given index. + * @param index the index to look up for the bit + * @return true if bit is set, false otherwise + */ + public boolean get(long index) { + long i = index >> 6; // div 64 + long val = longArray.get(i); + long bitmask = 1L << index; + return (val & bitmask) != 0; + } + + /** + * Sets the bit at the given index. + * @param index the index to set the bit at. + */ + public void set(long index) { + long wordNum = index >> 6; // div 64 + long bitmask = 1L << index; + long val = longArray.get(wordNum); + longArray.set(wordNum, val | bitmask); + } + + @Override + public long ramBytesUsed() { + return 128L + longArray.ramBytesUsed(); + } + + @Override + public void close() throws IOException { + IOUtils.close(longArray); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/package-info.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/package-info.java new file mode 100644 index 0000000000000..7aeac68cd192a --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** classes responsible for handling all fuzzy codecs and operations */ +package org.opensearch.index.codec.fuzzy; diff --git a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java index d4a97f0267222..34aecfc62b8b2 100644 --- a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java +++ b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java @@ -41,6 +41,7 @@ import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.ReplicationStats; +import org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat; import org.opensearch.index.remote.RemoteSegmentStats; import java.io.IOException; @@ -95,7 +96,8 @@ public class SegmentsStats implements Writeable, ToXContentFragment { Map.entry("tvx", "Term Vector Index"), Map.entry("tvd", "Term Vector Documents"), Map.entry("tvf", "Term Vector Fields"), - Map.entry("liv", "Live Documents") + Map.entry("liv", "Live Documents"), + Map.entry(FuzzyFilterPostingsFormat.FUZZY_FILTER_FILE_EXTENSION, "Fuzzy Filter") ); public SegmentsStats() { diff --git a/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java b/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java index a2a70e280187a..3a2504ce92158 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java @@ -43,6 +43,9 @@ * aggregations, which only use {@link #advanceExact(int)} and * {@link #longValue()}. * + * In case when optimizations based on point values are used, the {@link #advance(int)} + * and, optionally, {@link #cost()} have to be implemented as well. + * * @opensearch.internal */ public abstract class AbstractNumericDocValues extends NumericDocValues { diff --git a/server/src/main/java/org/opensearch/index/fielddata/FieldData.java b/server/src/main/java/org/opensearch/index/fielddata/FieldData.java index e09de53dc05f7..6db6bbccacae5 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/FieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/FieldData.java @@ -37,6 +37,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BytesRef; import org.opensearch.common.Numbers; import org.opensearch.common.geo.GeoPoint; @@ -76,6 +77,10 @@ public double doubleValue() throws IOException { throw new UnsupportedOperationException(); } + @Override + public int advance(int target) throws IOException { + return DocIdSetIterator.NO_MORE_DOCS; + } }; } @@ -561,6 +566,10 @@ public boolean advanceExact(int doc) throws IOException { return values.advanceExact(doc); } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } /** @@ -591,6 +600,10 @@ public int docValueCount() { return values.docValueCount(); } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } /** @@ -622,6 +635,12 @@ public long longValue() throws IOException { public int docID() { return docID; } + + @Override + public int advance(int target) throws IOException { + docID = values.advance(target); + return docID; + } } /** @@ -683,6 +702,11 @@ public boolean advanceExact(int target) throws IOException { public long longValue() throws IOException { return value; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } @@ -715,6 +739,11 @@ public boolean advanceExact(int target) throws IOException { public long longValue() throws IOException { return value.longValue(); } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } @@ -742,6 +771,11 @@ public boolean advanceExact(int target) throws IOException { public double doubleValue() throws IOException { return value; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java index b0f3400acfb3d..f69cfacaf35d4 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java @@ -71,6 +71,11 @@ public long longValue() throws IOException { public int docID() { return docID; } + + @Override + public int advance(int target) throws IOException { + return NumericDoubleValues.this.advance(target); + } }; } @@ -95,6 +100,23 @@ public long longValue() throws IOException { public int docID() { return docID; } + + @Override + public int advance(int target) throws IOException { + return NumericDoubleValues.this.advance(target); + } }; } + + /** + * Advances to the first beyond the current whose document number is greater than or equal to + * <i>target</i>, and returns the document number itself. Exhausts the iterator and returns {@link + * org.apache.lucene.search.DocIdSetIterator#NO_MORE_DOCS} if <i>target</i> is greater than the highest document number in the set. + * + * This method is being used by {@link org.apache.lucene.search.comparators.NumericComparator.NumericLeafComparator} when point values optimization kicks + * in and is implemented by most numeric types. + */ + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java index 4ee494ffb30aa..816445bb319f1 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java @@ -69,4 +69,8 @@ public double nextValue() throws IOException { return in.doubleValue(); } + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java index 39aca38c331ea..e2739e462dea5 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java @@ -74,4 +74,9 @@ public NumericDoubleValues getDoubleValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } + } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java index 150e114d342de..98a44c246f654 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java @@ -67,4 +67,8 @@ public NumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java index 1bae845c9b0d2..279a78ac51adf 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java @@ -72,4 +72,8 @@ public SortedNumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java index dce1aff9cc94f..be9064751b5f0 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java @@ -70,4 +70,15 @@ protected SortedNumericDoubleValues() {} */ public abstract int docValueCount(); + /** + * Advances to the first beyond the current whose document number is greater than or equal to + * <i>target</i>, and returns the document number itself. Exhausts the iterator and returns {@link + * org.apache.lucene.search.DocIdSetIterator#NO_MORE_DOCS} if <i>target</i> is greater than the highest document number in the set. + * + * This method is being used by {@link org.apache.lucene.search.comparators.NumericComparator.NumericLeafComparator} when point values optimization kicks + * in and is implemented by most numeric types. + */ + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java index 8d17146760d9e..d9e9dd6a293fd 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java @@ -42,4 +42,8 @@ public NumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java index 90b49e19a8954..63c7e6162cc55 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java @@ -47,4 +47,8 @@ public SortedNumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java index b70752df9e826..0019a41e67c02 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -336,6 +336,11 @@ public double doubleValue() throws IOException { public boolean advanceExact(int doc) throws IOException { return in.advanceExact(doc); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** @@ -364,6 +369,11 @@ public double nextValue() throws IOException { public int docValueCount() { return in.docValueCount(); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** @@ -434,6 +444,11 @@ public double doubleValue() throws IOException { public boolean advanceExact(int doc) throws IOException { return in.advanceExact(doc); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** @@ -462,6 +477,11 @@ public double nextValue() throws IOException { public int docValueCount() { return in.docValueCount(); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** diff --git a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java index 2a677d8bc1352..db8da8a949d6f 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java @@ -36,7 +36,9 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; @@ -222,25 +224,48 @@ protected Object parseSourceValue(Object value) { @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { - failIfNotIndexed(); + failIfNotIndexedAndNoDocValues(); + Query query; if (value instanceof InetAddress) { - return InetAddressPoint.newExactQuery(name(), (InetAddress) value); + query = InetAddressPoint.newExactQuery(name(), (InetAddress) value); } else { if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } + String term = value.toString(); + if (term.contains("/")) { + final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(term); + query = InetAddressPoint.newPrefixQuery(name(), cidr.v1(), cidr.v2()); + } else { + InetAddress address = InetAddresses.forString(term); + query = InetAddressPoint.newExactQuery(name(), address); + } + } + if (isSearchable() && hasDocValues()) { + String term = value.toString(); + if (term.contains("/")) { + final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(term); + return InetAddressPoint.newPrefixQuery(name(), cidr.v1(), cidr.v2()); + } + return new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowExactQuery(name(), new BytesRef(((PointRangeQuery) query).getLowerPoint())) + ); + } + if (hasDocValues()) { String term = value.toString(); if (term.contains("/")) { final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(term); return InetAddressPoint.newPrefixQuery(name(), cidr.v1(), cidr.v2()); } - InetAddress address = InetAddresses.forString(term); - return InetAddressPoint.newExactQuery(name(), address); + return SortedSetDocValuesField.newSlowExactQuery(name(), new BytesRef(((PointRangeQuery) query).getLowerPoint())); } + return query; } @Override public Query termsQuery(List<?> values, QueryShardContext context) { + failIfNotIndexedAndNoDocValues(); InetAddress[] addresses = new InetAddress[values.size()]; int i = 0; for (Object value : values) { @@ -265,14 +290,32 @@ public Query termsQuery(List<?> values, QueryShardContext context) { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - failIfNotIndexed(); - return rangeQuery( - lowerTerm, - upperTerm, - includeLower, - includeUpper, - (lower, upper) -> InetAddressPoint.newRangeQuery(name(), lower, upper) - ); + failIfNotIndexedAndNoDocValues(); + return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, (lower, upper) -> { + Query query = InetAddressPoint.newRangeQuery(name(), lower, upper); + if (isSearchable() && hasDocValues()) { + return new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ); + } + if (hasDocValues()) { + return SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ); + } + return query; + }); } /** diff --git a/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java index ac4fde7f06b16..3f97b3918a126 100644 --- a/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java @@ -322,10 +322,13 @@ protected Query doToQuery(QueryShardContext context) throws IOException { try { context.setParentFilter(parentFilter); context.nestedScope().nextLevel(nestedObjectMapper); - innerQuery = this.query.toQuery(context); + try { + innerQuery = this.query.toQuery(context); + } finally { + context.nestedScope().previousLevel(); + } } finally { context.setParentFilter(previousParentFilter); - context.nestedScope().previousLevel(); } // ToParentBlockJoinQuery requires that the inner query only matches documents diff --git a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java index eba3ed076e82b..5069f055a89bb 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java @@ -256,7 +256,7 @@ private QueryShardContext( this.bitsetFilterCache = bitsetFilterCache; this.indexFieldDataService = indexFieldDataLookup; this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); - this.nestedScope = new NestedScope(); + this.nestedScope = new NestedScope(indexSettings); this.scriptService = scriptService; this.indexSettings = indexSettings; this.searcher = searcher; @@ -270,7 +270,7 @@ private void reset() { allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); this.lookup = null; this.namedQueries.clear(); - this.nestedScope = new NestedScope(); + this.nestedScope = new NestedScope(indexSettings); } public IndexAnalyzers getIndexAnalyzers() { diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java index cfc44d4434d3b..1c693f9761240 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java @@ -560,6 +560,11 @@ public boolean needsScores() { protected NumericDoubleValues distance(LeafReaderContext context) { final SortedNumericDoubleValues doubleValues = fieldData.load(context).getDoubleValues(); return FieldData.replaceMissing(mode.select(new SortingNumericDoubleValues() { + @Override + public int advance(int target) throws IOException { + return doubleValues.advance(target); + } + @Override public boolean advanceExact(int docId) throws IOException { if (doubleValues.advanceExact(docId)) { diff --git a/server/src/main/java/org/opensearch/index/query/support/NestedScope.java b/server/src/main/java/org/opensearch/index/query/support/NestedScope.java index 51abe389ad686..488768c32d17f 100644 --- a/server/src/main/java/org/opensearch/index/query/support/NestedScope.java +++ b/server/src/main/java/org/opensearch/index/query/support/NestedScope.java @@ -33,6 +33,7 @@ package org.opensearch.index.query.support; import org.opensearch.common.annotation.PublicApi; +import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.ObjectMapper; import java.util.Deque; @@ -47,6 +48,11 @@ public final class NestedScope { private final Deque<ObjectMapper> levelStack = new LinkedList<>(); + private final IndexSettings indexSettings; + + public NestedScope(IndexSettings indexSettings) { + this.indexSettings = indexSettings; + } /** * @return For the current nested level returns the object mapper that belongs to that @@ -60,7 +66,21 @@ public ObjectMapper getObjectMapper() { */ public ObjectMapper nextLevel(ObjectMapper level) { ObjectMapper previous = levelStack.peek(); - levelStack.push(level); + if (levelStack.size() < indexSettings.getMaxNestedQueryDepth()) { + levelStack.push(level); + } else { + throw new IllegalArgumentException( + "The depth of Nested Query is [" + + (levelStack.size() + 1) + + "] has exceeded " + + "the allowed maximum of [" + + indexSettings.getMaxNestedQueryDepth() + + "]. " + + "This maximum can be set by changing the [" + + IndexSettings.MAX_NESTED_QUERY_DEPTH_SETTING.getKey() + + "] index level setting." + ); + } return previous; } diff --git a/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java b/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java index fe9440813b94f..92436a09a4e7e 100644 --- a/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java +++ b/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java @@ -301,12 +301,17 @@ public Map<String, Long> getLatestLocalFileNameLengthMap() { } /** - * Updates the latestLocalFileNameLengthMap by adding file name and it's size to the map. The method is given a function as an argument which is used for determining the file size (length in bytes). This method is also provided the collection of segment files which are the latest refresh local segment files. This method also removes the stale segment files from the map that are not part of the input segment files. + * Updates the latestLocalFileNameLengthMap by adding file name and it's size to the map. + * The method is given a function as an argument which is used for determining the file size (length in bytes). + * This method is also provided the collection of segment files which are the latest refresh local segment files. + * This method also removes the stale segment files from the map that are not part of the input segment files. * * @param segmentFiles list of local refreshed segment files * @param fileSizeFunction function is used to determine the file size in bytes + * + * @return updated map of local segment files and filesize */ - public void updateLatestLocalFileNameLengthMap( + public Map<String, Long> updateLatestLocalFileNameLengthMap( Collection<String> segmentFiles, CheckedFunction<String, Long, IOException> fileSizeFunction ) { @@ -332,6 +337,7 @@ public void updateLatestLocalFileNameLengthMap( // Remove keys from the fileSizeMap that do not exist in the latest segment files latestLocalFileNameLengthMap.entrySet().removeIf(entry -> fileSet.contains(entry.getKey()) == false); computeBytesLag(); + return Collections.unmodifiableMap(latestLocalFileNameLengthMap); } public void addToLatestUploadedFiles(String file) { diff --git a/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java b/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java index b13f1eb04a941..576e00f8f30d1 100644 --- a/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java +++ b/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java @@ -38,7 +38,6 @@ import org.opensearch.common.Nullable; import org.opensearch.common.annotation.PublicApi; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -466,12 +465,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.humanReadableField(Fields.QUERY_TIME_IN_MILLIS, Fields.QUERY_TIME, getQueryTime()); builder.field(Fields.QUERY_CURRENT, queryCurrent); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - builder.field(Fields.CONCURRENT_QUERY_TOTAL, concurrentQueryCount); - builder.humanReadableField(Fields.CONCURRENT_QUERY_TIME_IN_MILLIS, Fields.CONCURRENT_QUERY_TIME, getConcurrentQueryTime()); - builder.field(Fields.CONCURRENT_QUERY_CURRENT, concurrentQueryCurrent); - builder.field(Fields.CONCURRENT_AVG_SLICE_COUNT, getConcurrentAvgSliceCount()); - } + builder.field(Fields.CONCURRENT_QUERY_TOTAL, concurrentQueryCount); + builder.humanReadableField(Fields.CONCURRENT_QUERY_TIME_IN_MILLIS, Fields.CONCURRENT_QUERY_TIME, getConcurrentQueryTime()); + builder.field(Fields.CONCURRENT_QUERY_CURRENT, concurrentQueryCurrent); + builder.field(Fields.CONCURRENT_AVG_SLICE_COUNT, getConcurrentAvgSliceCount()); builder.field(Fields.FETCH_TOTAL, fetchCount); builder.humanReadableField(Fields.FETCH_TIME_IN_MILLIS, Fields.FETCH_TIME, getFetchTime()); diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index cbb246219546b..977155a1cbb72 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -1697,7 +1697,8 @@ ReplicationCheckpoint computeReplicationCheckpoint(SegmentInfos segmentInfos) th } final ReplicationCheckpoint latestReplicationCheckpoint = getLatestReplicationCheckpoint(); if (latestReplicationCheckpoint.getSegmentInfosVersion() == segmentInfos.getVersion() - && latestReplicationCheckpoint.getSegmentsGen() == segmentInfos.getGeneration()) { + && latestReplicationCheckpoint.getSegmentsGen() == segmentInfos.getGeneration() + && latestReplicationCheckpoint.getPrimaryTerm() == getOperationPrimaryTerm()) { return latestReplicationCheckpoint; } final Map<String, StoreFileMetadata> metadataMap = store.getSegmentMetadataMap(segmentInfos); @@ -2014,7 +2015,7 @@ public void close(String reason, boolean flushEngine, boolean deleted) throws IO /* ToDo : Fix this https://github.com/opensearch-project/OpenSearch/issues/8003 */ - private RemoteSegmentStoreDirectory getRemoteDirectory() { + public RemoteSegmentStoreDirectory getRemoteDirectory() { assert indexSettings.isRemoteStoreEnabled(); assert remoteStore.directory() instanceof FilterDirectory : "Store.directory is not an instance of FilterDirectory"; FilterDirectory remoteStoreDirectory = (FilterDirectory) remoteStore.directory(); @@ -2024,23 +2025,35 @@ private RemoteSegmentStoreDirectory getRemoteDirectory() { } /** - Returns true iff it is able to verify that remote segment store - is in sync with local + * Returns true iff it is able to verify that remote segment store + * is in sync with local */ boolean isRemoteSegmentStoreInSync() { assert indexSettings.isRemoteStoreEnabled(); try { RemoteSegmentStoreDirectory directory = getRemoteDirectory(); if (directory.readLatestMetadataFile() != null) { - // verifying that all files except EXCLUDE_FILES are uploaded to the remote Collection<String> uploadFiles = directory.getSegmentsUploadedToRemoteStore().keySet(); - SegmentInfos segmentInfos = store.readLastCommittedSegmentsInfo(); - Collection<String> localFiles = segmentInfos.files(true); - if (uploadFiles.containsAll(localFiles)) { - return true; + try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = getSegmentInfosSnapshot()) { + Collection<String> localSegmentInfosFiles = segmentInfosGatedCloseable.get().files(true); + Set<String> localFiles = new HashSet<>(localSegmentInfosFiles); + // verifying that all files except EXCLUDE_FILES are uploaded to the remote + localFiles.removeAll(RemoteStoreRefreshListener.EXCLUDE_FILES); + if (uploadFiles.containsAll(localFiles)) { + return true; + } + logger.debug( + () -> new ParameterizedMessage( + "RemoteSegmentStoreSyncStatus localSize={} remoteSize={}", + localFiles.size(), + uploadFiles.size() + ) + ); } } - } catch (IOException e) { + } catch (AlreadyClosedException e) { + throw e; + } catch (Throwable e) { logger.error("Exception while reading latest metadata", e); } return false; diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index d96a7e7c95ecf..7bb80b736693f 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -41,6 +41,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.Iterator; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -171,13 +172,33 @@ private boolean shouldSync(boolean didRefresh, boolean skipPrimaryTermCheck) { // When the shouldSync is called the first time, then 1st condition on primary term is true. But after that // we update the primary term and the same condition would not evaluate to true again in syncSegments. // Below check ensures that if there is commit, then that gets picked up by both 1st and 2nd shouldSync call. - || isRefreshAfterCommitSafe(); + || isRefreshAfterCommitSafe() + || isRemoteSegmentStoreInSync() == false; if (shouldSync || skipPrimaryTermCheck) { return shouldSync; } return this.primaryTerm != indexShard.getOperationPrimaryTerm(); } + /** + * Checks if all files present in local store are uploaded to remote store or part of excluded files. + * + * Different from IndexShard#isRemoteSegmentStoreInSync as + * it uses files uploaded cache in RemoteDirector and it doesn't make a remote store call. + * Doesn't throw an exception on store getting closed as store will be open + * + * + * @return true iff all the local files are uploaded to remote store. + */ + boolean isRemoteSegmentStoreInSync() { + try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = indexShard.getSegmentInfosSnapshot()) { + return segmentInfosGatedCloseable.get().files(true).stream().allMatch(this::skipUpload); + } catch (Throwable throwable) { + logger.error("Throwable thrown during isRemoteSegmentStoreInSync", throwable); + } + return false; + } + /* @return false if retry is needed */ @@ -209,13 +230,25 @@ private boolean syncSegments() { try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = indexShard.getSegmentInfosSnapshot()) { SegmentInfos segmentInfos = segmentInfosGatedCloseable.get(); final ReplicationCheckpoint checkpoint = indexShard.computeReplicationCheckpoint(segmentInfos); + if (checkpoint.getPrimaryTerm() != indexShard.getOperationPrimaryTerm()) { + throw new IllegalStateException( + String.format( + Locale.ROOT, + "primaryTerm mismatch during segments upload to remote store [%s] != [%s]", + checkpoint.getPrimaryTerm(), + indexShard.getOperationPrimaryTerm() + ) + ); + } // Capture replication checkpoint before uploading the segments as upload can take some time and checkpoint can // move. long lastRefreshedCheckpoint = ((InternalEngine) indexShard.getEngine()).lastRefreshedCheckpoint(); Collection<String> localSegmentsPostRefresh = segmentInfos.files(true); // Create a map of file name to size and update the refresh segment tracker - updateLocalSizeMapAndTracker(localSegmentsPostRefresh); + Map<String, Long> localSegmentsSizeMap = updateLocalSizeMapAndTracker(localSegmentsPostRefresh).entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); CountDownLatch latch = new CountDownLatch(1); ActionListener<Void> segmentUploadsCompletedListener = new LatchedActionListener<>(new ActionListener<>() { @Override @@ -231,6 +264,7 @@ public void onResponse(Void unused) { refreshClockTimeMs, refreshSeqNo, lastRefreshedCheckpoint, + localSegmentsSizeMap, checkpoint ); // At this point since we have uploaded new segments, segment infos and segment metadata file, @@ -251,7 +285,7 @@ public void onFailure(Exception e) { }, latch); // Start the segments files upload - uploadNewSegments(localSegmentsPostRefresh, segmentUploadsCompletedListener); + uploadNewSegments(localSegmentsPostRefresh, localSegmentsSizeMap, segmentUploadsCompletedListener); latch.await(); } catch (EngineException e) { logger.warn("Exception while reading SegmentInfosSnapshot", e); @@ -295,10 +329,11 @@ private void onSuccessfulSegmentsSync( long refreshClockTimeMs, long refreshSeqNo, long lastRefreshedCheckpoint, + Map<String, Long> localFileSizeMap, ReplicationCheckpoint checkpoint ) { // Update latest uploaded segment files name in segment tracker - segmentTracker.setLatestUploadedFiles(segmentTracker.getLatestLocalFileNameLengthMap().keySet()); + segmentTracker.setLatestUploadedFiles(localFileSizeMap.keySet()); // Update the remote refresh time and refresh seq no updateRemoteRefreshTimeAndSeqNo(refreshTimeMs, refreshClockTimeMs, refreshSeqNo); // Reset the backoffDelayIterator for the future failures @@ -371,7 +406,11 @@ void uploadMetadata(Collection<String> localSegmentsPostRefresh, SegmentInfos se } } - private void uploadNewSegments(Collection<String> localSegmentsPostRefresh, ActionListener<Void> listener) { + private void uploadNewSegments( + Collection<String> localSegmentsPostRefresh, + Map<String, Long> localSegmentsSizeMap, + ActionListener<Void> listener + ) { Collection<String> filteredFiles = localSegmentsPostRefresh.stream().filter(file -> !skipUpload(file)).collect(Collectors.toList()); if (filteredFiles.size() == 0) { logger.debug("No new segments to upload in uploadNewSegments"); @@ -385,7 +424,7 @@ private void uploadNewSegments(Collection<String> localSegmentsPostRefresh, Acti for (String src : filteredFiles) { // Initializing listener here to ensure that the stats increment operations are thread-safe - UploadListener statsListener = createUploadListener(); + UploadListener statsListener = createUploadListener(localSegmentsSizeMap); ActionListener<Void> aggregatedListener = ActionListener.wrap(resp -> { statsListener.onSuccess(src); batchUploadListener.onResponse(resp); @@ -444,9 +483,11 @@ private void updateRemoteRefreshTimeAndSeqNo(long refreshTimeMs, long refreshClo * Updates map of file name to size of the input segment files in the segment tracker. Uses {@code storeDirectory.fileLength(file)} to get the size. * * @param segmentFiles list of segment files that are part of the most recent local refresh. + * + * @return updated map of local segment files and filesize */ - private void updateLocalSizeMapAndTracker(Collection<String> segmentFiles) { - segmentTracker.updateLatestLocalFileNameLengthMap(segmentFiles, storeDirectory::fileLength); + private Map<String, Long> updateLocalSizeMapAndTracker(Collection<String> segmentFiles) { + return segmentTracker.updateLatestLocalFileNameLengthMap(segmentFiles, storeDirectory::fileLength); } private void updateFinalStatusInSegmentTracker(boolean uploadStatus, long bytesBeforeUpload, long startTimeInNS) { @@ -521,22 +562,24 @@ private boolean isLocalOrSnapshotRecovery() { /** * Creates an {@link UploadListener} containing the stats population logic which would be triggered before and after segment upload events + * + * @param fileSizeMap updated map of current snapshot of local segments to their sizes */ - private UploadListener createUploadListener() { + private UploadListener createUploadListener(Map<String, Long> fileSizeMap) { return new UploadListener() { private long uploadStartTime = 0; @Override public void beforeUpload(String file) { // Start tracking the upload bytes started - segmentTracker.addUploadBytesStarted(segmentTracker.getLatestLocalFileNameLengthMap().get(file)); + segmentTracker.addUploadBytesStarted(fileSizeMap.get(file)); uploadStartTime = System.currentTimeMillis(); } @Override public void onSuccess(String file) { // Track upload success - segmentTracker.addUploadBytesSucceeded(segmentTracker.getLatestLocalFileNameLengthMap().get(file)); + segmentTracker.addUploadBytesSucceeded(fileSizeMap.get(file)); segmentTracker.addToLatestUploadedFiles(file); segmentTracker.addUploadTimeInMillis(Math.max(1, System.currentTimeMillis() - uploadStartTime)); } @@ -544,7 +587,7 @@ public void onSuccess(String file) { @Override public void onFailure(String file) { // Track upload failure - segmentTracker.addUploadBytesFailed(segmentTracker.getLatestLocalFileNameLengthMap().get(file)); + segmentTracker.addUploadBytesFailed(fileSizeMap.get(file)); segmentTracker.addUploadTimeInMillis(Math.max(1, System.currentTimeMillis() - uploadStartTime)); } }; diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 5b1940bb1d9a5..3faef2da05320 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -38,11 +38,13 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.search.Sort; +import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.opensearch.ExceptionsHelper; +import org.opensearch.OpenSearchException; import org.opensearch.action.StepListener; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.MappingMetadata; @@ -191,7 +193,8 @@ void recoverFromLocalShards( // just trigger a merge to do housekeeping on the // copied segments - we will also see them in stats etc. indexShard.getEngine().forceMerge(false, -1, false, false, false, UUIDs.randomBase64UUID()); - if (indexShard.isRemoteTranslogEnabled()) { + if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { + waitForRemoteStoreSync(indexShard); if (indexShard.isRemoteSegmentStoreInSync() == false) { throw new IndexShardRecoveryException( indexShard.shardId(), @@ -432,7 +435,8 @@ void recoverFromSnapshotAndRemoteStore( } indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); - if (indexShard.isRemoteTranslogEnabled()) { + if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { + waitForRemoteStoreSync(indexShard); if (indexShard.isRemoteSegmentStoreInSync() == false) { listener.onFailure(new IndexShardRestoreFailedException(shardId, "Failed to upload to remote segment store")); return; @@ -717,7 +721,8 @@ private void restore( } indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); - if (indexShard.isRemoteTranslogEnabled()) { + if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { + waitForRemoteStoreSync(indexShard); if (indexShard.isRemoteSegmentStoreInSync() == false) { listener.onFailure(new IndexShardRestoreFailedException(shardId, "Failed to upload to remote segment store")); return; @@ -791,4 +796,31 @@ private void bootstrap(final IndexShard indexShard, final Store store) throws IO ); store.associateIndexWithNewTranslog(translogUUID); } + + /* + Blocks the calling thread, waiting for the remote store to get synced till internal Remote Upload Timeout + */ + private void waitForRemoteStoreSync(IndexShard indexShard) { + if (indexShard.shardRouting.primary() == false) { + return; + } + long startNanos = System.nanoTime(); + + while (System.nanoTime() - startNanos < indexShard.getRecoverySettings().internalRemoteUploadTimeout().nanos()) { + try { + if (indexShard.isRemoteSegmentStoreInSync()) { + break; + } else { + try { + Thread.sleep(TimeValue.timeValueMinutes(1).seconds()); + } catch (InterruptedException ie) { + throw new OpenSearchException("Interrupted waiting for completion of [{}]", ie); + } + } + } catch (AlreadyClosedException e) { + // There is no point in waiting as shard is now closed . + return; + } + } + } } diff --git a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java index 9c1e902606cab..dab99fd25b192 100644 --- a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java +++ b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java @@ -164,7 +164,7 @@ public RemoteSegmentMetadata init() throws IOException { */ public RemoteSegmentMetadata initializeToSpecificCommit(long primaryTerm, long commitGeneration, String acquirerId) throws IOException { String metadataFilePrefix = MetadataFilenameUtils.getMetadataFilePrefixForCommit(primaryTerm, commitGeneration); - String metadataFile = ((RemoteStoreMetadataLockManager) mdLockManager).fetchLock(metadataFilePrefix, acquirerId); + String metadataFile = ((RemoteStoreMetadataLockManager) mdLockManager).fetchLockedMetadataFile(metadataFilePrefix, acquirerId); RemoteSegmentMetadata remoteSegmentMetadata = readMetadataFile(metadataFile); if (remoteSegmentMetadata != null) { this.segmentsUploadedToRemoteStore = new ConcurrentHashMap<>(remoteSegmentMetadata.getMetadata()); @@ -749,20 +749,16 @@ public void deleteStaleSegments(int lastNMetadataFilesToKeep) throws IOException lastNMetadataFilesToKeep, sortedMetadataFileList.size() ); - List<String> metadataFilesToBeDeleted = metadataFilesEligibleToDelete.stream().filter(metadataFile -> { - try { - return !isLockAcquired(metadataFile); - } catch (IOException e) { - logger.error( - "skipping metadata file (" - + metadataFile - + ") deletion for this run," - + " as checking lock for metadata is failing with error: " - + e - ); - return false; - } - }).collect(Collectors.toList()); + Set<String> allLockFiles; + try { + allLockFiles = ((RemoteStoreMetadataLockManager) mdLockManager).fetchLockedMetadataFiles(MetadataFilenameUtils.METADATA_PREFIX); + } catch (Exception e) { + logger.error("Exception while fetching segment metadata lock files, skipping deleteStaleSegments", e); + return; + } + List<String> metadataFilesToBeDeleted = metadataFilesEligibleToDelete.stream() + .filter(metadataFile -> allLockFiles.contains(metadataFile) == false) + .collect(Collectors.toList()); sortedMetadataFileList.removeAll(metadataFilesToBeDeleted); logger.debug( diff --git a/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java b/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java index 5ebd00f59ef49..9c29e03c225e4 100644 --- a/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java +++ b/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java @@ -21,6 +21,7 @@ import java.util.Collection; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; /** @@ -75,7 +76,7 @@ public void release(LockInfo lockInfo) throws IOException { } } - public String fetchLock(String filenamePrefix, String acquirerId) throws IOException { + public String fetchLockedMetadataFile(String filenamePrefix, String acquirerId) throws IOException { Collection<String> lockFiles = lockDirectory.listFilesByPrefix(filenamePrefix); List<String> lockFilesForAcquirer = lockFiles.stream() .filter(lockFile -> acquirerId.equals(FileLockInfo.LockFileUtils.getAcquirerIdFromLock(lockFile))) @@ -88,6 +89,11 @@ public String fetchLock(String filenamePrefix, String acquirerId) throws IOExcep return lockFilesForAcquirer.get(0); } + public Set<String> fetchLockedMetadataFiles(String filenamePrefix) throws IOException { + Collection<String> lockFiles = lockDirectory.listFilesByPrefix(filenamePrefix); + return lockFiles.stream().map(FileLockInfo.LockFileUtils::getFileToLockNameFromLock).collect(Collectors.toSet()); + } + /** * Checks whether a given file have any lock on it or not. * @param lockInfo File Lock Info instance for which we need to check if lock is acquired. diff --git a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java index fb89e651e7616..0261ab24dfa7a 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java +++ b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java @@ -11,16 +11,13 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.opensearch.common.settings.Settings; +import org.opensearch.common.inject.Provider; import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; import org.opensearch.env.NodeEnvironment; -import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; -import org.opensearch.index.shard.IndexEventListener; import org.opensearch.index.shard.ShardPath; -import org.opensearch.indices.cluster.IndicesClusterStateService; import java.io.IOException; import java.nio.file.DirectoryStream; @@ -30,79 +27,90 @@ import static org.opensearch.index.store.remote.directory.RemoteSnapshotDirectoryFactory.LOCAL_STORE_LOCATION; /** - * IndexEventListener to clean up file cache when the index is deleted. The cached entries will be eligible + * IndexStoreListener to clean up file cache when the index is deleted. The cached entries will be eligible * for eviction when the shard is deleted, but this listener deterministically removes entries from memory and * from disk at the time of shard deletion as opposed to waiting for the cache to need to perform eviction. * * @opensearch.internal */ -public class FileCacheCleaner implements IndexEventListener { - private static final Logger log = LogManager.getLogger(FileCacheCleaner.class); +public class FileCacheCleaner implements NodeEnvironment.IndexStoreListener { + private static final Logger logger = LogManager.getLogger(FileCacheCleaner.class); - private final NodeEnvironment nodeEnvironment; - private final FileCache fileCache; + private final Provider<FileCache> fileCacheProvider; - public FileCacheCleaner(NodeEnvironment nodeEnvironment, FileCache fileCache) { - this.nodeEnvironment = nodeEnvironment; - this.fileCache = fileCache; + public FileCacheCleaner(Provider<FileCache> fileCacheProvider) { + this.fileCacheProvider = fileCacheProvider; } /** - * before shard deleted and after shard closed, cleans up the corresponding index file path entries from FC. - * @param shardId The shard id - * @param settings the shards index settings + * before shard path deleted, cleans up the corresponding index file path entries from FC and delete the corresponding shard file + * cache path. + * + * @param shardId the shard id + * @param indexSettings the index settings + * @param nodeEnvironment the node environment */ @Override - public void beforeIndexShardDeleted(ShardId shardId, Settings settings) { + public void beforeShardPathDeleted(ShardId shardId, IndexSettings indexSettings, NodeEnvironment nodeEnvironment) { + if (indexSettings.isRemoteSnapshot()) { + final ShardPath shardPath = ShardPath.loadFileCachePath(nodeEnvironment, shardId); + cleanupShardFileCache(shardPath); + deleteShardFileCacheDirectory(shardPath); + } + } + + /** + * Cleans up the corresponding index file path entries from FileCache + * + * @param shardPath the shard path + */ + private void cleanupShardFileCache(ShardPath shardPath) { try { - if (isRemoteSnapshot(settings)) { - final ShardPath shardPath = ShardPath.loadFileCachePath(nodeEnvironment, shardId); - final Path localStorePath = shardPath.getDataPath().resolve(LOCAL_STORE_LOCATION); - try (DirectoryStream<Path> ds = Files.newDirectoryStream(localStorePath)) { - for (Path subPath : ds) { - fileCache.remove(subPath.toRealPath()); - } + final FileCache fc = fileCacheProvider.get(); + assert fc != null; + final Path localStorePath = shardPath.getDataPath().resolve(LOCAL_STORE_LOCATION); + try (DirectoryStream<Path> ds = Files.newDirectoryStream(localStorePath)) { + for (Path subPath : ds) { + fc.remove(subPath.toRealPath()); } } } catch (IOException ioe) { - log.error(() -> new ParameterizedMessage("Error removing items from cache during shard deletion {}", shardId), ioe); + logger.error( + () -> new ParameterizedMessage("Error removing items from cache during shard deletion {}", shardPath.getShardId()), + ioe + ); } } - @Override - public void afterIndexShardDeleted(ShardId shardId, Settings settings) { - if (isRemoteSnapshot(settings)) { - final Path path = ShardPath.loadFileCachePath(nodeEnvironment, shardId).getDataPath(); - try { - if (Files.exists(path)) { - IOUtils.rm(path); - } - } catch (IOException e) { - log.error(() -> new ParameterizedMessage("Failed to delete cache path for shard {}", shardId), e); + private void deleteShardFileCacheDirectory(ShardPath shardPath) { + final Path path = shardPath.getDataPath(); + try { + if (Files.exists(path)) { + IOUtils.rm(path); } + } catch (IOException e) { + logger.error(() -> new ParameterizedMessage("Failed to delete cache path for shard {}", shardPath.getShardId()), e); } } + /** + * before index path deleted, delete the corresponding index file cache path. + * + * @param index the index + * @param indexSettings the index settings + * @param nodeEnvironment the node environment + */ @Override - public void afterIndexRemoved( - Index index, - IndexSettings indexSettings, - IndicesClusterStateService.AllocatedIndices.IndexRemovalReason reason - ) { - if (isRemoteSnapshot(indexSettings.getSettings()) - && reason == IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.DELETED) { + public void beforeIndexPathDeleted(Index index, IndexSettings indexSettings, NodeEnvironment nodeEnvironment) { + if (indexSettings.isRemoteSnapshot()) { final Path indexCachePath = nodeEnvironment.fileCacheNodePath().fileCachePath.resolve(index.getUUID()); if (Files.exists(indexCachePath)) { try { IOUtils.rm(indexCachePath); } catch (IOException e) { - log.error(() -> new ParameterizedMessage("Failed to delete cache path for index {}", index), e); + logger.error(() -> new ParameterizedMessage("Failed to delete cache path for index {}", index), e); } } } } - - private static boolean isRemoteSnapshot(Settings settings) { - return IndexModule.Type.REMOTE_SNAPSHOT.match(settings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey())); - } } diff --git a/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java b/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java index c9469283ee921..9250e73e08509 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java +++ b/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java @@ -75,6 +75,7 @@ public IndexInput fetchBlob(BlobFetchRequest blobFetchRequest) throws IOExceptio } } + @SuppressWarnings("removal") private static FileCachedIndexInput createIndexInput(FileCache fileCache, BlobContainer blobContainer, BlobFetchRequest request) { // We need to do a privileged action here in order to fetch from remote // and write to the local file cache in case this is invoked as a side diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index db5b93f073b03..c83f2a4c5cd5d 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -135,7 +135,6 @@ import org.opensearch.index.shard.IndexingOperationListener; import org.opensearch.index.shard.IndexingStats; import org.opensearch.index.shard.IndexingStats.Stats.DocStatusStats; -import org.opensearch.index.store.remote.filecache.FileCacheCleaner; import org.opensearch.index.translog.InternalTranslogFactory; import org.opensearch.index.translog.RemoteBlobStoreInternalTranslogFactory; import org.opensearch.index.translog.TranslogFactory; @@ -362,7 +361,6 @@ public class IndicesService extends AbstractLifecycleComponent private final BiFunction<IndexSettings, ShardRouting, TranslogFactory> translogFactorySupplier; private volatile TimeValue clusterDefaultRefreshInterval; private volatile TimeValue clusterRemoteTranslogBufferInterval; - private final FileCacheCleaner fileCacheCleaner; private final SearchRequestStats searchRequestStats; @@ -395,7 +393,6 @@ public IndicesService( Map<String, IndexStorePlugin.RecoveryStateFactory> recoveryStateFactories, IndexStorePlugin.DirectoryFactory remoteDirectoryFactory, Supplier<RepositoriesService> repositoriesServiceSupplier, - FileCacheCleaner fileCacheCleaner, SearchRequestStats searchRequestStats, @Nullable RemoteStoreStatsTrackerFactory remoteStoreStatsTrackerFactory, RecoverySettings recoverySettings @@ -450,7 +447,6 @@ public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, lon this.directoryFactories = directoryFactories; this.recoveryStateFactories = recoveryStateFactories; - this.fileCacheCleaner = fileCacheCleaner; // doClose() is called when shutting down a node, yet there might still be ongoing requests // that we need to wait for before closing some resources such as the caches. In order to // avoid closing these resources while ongoing requests are still being processed, we use a @@ -766,7 +762,6 @@ public void onStoreClosed(ShardId shardId) { }; finalListeners.add(onStoreClose); finalListeners.add(oldShardsStats); - finalListeners.add(fileCacheCleaner); final IndexService indexService = createIndexService( CREATE_INDEX, indexMetadata, diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java index 5351ae7fe08dd..2b41eb125d808 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java @@ -46,6 +46,8 @@ import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.common.unit.ByteSizeValue; +import java.util.concurrent.TimeUnit; + /** * Settings for the recovery mechanism * @@ -176,6 +178,13 @@ public class RecoverySettings { Property.Dynamic ); + public static final Setting<TimeValue> INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT = Setting.timeSetting( + "indices.recovery.internal_remote_upload_timeout", + new TimeValue(1, TimeUnit.HOURS), + Property.Dynamic, + Property.NodeScope + ); + // choose 512KB-16B to ensure that the resulting byte[] is not a humongous allocation in G1. public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512 * 1024 - 16, ByteSizeUnit.BYTES); @@ -193,6 +202,7 @@ public class RecoverySettings { private volatile int minRemoteSegmentMetadataFiles; private volatile ByteSizeValue chunkSize = DEFAULT_CHUNK_SIZE; + private volatile TimeValue internalRemoteUploadTimeout; public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { this.retryDelayStateSync = INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.get(settings); @@ -216,6 +226,7 @@ public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { } logger.debug("using max_bytes_per_sec[{}]", maxBytesPerSec); + this.internalRemoteUploadTimeout = INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT.get(settings); clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, this::setMaxBytesPerSec); clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_CONCURRENT_FILE_CHUNKS_SETTING, this::setMaxConcurrentFileChunks); @@ -237,6 +248,8 @@ public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { CLUSTER_REMOTE_INDEX_SEGMENT_METADATA_RETENTION_MAX_COUNT_SETTING, this::setMinRemoteSegmentMetadataFiles ); + clusterSettings.addSettingsUpdateConsumer(INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT, this::setInternalRemoteUploadTimeout); + } public RateLimiter rateLimiter() { @@ -267,6 +280,10 @@ public TimeValue internalActionLongTimeout() { return internalActionLongTimeout; } + public TimeValue internalRemoteUploadTimeout() { + return internalRemoteUploadTimeout; + } + public ByteSizeValue getChunkSize() { return chunkSize; } @@ -298,6 +315,10 @@ public void setInternalActionLongTimeout(TimeValue internalActionLongTimeout) { this.internalActionLongTimeout = internalActionLongTimeout; } + public void setInternalRemoteUploadTimeout(TimeValue internalRemoteUploadTimeout) { + this.internalRemoteUploadTimeout = internalRemoteUploadTimeout; + } + private void setMaxBytesPerSec(ByteSizeValue maxBytesPerSec) { this.maxBytesPerSec = maxBytesPerSec; if (maxBytesPerSec.getBytes() <= 0) { diff --git a/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java b/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java index 02fc8feefd698..a17779810239a 100644 --- a/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java +++ b/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java @@ -8,16 +8,14 @@ package org.opensearch.indices.replication; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionListenerResponseHandler; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.indices.recovery.RecoverySettings; -import org.opensearch.indices.recovery.RetryableTransportClient; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportRequestOptions; import org.opensearch.transport.TransportService; @@ -35,9 +33,7 @@ */ public class PrimaryShardReplicationSource implements SegmentReplicationSource { - private static final Logger logger = LogManager.getLogger(PrimaryShardReplicationSource.class); - - private final RetryableTransportClient transportClient; + private final TransportService transportService; private final DiscoveryNode sourceNode; private final DiscoveryNode targetNode; @@ -52,12 +48,7 @@ public PrimaryShardReplicationSource( DiscoveryNode sourceNode ) { this.targetAllocationId = targetAllocationId; - this.transportClient = new RetryableTransportClient( - transportService, - sourceNode, - recoverySettings.internalActionRetryTimeout(), - logger - ); + this.transportService = transportService; this.sourceNode = sourceNode; this.targetNode = targetNode; this.recoverySettings = recoverySettings; @@ -69,10 +60,14 @@ public void getCheckpointMetadata( ReplicationCheckpoint checkpoint, ActionListener<CheckpointInfoResponse> listener ) { - final Writeable.Reader<CheckpointInfoResponse> reader = CheckpointInfoResponse::new; - final ActionListener<CheckpointInfoResponse> responseListener = ActionListener.map(listener, r -> r); final CheckpointInfoRequest request = new CheckpointInfoRequest(replicationId, targetAllocationId, targetNode, checkpoint); - transportClient.executeRetryableAction(GET_CHECKPOINT_INFO, request, responseListener, reader); + transportService.sendRequest( + sourceNode, + GET_CHECKPOINT_INFO, + request, + TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionRetryTimeout()).build(), + new ActionListenerResponseHandler<>(listener, CheckpointInfoResponse::new, ThreadPool.Names.GENERIC) + ); } @Override @@ -88,8 +83,6 @@ public void getSegmentFiles( // MultiFileWriter takes care of progress tracking for downloads in this scenario // TODO: Move state management and tracking into replication methods and use chunking and data // copy mechanisms only from MultiFileWriter - final Writeable.Reader<GetSegmentFilesResponse> reader = GetSegmentFilesResponse::new; - final ActionListener<GetSegmentFilesResponse> responseListener = ActionListener.map(listener, r -> r); final GetSegmentFilesRequest request = new GetSegmentFilesRequest( replicationId, targetAllocationId, @@ -97,20 +90,17 @@ public void getSegmentFiles( filesToFetch, checkpoint ); - final TransportRequestOptions options = TransportRequestOptions.builder() - .withTimeout(recoverySettings.internalActionLongTimeout()) - .build(); - transportClient.executeRetryableAction(GET_SEGMENT_FILES, request, options, responseListener, reader); + transportService.sendRequest( + sourceNode, + GET_SEGMENT_FILES, + request, + TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionLongTimeout()).build(), + new ActionListenerResponseHandler<>(listener, GetSegmentFilesResponse::new, ThreadPool.Names.GENERIC) + ); } @Override public String getDescription() { return sourceNode.getName(); } - - @Override - public void cancel() { - transportClient.cancel(); - } - } diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java index cc71ef816e525..af764556b7549 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java @@ -83,6 +83,16 @@ protected void closeInternal() { } } + @Override + protected void onCancel(String reason) { + try { + notifyListener(new ReplicationFailedException(reason), false); + } finally { + source.cancel(); + cancellableThreads.cancel(reason); + } + } + @Override protected String getPrefix() { return REPLICATION_PREFIX + UUIDs.randomBase64UUID() + "."; @@ -320,16 +330,4 @@ private void finalizeReplication(CheckpointInfoResponse checkpointInfoResponse) } } } - - /** - * Trigger a cancellation, this method will not close the target a subsequent call to #fail is required from target service. - */ - @Override - public void cancel(String reason) { - if (finished.get() == false) { - logger.trace(new ParameterizedMessage("Cancelling replication for target {}", description())); - cancellableThreads.cancel(reason); - source.cancel(); - } - } } diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java index d6db154a4e0e3..f28f829545d59 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java @@ -84,10 +84,6 @@ public class SegmentReplicationTargetService extends AbstractLifecycleComponent private final ClusterService clusterService; private final TransportService transportService; - public ReplicationRef<SegmentReplicationTarget> get(long replicationId) { - return onGoingReplications.get(replicationId); - } - /** * The internal actions * @@ -158,6 +154,7 @@ protected void doStart() { @Override protected void doStop() { if (DiscoveryNode.isDataNode(clusterService.getSettings())) { + assert onGoingReplications.size() == 0 : "Replication collection should be empty on shutdown"; clusterService.removeListener(this); } } @@ -201,7 +198,7 @@ public void clusterChanged(ClusterChangedEvent event) { @Override public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { if (indexShard != null && indexShard.indexSettings().isSegRepEnabled()) { - onGoingReplications.requestCancel(indexShard.shardId(), "Shard closing"); + onGoingReplications.cancelForShard(indexShard.shardId(), "Shard closing"); latestReceivedCheckpoint.remove(shardId); } } @@ -223,7 +220,7 @@ public void afterIndexShardStarted(IndexShard indexShard) { @Override public void shardRoutingChanged(IndexShard indexShard, @Nullable ShardRouting oldRouting, ShardRouting newRouting) { if (oldRouting != null && indexShard.indexSettings().isSegRepEnabled() && oldRouting.primary() == false && newRouting.primary()) { - onGoingReplications.requestCancel(indexShard.shardId(), "Shard has been promoted to primary"); + onGoingReplications.cancelForShard(indexShard.shardId(), "Shard has been promoted to primary"); latestReceivedCheckpoint.remove(indexShard.shardId()); } } @@ -255,6 +252,14 @@ public SegmentReplicationState getSegmentReplicationState(ShardId shardId) { .orElseGet(() -> getlatestCompletedEventSegmentReplicationState(shardId)); } + public ReplicationRef<SegmentReplicationTarget> get(long replicationId) { + return onGoingReplications.get(replicationId); + } + + public SegmentReplicationTarget get(ShardId shardId) { + return onGoingReplications.getOngoingReplicationTarget(shardId); + } + /** * Invoked when a new checkpoint is received from a primary shard. * It checks if a new checkpoint should be processed or not and starts replication if needed. @@ -454,7 +459,13 @@ protected boolean processLatestReceivedCheckpoint(IndexShard replicaShard, Threa latestPublishedCheckpoint ) ); - Runnable runnable = () -> onNewCheckpoint(latestReceivedCheckpoint.get(replicaShard.shardId()), replicaShard); + Runnable runnable = () -> { + // if we retry ensure the shard is not in the process of being closed. + // it will be removed from indexService's collection before the shard is actually marked as closed. + if (indicesService.getShardOrNull(replicaShard.shardId()) != null) { + onNewCheckpoint(latestReceivedCheckpoint.get(replicaShard.shardId()), replicaShard); + } + }; // Checks if we are using same thread and forks if necessary. if (thread == Thread.currentThread()) { threadPool.generic().execute(runnable); @@ -548,9 +559,6 @@ public ReplicationRunner(long replicationId) { @Override public void onFailure(Exception e) { - try (final ReplicationRef<SegmentReplicationTarget> ref = onGoingReplications.get(replicationId)) { - logger.error(() -> new ParameterizedMessage("Error during segment replication, {}", ref.get().description()), e); - } onGoingReplications.fail(replicationId, new ReplicationFailedException("Unexpected Error during replication", e), false); } diff --git a/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java b/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java index 5185b740d90cb..a2c2137130587 100644 --- a/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java +++ b/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java @@ -387,6 +387,7 @@ private static <T> Map<String, T> readMap(String processorType, String processor /** * Returns and removes the specified property as an {@link Object} from the specified configuration map. + * If the property is missing an {@link OpenSearchParseException} is thrown */ public static Object readObject(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) { Object value = configuration.remove(propertyName); @@ -396,6 +397,13 @@ public static Object readObject(String processorType, String processorTag, Map<S return value; } + /** + * Returns and removes the specified property as an {@link Object} from the specified configuration map. + */ + public static Object readOptionalObject(Map<String, Object> configuration, String propertyName) { + return configuration.remove(propertyName); + } + public static OpenSearchException newConfigurationException( String processorType, String processorTag, diff --git a/server/src/main/java/org/opensearch/ingest/IngestDocument.java b/server/src/main/java/org/opensearch/ingest/IngestDocument.java index 10e9e64db561e..d975b0014de1f 100644 --- a/server/src/main/java/org/opensearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/opensearch/ingest/IngestDocument.java @@ -757,7 +757,7 @@ public static <K, V> Map<K, V> deepCopyMap(Map<K, V> source) { return (Map<K, V>) deepCopy(source); } - private static Object deepCopy(Object value) { + public static Object deepCopy(Object value) { if (value instanceof Map) { Map<?, ?> mapValue = (Map<?, ?>) value; Map<Object, Object> copy = new HashMap<>(mapValue.size()); diff --git a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java index dc27ab0fb91c2..e3290bfec6905 100644 --- a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java @@ -216,6 +216,7 @@ private static boolean usingBundledJdkOrJre() { } } + @SuppressWarnings("removal") public static JvmInfo jvmInfo() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 8510122c39fcb..547f610f4a752 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -527,7 +527,11 @@ protected Node( */ this.environment = new Environment(settings, initialEnvironment.configDir(), Node.NODE_LOCAL_STORAGE_SETTING.get(settings)); Environment.assertEquivalent(initialEnvironment, this.environment); - nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + if (DiscoveryNode.isSearchNode(settings) == false) { + nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + } else { + nodeEnvironment = new NodeEnvironment(settings, environment, new FileCacheCleaner(this::fileCache)); + } logger.info( "node name [{}], node ID [{}], cluster name [{}], roles {}", NODE_NAME_SETTING.get(tmpSettings), @@ -678,7 +682,6 @@ protected Node( ); // File cache will be initialized by the node once circuit breakers are in place. initializeFileCache(settings, circuitBreakerService.getBreaker(CircuitBreaker.REQUEST)); - final FileCacheCleaner fileCacheCleaner = new FileCacheCleaner(nodeEnvironment, fileCache); final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool, fileCache); pluginsService.filterPlugins(CircuitBreakerPlugin.class).forEach(plugin -> { @@ -812,7 +815,6 @@ protected Node( recoveryStateFactories, remoteDirectoryFactory, repositoriesServiceReference::get, - fileCacheCleaner, searchRequestStats, remoteStoreStatsTrackerFactory, recoverySettings diff --git a/server/src/main/java/org/opensearch/plugins/PluginInfo.java b/server/src/main/java/org/opensearch/plugins/PluginInfo.java index dc8fd6e604d72..79e57b3e8a0e8 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/opensearch/plugins/PluginInfo.java @@ -32,20 +32,28 @@ package org.opensearch.plugins; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.json.JsonReadFeature; + import org.opensearch.Version; import org.opensearch.bootstrap.JarHell; import org.opensearch.common.annotation.PublicApi; +import org.opensearch.common.xcontent.json.JsonXContentParser; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.semver.SemverRange; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -65,11 +73,15 @@ public class PluginInfo implements Writeable, ToXContentObject { public static final String OPENSEARCH_PLUGIN_PROPERTIES = "plugin-descriptor.properties"; public static final String OPENSEARCH_PLUGIN_POLICY = "plugin-security.policy"; + private static final JsonFactory jsonFactory = new JsonFactory().configure( + JsonReadFeature.ALLOW_UNQUOTED_FIELD_NAMES.mappedFeature(), + true + ); private final String name; private final String description; private final String version; - private final Version opensearchVersion; + private final List<SemverRange> opensearchVersionRanges; private final String javaVersion; private final String classname; private final String customFolderName; @@ -99,11 +111,41 @@ public PluginInfo( String customFolderName, List<String> extendedPlugins, boolean hasNativeController + ) { + this( + name, + description, + version, + List.of(SemverRange.fromString(opensearchVersion.toString())), + javaVersion, + classname, + customFolderName, + extendedPlugins, + hasNativeController + ); + } + + public PluginInfo( + String name, + String description, + String version, + List<SemverRange> opensearchVersionRanges, + String javaVersion, + String classname, + String customFolderName, + List<String> extendedPlugins, + boolean hasNativeController ) { this.name = name; this.description = description; this.version = version; - this.opensearchVersion = opensearchVersion; + // Ensure only one range is specified (for now) + if (opensearchVersionRanges.size() != 1) { + throw new IllegalArgumentException( + "Exactly one range is allowed to be specified in dependencies for the plugin [" + name + "]" + ); + } + this.opensearchVersionRanges = opensearchVersionRanges; this.javaVersion = javaVersion; this.classname = classname; this.customFolderName = customFolderName; @@ -152,11 +194,16 @@ public PluginInfo( * @param in the stream * @throws IOException if an I/O exception occurred reading the plugin info from the stream */ + @SuppressWarnings("unchecked") public PluginInfo(final StreamInput in) throws IOException { this.name = in.readString(); this.description = in.readString(); this.version = in.readString(); - this.opensearchVersion = in.readVersion(); + if (in.getVersion().onOrAfter(Version.V_2_13_0)) { + this.opensearchVersionRanges = (List<SemverRange>) in.readGenericValue(); + } else { + this.opensearchVersionRanges = List.of(new SemverRange(in.readVersion(), SemverRange.RangeOperator.DEFAULT)); + } this.javaVersion = in.readString(); this.classname = in.readString(); this.customFolderName = in.readString(); @@ -169,7 +216,15 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeString(name); out.writeString(description); out.writeString(version); - out.writeVersion(opensearchVersion); + if (out.getVersion().onOrAfter(Version.V_2_13_0)) { + out.writeGenericValue(opensearchVersionRanges); + } else { + /* + This works for currently supported range notations (=,~) + As more notations get added, then a suitable version must be picked. + */ + out.writeVersion(opensearchVersionRanges.get(0).getRangeVersion()); + } out.writeString(javaVersion); out.writeString(classname); if (customFolderName != null) { @@ -214,10 +269,49 @@ public static PluginInfo readFromProperties(final Path path) throws IOException } final String opensearchVersionString = propsMap.remove("opensearch.version"); - if (opensearchVersionString == null) { - throw new IllegalArgumentException("property [opensearch.version] is missing for plugin [" + name + "]"); + final String dependenciesValue = propsMap.remove("dependencies"); + if (opensearchVersionString == null && dependenciesValue == null) { + throw new IllegalArgumentException( + "Either [opensearch.version] or [dependencies] property must be specified for the plugin [" + name + "]" + ); + } + if (opensearchVersionString != null && dependenciesValue != null) { + throw new IllegalArgumentException( + "Only one of [opensearch.version] or [dependencies] property can be specified for the plugin [" + name + "]" + ); + } + + final List<SemverRange> opensearchVersionRanges = new ArrayList<>(); + if (opensearchVersionString != null) { + opensearchVersionRanges.add(SemverRange.fromString(opensearchVersionString)); + } else { + Map<String, String> dependenciesMap; + try ( + final JsonXContentParser parser = new JsonXContentParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + jsonFactory.createParser(dependenciesValue) + ) + ) { + dependenciesMap = parser.mapStrings(); + } + if (dependenciesMap.size() != 1) { + throw new IllegalArgumentException( + "Exactly one dependency is allowed to be specified in plugin descriptor properties: " + dependenciesMap + ); + } + if (dependenciesMap.keySet().stream().noneMatch(s -> s.equals("opensearch"))) { + throw new IllegalArgumentException("Only opensearch is allowed to be specified as a plugin dependency: " + dependenciesMap); + } + String[] ranges = dependenciesMap.get("opensearch").split(","); + if (ranges.length != 1) { + throw new IllegalArgumentException( + "Exactly one range is allowed to be specified in dependencies for the plugin [\" + name + \"]" + ); + } + opensearchVersionRanges.add(SemverRange.fromString(ranges[0].trim())); } - final Version opensearchVersion = Version.fromString(opensearchVersionString); + final String javaVersionString = propsMap.remove("java.version"); if (javaVersionString == null) { throw new IllegalArgumentException("property [java.version] is missing for plugin [" + name + "]"); @@ -273,7 +367,7 @@ public static PluginInfo readFromProperties(final Path path) throws IOException name, description, version, - opensearchVersion, + opensearchVersionRanges, javaVersionString, classname, customFolderName, @@ -337,12 +431,26 @@ public String getVersion() { } /** - * The version of OpenSearch the plugin was built for. + * The list of OpenSearch version ranges the plugin is compatible with. * - * @return an OpenSearch version + * @return a list of OpenSearch version ranges */ - public Version getOpenSearchVersion() { - return opensearchVersion; + public List<SemverRange> getOpenSearchVersionRanges() { + return opensearchVersionRanges; + } + + /** + * Pretty print the semver ranges and return the string. + * @return semver ranges string + */ + public String getOpenSearchVersionRangesString() { + if (opensearchVersionRanges == null || opensearchVersionRanges.isEmpty()) { + return ""; + } + if (opensearchVersionRanges.size() == 1) { + return opensearchVersionRanges.get(0).toString(); + } + return opensearchVersionRanges.stream().map(Object::toString).collect(Collectors.joining(",", "[", "]")); } /** @@ -378,7 +486,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws { builder.field("name", name); builder.field("version", version); - builder.field("opensearch_version", opensearchVersion); + builder.field("opensearch_version", opensearchVersionRanges); builder.field("java_version", javaVersion); builder.field("description", description); builder.field("classname", classname); @@ -432,7 +540,7 @@ public String toString(String prefix) { .append("\n") .append(prefix) .append("OpenSearch Version: ") - .append(opensearchVersion) + .append(getOpenSearchVersionRangesString()) .append("\n") .append(prefix) .append("Java Version: ") diff --git a/server/src/main/java/org/opensearch/plugins/PluginSecurity.java b/server/src/main/java/org/opensearch/plugins/PluginSecurity.java index e7d92016d4082..1bf8642d1112f 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginSecurity.java +++ b/server/src/main/java/org/opensearch/plugins/PluginSecurity.java @@ -135,6 +135,7 @@ static String formatPermission(Permission permission) { /** * Parses plugin policy into a set of permissions. Each permission is formatted for output to users. */ + @SuppressWarnings("removal") public static Set<String> parsePermissions(Path file, Path tmpDir) throws IOException { // create a zero byte file for "comparison" // this is necessary because the default policy impl automatically grants two permissions: diff --git a/server/src/main/java/org/opensearch/plugins/PluginsService.java b/server/src/main/java/org/opensearch/plugins/PluginsService.java index cc9cc5b5b5fbf..a6eefd2f4fd17 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginsService.java +++ b/server/src/main/java/org/opensearch/plugins/PluginsService.java @@ -52,6 +52,7 @@ import org.opensearch.core.common.Strings; import org.opensearch.core.service.ReportingService; import org.opensearch.index.IndexModule; +import org.opensearch.semver.SemverRange; import org.opensearch.threadpool.ExecutorBuilder; import org.opensearch.transport.TransportSettings; @@ -387,12 +388,12 @@ public static List<Path> findPluginDirs(final Path rootPath) throws IOException * Verify the given plugin is compatible with the current OpenSearch installation. */ static void verifyCompatibility(PluginInfo info) { - if (info.getOpenSearchVersion().equals(Version.CURRENT) == false) { + if (!isPluginVersionCompatible(info, Version.CURRENT)) { throw new IllegalArgumentException( "Plugin [" + info.getName() + "] was built for OpenSearch version " - + info.getOpenSearchVersion() + + info.getOpenSearchVersionRangesString() + " but version " + Version.CURRENT + " is running" @@ -401,6 +402,16 @@ static void verifyCompatibility(PluginInfo info) { JarHell.checkJavaVersion(info.getName(), info.getJavaVersion()); } + public static boolean isPluginVersionCompatible(final PluginInfo pluginInfo, final Version coreVersion) { + // Core version must satisfy the semver range in plugin info + for (SemverRange range : pluginInfo.getOpenSearchVersionRanges()) { + if (!range.isSatisfiedBy(coreVersion)) { + return false; + } + } + return true; + } + static void checkForFailedPluginRemovals(final Path pluginsDirectory) throws IOException { /* * Check for the existence of a marker file that indicates any plugins are in a garbage state from a failed attempt to remove the @@ -682,6 +693,7 @@ static void checkBundleJarHell(Set<URL> classpath, Bundle bundle, Map<String, Se } } + @SuppressWarnings("removal") private Plugin loadBundle(Bundle bundle, Map<String, Plugin> loaded) { String name = bundle.plugin.getName(); diff --git a/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java b/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java index cc4d3c006d84c..afb6e530b0eec 100644 --- a/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java @@ -39,6 +39,7 @@ import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.plugins.RepositoryPlugin; import org.opensearch.repositories.fs.FsRepository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -71,6 +72,11 @@ public RepositoriesModule( metadata -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, recoverySettings) ); + factories.put( + ReloadableFsRepository.TYPE, + metadata -> new ReloadableFsRepository(metadata, env, namedXContentRegistry, clusterService, recoverySettings) + ); + for (RepositoryPlugin repoPlugin : repoPlugins) { Map<String, Repository.Factory> newRepoTypes = repoPlugin.getRepositories( env, diff --git a/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java b/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java index c06c805a39396..e8020a432a58a 100644 --- a/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java +++ b/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java @@ -8,18 +8,52 @@ package org.opensearch.repositories.fs; +import org.opensearch.OpenSearchException; import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Randomness; +import org.opensearch.common.blobstore.BlobContainer; +import org.opensearch.common.blobstore.BlobPath; +import org.opensearch.common.blobstore.BlobStore; +import org.opensearch.common.blobstore.fs.FsBlobContainer; +import org.opensearch.common.blobstore.fs.FsBlobStore; +import org.opensearch.common.settings.Setting; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.indices.recovery.RecoverySettings; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.util.Random; + /** - * Extension of {@link FsRepository} that can be reloaded inplace + * Extension of {@link FsRepository} that can be reloaded inplace , supports failing operation and slowing it down * * @opensearch.internal */ public class ReloadableFsRepository extends FsRepository { + public static final String TYPE = "reloadable-fs"; + + private final FailSwitch fail; + private final SlowDownWriteSwitch slowDown; + + public static final Setting<Integer> REPOSITORIES_FAILRATE_SETTING = Setting.intSetting( + "repositories.fail.rate", + 0, + 0, + 100, + Setting.Property.NodeScope + ); + + public static final Setting<Integer> REPOSITORIES_SLOWDOWN_SETTING = Setting.intSetting( + "repositories.slowdown", + 0, + 0, + 100, + Setting.Property.NodeScope + ); + /** * Constructs a shared file system repository that is reloadable in-place. */ @@ -31,6 +65,11 @@ public ReloadableFsRepository( RecoverySettings recoverySettings ) { super(metadata, environment, namedXContentRegistry, clusterService, recoverySettings); + fail = new FailSwitch(); + fail.failRate(REPOSITORIES_FAILRATE_SETTING.get(metadata.settings())); + slowDown = new SlowDownWriteSwitch(); + slowDown.setSleepSeconds(REPOSITORIES_SLOWDOWN_SETTING.get(metadata.settings())); + readRepositoryMetadata(); } @Override @@ -40,12 +79,124 @@ public boolean isReloadable() { @Override public void reload(RepositoryMetadata repositoryMetadata) { - if (isReloadable() == false) { - return; - } - super.reload(repositoryMetadata); + readRepositoryMetadata(); validateLocation(); readMetadata(); } + + private void readRepositoryMetadata() { + fail.failRate(REPOSITORIES_FAILRATE_SETTING.get(metadata.settings())); + slowDown.setSleepSeconds(REPOSITORIES_SLOWDOWN_SETTING.get(metadata.settings())); + } + + protected BlobStore createBlobStore() throws Exception { + final String location = REPOSITORIES_LOCATION_SETTING.get(getMetadata().settings()); + final Path locationFile = environment.resolveRepoFile(location); + return new ThrowingBlobStore(bufferSize, locationFile, isReadOnly(), fail, slowDown); + } + + // A random integer from min-max (inclusive). + public static int randomIntBetween(int min, int max) { + Random random = Randomness.get(); + return random.nextInt(max - min + 1) + min; + } + + static class FailSwitch { + private volatile int failRate; + private volatile boolean onceFailedFailAlways = false; + + public boolean fail() { + final int rnd = randomIntBetween(1, 100); + boolean fail = rnd <= failRate; + if (fail && onceFailedFailAlways) { + failAlways(); + } + return fail; + } + + public void failAlways() { + failRate = 100; + } + + public void failRate(int rate) { + failRate = rate; + } + + public void onceFailedFailAlways() { + onceFailedFailAlways = true; + } + } + + static class SlowDownWriteSwitch { + private volatile int sleepSeconds; + + public void setSleepSeconds(int sleepSeconds) { + this.sleepSeconds = sleepSeconds; + } + + public int getSleepSeconds() { + return sleepSeconds; + } + } + + private static class ThrowingBlobStore extends FsBlobStore { + + private final FailSwitch fail; + private final SlowDownWriteSwitch slowDown; + + public ThrowingBlobStore(int bufferSizeInBytes, Path path, boolean readonly, FailSwitch fail, SlowDownWriteSwitch slowDown) + throws IOException { + super(bufferSizeInBytes, path, readonly); + this.fail = fail; + this.slowDown = slowDown; + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + try { + return new ThrowingBlobContainer(this, path, buildAndCreate(path), fail, slowDown); + } catch (IOException ex) { + throw new OpenSearchException("failed to create blob container", ex); + } + } + } + + private static class ThrowingBlobContainer extends FsBlobContainer { + + private final FailSwitch fail; + private final SlowDownWriteSwitch slowDown; + + public ThrowingBlobContainer(FsBlobStore blobStore, BlobPath blobPath, Path path, FailSwitch fail, SlowDownWriteSwitch slowDown) { + super(blobStore, blobPath, path); + this.fail = fail; + this.slowDown = slowDown; + } + + @Override + public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, boolean failIfAlreadyExists) + throws IOException { + checkFailRateAndSleep(blobName); + super.writeBlobAtomic(blobName, inputStream, blobSize, failIfAlreadyExists); + } + + private void checkFailRateAndSleep(String blobName) throws IOException { + if (fail.fail() && blobName.contains(".dat") == false) { + throw new IOException("blob container throwing error"); + } + if (slowDown.getSleepSeconds() > 0) { + try { + Thread.sleep(slowDown.getSleepSeconds() * 1000L); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + checkFailRateAndSleep(blobName); + super.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + } + } } diff --git a/server/src/main/java/org/opensearch/rest/MethodHandlers.java b/server/src/main/java/org/opensearch/rest/MethodHandlers.java index 8c29bf2e66036..30221705e1aba 100644 --- a/server/src/main/java/org/opensearch/rest/MethodHandlers.java +++ b/server/src/main/java/org/opensearch/rest/MethodHandlers.java @@ -6,82 +6,24 @@ * compatible open source license. */ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - package org.opensearch.rest; -import org.opensearch.common.Nullable; +import org.opensearch.common.annotation.PublicApi; -import java.util.HashMap; -import java.util.Map; import java.util.Set; /** - * Encapsulate multiple handlers for the same path, allowing different handlers for different HTTP verbs. - * - * @opensearch.api + * A collection of REST method handlers. */ -final class MethodHandlers { - - private final String path; - private final Map<RestRequest.Method, RestHandler> methodHandlers; - - MethodHandlers(String path, RestHandler handler, RestRequest.Method... methods) { - this.path = path; - this.methodHandlers = new HashMap<>(methods.length); - for (RestRequest.Method method : methods) { - methodHandlers.put(method, handler); - } - } - - /** - * Add a handler for an additional array of methods. Note that {@code MethodHandlers} - * does not allow replacing the handler for an already existing method. - */ - MethodHandlers addMethods(RestHandler handler, RestRequest.Method... methods) { - for (RestRequest.Method method : methods) { - RestHandler existing = methodHandlers.putIfAbsent(method, handler); - if (existing != null) { - throw new IllegalArgumentException("Cannot replace existing handler for [" + path + "] for method: " + method); - } - } - return this; - } - +@PublicApi(since = "2.12.0") +public interface MethodHandlers { /** - * Returns the handler for the given method or {@code null} if none exists. + * Return a set of all valid HTTP methods for the particular path. */ - @Nullable - RestHandler getHandler(RestRequest.Method method) { - return methodHandlers.get(method); - } + Set<RestRequest.Method> getValidMethods(); /** - * Return a set of all valid HTTP methods for the particular path + * Returns the relative HTTP path of the set of method handlers. */ - Set<RestRequest.Method> getValidMethods() { - return methodHandlers.keySet(); - } + String getPath(); } diff --git a/server/src/main/java/org/opensearch/rest/RestController.java b/server/src/main/java/org/opensearch/rest/RestController.java index cc48b59699a17..95abb9b3daeca 100644 --- a/server/src/main/java/org/opensearch/rest/RestController.java +++ b/server/src/main/java/org/opensearch/rest/RestController.java @@ -65,6 +65,7 @@ import java.io.IOException; import java.io.InputStream; import java.net.URI; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -107,7 +108,7 @@ public class RestController implements HttpServerTransport.Dispatcher { } } - private final PathTrie<MethodHandlers> handlers = new PathTrie<>(RestUtils.REST_DECODER); + private final PathTrie<RestMethodHandlers> handlers = new PathTrie<>(RestUtils.REST_DECODER); private final UnaryOperator<RestHandler> handlerWrapper; @@ -144,6 +145,16 @@ public RestController( ); } + /** + * Returns an iterator over registered REST method handlers. + * @return {@link Iterator} of {@link MethodHandlers} + */ + public Iterator<MethodHandlers> getAllHandlers() { + List<MethodHandlers> methodHandlers = new ArrayList<>(); + handlers.retrieveAll().forEachRemaining(methodHandlers::add); + return methodHandlers.iterator(); + } + /** * Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request. * @@ -221,7 +232,7 @@ protected void registerHandler(RestRequest.Method method, String path, RestHandl private void registerHandlerNoWrap(RestRequest.Method method, String path, RestHandler maybeWrappedHandler) { handlers.insertOrUpdate( path, - new MethodHandlers(path, maybeWrappedHandler, method), + new RestMethodHandlers(path, maybeWrappedHandler, method), (mHandlers, newMHandler) -> mHandlers.addMethods(maybeWrappedHandler, method) ); } @@ -392,10 +403,10 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel // Resolves the HTTP method and fails if the method is invalid requestMethod = request.method(); // Loop through all possible handlers, attempting to dispatch the request - Iterator<MethodHandlers> allHandlers = getAllHandlers(request.params(), rawPath); + Iterator<RestMethodHandlers> allHandlers = getAllRestMethodHandlers(request.params(), rawPath); while (allHandlers.hasNext()) { final RestHandler handler; - final MethodHandlers handlers = allHandlers.next(); + final RestMethodHandlers handlers = allHandlers.next(); if (handlers == null) { handler = null; } else { @@ -423,7 +434,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel handleBadRequest(uri, requestMethod, channel); } - Iterator<MethodHandlers> getAllHandlers(@Nullable Map<String, String> requestParamsRef, String rawPath) { + Iterator<RestMethodHandlers> getAllRestMethodHandlers(@Nullable Map<String, String> requestParamsRef, String rawPath) { final Supplier<Map<String, String>> paramsSupplier; if (requestParamsRef == null) { paramsSupplier = () -> null; @@ -561,7 +572,7 @@ private boolean handleAuthenticateUser(final RestRequest request, final RestChan */ private Set<RestRequest.Method> getValidHandlerMethodSet(String rawPath) { Set<RestRequest.Method> validMethods = new HashSet<>(); - Iterator<MethodHandlers> allHandlers = getAllHandlers(null, rawPath); + Iterator<RestMethodHandlers> allHandlers = getAllRestMethodHandlers(null, rawPath); while (allHandlers.hasNext()) { final MethodHandlers methodHandlers = allHandlers.next(); if (methodHandlers != null) { diff --git a/server/src/main/java/org/opensearch/rest/RestMethodHandlers.java b/server/src/main/java/org/opensearch/rest/RestMethodHandlers.java new file mode 100644 index 0000000000000..a430d8ace447c --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/RestMethodHandlers.java @@ -0,0 +1,92 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.rest; + +import org.opensearch.common.Nullable; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +/** + * Encapsulate multiple handlers for the same path, allowing different handlers for different HTTP verbs. + */ +final class RestMethodHandlers implements MethodHandlers { + + private final String path; + private final Map<RestRequest.Method, RestHandler> methodHandlers; + + RestMethodHandlers(String path, RestHandler handler, RestRequest.Method... methods) { + this.path = path; + this.methodHandlers = new HashMap<>(methods.length); + for (RestRequest.Method method : methods) { + methodHandlers.put(method, handler); + } + } + + /** + * Add a handler for an additional array of methods. Note that {@code MethodHandlers} + * does not allow replacing the handler for an already existing method. + */ + public RestMethodHandlers addMethods(RestHandler handler, RestRequest.Method... methods) { + for (RestRequest.Method method : methods) { + RestHandler existing = methodHandlers.putIfAbsent(method, handler); + if (existing != null) { + throw new IllegalArgumentException("Cannot replace existing handler for [" + path + "] for method: " + method); + } + } + return this; + } + + /** + * Returns the handler for the given method or {@code null} if none exists. + */ + @Nullable + public RestHandler getHandler(RestRequest.Method method) { + return methodHandlers.get(method); + } + + /** + * Return a set of all valid HTTP methods for the particular path. + */ + public Set<RestRequest.Method> getValidMethods() { + return methodHandlers.keySet(); + } + + /** + * Returns the relative HTTP path of the set of method handlers. + */ + public String getPath() { + return path; + } +} diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java index 28edba4db387d..138f9fdf5c813 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java @@ -53,7 +53,7 @@ public final class RestRemoteClusterInfoAction extends BaseRestHandler { @Override public List<Route> routes() { - return singletonList(new Route(GET, "_remote/info")); + return singletonList(new Route(GET, "/_remote/info")); } @Override diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java index 8fdf000139d89..0d805f5f3bfb8 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java @@ -66,10 +66,10 @@ public class RestPutMappingAction extends BaseRestHandler { public List<Route> routes() { return unmodifiableList( asList( - new Route(POST, "/{index}/_mapping/"), - new Route(PUT, "/{index}/_mapping/"), - new Route(POST, "/{index}/_mappings/"), - new Route(PUT, "/{index}/_mappings/") + new Route(POST, "/{index}/_mapping"), + new Route(PUT, "/{index}/_mapping"), + new Route(POST, "/{index}/_mappings"), + new Route(PUT, "/{index}/_mappings") ) ); } diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java index 23cc1cb507072..9dc711f804144 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java @@ -54,7 +54,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.Strings; @@ -582,31 +581,29 @@ protected Table getTableWithHeader(final RestRequest request) { "sibling:pri;alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops" ); table.addCell("pri.search.query_total", "default:false;text-align:right;desc:total query phase ops"); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell( - "search.concurrent_query_current", - "sibling:pri;alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" - ); - table.addCell("pri.search.concurrent_query_current", "default:false;text-align:right;desc:current concurrent query phase ops"); + table.addCell( + "search.concurrent_query_current", + "sibling:pri;alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" + ); + table.addCell("pri.search.concurrent_query_current", "default:false;text-align:right;desc:current concurrent query phase ops"); - table.addCell( - "search.concurrent_query_time", - "sibling:pri;alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" - ); - table.addCell("pri.search.concurrent_query_time", "default:false;text-align:right;desc:time spent in concurrent query phase"); + table.addCell( + "search.concurrent_query_time", + "sibling:pri;alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" + ); + table.addCell("pri.search.concurrent_query_time", "default:false;text-align:right;desc:time spent in concurrent query phase"); - table.addCell( - "search.concurrent_query_total", - "sibling:pri;alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total query phase ops" - ); - table.addCell("pri.search.concurrent_query_total", "default:false;text-align:right;desc:total query phase ops"); + table.addCell( + "search.concurrent_query_total", + "sibling:pri;alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total query phase ops" + ); + table.addCell("pri.search.concurrent_query_total", "default:false;text-align:right;desc:total query phase ops"); - table.addCell( - "search.concurrent_avg_slice_count", - "sibling:pri;alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" - ); - table.addCell("pri.search.concurrent_avg_slice_count", "default:false;text-align:right;desc:average query concurrency"); - } + table.addCell( + "search.concurrent_avg_slice_count", + "sibling:pri;alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" + ); + table.addCell("pri.search.concurrent_avg_slice_count", "default:false;text-align:right;desc:average query concurrency"); table.addCell( "search.scroll_current", @@ -916,19 +913,17 @@ Table buildTable( table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getQueryCount()); table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getQueryCount()); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCurrent()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCurrent()); + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCurrent()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCurrent()); - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryTime()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryTime()); + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryTime()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryTime()); - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCount()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCount()); + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCount()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCount()); - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentAvgSliceCount()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentAvgSliceCount()); - } + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentAvgSliceCount()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentAvgSliceCount()); table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getScrollCurrent()); table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getScrollCurrent()); diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java index dd3e0ba836557..e11012a23fce7 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java @@ -47,7 +47,6 @@ import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.NetworkAddress; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; @@ -304,24 +303,22 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("search.query_current", "alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops"); table.addCell("search.query_time", "alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase"); table.addCell("search.query_total", "alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops"); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell( - "search.concurrent_query_current", - "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" - ); - table.addCell( - "search.concurrent_query_time", - "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" - ); - table.addCell( - "search.concurrent_query_total", - "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" - ); - table.addCell( - "search.concurrent_avg_slice_count", - "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" - ); - } + table.addCell( + "search.concurrent_query_current", + "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" + ); + table.addCell( + "search.concurrent_query_time", + "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" + ); + table.addCell( + "search.concurrent_query_total", + "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" + ); + table.addCell( + "search.concurrent_avg_slice_count", + "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" + ); table.addCell("search.scroll_current", "alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts"); table.addCell( "search.scroll_time", @@ -548,12 +545,10 @@ Table buildTable( table.addCell(searchStats == null ? null : searchStats.getTotal().getQueryCurrent()); table.addCell(searchStats == null ? null : searchStats.getTotal().getQueryTime()); table.addCell(searchStats == null ? null : searchStats.getTotal().getQueryCount()); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCurrent()); - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryTime()); - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCount()); - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentAvgSliceCount()); - } + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCurrent()); + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryTime()); + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCount()); + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentAvgSliceCount()); table.addCell(searchStats == null ? null : searchStats.getTotal().getScrollCurrent()); table.addCell(searchStats == null ? null : searchStats.getTotal().getScrollTime()); table.addCell(searchStats == null ? null : searchStats.getTotal().getScrollCount()); diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java index d0d00e4c4596a..4cd10c6874e0a 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java @@ -44,7 +44,6 @@ import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.index.cache.query.QueryCacheStats; import org.opensearch.index.engine.CommitStats; @@ -220,24 +219,22 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("search.query_current", "alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops"); table.addCell("search.query_time", "alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase"); table.addCell("search.query_total", "alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops"); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell( - "search.concurrent_query_current", - "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" - ); - table.addCell( - "search.concurrent_query_time", - "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" - ); - table.addCell( - "search.concurrent_query_total", - "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" - ); - table.addCell( - "search.concurrent_avg_slice_count", - "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" - ); - } + table.addCell( + "search.concurrent_query_current", + "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" + ); + table.addCell( + "search.concurrent_query_time", + "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" + ); + table.addCell( + "search.concurrent_query_total", + "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" + ); + table.addCell( + "search.concurrent_avg_slice_count", + "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" + ); table.addCell("search.scroll_current", "alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts"); table.addCell( "search.scroll_time", @@ -419,13 +416,11 @@ Table buildTable(RestRequest request, ClusterStateResponse state, IndicesStatsRe table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getQueryCurrent())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getQueryTime())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getQueryCount())); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCurrent())); - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryTime())); - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCount())); - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentAvgSliceCount())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCurrent())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryTime())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCount())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentAvgSliceCount())); - } table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getScrollCurrent())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getScrollTime())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getScrollCount())); diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java index 080366e536da1..80dc34c4d5d68 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java @@ -86,10 +86,13 @@ public class RestSearchAction extends BaseRestHandler { */ public static final String TOTAL_HITS_AS_INT_PARAM = "rest_total_hits_as_int"; public static final String TYPED_KEYS_PARAM = "typed_keys"; + public static final String INCLUDE_NAMED_QUERIES_SCORE_PARAM = "include_named_queries_score"; private static final Set<String> RESPONSE_PARAMS; static { - final Set<String> responseParams = new HashSet<>(Arrays.asList(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM)); + final Set<String> responseParams = new HashSet<>( + Arrays.asList(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM, INCLUDE_NAMED_QUERIES_SCORE_PARAM) + ); RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } @@ -209,6 +212,7 @@ public static void parseSearchRequest( searchRequest.pipeline(request.param("search_pipeline")); checkRestTotalHits(request, searchRequest); + request.paramAsBoolean(INCLUDE_NAMED_QUERIES_SCORE_PARAM, false); if (searchRequest.pointInTimeBuilder() != null) { preparePointInTime(searchRequest, request, namedWriteableRegistry); @@ -286,6 +290,10 @@ private static void parseSearchSource(final SearchSourceBuilder searchSourceBuil searchSourceBuilder.trackScores(request.paramAsBoolean("track_scores", false)); } + if (request.hasParam("include_named_queries_score")) { + searchSourceBuilder.includeNamedQueriesScores(request.paramAsBoolean("include_named_queries_score", false)); + } + if (request.hasParam("track_total_hits")) { if (Booleans.isBoolean(request.param("track_total_hits"))) { searchSourceBuilder.trackTotalHits(request.paramAsBoolean("track_total_hits", true)); diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index 960b46d68977b..061aa2f6e5896 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -50,7 +50,6 @@ import org.opensearch.common.lucene.search.Queries; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; @@ -149,6 +148,8 @@ final class DefaultSearchContext extends SearchContext { private SortAndFormats sort; private Float minimumScore; private boolean trackScores = false; // when sorting, track scores as well... + + private boolean includeNamedQueriesScore = false; private int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; private FieldDoc searchAfter; private CollapseContext collapse; @@ -636,6 +637,17 @@ public boolean trackScores() { return this.trackScores; } + @Override + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + @Override + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore; + } + @Override public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { this.trackTotalHitsUpTo = trackTotalHitsUpTo; @@ -950,9 +962,7 @@ public BucketCollectorProcessor bucketCollectorProcessor() { * false: otherwise */ private boolean evaluateConcurrentSegmentSearchSettings(Executor concurrentSearchExecutor) { - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH) - && (clusterService != null) - && (concurrentSearchExecutor != null)) { + if ((clusterService != null) && (concurrentSearchExecutor != null)) { return indexService.getIndexSettings() .getSettings() .getAsBoolean( diff --git a/server/src/main/java/org/opensearch/search/MultiValueMode.java b/server/src/main/java/org/opensearch/search/MultiValueMode.java index ca088203733c6..a99da674836f2 100644 --- a/server/src/main/java/org/opensearch/search/MultiValueMode.java +++ b/server/src/main/java/org/opensearch/search/MultiValueMode.java @@ -685,6 +685,11 @@ public boolean advanceExact(int target) throws IOException { public double doubleValue() throws IOException { return this.value; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } } @@ -745,6 +750,11 @@ public boolean advanceExact(int parentDoc) throws IOException { public double doubleValue() throws IOException { return lastEmittedValue; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } diff --git a/server/src/main/java/org/opensearch/search/SearchHit.java b/server/src/main/java/org/opensearch/search/SearchHit.java index 10e65fca3afb5..a3db7e0893a3d 100644 --- a/server/src/main/java/org/opensearch/search/SearchHit.java +++ b/server/src/main/java/org/opensearch/search/SearchHit.java @@ -64,19 +64,21 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.seqno.SequenceNumbers; +import org.opensearch.rest.action.search.RestSearchAction; import org.opensearch.search.fetch.subphase.highlight.HighlightField; import org.opensearch.search.lookup.SourceLookup; import org.opensearch.transport.RemoteClusterAware; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; @@ -120,7 +122,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do private SearchSortValues sortValues = SearchSortValues.EMPTY; - private String[] matchedQueries = Strings.EMPTY_ARRAY; + private Map<String, Float> matchedQueries = new HashMap<>(); private Explanation explanation; @@ -203,10 +205,20 @@ public SearchHit(StreamInput in) throws IOException { sortValues = new SearchSortValues(in); size = in.readVInt(); - if (size > 0) { - matchedQueries = new String[size]; + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (size > 0) { + Map<String, Float> tempMap = in.readMap(StreamInput::readString, StreamInput::readFloat); + matchedQueries = tempMap.entrySet() + .stream() + .sorted(Map.Entry.comparingByKey()) + .collect( + Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> oldValue, LinkedHashMap::new) + ); + } + } else { + matchedQueries = new LinkedHashMap<>(size); for (int i = 0; i < size; i++) { - matchedQueries[i] = in.readString(); + matchedQueries.put(in.readString(), Float.NaN); } } // we call the setter here because that also sets the local index parameter @@ -224,36 +236,6 @@ public SearchHit(StreamInput in) throws IOException { } } - private Map<String, DocumentField> readFields(StreamInput in) throws IOException { - Map<String, DocumentField> fields; - int size = in.readVInt(); - if (size == 0) { - fields = emptyMap(); - } else if (size == 1) { - DocumentField hitField = new DocumentField(in); - fields = singletonMap(hitField.getName(), hitField); - } else { - fields = new HashMap<>(size); - for (int i = 0; i < size; i++) { - DocumentField field = new DocumentField(in); - fields.put(field.getName(), field); - } - fields = unmodifiableMap(fields); - } - return fields; - } - - private void writeFields(StreamOutput out, Map<String, DocumentField> fields) throws IOException { - if (fields == null) { - out.writeVInt(0); - } else { - out.writeVInt(fields.size()); - for (DocumentField field : fields.values()) { - field.writeTo(out); - } - } - } - private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); @Override @@ -286,11 +268,13 @@ public void writeTo(StreamOutput out) throws IOException { } sortValues.writeTo(out); - if (matchedQueries.length == 0) { - out.writeVInt(0); + out.writeVInt(matchedQueries.size()); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (!matchedQueries.isEmpty()) { + out.writeMap(matchedQueries, StreamOutput::writeString, StreamOutput::writeFloat); + } } else { - out.writeVInt(matchedQueries.length); - for (String matchedFilter : matchedQueries) { + for (String matchedFilter : matchedQueries.keySet()) { out.writeString(matchedFilter); } } @@ -458,11 +442,11 @@ public DocumentField field(String fieldName) { } /* - * Adds a new DocumentField to the map in case both parameters are not null. - * */ + * Adds a new DocumentField to the map in case both parameters are not null. + * */ public void setDocumentField(String fieldName, DocumentField field) { if (fieldName == null || field == null) return; - if (documentFields.size() == 0) this.documentFields = new HashMap<>(); + if (documentFields.isEmpty()) this.documentFields = new HashMap<>(); this.documentFields.put(fieldName, field); } @@ -475,7 +459,7 @@ public DocumentField removeDocumentField(String fieldName) { * were required to be loaded. */ public Map<String, DocumentField> getFields() { - if (metaFields.size() > 0 || documentFields.size() > 0) { + if (!metaFields.isEmpty() || !documentFields.isEmpty()) { final Map<String, DocumentField> fields = new HashMap<>(); fields.putAll(metaFields); fields.putAll(documentFields); @@ -560,14 +544,45 @@ public String getClusterAlias() { } public void matchedQueries(String[] matchedQueries) { - this.matchedQueries = matchedQueries; + if (matchedQueries != null) { + for (String query : matchedQueries) { + this.matchedQueries.put(query, Float.NaN); + } + } + } + + public void matchedQueriesWithScores(Map<String, Float> matchedQueries) { + if (matchedQueries != null) { + this.matchedQueries = matchedQueries; + } } /** * The set of query and filter names the query matched with. Mainly makes sense for compound filters and queries. */ public String[] getMatchedQueries() { - return this.matchedQueries; + return matchedQueries == null ? new String[0] : matchedQueries.keySet().toArray(new String[0]); + } + + /** + * Returns the score of the provided named query if it matches. + * <p> + * If the 'include_named_queries_score' is not set, this method will return {@link Float#NaN} + * for each named query instead of a numerical score. + * </p> + * + * @param name The name of the query to retrieve the score for. + * @return The score of the named query, or {@link Float#NaN} if 'include_named_queries_score' is not set. + */ + public Float getMatchedQueryScore(String name) { + return getMatchedQueriesAndScores().get(name); + } + + /** + * @return The map of the named queries that matched and their associated score. + */ + public Map<String, Float> getMatchedQueriesAndScores() { + return matchedQueries == null ? Collections.emptyMap() : matchedQueries; } /** @@ -654,7 +669,7 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t for (DocumentField field : metaFields.values()) { // ignore empty metadata fields - if (field.getValues().size() == 0) { + if (field.getValues().isEmpty()) { continue; } // _ignored is the only multi-valued meta field @@ -670,10 +685,10 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t } if (documentFields.isEmpty() == false && // ignore fields all together if they are all empty - documentFields.values().stream().anyMatch(df -> df.getValues().size() > 0)) { + documentFields.values().stream().anyMatch(df -> !df.getValues().isEmpty())) { builder.startObject(Fields.FIELDS); for (DocumentField field : documentFields.values()) { - if (field.getValues().size() > 0) { + if (!field.getValues().isEmpty()) { field.toXContent(builder, params); } } @@ -687,12 +702,21 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t builder.endObject(); } sortValues.toXContent(builder, params); - if (matchedQueries.length > 0) { - builder.startArray(Fields.MATCHED_QUERIES); - for (String matchedFilter : matchedQueries) { - builder.value(matchedFilter); + if (!matchedQueries.isEmpty()) { + boolean includeMatchedQueriesScore = params.paramAsBoolean(RestSearchAction.INCLUDE_NAMED_QUERIES_SCORE_PARAM, false); + if (includeMatchedQueriesScore) { + builder.startObject(Fields.MATCHED_QUERIES); + for (Map.Entry<String, Float> entry : matchedQueries.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + } + builder.endObject(); + } else { + builder.startArray(Fields.MATCHED_QUERIES); + for (String matchedFilter : matchedQueries.keySet()) { + builder.value(matchedFilter); + } + builder.endArray(); } - builder.endArray(); } if (getExplanation() != null) { builder.field(Fields._EXPLANATION); @@ -797,7 +821,27 @@ public static void declareInnerHitsParseFields(ObjectParser<Map<String, Object>, (p, c) -> parseInnerHits(p), new ParseField(Fields.INNER_HITS) ); - parser.declareStringArray((map, list) -> map.put(Fields.MATCHED_QUERIES, list), new ParseField(Fields.MATCHED_QUERIES)); + parser.declareField((p, map, context) -> { + XContentParser.Token token = p.currentToken(); + Map<String, Float> matchedQueries = new LinkedHashMap<>(); + if (token == XContentParser.Token.START_OBJECT) { + String fieldName = null; + while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = p.currentName(); + } else if (token.isValue()) { + matchedQueries.put(fieldName, p.floatValue()); + } + } + } else if (token == XContentParser.Token.START_ARRAY) { + while (p.nextToken() != XContentParser.Token.END_ARRAY) { + matchedQueries.put(p.text(), Float.NaN); + } + } else { + throw new IllegalStateException("expected object or array but got [" + token + "]"); + } + map.put(Fields.MATCHED_QUERIES, matchedQueries); + }, new ParseField(Fields.MATCHED_QUERIES), ObjectParser.ValueType.OBJECT_ARRAY); parser.declareField( (map, list) -> map.put(Fields.SORT, list), SearchSortValues::fromXContent, @@ -828,7 +872,7 @@ public static SearchHit createFromMap(Map<String, Object> values) { assert shardId.getIndexName().equals(index); searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias, OriginalIndices.NONE)); } else { - // these fields get set anyways when setting the shard target, + // these fields get set anyway when setting the shard target, // but we set them explicitly when we don't have enough info to rebuild the shard target searchHit.index = index; searchHit.clusterAlias = clusterAlias; @@ -842,10 +886,7 @@ public static SearchHit createFromMap(Map<String, Object> values) { searchHit.sourceRef(get(SourceFieldMapper.NAME, values, null)); searchHit.explanation(get(Fields._EXPLANATION, values, null)); searchHit.setInnerHits(get(Fields.INNER_HITS, values, null)); - List<String> matchedQueries = get(Fields.MATCHED_QUERIES, values, null); - if (matchedQueries != null) { - searchHit.matchedQueries(matchedQueries.toArray(new String[0])); - } + searchHit.matchedQueriesWithScores(get(Fields.MATCHED_QUERIES, values, null)); return searchHit; } @@ -965,7 +1006,7 @@ public boolean equals(Object obj) { && Objects.equals(documentFields, other.documentFields) && Objects.equals(metaFields, other.metaFields) && Objects.equals(getHighlightFields(), other.getHighlightFields()) - && Arrays.equals(matchedQueries, other.matchedQueries) + && Objects.equals(getMatchedQueriesAndScores(), other.getMatchedQueriesAndScores()) && Objects.equals(explanation, other.explanation) && Objects.equals(shard, other.shard) && Objects.equals(innerHits, other.innerHits) @@ -985,7 +1026,7 @@ public int hashCode() { documentFields, metaFields, getHighlightFields(), - Arrays.hashCode(matchedQueries), + getMatchedQueriesAndScores(), explanation, shard, innerHits, diff --git a/server/src/main/java/org/opensearch/search/SearchModule.java b/server/src/main/java/org/opensearch/search/SearchModule.java index 62d397de58187..88218896dceae 100644 --- a/server/src/main/java/org/opensearch/search/SearchModule.java +++ b/server/src/main/java/org/opensearch/search/SearchModule.java @@ -39,7 +39,6 @@ import org.opensearch.common.geo.ShapesAvailability; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.ParseFieldRegistry; import org.opensearch.core.ParseField; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; @@ -1279,7 +1278,7 @@ private SearchPlugin.ExecutorServiceProvider registerIndexSearcherExecutorProvid } } - if (provider == null && FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { + if (provider == null) { provider = (ThreadPool threadPool) -> threadPool.executor(INDEX_SEARCHER); } return provider; diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 2c85fcbb25f35..62eb597e387e6 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -250,7 +250,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv public static final Setting<Boolean> CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING = Setting.boolSetting( "search.concurrent_segment_search.enabled", - true, + false, Property.Dynamic, Property.NodeScope ); @@ -1274,6 +1274,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc } } context.trackScores(source.trackScores()); + context.includeNamedQueriesScore(source.includeNamedQueriesScore()); if (source.trackTotalHitsUpTo() != null && source.trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_ACCURATE && context.scrollContext() != null) { diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java new file mode 100644 index 0000000000000..e587b7f169e5f --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java @@ -0,0 +1,497 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.FieldExistsQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PointRangeQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.NumericUtils; +import org.opensearch.common.Rounding; +import org.opensearch.common.lucene.search.function.FunctionScoreQuery; +import org.opensearch.index.mapper.DateFieldMapper; +import org.opensearch.index.mapper.DocCountFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.query.DateRangeIncludingNowQuery; +import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceConfig; +import org.opensearch.search.aggregations.bucket.composite.RoundingValuesSource; +import org.opensearch.search.aggregations.bucket.histogram.LongBounds; +import org.opensearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.OptionalLong; +import java.util.function.BiConsumer; +import java.util.function.Function; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; + +/** + * Utility class to help rewrite aggregations into filters. + * Instead of aggregation collects documents one by one, filter may count all documents that match in one pass. + * <p> + * Currently supported rewrite: + * <ul> + * <li> date histogram : date range filter. + * Applied: DateHistogramAggregator, AutoDateHistogramAggregator, CompositeAggregator </li> + * </ul> + * + * @opensearch.internal + */ +public final class FastFilterRewriteHelper { + + private FastFilterRewriteHelper() {} + + private static final Logger logger = LogManager.getLogger(FastFilterRewriteHelper.class); + + private static final int MAX_NUM_FILTER_BUCKETS = 1024; + private static final Map<Class<?>, Function<Query, Query>> queryWrappers; + + // Initialize the wrapper map for unwrapping the query + static { + queryWrappers = new HashMap<>(); + queryWrappers.put(ConstantScoreQuery.class, q -> ((ConstantScoreQuery) q).getQuery()); + queryWrappers.put(FunctionScoreQuery.class, q -> ((FunctionScoreQuery) q).getSubQuery()); + queryWrappers.put(DateRangeIncludingNowQuery.class, q -> ((DateRangeIncludingNowQuery) q).getQuery()); + queryWrappers.put(IndexOrDocValuesQuery.class, q -> ((IndexOrDocValuesQuery) q).getIndexQuery()); + } + + /** + * Recursively unwraps query into the concrete form + * for applying the optimization + */ + private static Query unwrapIntoConcreteQuery(Query query) { + while (queryWrappers.containsKey(query.getClass())) { + query = queryWrappers.get(query.getClass()).apply(query); + } + + return query; + } + + /** + * Finds the global min and max bounds of the field for the shard across all segments + * + * @return null if the field is empty or not indexed + */ + private static long[] getShardBounds(final SearchContext context, final String fieldName) throws IOException { + final List<LeafReaderContext> leaves = context.searcher().getIndexReader().leaves(); + long min = Long.MAX_VALUE, max = Long.MIN_VALUE; + for (LeafReaderContext leaf : leaves) { + final PointValues values = leaf.reader().getPointValues(fieldName); + if (values != null) { + min = Math.min(min, NumericUtils.sortableBytesToLong(values.getMinPackedValue(), 0)); + max = Math.max(max, NumericUtils.sortableBytesToLong(values.getMaxPackedValue(), 0)); + } + } + + if (min == Long.MAX_VALUE || max == Long.MIN_VALUE) { + return null; + } + return new long[] { min, max }; + } + + /** + * Finds the min and max bounds of the field for the segment + * + * @return null if the field is empty or not indexed + */ + private static long[] getSegmentBounds(final LeafReaderContext context, final String fieldName) throws IOException { + long min = Long.MAX_VALUE, max = Long.MIN_VALUE; + final PointValues values = context.reader().getPointValues(fieldName); + if (values != null) { + min = Math.min(min, NumericUtils.sortableBytesToLong(values.getMinPackedValue(), 0)); + max = Math.max(max, NumericUtils.sortableBytesToLong(values.getMaxPackedValue(), 0)); + } + + if (min == Long.MAX_VALUE || max == Long.MIN_VALUE) { + return null; + } + return new long[] { min, max }; + } + + /** + * This method also acts as a pre-condition check for the optimization + * + * @return null if the processed query not as expected + */ + public static long[] getDateHistoAggBounds(final SearchContext context, final String fieldName) throws IOException { + final Query cq = unwrapIntoConcreteQuery(context.query()); + if (cq instanceof PointRangeQuery) { + final PointRangeQuery prq = (PointRangeQuery) cq; + final long[] indexBounds = getShardBounds(context, fieldName); + if (indexBounds == null) return null; + return getBoundsWithRangeQuery(prq, fieldName, indexBounds); + } else if (cq instanceof MatchAllDocsQuery) { + return getShardBounds(context, fieldName); + } else if (cq instanceof FieldExistsQuery) { + // when a range query covers all values of a shard, it will be rewrite field exists query + if (((FieldExistsQuery) cq).getField().equals(fieldName)) { + return getShardBounds(context, fieldName); + } + } + + return null; + } + + private static long[] getBoundsWithRangeQuery(PointRangeQuery prq, String fieldName, long[] indexBounds) { + // Ensure that the query and aggregation are on the same field + if (prq.getField().equals(fieldName)) { + // Minimum bound for aggregation is the max between query and global + long lower = Math.max(NumericUtils.sortableBytesToLong(prq.getLowerPoint(), 0), indexBounds[0]); + // Maximum bound for aggregation is the min between query and global + long upper = Math.min(NumericUtils.sortableBytesToLong(prq.getUpperPoint(), 0), indexBounds[1]); + if (lower > upper) { + return null; + } + return new long[] { lower, upper }; + } + + return null; + } + + /** + * Creates the date range filters for aggregations using the interval, min/max + * bounds and prepared rounding + */ + private static Weight[] createFilterForAggregations( + final SearchContext context, + final DateFieldMapper.DateFieldType fieldType, + final long interval, + final Rounding.Prepared preparedRounding, + long low, + final long high + ) throws IOException { + // Calculate the number of buckets using range and interval + long roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); + long prevRounded = roundedLow; + int bucketCount = 0; + while (roundedLow <= fieldType.convertNanosToMillis(high)) { + bucketCount++; + if (bucketCount > MAX_NUM_FILTER_BUCKETS) { + logger.debug("Max number of filters reached [{}], skip the fast filter optimization", MAX_NUM_FILTER_BUCKETS); + return null; + } + // Below rounding is needed as the interval could return in + // non-rounded values for something like calendar month + roundedLow = preparedRounding.round(roundedLow + interval); + if (prevRounded == roundedLow) break; // prevents getting into an infinite loop + prevRounded = roundedLow; + } + + Weight[] filters = null; + if (bucketCount > 0) { + filters = new Weight[bucketCount]; + roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); + + int i = 0; + while (i < bucketCount) { + // Calculate the lower bucket bound + final byte[] lower = new byte[8]; + NumericUtils.longToSortableBytes(i == 0 ? low : fieldType.convertRoundedMillisToNanos(roundedLow), lower, 0); + + // Calculate the upper bucket bound + roundedLow = preparedRounding.round(roundedLow + interval); + final byte[] upper = new byte[8]; + NumericUtils.longToSortableBytes(i + 1 == bucketCount ? high : + // Subtract -1 if the minimum is roundedLow as roundedLow itself + // is included in the next bucket + fieldType.convertRoundedMillisToNanos(roundedLow) - 1, upper, 0); + + filters[i++] = context.searcher().createWeight(new PointRangeQuery(fieldType.name(), lower, upper, 1) { + @Override + protected String toString(int dimension, byte[] value) { + return Long.toString(LongPoint.decodeDimension(value, 0)); + } + }, ScoreMode.COMPLETE_NO_SCORES, 1); + } + } + + return filters; + } + + /** + * Context object for fast filter optimization + * <p> + * Usage: first set aggregation type, then check isRewriteable, then buildFastFilter + */ + public static class FastFilterContext { + private boolean rewriteable = false; + private Weight[] filters = null; + private boolean filtersBuiltAtShardLevel = false; + + private AggregationType aggregationType; + private final SearchContext context; + + public FastFilterContext(SearchContext context) { + this.context = context; + } + + public AggregationType getAggregationType() { + return aggregationType; + } + + public void setAggregationType(AggregationType aggregationType) { + this.aggregationType = aggregationType; + } + + public boolean isRewriteable(final Object parent, final int subAggLength) { + boolean rewriteable = aggregationType.isRewriteable(parent, subAggLength); + logger.debug("Fast filter rewriteable: {} for shard {}", rewriteable, context.indexShard().shardId()); + this.rewriteable = rewriteable; + return rewriteable; + } + + public void buildFastFilter() throws IOException { + assert filters == null : "Filters should only be built once, but they are already built"; + this.filters = this.aggregationType.buildFastFilter(context); + if (filters != null) { + logger.debug("Fast filter built for shard {}", context.indexShard().shardId()); + filtersBuiltAtShardLevel = true; + } + } + + /** + * Built filters for a segment + */ + public Weight[] buildFastFilter(LeafReaderContext leaf) throws IOException { + Weight[] filters = this.aggregationType.buildFastFilter(leaf, context); + if (filters != null) { + logger.debug("Fast filter built for shard {} segment {}", context.indexShard().shardId(), leaf.ord); + } + return filters; + } + } + + /** + * Different types have different pre-conditions, filter building logic, etc. + */ + interface AggregationType { + + boolean isRewriteable(Object parent, int subAggLength); + + Weight[] buildFastFilter(SearchContext ctx) throws IOException; + + Weight[] buildFastFilter(LeafReaderContext leaf, SearchContext ctx) throws IOException; + + default int getSize() { + return Integer.MAX_VALUE; + } + } + + /** + * For date histogram aggregation + */ + public static abstract class AbstractDateHistogramAggregationType implements AggregationType { + private final MappedFieldType fieldType; + private final boolean missing; + private final boolean hasScript; + private LongBounds hardBounds; + + public AbstractDateHistogramAggregationType(MappedFieldType fieldType, boolean missing, boolean hasScript) { + this.fieldType = fieldType; + this.missing = missing; + this.hasScript = hasScript; + } + + public AbstractDateHistogramAggregationType(MappedFieldType fieldType, boolean missing, boolean hasScript, LongBounds hardBounds) { + this(fieldType, missing, hasScript); + this.hardBounds = hardBounds; + } + + @Override + public boolean isRewriteable(Object parent, int subAggLength) { + if (parent == null && subAggLength == 0 && !missing && !hasScript) { + if (fieldType != null && fieldType instanceof DateFieldMapper.DateFieldType) { + return fieldType.isSearchable(); + } + } + return false; + } + + @Override + public Weight[] buildFastFilter(SearchContext context) throws IOException { + long[] bounds = getDateHistoAggBounds(context, fieldType.name()); + logger.debug("Bounds are {} for shard {}", bounds, context.indexShard().shardId()); + return buildFastFilter(context, bounds); + } + + @Override + public Weight[] buildFastFilter(LeafReaderContext leaf, SearchContext context) throws IOException { + long[] bounds = getSegmentBounds(leaf, fieldType.name()); + logger.debug("Bounds are {} for shard {} segment {}", bounds, context.indexShard().shardId(), leaf.ord); + return buildFastFilter(context, bounds); + } + + private Weight[] buildFastFilter(SearchContext context, long[] bounds) throws IOException { + bounds = processHardBounds(bounds); + if (bounds == null) { + return null; + } + assert bounds[0] <= bounds[1] : "Low bound should be less than high bound"; + + final Rounding rounding = getRounding(bounds[0], bounds[1]); + final OptionalLong intervalOpt = Rounding.getInterval(rounding); + if (intervalOpt.isEmpty()) { + return null; + } + final long interval = intervalOpt.getAsLong(); + + // process the after key of composite agg + processAfterKey(bounds, interval); + + return FastFilterRewriteHelper.createFilterForAggregations( + context, + (DateFieldMapper.DateFieldType) fieldType, + interval, + getRoundingPrepared(), + bounds[0], + bounds[1] + ); + } + + protected abstract Rounding getRounding(final long low, final long high); + + protected abstract Rounding.Prepared getRoundingPrepared(); + + protected void processAfterKey(long[] bound, long interval) {} + + protected long[] processHardBounds(long[] bounds) { + if (bounds != null) { + // Update min/max limit if user specified any hard bounds + if (hardBounds != null) { + if (hardBounds.getMin() > bounds[0]) { + bounds[0] = hardBounds.getMin(); + } + if (hardBounds.getMax() - 1 < bounds[1]) { + bounds[1] = hardBounds.getMax() - 1; // hard bounds max is exclusive + } + if (bounds[0] > bounds[1]) { + return null; + } + } + } + return bounds; + } + + public DateFieldMapper.DateFieldType getFieldType() { + assert fieldType instanceof DateFieldMapper.DateFieldType; + return (DateFieldMapper.DateFieldType) fieldType; + } + } + + public static boolean isCompositeAggRewriteable(CompositeValuesSourceConfig[] sourceConfigs) { + return sourceConfigs.length == 1 && sourceConfigs[0].valuesSource() instanceof RoundingValuesSource; + } + + public static long getBucketOrd(long bucketOrd) { + if (bucketOrd < 0) { // already seen + bucketOrd = -1 - bucketOrd; + } + + return bucketOrd; + } + + /** + * Try to get the bucket doc counts from the fast filters for the aggregation + * <p> + * Usage: invoked at segment level — in getLeafCollector of aggregator + * + * @param incrementDocCount takes in the bucket key value and the bucket count + */ + public static boolean tryFastFilterAggregation( + final LeafReaderContext ctx, + FastFilterContext fastFilterContext, + final BiConsumer<Long, Integer> incrementDocCount + ) throws IOException { + if (fastFilterContext == null) return false; + if (!fastFilterContext.rewriteable) { + return false; + } + + NumericDocValues docCountValues = DocValues.getNumeric(ctx.reader(), DocCountFieldMapper.NAME); + if (docCountValues.nextDoc() != NO_MORE_DOCS) { + logger.debug( + "Shard {} segment {} has at least one document with _doc_count field, skip fast filter optimization", + fastFilterContext.context.indexShard().shardId(), + ctx.ord + ); + return false; + } + + // if no filters built at shard level (see getDateHistoAggBounds method for possible reasons) + // check if the query is functionally match-all at segment level + if (!fastFilterContext.filtersBuiltAtShardLevel && !segmentMatchAll(fastFilterContext.context, ctx)) { + return false; + } + Weight[] filters = fastFilterContext.filters; + if (filters == null) { + logger.debug( + "Shard {} segment {} functionally match all documents. Build the fast filter", + fastFilterContext.context.indexShard().shardId(), + ctx.ord + ); + filters = fastFilterContext.buildFastFilter(ctx); + if (filters == null) { + return false; + } + } + + final int[] counts = new int[filters.length]; + int i; + for (i = 0; i < filters.length; i++) { + counts[i] = filters[i].count(ctx); + if (counts[i] == -1) { + // Cannot use the optimization if any of the counts + // is -1 indicating the segment might have deleted documents + return false; + } + } + + int s = 0; + int size = fastFilterContext.aggregationType.getSize(); + for (i = 0; i < filters.length; i++) { + if (counts[i] > 0) { + long bucketKey = i; // the index of filters is the key for filters aggregation + if (fastFilterContext.aggregationType instanceof AbstractDateHistogramAggregationType) { + final DateFieldMapper.DateFieldType fieldType = + ((AbstractDateHistogramAggregationType) fastFilterContext.aggregationType).getFieldType(); + bucketKey = fieldType.convertNanosToMillis( + NumericUtils.sortableBytesToLong(((PointRangeQuery) filters[i].getQuery()).getLowerPoint(), 0) + ); + } + incrementDocCount.accept(bucketKey, counts[i]); + s++; + if (s > size) { + break; + } + } + } + + logger.debug("Fast filter optimization applied to shard {} segment {}", fastFilterContext.context.indexShard().shardId(), ctx.ord); + return true; + } + + private static boolean segmentMatchAll(SearchContext ctx, LeafReaderContext leafCtx) throws IOException { + Weight weight = ctx.searcher().createWeight(ctx.query(), ScoreMode.COMPLETE_NO_SCORES, 1f); + return weight != null && weight.count(leafCtx) == leafCtx.reader().numDocs(); + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index 5e8791441d83a..e57acba5bc0ad 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -44,7 +44,9 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregatorFactories; import org.opensearch.search.aggregations.AggregatorFactory; +import org.opensearch.search.aggregations.bucket.filter.FilterAggregatorFactory; import org.opensearch.search.aggregations.bucket.nested.NestedAggregatorFactory; +import org.opensearch.search.aggregations.bucket.nested.ReverseNestedAggregatorFactory; import org.opensearch.search.aggregations.support.ValuesSourceRegistry; import java.io.IOException; @@ -240,14 +242,16 @@ public BucketCardinality bucketCardinality() { * this aggregator or the instance of the parent's factory that is incompatible with * the composite aggregation. */ - private AggregatorFactory checkParentIsNullOrNested(AggregatorFactory factory) { + private static AggregatorFactory checkParentIsSafe(AggregatorFactory factory) { if (factory == null) { return null; - } else if (factory instanceof NestedAggregatorFactory) { - return checkParentIsNullOrNested(factory.getParent()); - } else { - return factory; - } + } else if (factory instanceof NestedAggregatorFactory + || factory instanceof FilterAggregatorFactory + || factory instanceof ReverseNestedAggregatorFactory) { + return checkParentIsSafe(factory.getParent()); + } else { + return factory; + } } private static void validateSources(List<CompositeValuesSourceBuilder<?>> sources) { @@ -278,7 +282,7 @@ protected AggregatorFactory doBuild( AggregatorFactory parent, AggregatorFactories.Builder subfactoriesBuilder ) throws IOException { - AggregatorFactory invalid = checkParentIsNullOrNested(parent); + AggregatorFactory invalid = checkParentIsSafe(parent); if (invalid != null) { throw new IllegalArgumentException( "[composite] aggregation cannot be used with a parent aggregation of" diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index 317c2a357bac5..b97c814cdf645 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -56,7 +56,9 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.RoaringDocIdSet; +import org.opensearch.common.Rounding; import org.opensearch.common.lease.Releasables; import org.opensearch.index.IndexSortConfig; import org.opensearch.lucene.queries.SearchAfterSortedDocQuery; @@ -71,7 +73,9 @@ import org.opensearch.search.aggregations.MultiBucketCollector; import org.opensearch.search.aggregations.MultiBucketConsumerService; import org.opensearch.search.aggregations.bucket.BucketsAggregator; +import org.opensearch.search.aggregations.bucket.FastFilterRewriteHelper; import org.opensearch.search.aggregations.bucket.missing.MissingOrder; +import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; import org.opensearch.search.internal.SearchContext; import org.opensearch.search.searchafter.SearchAfterBuilder; import org.opensearch.search.sort.SortAndFormats; @@ -80,6 +84,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.LongUnaryOperator; @@ -111,6 +116,10 @@ final class CompositeAggregator extends BucketsAggregator { private boolean earlyTerminated; + private final FastFilterRewriteHelper.FastFilterContext fastFilterContext; + private LongKeyedBucketOrds bucketOrds = null; + private Rounding.Prepared preparedRounding = null; + CompositeAggregator( String name, AggregatorFactories factories, @@ -154,12 +163,64 @@ final class CompositeAggregator extends BucketsAggregator { } this.queue = new CompositeValuesCollectorQueue(context.bigArrays(), sources, size, rawAfterKey); this.rawAfterKey = rawAfterKey; + + fastFilterContext = new FastFilterRewriteHelper.FastFilterContext(context); + if (!FastFilterRewriteHelper.isCompositeAggRewriteable(sourceConfigs)) return; + fastFilterContext.setAggregationType(new CompositeAggregationType()); + if (fastFilterContext.isRewriteable(parent, subAggregators.length)) { + // bucketOrds is used for saving date histogram results + bucketOrds = LongKeyedBucketOrds.build(context.bigArrays(), CardinalityUpperBound.ONE); + preparedRounding = ((CompositeAggregationType) fastFilterContext.getAggregationType()).getRoundingPrepared(); + fastFilterContext.buildFastFilter(); + } + } + + /** + * Currently the filter rewrite is only supported for date histograms + */ + private class CompositeAggregationType extends FastFilterRewriteHelper.AbstractDateHistogramAggregationType { + private final RoundingValuesSource valuesSource; + private long afterKey = -1L; + + public CompositeAggregationType() { + super(sourceConfigs[0].fieldType(), sourceConfigs[0].missingBucket(), sourceConfigs[0].hasScript()); + this.valuesSource = (RoundingValuesSource) sourceConfigs[0].valuesSource(); + if (rawAfterKey != null) { + assert rawAfterKey.size() == 1 && formats.size() == 1; + this.afterKey = formats.get(0).parseLong(rawAfterKey.get(0).toString(), false, () -> { + throw new IllegalArgumentException("now() is not supported in [after] key"); + }); + } + } + + public Rounding getRounding(final long low, final long high) { + return valuesSource.getRounding(); + } + + public Rounding.Prepared getRoundingPrepared() { + return valuesSource.getPreparedRounding(); + } + + @Override + protected void processAfterKey(long[] bound, long interval) { + // afterKey is the last bucket key in previous response, and the bucket key + // is the minimum of all values in the bucket, so need to add the interval + if (afterKey != -1L) { + bound[0] = afterKey + interval; + } + } + + @Override + public int getSize() { + return size; + } } @Override protected void doClose() { try { Releasables.close(queue); + Releasables.close(bucketOrds); } finally { Releasables.close(sources); } @@ -187,12 +248,14 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I } int num = Math.min(size, queue.size()); - final InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; + InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; + long[] bucketOrdsToCollect = new long[queue.size()]; for (int i = 0; i < queue.size(); i++) { bucketOrdsToCollect[i] = i; } InternalAggregations[] subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); + while (queue.size() > 0) { int slot = queue.pop(); CompositeKey key = queue.toCompositeKey(slot); @@ -208,6 +271,43 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I aggs ); } + + // Build results from fast filters optimization + if (bucketOrds != null) { + // CompositeKey is the value of bucket key + final Map<CompositeKey, InternalComposite.InternalBucket> bucketMap = new HashMap<>(); + // Some segments may not be optimized, so buckets may contain results from the queue. + for (InternalComposite.InternalBucket internalBucket : buckets) { + bucketMap.put(internalBucket.getRawKey(), internalBucket); + } + // Loop over the buckets in the bucketOrds, and populate the map accordingly + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(0); + while (ordsEnum.next()) { + Long bucketKeyValue = ordsEnum.value(); + CompositeKey key = new CompositeKey(bucketKeyValue); + if (bucketMap.containsKey(key)) { + long docCount = bucketDocCount(ordsEnum.ord()) + bucketMap.get(key).getDocCount(); + bucketMap.get(key).setDocCount(docCount); + } else { + InternalComposite.InternalBucket bucket = new InternalComposite.InternalBucket( + sourceNames, + formats, + key, + reverseMuls, + missingOrders, + bucketDocCount(ordsEnum.ord()), + buildEmptySubAggregations() + ); + bucketMap.put(key, bucket); + } + } + // since a map is not sorted structure, sort it before transform back to buckets + List<InternalComposite.InternalBucket> bucketList = new ArrayList<>(bucketMap.values()); + CollectionUtil.introSort(bucketList, InternalComposite.InternalBucket::compareKey); + buckets = bucketList.subList(0, Math.min(size, bucketList.size())).toArray(InternalComposite.InternalBucket[]::new); + num = buckets.length; + } + CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; return new InternalAggregation[] { new InternalComposite( @@ -296,7 +396,7 @@ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException if (indexSortField.getReverse() != (source.reverseMul == -1)) { if (i == 0) { - // the leading index sort matches the leading source field but the order is reversed + // the leading index sort matches the leading source field, but the order is reversed, // so we don't check the other sources. return new Sort(indexSortField); } @@ -304,8 +404,8 @@ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException } sortFields.add(indexSortField); if (sourceConfig.valuesSource() instanceof RoundingValuesSource) { - // the rounding "squashes" many values together, that breaks the ordering of sub-values - // so we ignore subsequent source even if they match the index sort. + // the rounding "squashes" many values together, that breaks the ordering of sub-values, + // so we ignore the subsequent sources even if they match the index sort. break; } } @@ -448,6 +548,16 @@ private void processLeafFromQuery(LeafReaderContext ctx, Sort indexSortPrefix) t @Override protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { + boolean optimized = FastFilterRewriteHelper.tryFastFilterAggregation( + ctx, + fastFilterContext, + (key, count) -> incrementBucketDocCount( + FastFilterRewriteHelper.getBucketOrd(bucketOrds.add(0, preparedRounding.round(key))), + count + ) + ); + if (optimized) throw new CollectionTerminatedException(); + finishLeaf(); boolean fillDocIdSet = deferredCollectors != NO_OP_COLLECTOR; @@ -477,9 +587,10 @@ protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucket docIdSetBuilder = new RoaringDocIdSet.Builder(ctx.reader().maxDoc()); } if (rawAfterKey != null && sortPrefixLen > 0) { - // We have an after key and index sort is applicable so we jump directly to the doc - // that is after the index sort prefix using the rawAfterKey and we start collecting - // document from there. + // We have an after key and index sort is applicable, so we jump directly to the doc + // after the index sort prefix using the rawAfterKey and we start collecting + // documents from there. + assert indexSortPrefix != null; processLeafFromQuery(ctx, indexSortPrefix); throw new CollectionTerminatedException(); } else { @@ -507,6 +618,8 @@ public void collect(int doc, long bucket) throws IOException { try { long docCount = docCountProvider.getDocCount(doc); if (queue.addIfCompetitive(indexSortPrefix, docCount)) { + // one doc may contain multiple values, we iterate over and collect one by one + // so the same doc can appear multiple times here if (builder != null && lastDoc != doc) { builder.add(doc); lastDoc = doc; @@ -569,7 +682,7 @@ private LeafBucketCollector getSecondPassCollector(LeafBucketCollector subCollec @Override public void collect(int doc, long zeroBucket) throws IOException { assert zeroBucket == 0; - Integer slot = queue.compareCurrent(); + Integer slot = queue.getCurrentSlot(); if (slot != null) { // The candidate key is a top bucket. // We can defer the collection of this document/bucket to the sub collector diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java index 5ddeb22d33a6f..338ebdc66eef7 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java @@ -44,7 +44,7 @@ * * @opensearch.internal */ -class CompositeKey implements Writeable { +public class CompositeKey implements Writeable { private final Comparable[] values; CompositeKey(Comparable... values) { @@ -64,11 +64,11 @@ Comparable[] values() { return values; } - int size() { + public int size() { return values.length; } - Comparable get(int pos) { + public Comparable get(int pos) { assert pos < values.length; return values[pos]; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java index 6ee1682a7b196..2c4d451322bca 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java @@ -47,6 +47,8 @@ /** * A specialized {@link PriorityQueue} implementation for composite buckets. + * Can think of this as a max heap that holds the top small buckets slots in order. + * Each slot holds the values of the composite bucket key it represents. * * @opensearch.internal */ @@ -77,7 +79,7 @@ public int hashCode() { private final BigArrays bigArrays; private final int maxSize; - private final Map<Slot, Integer> map; + private final Map<Slot, Integer> map; // to quickly find the slot for a value private final SingleDimensionValuesSource<?>[] arrays; private LongArray docCounts; @@ -108,7 +110,7 @@ public int hashCode() { @Override protected boolean lessThan(Integer a, Integer b) { - return compare(a, b) > 0; + return compare(a, b) > 0; // max heap } /** @@ -119,10 +121,10 @@ boolean isFull() { } /** - * Compares the current candidate with the values in the queue and returns + * Try to get the slot of the current/candidate values in the queue and returns * the slot if the candidate is already in the queue or null if the candidate is not present. */ - Integer compareCurrent() { + Integer getCurrentSlot() { return map.get(new Slot(CANDIDATE_SLOT)); } @@ -281,32 +283,34 @@ boolean addIfCompetitive(long inc) { */ boolean addIfCompetitive(int indexSortSourcePrefix, long inc) { // checks if the candidate key is competitive - Integer topSlot = compareCurrent(); - if (topSlot != null) { + Integer curSlot = getCurrentSlot(); + if (curSlot != null) { // this key is already in the top N, skip it - docCounts.increment(topSlot, inc); + docCounts.increment(curSlot, inc); return true; } + if (afterKeyIsSet) { int cmp = compareCurrentWithAfter(); if (cmp <= 0) { if (indexSortSourcePrefix < 0 && cmp == indexSortSourcePrefix) { - // the leading index sort is in the reverse order of the leading source + // the leading index sort is and the leading source order are both reversed, // so we can early terminate when we reach a document that is smaller // than the after key (collected on a previous page). throw new CollectionTerminatedException(); } - // key was collected on a previous page, skip it (>= afterKey). + // the key was collected on a previous page, skip it. return false; } } + + // the heap is full, check if the candidate key larger than max heap top if (size() >= maxSize) { - // the tree map is full, check if the candidate key should be kept int cmp = compare(CANDIDATE_SLOT, top()); if (cmp > 0) { if (cmp <= indexSortSourcePrefix) { - // index sort guarantees that there is no key greater or equal than the - // current one in the subsequent documents so we can early terminate. + // index sort guarantees the following documents will have a key larger than the current candidate, + // so we can early terminate. throw new CollectionTerminatedException(); } // the candidate key is not competitive, skip it. @@ -324,7 +328,7 @@ boolean addIfCompetitive(int indexSortSourcePrefix, long inc) { } else { newSlot = size(); } - // move the candidate key to its new slot + // move the candidate key to its new slot by copy its values to the new slot copyCurrent(newSlot, inc); map.put(new Slot(newSlot), newSlot); add(newSlot); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java index 788a4ddc15374..5289b3a34ab34 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java @@ -156,7 +156,7 @@ public MissingOrder missingOrder() { /** * Returns true if the source contains a script that can change the value. */ - protected boolean hasScript() { + public boolean hasScript() { return hasScript; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index fd94ba355238a..3926ce9bbecb7 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -298,7 +298,7 @@ public static void register(ValuesSourceRegistry.Builder builder) { // TODO once composite is plugged in to the values source registry or at least understands Date values source types use it // here Rounding.Prepared preparedRounding = rounding.prepareForUnknown(); - RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding); + RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding, rounding); // is specified in the builder. final DocValueFormat docValueFormat = format == null ? DocValueFormat.RAW : valuesSourceConfig.format(); final MappedFieldType fieldType = valuesSourceConfig.fieldType(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java index 9f8a4cff5f3fc..43f1ad32a66f4 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java @@ -339,7 +339,7 @@ public static class InternalBucket extends InternalMultiBucketAggregation.Intern KeyComparable<InternalBucket> { private final CompositeKey key; - private final long docCount; + private long docCount; private final InternalAggregations aggregations; private final transient int[] reverseMuls; private final transient MissingOrder[] missingOrders; @@ -436,6 +436,10 @@ public long getDocCount() { return docCount; } + public void setDocCount(long docCount) { + this.docCount = docCount; + } + @Override public Aggregations getAggregations() { return aggregations; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index 3d6730203b6ae..dc130eb54c0ea 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -68,6 +68,7 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReade // no value for the field return DocIdSet.EMPTY; } + long lowerBucket = Long.MIN_VALUE; Comparable lowerValue = queue.getLowerValueLeadSource(); if (lowerValue != null) { @@ -76,7 +77,6 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReade } lowerBucket = (Long) lowerValue; } - long upperBucket = Long.MAX_VALUE; Comparable upperValue = queue.getUpperValueLeadSource(); if (upperValue != null) { @@ -85,6 +85,7 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReade } upperBucket = (Long) upperValue; } + DocIdSetBuilder builder = fillDocIdSet ? new DocIdSetBuilder(context.reader().maxDoc(), values, field) : null; Visitor visitor = new Visitor(context, queue, builder, values.getBytesPerDimension(), lowerBucket, upperBucket); try { @@ -146,6 +147,7 @@ public void visit(int docID, byte[] packedValue) throws IOException { } long bucket = bucketFunction.applyAsLong(packedValue); + // process previous bucket when new bucket appears if (first == false && bucket != lastBucket) { final DocIdSet docIdSet = bucketDocsBuilder.build(); if (processBucket(queue, context, docIdSet.iterator(), lastBucket, builder) && @@ -182,13 +184,13 @@ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue return PointValues.Relation.CELL_OUTSIDE_QUERY; } } - if (upperBucket != Long.MAX_VALUE) { long minBucket = bucketFunction.applyAsLong(minPackedValue); if (minBucket > upperBucket) { return PointValues.Relation.CELL_OUTSIDE_QUERY; } } + return PointValues.Relation.CELL_CROSSES_QUERY; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java index 89315724ff9ed..3f5cf919f1755 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java @@ -47,17 +47,19 @@ * * @opensearch.internal */ -class RoundingValuesSource extends ValuesSource.Numeric { +public class RoundingValuesSource extends ValuesSource.Numeric { private final ValuesSource.Numeric vs; - private final Rounding.Prepared rounding; + private final Rounding.Prepared preparedRounding; + private final Rounding rounding; /** - * - * @param vs The original values source - * @param rounding How to round the values + * @param vs The original values source + * @param preparedRounding How to round the values + * @param rounding The rounding strategy */ - RoundingValuesSource(Numeric vs, Rounding.Prepared rounding) { + RoundingValuesSource(Numeric vs, Rounding.Prepared preparedRounding, Rounding rounding) { this.vs = vs; + this.preparedRounding = preparedRounding; this.rounding = rounding; } @@ -71,8 +73,16 @@ public boolean isBigInteger() { return false; } + public Rounding.Prepared getPreparedRounding() { + return preparedRounding; + } + + public Rounding getRounding() { + return rounding; + } + public long round(long value) { - return rounding.round(value); + return preparedRounding.round(value); } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java index a0a636c121e12..db21b384c77ea 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java @@ -56,7 +56,7 @@ public class FilterAggregatorFactory extends AggregatorFactory { private Weight weight; - private Query filter; + private final Query filter; public FilterAggregatorFactory( String name, @@ -85,7 +85,7 @@ public Weight getWeight() { try { weight = contextSearcher.createWeight(contextSearcher.rewrite(filter), ScoreMode.COMPLETE_NO_SCORES, 1f); } catch (IOException e) { - throw new AggregationInitializationException("Failed to initialse filter", e); + throw new AggregationInitializationException("Failed to initialise filter", e); } } return weight; @@ -98,7 +98,7 @@ public Aggregator createInternal( CardinalityUpperBound cardinality, Map<String, Object> metadata ) throws IOException { - return new FilterAggregator(name, () -> this.getWeight(), factories, searchContext, parent, cardinality, metadata); + return new FilterAggregator(name, this::getWeight, factories, searchContext, parent, cardinality, metadata); } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index a71c15d551927..12aefc540e75c 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -33,8 +33,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; import org.apache.lucene.util.CollectionUtil; import org.opensearch.common.Rounding; import org.opensearch.common.Rounding.Prepared; @@ -42,7 +42,7 @@ import org.opensearch.common.util.IntArray; import org.opensearch.common.util.LongArray; import org.opensearch.core.common.util.ByteArray; -import org.opensearch.index.mapper.DateFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.Aggregator; import org.opensearch.search.aggregations.AggregatorFactories; @@ -53,6 +53,7 @@ import org.opensearch.search.aggregations.LeafBucketCollectorBase; import org.opensearch.search.aggregations.bucket.DeferableBucketAggregator; import org.opensearch.search.aggregations.bucket.DeferringBucketCollector; +import org.opensearch.search.aggregations.bucket.FastFilterRewriteHelper; import org.opensearch.search.aggregations.bucket.MergingBucketsDeferringCollector; import org.opensearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; @@ -127,14 +128,14 @@ static AutoDateHistogramAggregator build( * {@link MergingBucketsDeferringCollector#mergeBuckets(long[])}. */ private MergingBucketsDeferringCollector deferringCollector; - private final Weight[] filters; - private final DateFieldMapper.DateFieldType fieldType; protected final RoundingInfo[] roundingInfos; protected final int targetBuckets; protected int roundingIdx; protected Rounding.Prepared preparedRounding; + private final FastFilterRewriteHelper.FastFilterContext fastFilterContext; + private AutoDateHistogramAggregator( String name, AggregatorFactories factories, @@ -156,45 +157,53 @@ private AutoDateHistogramAggregator( this.roundingPreparer = roundingPreparer; this.preparedRounding = prepareRounding(0); - FilterRewriteHelper.FilterContext filterContext = FilterRewriteHelper.buildFastFilterContext( - parent(), - subAggregators.length, - context, - b -> getMinimumRounding(b[0], b[1]), - // Passing prepared rounding as supplier to ensure the correct prepared - // rounding is set as it is done during getMinimumRounding - () -> preparedRounding, - valuesSourceConfig, - fc -> FilterRewriteHelper.getAggregationBounds(context, fc.field()) + fastFilterContext = new FastFilterRewriteHelper.FastFilterContext(context); + fastFilterContext.setAggregationType( + new AutoHistogramAggregationType( + valuesSourceConfig.fieldType(), + valuesSourceConfig.missing() != null, + valuesSourceConfig.script() != null + ) ); - if (filterContext != null) { - fieldType = filterContext.fieldType; - filters = filterContext.filters; - } else { - fieldType = null; - filters = null; + if (fastFilterContext.isRewriteable(parent, subAggregators.length)) { + fastFilterContext.buildFastFilter(); } } - private Rounding getMinimumRounding(final long low, final long high) { - // max - min / targetBuckets = bestDuration - // find the right innerInterval this bestDuration belongs to - // since we cannot exceed targetBuckets, bestDuration should go up, - // so the right innerInterval should be an upper bound - long bestDuration = (high - low) / targetBuckets; - while (roundingIdx < roundingInfos.length - 1) { - final RoundingInfo curRoundingInfo = roundingInfos[roundingIdx]; - final int temp = curRoundingInfo.innerIntervals[curRoundingInfo.innerIntervals.length - 1]; - // If the interval duration is covered by the maximum inner interval, - // we can start with this outer interval for creating the buckets - if (bestDuration <= temp * curRoundingInfo.roughEstimateDurationMillis) { - break; + private class AutoHistogramAggregationType extends FastFilterRewriteHelper.AbstractDateHistogramAggregationType { + + public AutoHistogramAggregationType(MappedFieldType fieldType, boolean missing, boolean hasScript) { + super(fieldType, missing, hasScript); + } + + @Override + protected Rounding getRounding(final long low, final long high) { + // max - min / targetBuckets = bestDuration + // find the right innerInterval this bestDuration belongs to + // since we cannot exceed targetBuckets, bestDuration should go up, + // so the right innerInterval should be an upper bound + long bestDuration = (high - low) / targetBuckets; + // reset so this function is idempotent + roundingIdx = 0; + while (roundingIdx < roundingInfos.length - 1) { + final RoundingInfo curRoundingInfo = roundingInfos[roundingIdx]; + final int temp = curRoundingInfo.innerIntervals[curRoundingInfo.innerIntervals.length - 1]; + // If the interval duration is covered by the maximum inner interval, + // we can start with this outer interval for creating the buckets + if (bestDuration <= temp * curRoundingInfo.roughEstimateDurationMillis) { + break; + } + roundingIdx++; } - roundingIdx++; + + preparedRounding = prepareRounding(roundingIdx); + return roundingInfos[roundingIdx].rounding; } - preparedRounding = prepareRounding(roundingIdx); - return roundingInfos[roundingIdx].rounding; + @Override + protected Prepared getRoundingPrepared() { + return preparedRounding; + } } protected abstract LongKeyedBucketOrds getBucketOrds(); @@ -226,28 +235,21 @@ public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBuc return LeafBucketCollector.NO_OP_COLLECTOR; } + boolean optimized = FastFilterRewriteHelper.tryFastFilterAggregation( + ctx, + fastFilterContext, + (key, count) -> incrementBucketDocCount( + FastFilterRewriteHelper.getBucketOrd(getBucketOrds().add(0, preparedRounding.round(key))), + count + ) + ); + if (optimized) throw new CollectionTerminatedException(); + final SortedNumericDocValues values = valuesSource.longValues(ctx); final LeafBucketCollector iteratingCollector = getLeafCollector(values, sub); - - // Need to be declared as final and array for usage within the - // LeafBucketCollectorBase subclass below - final boolean[] useOpt = new boolean[1]; - useOpt[0] = filters != null; - return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { - // Try fast filter aggregation if the filters have been created - // Skip if tried before and gave incorrect/incomplete results - if (useOpt[0]) { - useOpt[0] = FilterRewriteHelper.tryFastFilterAggregation(ctx, filters, fieldType, (key, count) -> { - incrementBucketDocCount( - FilterRewriteHelper.getBucketOrd(getBucketOrds().add(owningBucketOrd, preparedRounding.round(key))), - count - ); - }); - } - iteratingCollector.collect(doc, owningBucketOrd); } }; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 8437e1dce9fe0..0e830106c8284 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -33,13 +33,13 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; import org.apache.lucene.util.CollectionUtil; import org.opensearch.common.Nullable; import org.opensearch.common.Rounding; import org.opensearch.common.lease.Releasables; -import org.opensearch.index.mapper.DateFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.Aggregator; import org.opensearch.search.aggregations.AggregatorFactories; @@ -49,8 +49,8 @@ import org.opensearch.search.aggregations.LeafBucketCollector; import org.opensearch.search.aggregations.LeafBucketCollectorBase; import org.opensearch.search.aggregations.bucket.BucketsAggregator; +import org.opensearch.search.aggregations.bucket.FastFilterRewriteHelper; import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; -import org.opensearch.search.aggregations.support.FieldContext; import org.opensearch.search.aggregations.support.ValuesSource; import org.opensearch.search.aggregations.support.ValuesSourceConfig; import org.opensearch.search.internal.SearchContext; @@ -81,9 +81,9 @@ class DateHistogramAggregator extends BucketsAggregator implements SizedBucketAg private final long minDocCount; private final LongBounds extendedBounds; private final LongBounds hardBounds; - private final Weight[] filters; private final LongKeyedBucketOrds bucketOrds; - private final DateFieldMapper.DateFieldType fieldType; + + private final FastFilterRewriteHelper.FastFilterContext fastFilterContext; DateHistogramAggregator( String name, @@ -116,34 +116,35 @@ class DateHistogramAggregator extends BucketsAggregator implements SizedBucketAg bucketOrds = LongKeyedBucketOrds.build(context.bigArrays(), cardinality); - FilterRewriteHelper.FilterContext filterContext = FilterRewriteHelper.buildFastFilterContext( - parent, - subAggregators.length, - context, - x -> rounding, - () -> preparedRounding, - valuesSourceConfig, - this::computeBounds + fastFilterContext = new FastFilterRewriteHelper.FastFilterContext(context); + fastFilterContext.setAggregationType( + new DateHistogramAggregationType( + valuesSourceConfig.fieldType(), + valuesSourceConfig.missing() != null, + valuesSourceConfig.script() != null, + hardBounds + ) ); - if (filterContext != null) { - fieldType = filterContext.fieldType; - filters = filterContext.filters; - } else { - filters = null; - fieldType = null; + if (fastFilterContext.isRewriteable(parent, subAggregators.length)) { + fastFilterContext.buildFastFilter(); } } - private long[] computeBounds(final FieldContext fieldContext) throws IOException { - final long[] bounds = FilterRewriteHelper.getAggregationBounds(context, fieldContext.field()); - if (bounds != null) { - // Update min/max limit if user specified any hard bounds - if (hardBounds != null) { - bounds[0] = Math.max(bounds[0], hardBounds.getMin()); - bounds[1] = Math.min(bounds[1], hardBounds.getMax() - 1); // hard bounds max is exclusive - } + private class DateHistogramAggregationType extends FastFilterRewriteHelper.AbstractDateHistogramAggregationType { + + public DateHistogramAggregationType(MappedFieldType fieldType, boolean missing, boolean hasScript, LongBounds hardBounds) { + super(fieldType, missing, hasScript, hardBounds); + } + + @Override + protected Rounding getRounding(long low, long high) { + return rounding; + } + + @Override + protected Rounding.Prepared getRoundingPrepared() { + return preparedRounding; } - return bounds; } @Override @@ -160,26 +161,20 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol return LeafBucketCollector.NO_OP_COLLECTOR; } - // Need to be declared as final and array for usage within the - // LeafBucketCollectorBase subclass below - final boolean[] useOpt = new boolean[1]; - useOpt[0] = filters != null; + boolean optimized = FastFilterRewriteHelper.tryFastFilterAggregation( + ctx, + fastFilterContext, + (key, count) -> incrementBucketDocCount( + FastFilterRewriteHelper.getBucketOrd(bucketOrds.add(0, preparedRounding.round(key))), + count + ) + ); + if (optimized) throw new CollectionTerminatedException(); SortedNumericDocValues values = valuesSource.longValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { - // Try fast filter aggregation if the filters have been created - // Skip if tried before and gave incorrect/incomplete results - if (useOpt[0]) { - useOpt[0] = FilterRewriteHelper.tryFastFilterAggregation(ctx, filters, fieldType, (key, count) -> { - incrementBucketDocCount( - FilterRewriteHelper.getBucketOrd(bucketOrds.add(owningBucketOrd, preparedRounding.round(key))), - count - ); - }); - } - if (values.advanceExact(doc)) { int valuesCount = values.docValueCount(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/FilterRewriteHelper.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/FilterRewriteHelper.java deleted file mode 100644 index 29cecd5b382cd..0000000000000 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/FilterRewriteHelper.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.search.aggregations.bucket.histogram; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.PointValues; -import org.apache.lucene.search.CollectionTerminatedException; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.PointRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.NumericUtils; -import org.opensearch.common.CheckedFunction; -import org.opensearch.common.Rounding; -import org.opensearch.common.lucene.search.function.FunctionScoreQuery; -import org.opensearch.index.mapper.DateFieldMapper; -import org.opensearch.index.query.DateRangeIncludingNowQuery; -import org.opensearch.search.aggregations.support.FieldContext; -import org.opensearch.search.aggregations.support.ValuesSourceConfig; -import org.opensearch.search.internal.SearchContext; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.OptionalLong; -import java.util.function.BiConsumer; -import java.util.function.Function; -import java.util.function.Supplier; - -/** - * Helpers functions to rewrite and optimize aggregations using - * range filter queries - * - * @opensearch.internal - */ -public class FilterRewriteHelper { - - static class FilterContext { - final DateFieldMapper.DateFieldType fieldType; - final Weight[] filters; - - public FilterContext(DateFieldMapper.DateFieldType fieldType, Weight[] filters) { - this.fieldType = fieldType; - this.filters = filters; - } - } - - private static final int MAX_NUM_FILTER_BUCKETS = 1024; - private static final Map<Class<?>, Function<Query, Query>> queryWrappers; - - // Initialize the wrappers map for unwrapping the query - static { - queryWrappers = new HashMap<>(); - queryWrappers.put(ConstantScoreQuery.class, q -> ((ConstantScoreQuery) q).getQuery()); - queryWrappers.put(FunctionScoreQuery.class, q -> ((FunctionScoreQuery) q).getSubQuery()); - queryWrappers.put(DateRangeIncludingNowQuery.class, q -> ((DateRangeIncludingNowQuery) q).getQuery()); - queryWrappers.put(IndexOrDocValuesQuery.class, q -> ((IndexOrDocValuesQuery) q).getIndexQuery()); - } - - /** - * Recursively unwraps query into the concrete form - * for applying the optimization - */ - private static Query unwrapIntoConcreteQuery(Query query) { - while (queryWrappers.containsKey(query.getClass())) { - query = queryWrappers.get(query.getClass()).apply(query); - } - - return query; - } - - /** - * Finds the min and max bounds for segments within the passed search context - */ - private static long[] getIndexBoundsFromLeaves(final SearchContext context, final String fieldName) throws IOException { - final List<LeafReaderContext> leaves = context.searcher().getIndexReader().leaves(); - long min = Long.MAX_VALUE, max = Long.MIN_VALUE; - // Since the query does not specify bounds for aggregation, we can - // build the global min/max from local min/max within each segment - for (LeafReaderContext leaf : leaves) { - final PointValues values = leaf.reader().getPointValues(fieldName); - if (values != null) { - min = Math.min(min, NumericUtils.sortableBytesToLong(values.getMinPackedValue(), 0)); - max = Math.max(max, NumericUtils.sortableBytesToLong(values.getMaxPackedValue(), 0)); - } - } - - if (min == Long.MAX_VALUE || max == Long.MIN_VALUE) return null; - - return new long[] { min, max }; - } - - static long[] getAggregationBounds(final SearchContext context, final String fieldName) throws IOException { - final Query cq = unwrapIntoConcreteQuery(context.query()); - final long[] indexBounds = getIndexBoundsFromLeaves(context, fieldName); - if (cq instanceof PointRangeQuery) { - final PointRangeQuery prq = (PointRangeQuery) cq; - // Ensure that the query and aggregation are on the same field - if (prq.getField().equals(fieldName)) { - return new long[] { - // Minimum bound for aggregation is the max between query and global - Math.max(NumericUtils.sortableBytesToLong(prq.getLowerPoint(), 0), indexBounds[0]), - // Maximum bound for aggregation is the min between query and global - Math.min(NumericUtils.sortableBytesToLong(prq.getUpperPoint(), 0), indexBounds[1]) }; - } - } else if (cq instanceof MatchAllDocsQuery) { - return indexBounds; - } - - return null; - } - - /** - * Creates the range query filters for aggregations using the interval, min/max - * bounds and the rounding values - */ - private static Weight[] createFilterForAggregations( - final SearchContext context, - final Rounding rounding, - final Rounding.Prepared preparedRounding, - final String field, - final DateFieldMapper.DateFieldType fieldType, - final long low, - final long high - ) throws IOException { - final OptionalLong intervalOpt = Rounding.getInterval(rounding); - if (intervalOpt.isEmpty()) { - return null; - } - - final long interval = intervalOpt.getAsLong(); - // Calculate the number of buckets using range and interval - long roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); - long prevRounded = roundedLow; - int bucketCount = 0; - while (roundedLow <= fieldType.convertNanosToMillis(high)) { - bucketCount++; - // Below rounding is needed as the interval could return in - // non-rounded values for something like calendar month - roundedLow = preparedRounding.round(roundedLow + interval); - if (prevRounded == roundedLow) break; - prevRounded = roundedLow; - } - - Weight[] filters = null; - if (bucketCount > 0 && bucketCount <= MAX_NUM_FILTER_BUCKETS) { - int i = 0; - filters = new Weight[bucketCount]; - roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); - while (i < bucketCount) { - // Calculate the lower bucket bound - final byte[] lower = new byte[8]; - NumericUtils.longToSortableBytes(i == 0 ? low : fieldType.convertRoundedMillisToNanos(roundedLow), lower, 0); - // Calculate the upper bucket bound - final byte[] upper = new byte[8]; - roundedLow = preparedRounding.round(roundedLow + interval); - // Subtract -1 if the minimum is roundedLow as roundedLow itself - // is included in the next bucket - NumericUtils.longToSortableBytes( - i + 1 == bucketCount ? high : fieldType.convertRoundedMillisToNanos(roundedLow) - 1, - upper, - 0 - ); - filters[i++] = context.searcher().createWeight(new PointRangeQuery(field, lower, upper, 1) { - @Override - protected String toString(int dimension, byte[] value) { - return null; - } - }, ScoreMode.COMPLETE_NO_SCORES, 1); - } - } - - return filters; - } - - static FilterContext buildFastFilterContext( - final Object parent, - final int subAggLength, - SearchContext context, - Function<long[], Rounding> roundingFunction, - Supplier<Rounding.Prepared> preparedRoundingSupplier, - ValuesSourceConfig valuesSourceConfig, - CheckedFunction<FieldContext, long[], IOException> computeBounds - ) throws IOException { - // Create the filters for fast aggregation only if the query is instance - // of point range query and there aren't any parent/sub aggregations - if (parent == null && subAggLength == 0 && valuesSourceConfig.missing() == null && valuesSourceConfig.script() == null) { - final FieldContext fieldContext = valuesSourceConfig.fieldContext(); - if (fieldContext != null) { - final String fieldName = fieldContext.field(); - final long[] bounds = computeBounds.apply(fieldContext); - if (bounds != null) { - assert fieldContext.fieldType() instanceof DateFieldMapper.DateFieldType; - final DateFieldMapper.DateFieldType fieldType = (DateFieldMapper.DateFieldType) fieldContext.fieldType(); - final Rounding rounding = roundingFunction.apply(bounds); - final Weight[] filters = FilterRewriteHelper.createFilterForAggregations( - context, - rounding, - preparedRoundingSupplier.get(), - fieldName, - fieldType, - bounds[0], - bounds[1] - ); - return new FilterContext(fieldType, filters); - } - } - } - return null; - } - - static long getBucketOrd(long bucketOrd) { - if (bucketOrd < 0) { // already seen - bucketOrd = -1 - bucketOrd; - } - - return bucketOrd; - } - - static boolean tryFastFilterAggregation( - final LeafReaderContext ctx, - final Weight[] filters, - final DateFieldMapper.DateFieldType fieldType, - final BiConsumer<Long, Integer> incrementDocCount - ) throws IOException { - final int[] counts = new int[filters.length]; - int i; - for (i = 0; i < filters.length; i++) { - counts[i] = filters[i].count(ctx); - if (counts[i] == -1) { - // Cannot use the optimization if any of the counts - // is -1 indicating the segment might have deleted documents - return false; - } - } - - for (i = 0; i < filters.length; i++) { - if (counts[i] > 0) { - incrementDocCount.accept( - fieldType.convertNanosToMillis( - NumericUtils.sortableBytesToLong(((PointRangeQuery) filters[i].getQuery()).getLowerPoint(), 0) - ), - counts[i] - ); - } - } - throw new CollectionTerminatedException(); - } -} diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java index da1d9961ed81b..d21737a8366b2 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java @@ -227,6 +227,10 @@ public String toString() { return "anon SortedNumericDoubleValues of [" + super.toString() + "]"; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java index 1a76183ac1a2d..3ce1f0447dfcc 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java @@ -576,6 +576,11 @@ public boolean advanceExact(int target) throws IOException { } return false; } + + @Override + public int advance(int target) throws IOException { + return doubleValues.advance(target); + } } } diff --git a/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java index 434e630893f25..bdd92a5baa115 100644 --- a/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java @@ -117,6 +117,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); public static final ParseField SORT_FIELD = new ParseField("sort"); public static final ParseField TRACK_SCORES_FIELD = new ParseField("track_scores"); + public static final ParseField INCLUDE_NAMED_QUERIES_SCORE = new ParseField("include_named_queries_score"); public static final ParseField TRACK_TOTAL_HITS_FIELD = new ParseField("track_total_hits"); public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost"); public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations"); @@ -175,6 +176,8 @@ public static HighlightBuilder highlight() { private boolean trackScores = false; + private Boolean includeNamedQueriesScore; + private Integer trackTotalHitsUpTo; private SearchAfterBuilder searchAfterBuilder; @@ -276,6 +279,9 @@ public SearchSourceBuilder(StreamInput in) throws IOException { searchPipelineSource = in.readMap(); } } + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + includeNamedQueriesScore = in.readOptionalBoolean(); + } } @Override @@ -341,6 +347,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(searchPipelineSource); } } + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + out.writeOptionalBoolean(includeNamedQueriesScore); + } } /** @@ -568,6 +577,22 @@ public SearchSourceBuilder trackScores(boolean trackScores) { return this; } + /** + * Applies when there are named queries, to return the scores along as well + * Defaults to {@code false}. + */ + public SearchSourceBuilder includeNamedQueriesScores(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + /** + * Indicates whether scores will be returned as part of every search matched query.s + */ + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore != null && includeNamedQueriesScore; + } + /** * Indicates whether scores will be tracked for this request. */ @@ -1103,6 +1128,7 @@ private SearchSourceBuilder shallowCopy( rewrittenBuilder.terminateAfter = terminateAfter; rewrittenBuilder.timeout = timeout; rewrittenBuilder.trackScores = trackScores; + rewrittenBuilder.includeNamedQueriesScore = includeNamedQueriesScore; rewrittenBuilder.trackTotalHitsUpTo = trackTotalHitsUpTo; rewrittenBuilder.version = version; rewrittenBuilder.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; @@ -1155,6 +1181,8 @@ public void parseXContent(XContentParser parser, boolean checkTrailingTokens) th explain = parser.booleanValue(); } else if (TRACK_SCORES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { trackScores = parser.booleanValue(); + } else if (INCLUDE_NAMED_QUERIES_SCORE.match(currentFieldName, parser.getDeprecationHandler())) { + includeNamedQueriesScore = parser.booleanValue(); } else if (TRACK_TOTAL_HITS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.VALUE_BOOLEAN || (token == XContentParser.Token.VALUE_STRING && Booleans.isBoolean(parser.text()))) { @@ -1418,6 +1446,10 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.field(TRACK_SCORES_FIELD.getPreferredName(), true); } + if (includeNamedQueriesScore != null) { + builder.field(INCLUDE_NAMED_QUERIES_SCORE.getPreferredName(), includeNamedQueriesScore); + } + if (trackTotalHitsUpTo != null) { builder.field(TRACK_TOTAL_HITS_FIELD.getPreferredName(), trackTotalHitsUpTo); } @@ -1749,6 +1781,7 @@ public int hashCode() { terminateAfter, timeout, trackScores, + includeNamedQueriesScore, version, seqNoAndPrimaryTerm, profile, @@ -1791,6 +1824,7 @@ public boolean equals(Object obj) { && Objects.equals(terminateAfter, other.terminateAfter) && Objects.equals(timeout, other.timeout) && Objects.equals(trackScores, other.trackScores) + && Objects.equals(includeNamedQueriesScore, other.includeNamedQueriesScore) && Objects.equals(version, other.version) && Objects.equals(seqNoAndPrimaryTerm, other.seqNoAndPrimaryTerm) && Objects.equals(profile, other.profile) diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchContext.java b/server/src/main/java/org/opensearch/search/fetch/FetchContext.java index 7e36ace9e2112..5be3733106655 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchContext.java @@ -188,6 +188,10 @@ public boolean fetchScores() { return searchContext.sort() != null && searchContext.trackScores(); } + public boolean includeNamedQueriesScore() { + return searchContext.includeNamedQueriesScore(); + } + /** * Configuration for returning inner hits */ diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index a842c0f1adc6e..1698f41caaf2b 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -91,7 +91,7 @@ /** * Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified - * after reducing all of the matches returned by the query phase + * after reducing all the matches returned by the query phase * * @opensearch.api */ diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java index 6c589438d6b4c..406d9c8b4bc03 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -28,12 +28,12 @@ * Modifications Copyright OpenSearch Contributors. See * GitHub history for details. */ - package org.opensearch.search.fetch.subphase; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -45,6 +45,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -67,25 +68,69 @@ public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOExcept if (namedQueries.isEmpty()) { return null; } + + Map<String, Weight> weights = prepareWeights(context, namedQueries); + + return context.includeNamedQueriesScore() ? createScoringProcessor(weights) : createNonScoringProcessor(weights); + } + + private Map<String, Weight> prepareWeights(FetchContext context, Map<String, Query> namedQueries) throws IOException { Map<String, Weight> weights = new HashMap<>(); + ScoreMode scoreMode = context.includeNamedQueriesScore() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; for (Map.Entry<String, Query> entry : namedQueries.entrySet()) { - weights.put( - entry.getKey(), - context.searcher().createWeight(context.searcher().rewrite(entry.getValue()), ScoreMode.COMPLETE_NO_SCORES, 1) - ); + weights.put(entry.getKey(), context.searcher().createWeight(context.searcher().rewrite(entry.getValue()), scoreMode, 1)); } + return weights; + } + + private FetchSubPhaseProcessor createScoringProcessor(Map<String, Weight> weights) { return new FetchSubPhaseProcessor() { + final Map<String, Scorer> matchingScorers = new HashMap<>(); + + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + matchingScorers.clear(); + for (Map.Entry<String, Weight> entry : weights.entrySet()) { + ScorerSupplier scorerSupplier = entry.getValue().scorerSupplier(readerContext); + if (scorerSupplier != null) { + Scorer scorer = scorerSupplier.get(0L); + if (scorer != null) { + matchingScorers.put(entry.getKey(), scorer); + } + } + } + } + + @Override + public void process(HitContext hitContext) throws IOException { + Map<String, Float> matches = new LinkedHashMap<>(); + int docId = hitContext.docId(); + for (Map.Entry<String, Scorer> entry : matchingScorers.entrySet()) { + Scorer scorer = entry.getValue(); + if (scorer.iterator().docID() < docId) { + scorer.iterator().advance(docId); + } + if (scorer.iterator().docID() == docId) { + matches.put(entry.getKey(), scorer.score()); + } + } + hitContext.hit().matchedQueriesWithScores(matches); + } + }; + } - final Map<String, Bits> matchingIterators = new HashMap<>(); + private FetchSubPhaseProcessor createNonScoringProcessor(Map<String, Weight> weights) { + return new FetchSubPhaseProcessor() { + final Map<String, Bits> matchingBits = new HashMap<>(); @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { - matchingIterators.clear(); + matchingBits.clear(); for (Map.Entry<String, Weight> entry : weights.entrySet()) { - ScorerSupplier ss = entry.getValue().scorerSupplier(readerContext); - if (ss != null) { - Bits matchingBits = Lucene.asSequentialAccessBits(readerContext.reader().maxDoc(), ss); - matchingIterators.put(entry.getKey(), matchingBits); + ScorerSupplier scorerSupplier = entry.getValue().scorerSupplier(readerContext); + if (scorerSupplier != null) { + Bits bits = Lucene.asSequentialAccessBits(readerContext.reader().maxDoc(), scorerSupplier); + matchingBits.put(entry.getKey(), bits); } } } @@ -93,15 +138,14 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { @Override public void process(HitContext hitContext) { List<String> matches = new ArrayList<>(); - int doc = hitContext.docId(); - for (Map.Entry<String, Bits> iterator : matchingIterators.entrySet()) { - if (iterator.getValue().get(doc)) { - matches.add(iterator.getKey()); + int docId = hitContext.docId(); + for (Map.Entry<String, Bits> entry : matchingBits.entrySet()) { + if (entry.getValue().get(docId)) { + matches.add(entry.getKey()); } } hitContext.hit().matchedQueries(matches.toArray(new String[0])); } }; } - } diff --git a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java index 151ef97a2a141..3a3b46366a6d2 100644 --- a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java @@ -340,6 +340,14 @@ public FieldDoc searchAfter() { return in.searchAfter(); } + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + return in.includeNamedQueriesScore(includeNamedQueriesScore); + } + + public boolean includeNamedQueriesScore() { + return in.includeNamedQueriesScore(); + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { return in.parsedPostFilter(postFilter); diff --git a/server/src/main/java/org/opensearch/search/internal/SearchContext.java b/server/src/main/java/org/opensearch/search/internal/SearchContext.java index 02837da64dafd..cd8f9f8410d50 100644 --- a/server/src/main/java/org/opensearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SearchContext.java @@ -305,6 +305,29 @@ public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) { public abstract boolean trackScores(); + /** + * Determines whether named queries' scores should be included in the search results. + * By default, this is set to return false, indicating that scores from named queries are not included. + * + * @param includeNamedQueriesScore true to include scores from named queries, false otherwise. + */ + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + // Default implementation does nothing and returns this for chaining. + // Implementations of SearchContext should override this method to actually store the value. + return this; + } + + /** + * Checks if scores from named queries are included in the search results. + * + * @return true if scores from named queries are included, false otherwise. + */ + public boolean includeNamedQueriesScore() { + // Default implementation returns false. + // Implementations of SearchContext should override this method to return the actual value. + return false; + } + public abstract SearchContext trackTotalHitsUpTo(int trackTotalHits); /** diff --git a/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java b/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java index 55315013ea8c9..b2c97baf78d91 100644 --- a/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java @@ -82,6 +82,8 @@ public class SubSearchContext extends FilteredSearchContext { private boolean explain; private boolean trackScores; + + private boolean includeNamedQueriesScore; private boolean version; private boolean seqNoAndPrimaryTerm; @@ -234,6 +236,17 @@ public boolean trackScores() { return trackScores; } + @Override + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + @Override + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore; + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { throw new UnsupportedOperationException("Not supported"); diff --git a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java index 3bd1c5118b5fb..70cbd8d7ad6c3 100644 --- a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java @@ -78,6 +78,7 @@ public void setDocument(int docId) { this.docId = docId; } + @SuppressWarnings("removal") @Override public ScriptDocValues<?> get(Object key) { // assume its a string... diff --git a/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java b/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java index 631ace41090d7..19a59e9f7bebe 100644 --- a/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java +++ b/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.Query; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.AggregationProcessor; import org.opensearch.search.internal.ContextIndexSearcher; import org.opensearch.search.internal.SearchContext; @@ -33,9 +32,7 @@ public class QueryPhaseSearcherWrapper implements QueryPhaseSearcher { public QueryPhaseSearcherWrapper() { this.defaultQueryPhaseSearcher = new QueryPhase.DefaultQueryPhaseSearcher(); - this.concurrentQueryPhaseSearcher = FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH) - ? new ConcurrentQueryPhaseSearcher() - : null; + this.concurrentQueryPhaseSearcher = new ConcurrentQueryPhaseSearcher(); } /** @@ -58,10 +55,8 @@ public boolean searchWith( boolean hasTimeout ) throws IOException { if (searchContext.shouldUseConcurrentSearch()) { - LOGGER.debug("Using concurrent search over segments (experimental) for request with context id {}", searchContext.id()); return concurrentQueryPhaseSearcher.searchWith(searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout); } else { - LOGGER.debug("Using non-concurrent search over segments for request with context id {}", searchContext.id()); return defaultQueryPhaseSearcher.searchWith(searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout); } } @@ -74,13 +69,8 @@ public boolean searchWith( @Override public AggregationProcessor aggregationProcessor(SearchContext searchContext) { if (searchContext.shouldUseConcurrentSearch()) { - LOGGER.debug( - "Using concurrent aggregation processor over segments (experimental) for request with context id {}", - searchContext.id() - ); return concurrentQueryPhaseSearcher.aggregationProcessor(searchContext); } else { - LOGGER.debug("Using non-concurrent aggregation processor over segments for request with context id {}", searchContext.id()); return defaultQueryPhaseSearcher.aggregationProcessor(searchContext); } } diff --git a/server/src/main/java/org/opensearch/snapshots/RestoreService.java b/server/src/main/java/org/opensearch/snapshots/RestoreService.java index 9d2c7eb882fa1..bf2c7fc74be92 100644 --- a/server/src/main/java/org/opensearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/opensearch/snapshots/RestoreService.java @@ -121,6 +121,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_VERSION_UPGRADED; import static org.opensearch.common.util.FeatureFlags.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY; import static org.opensearch.common.util.set.Sets.newHashSet; +import static org.opensearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.opensearch.index.store.remote.directory.RemoteSnapshotDirectory.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY_MINIMUM_VERSION; import static org.opensearch.index.store.remote.filecache.FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING; import static org.opensearch.node.Node.NODE_SEARCH_CACHE_SIZE_SETTING; @@ -226,6 +227,16 @@ public RestoreService( */ public void restoreSnapshot(final RestoreSnapshotRequest request, final ActionListener<RestoreCompletionResponse> listener) { try { + // Setting INDEX_STORE_TYPE_SETTING as REMOTE_SNAPSHOT is intended to be a system-managed index setting that is configured when + // restoring a snapshot and should not be manually set by user. + String storeTypeSetting = request.indexSettings().get(INDEX_STORE_TYPE_SETTING.getKey()); + if (storeTypeSetting != null && storeTypeSetting.equals(RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT.toString())) { + throw new SnapshotRestoreException( + request.repository(), + request.snapshot(), + "cannot restore remote snapshot with index settings \"index.store.type\" set to \"remote_snapshot\". Instead use \"storage_type\": \"remote_snapshot\" as argument to restore." + ); + } // Read snapshot info and metadata from the repository final String repositoryName = request.repository(); Repository repository = repositoriesService.repository(repositoryName); diff --git a/server/src/main/java/org/opensearch/telemetry/TelemetrySettings.java b/server/src/main/java/org/opensearch/telemetry/TelemetrySettings.java index 24dcab98c8870..4b8897a318531 100644 --- a/server/src/main/java/org/opensearch/telemetry/TelemetrySettings.java +++ b/server/src/main/java/org/opensearch/telemetry/TelemetrySettings.java @@ -66,7 +66,6 @@ public class TelemetrySettings { private volatile boolean tracingEnabled; private volatile double samplingProbability; - private final boolean tracingFeatureEnabled; private final boolean metricsFeatureEnabled; @@ -98,6 +97,7 @@ public void setSamplingProbability(double samplingProbability) { /** * Get sampling ratio + * @return double */ public double getSamplingProbability() { return samplingProbability; @@ -110,4 +110,5 @@ public boolean isTracingFeatureEnabled() { public boolean isMetricsFeatureEnabled() { return metricsFeatureEnabled; } + } diff --git a/server/src/main/java/org/opensearch/telemetry/tracing/AttributeNames.java b/server/src/main/java/org/opensearch/telemetry/tracing/AttributeNames.java index b6b2cf360d1c5..6a97914b04ebc 100644 --- a/server/src/main/java/org/opensearch/telemetry/tracing/AttributeNames.java +++ b/server/src/main/java/org/opensearch/telemetry/tracing/AttributeNames.java @@ -40,6 +40,11 @@ private AttributeNames() { */ public static final String HTTP_URI = "http.uri"; + /** + * Http Request Query Parameters. + */ + public static final String HTTP_REQ_QUERY_PARAMS = "url.query"; + /** * Rest Request ID. */ diff --git a/server/src/main/java/org/opensearch/telemetry/tracing/SpanBuilder.java b/server/src/main/java/org/opensearch/telemetry/tracing/SpanBuilder.java index 1dce422943b7a..70658c5d71bf3 100644 --- a/server/src/main/java/org/opensearch/telemetry/tracing/SpanBuilder.java +++ b/server/src/main/java/org/opensearch/telemetry/tracing/SpanBuilder.java @@ -11,6 +11,7 @@ import org.opensearch.action.bulk.BulkShardRequest; import org.opensearch.action.support.replication.ReplicatedWriteRequest; import org.opensearch.common.annotation.InternalApi; +import org.opensearch.common.collect.Tuple; import org.opensearch.core.common.Strings; import org.opensearch.http.HttpRequest; import org.opensearch.rest.RestRequest; @@ -75,7 +76,9 @@ public static SpanCreationContext from(String spanName, String nodeId, Replicate } private static String createSpanName(HttpRequest httpRequest) { - return httpRequest.method().name() + SEPARATOR + httpRequest.uri(); + Tuple<String, String> uriParts = splitUri(httpRequest.uri()); + String path = uriParts.v1(); + return httpRequest.method().name() + SEPARATOR + path; } private static Attributes buildSpanAttributes(HttpRequest httpRequest) { @@ -84,9 +87,26 @@ private static Attributes buildSpanAttributes(HttpRequest httpRequest) { .addAttribute(AttributeNames.HTTP_METHOD, httpRequest.method().name()) .addAttribute(AttributeNames.HTTP_PROTOCOL_VERSION, httpRequest.protocolVersion().name()); populateHeader(httpRequest, attributes); + + Tuple<String, String> uriParts = splitUri(httpRequest.uri()); + String query = uriParts.v2(); + if (query.isBlank() == false) { + attributes.addAttribute(AttributeNames.HTTP_REQ_QUERY_PARAMS, query); + } + return attributes; } + private static Tuple<String, String> splitUri(String uri) { + int index = uri.indexOf('?'); + if (index >= 0 && index < uri.length() - 1) { + String path = uri.substring(0, index); + String query = uri.substring(index + 1); + return new Tuple<>(path, query); + } + return new Tuple<>(uri, ""); + } + private static void populateHeader(HttpRequest httpRequest, Attributes attributes) { HEADERS_TO_BE_ADDED_AS_ATTRIBUTES.forEach(x -> { if (httpRequest.getHeaders() != null @@ -102,9 +122,8 @@ private static String createSpanName(RestRequest restRequest) { if (restRequest != null) { try { String methodName = restRequest.method().name(); - // path() does the decoding, which may give error - String path = restRequest.path(); - spanName = methodName + SEPARATOR + path; + String rawPath = restRequest.rawPath(); + spanName = methodName + SEPARATOR + rawPath; } catch (Exception e) { // swallow the exception and keep the default name. } @@ -114,9 +133,16 @@ private static String createSpanName(RestRequest restRequest) { private static Attributes buildSpanAttributes(RestRequest restRequest) { if (restRequest != null) { - return Attributes.create() + Attributes attributes = Attributes.create() .addAttribute(AttributeNames.REST_REQ_ID, restRequest.getRequestId()) .addAttribute(AttributeNames.REST_REQ_RAW_PATH, restRequest.rawPath()); + + Tuple<String, String> uriParts = splitUri(restRequest.uri()); + String query = uriParts.v2(); + if (query.isBlank() == false) { + attributes.addAttribute(AttributeNames.HTTP_REQ_QUERY_PARAMS, query); + } + return attributes; } else { return Attributes.EMPTY; } diff --git a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java index 12052598d3671..0b9026b81eb4e 100644 --- a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java @@ -42,7 +42,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.SizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.concurrent.OpenSearchThreadPoolExecutor; import org.opensearch.common.util.concurrent.ThreadContext; @@ -187,9 +186,7 @@ public static ThreadPoolType fromType(String type) { map.put(Names.REMOTE_PURGE, ThreadPoolType.SCALING); map.put(Names.REMOTE_REFRESH_RETRY, ThreadPoolType.SCALING); map.put(Names.REMOTE_RECOVERY, ThreadPoolType.SCALING); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - map.put(Names.INDEX_SEARCHER, ThreadPoolType.RESIZABLE); - } + map.put(Names.INDEX_SEARCHER, ThreadPoolType.RESIZABLE); THREAD_POOL_TYPES = Collections.unmodifiableMap(map); } @@ -282,12 +279,16 @@ public ThreadPool( TimeValue.timeValueMinutes(5) ) ); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - builders.put( + builders.put( + Names.INDEX_SEARCHER, + new ResizableExecutorBuilder( + settings, Names.INDEX_SEARCHER, - new ResizableExecutorBuilder(settings, Names.INDEX_SEARCHER, allocatedProcessors, 1000, runnableTaskListener) - ); - } + twiceAllocatedProcessors(allocatedProcessors), + 1000, + runnableTaskListener + ) + ); for (final ExecutorBuilder<?> builder : customBuilders) { if (builders.containsKey(builder.name())) { diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat index 2c92f0ecd3f51..80b1d25064885 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat @@ -1 +1,2 @@ org.apache.lucene.search.suggest.document.Completion50PostingsFormat +org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat diff --git a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java index 76129341fc9a2..a7cbbffc51ed4 100644 --- a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java @@ -338,7 +338,14 @@ public void testOnPhaseFailureAndVerifyListeners() { SearchQueryThenFetchAsyncAction action = createSearchQueryThenFetchAsyncAction(requestOperationListeners); action.start(); assertEquals(1, testListener.getPhaseCurrent(action.getSearchPhaseName())); - action.onPhaseFailure(new SearchPhase("test") { + action.onPhaseFailure(new SearchPhase("none") { + @Override + public void run() { + + } + }, "message", null); + assertEquals(1, testListener.getPhaseCurrent(action.getSearchPhaseName())); + action.onPhaseFailure(new SearchPhase(action.getName()) { @Override public void run() { @@ -352,14 +359,14 @@ public void run() { ); searchDfsQueryThenFetchAsyncAction.start(); assertEquals(1, testListener.getPhaseCurrent(searchDfsQueryThenFetchAsyncAction.getSearchPhaseName())); - searchDfsQueryThenFetchAsyncAction.onPhaseFailure(new SearchPhase("test") { + searchDfsQueryThenFetchAsyncAction.onPhaseFailure(new SearchPhase(searchDfsQueryThenFetchAsyncAction.getName()) { @Override public void run() { } }, "message", null); - assertEquals(0, testListener.getPhaseCurrent(action.getSearchPhaseName())); - assertEquals(0, testListener.getPhaseTotal(action.getSearchPhaseName())); + assertEquals(0, testListener.getPhaseCurrent(searchDfsQueryThenFetchAsyncAction.getSearchPhaseName())); + assertEquals(0, testListener.getPhaseTotal(searchDfsQueryThenFetchAsyncAction.getSearchPhaseName())); FetchSearchPhase fetchPhase = createFetchSearchPhase(); ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); @@ -368,7 +375,7 @@ public void run() { action.skipShard(searchShardIterator); action.executeNextPhase(action, fetchPhase); assertEquals(1, testListener.getPhaseCurrent(fetchPhase.getSearchPhaseName())); - action.onPhaseFailure(new SearchPhase("test") { + action.onPhaseFailure(new SearchPhase(fetchPhase.getName()) { @Override public void run() { @@ -403,6 +410,30 @@ public void run() { assertEquals(requestIds, releasedContexts); } + public void testOnPhaseStart() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); + + final List<SearchRequestOperationsListener> requestOperationListeners = new ArrayList<>(List.of(testListener)); + SearchQueryThenFetchAsyncAction action = createSearchQueryThenFetchAsyncAction(requestOperationListeners); + + action.onPhaseStart(new SearchPhase("test") { + @Override + public void run() {} + }); + action.onPhaseStart(new SearchPhase("none") { + @Override + public void run() {} + }); + assertEquals(0, testListener.getPhaseCurrent(action.getSearchPhaseName())); + + action.onPhaseStart(new SearchPhase(action.getName()) { + @Override + public void run() {} + }); + assertEquals(1, testListener.getPhaseCurrent(action.getSearchPhaseName())); + } + public void testShardNotAvailableWithDisallowPartialFailures() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false); AtomicReference<Exception> exception = new AtomicReference<>(); diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java index 78c5ba4412c68..1cb336e18b12c 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java @@ -119,13 +119,13 @@ public void testStandardListenerAndPerRequestListenerDisabled() { public SearchRequestOperationsListener createTestSearchRequestOperationsListener() { return new SearchRequestOperationsListener() { @Override - void onPhaseStart(SearchPhaseContext context) {} + protected void onPhaseStart(SearchPhaseContext context) {} @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} @Override - void onPhaseFailure(SearchPhaseContext context) {} + protected void onPhaseFailure(SearchPhaseContext context) {} }; } } diff --git a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java index 4b0bde0984ad1..e84b5213be39e 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -64,9 +64,8 @@ import org.apache.lucene.store.Directory; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -82,10 +81,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; -public abstract class AbstractTermVectorsTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractTermVectorsTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public AbstractTermVectorsTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public AbstractTermVectorsTestCase(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -96,11 +95,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - protected static class TestFieldSetting { public final String name; public final boolean storedOffset; diff --git a/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java b/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java index b1e27ea9c66e3..2b4d2a755f543 100644 --- a/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java @@ -49,6 +49,7 @@ public class OpenSearchPolicyTests extends OpenSearchTestCase { /** * test restricting privileges to no permissions actually works */ + @SuppressWarnings("removal") public void testRestrictPrivileges() { assumeTrue("test requires security manager", System.getSecurityManager() != null); try { diff --git a/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java b/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java index ea4ef96ec0f77..69e561bb8fd89 100644 --- a/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java @@ -72,6 +72,7 @@ public void testEnsureRegularFile() throws IOException { } /** can't execute processes */ + @SuppressWarnings("removal") public void testProcessExecution() throws Exception { assumeTrue("test requires security manager", System.getSecurityManager() != null); try { diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index cea151748bfb6..6d1f359d210ac 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -35,6 +35,7 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.ResourceAlreadyExistsException; import org.opensearch.Version; +import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.opensearch.action.admin.indices.shrink.ResizeType; @@ -133,6 +134,7 @@ import static org.opensearch.cluster.metadata.MetadataCreateIndexService.getIndexNumberOfRoutingShards; import static org.opensearch.cluster.metadata.MetadataCreateIndexService.parseV1Mappings; import static org.opensearch.cluster.metadata.MetadataCreateIndexService.resolveAndValidateAliases; +import static org.opensearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.opensearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; import static org.opensearch.index.IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; @@ -146,6 +148,7 @@ import static org.opensearch.node.Node.NODE_ATTRIBUTES; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; @@ -1936,6 +1939,35 @@ public void testRequestDurabilityWhenRestrictSettingTrue() { assertEquals(Translog.Durability.REQUEST, INDEX_TRANSLOG_DURABILITY_SETTING.get(indexSettings)); } + public void testIndexCreationWithIndexStoreTypeRemoteStoreThrowsException() { + // This checks that aggregateIndexSettings throws exception for the case when the index setting + // index.store.type is set to remote_snapshot + request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); + final Settings.Builder requestSettings = Settings.builder(); + requestSettings.put(INDEX_STORE_TYPE_SETTING.getKey(), RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT); + request.settings(requestSettings.build()); + final IllegalArgumentException error = expectThrows( + IllegalArgumentException.class, + () -> aggregateIndexSettings( + ClusterState.EMPTY_STATE, + request, + Settings.EMPTY, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Collections.emptySet(), + clusterSettings + ) + ); + assertThat( + error.getMessage(), + containsString( + "cannot create index with index setting \"index.store.type\" set to \"remote_snapshot\". Store type can be set to \"remote_snapshot\" only when restoring a remote snapshot by using \"storage_type\": \"remote_snapshot\"" + ) + ); + } + private IndexTemplateMetadata addMatchingTemplate(Consumer<IndexTemplateMetadata.Builder> configurator) { IndexTemplateMetadata.Builder builder = templateMetadataBuilder("template1", "te*"); configurator.accept(builder); diff --git a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java index 780d041c25d04..7a0fd76b0fbd9 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java @@ -46,8 +46,6 @@ import java.util.Iterator; import java.util.List; -import org.mockito.Mockito; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -206,40 +204,4 @@ public void testInterleavedShardIteratorReplicaFirst() { } assertEquals(shardCount, this.totalNumberOfShards); } - - public void testSwapPrimaryWithReplica() { - // Initialize all the shards for test index 1 and 2 - initPrimaries(); - startInitializingShards(TEST_INDEX_1); - startInitializingShards(TEST_INDEX_1); - startInitializingShards(TEST_INDEX_2); - startInitializingShards(TEST_INDEX_2); - - // Create primary shard count imbalance between two nodes - final RoutingNodes routingNodes = this.clusterState.getRoutingNodes(); - final RoutingNode node0 = routingNodes.node("node0"); - final RoutingNode node1 = routingNodes.node("node1"); - final List<ShardRouting> shardRoutingList = node0.shardsWithState(TEST_INDEX_1, ShardRoutingState.STARTED); - final RoutingChangesObserver routingChangesObserver = Mockito.mock(RoutingChangesObserver.class); - int swaps = 0; - - for (ShardRouting routing : shardRoutingList) { - if (routing.primary()) { - ShardRouting swap = node1.getByShardId(routing.shardId()); - routingNodes.swapPrimaryWithReplica(logger, routing, swap, routingChangesObserver); - swaps++; - } - } - Mockito.verify(routingChangesObserver, Mockito.times(swaps)).replicaPromoted(Mockito.any()); - - final List<ShardRouting> shards = node1.shardsWithState(TEST_INDEX_1, ShardRoutingState.STARTED); - int shardCount = 0; - for (ShardRouting shard : shards) { - if (shard.primary()) { - shardCount++; - } - } - - assertTrue(shardCount >= swaps); - } } diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java index a1db6cd83ab6c..6a03a1f79bcde 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java @@ -229,6 +229,11 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing return Decision.ALWAYS; } } + + @Override + public Decision canAllocateAnyShardToNode(RoutingNode node, RoutingAllocation allocation) { + return throttle ? Decision.THROTTLE : Decision.YES; + } }); Collections.shuffle(deciders, random()); return new AllocationDeciders(deciders); diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java index f2e79b319d0dd..b840b78eff448 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java @@ -47,7 +47,7 @@ public void testExcludeNodeIdMoveBlocked() { /** * Test move operations for index level allocation settings. - * Supported for local indices, not supported for remote indices. + * Supported for local indices and remote indices. */ public void testIndexLevelExclusions() throws InterruptedException { int localOnlyNodes = 7; @@ -102,8 +102,9 @@ public void testIndexLevelExclusions() throws InterruptedException { // No shard of updated local index should be on excluded local capable node assertTrue(routingTable.allShards(localIndex).stream().noneMatch(shard -> shard.currentNodeId().equals(excludedLocalOnlyNode))); - // Since remote index shards are untouched, at least one shard should - // continue to stay on the excluded remote capable node - assertTrue(routingTable.allShards(remoteIndex).stream().anyMatch(shard -> shard.currentNodeId().equals(excludedRemoteCapableNode))); + // No shard of updated remote index should be on excluded remote capable node + assertTrue( + routingTable.allShards(remoteIndex).stream().noneMatch(shard -> shard.currentNodeId().equals(excludedRemoteCapableNode)) + ); } } diff --git a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java index e8ddfde11f4cc..4fd8986d0b428 100644 --- a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java @@ -779,7 +779,9 @@ public void testSamePrinterOutput() { DateTime jodaDate = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC); for (FormatNames format : FormatNames.values()) { - if (format == FormatNames.ISO8601 || format == FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS) { + if (format == FormatNames.ISO8601 + || format == FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS + || format == FormatNames.RFC3339_LENIENT) { // Nanos aren't supported by joda continue; } diff --git a/server/src/test/java/org/opensearch/common/path/PathTrieTests.java b/server/src/test/java/org/opensearch/common/path/PathTrieTests.java index e366972feeaf2..2f0618ee299b4 100644 --- a/server/src/test/java/org/opensearch/common/path/PathTrieTests.java +++ b/server/src/test/java/org/opensearch/common/path/PathTrieTests.java @@ -36,8 +36,10 @@ import org.opensearch.rest.RestUtils; import org.opensearch.test.OpenSearchTestCase; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -286,4 +288,33 @@ public void testEscapedSlashWithinUrl() { assertThat(params.get("type"), equalTo("type")); assertThat(params.get("id"), equalTo("id")); } + + public void testRetrieveAllEmpty() { + PathTrie<String> trie = new PathTrie<>(NO_DECODER); + Iterator<String> allPaths = trie.retrieveAll(); + assertFalse(allPaths.hasNext()); + } + + public void testRetrieveAll() { + PathTrie<String> trie = new PathTrie<>(NO_DECODER); + trie.insert("{testA}", "test1"); + trie.insert("{testA}/{testB}", "test2"); + trie.insert("a/{testB}", "test3"); + trie.insert("{testA}/b", "test4"); + trie.insert("{testA}/b/c", "test5"); + + Iterator<String> iterator = trie.retrieveAll(); + assertTrue(iterator.hasNext()); + List<String> paths = new ArrayList<>(); + iterator.forEachRemaining(paths::add); + assertEquals(paths, List.of("test1", "test4", "test5", "test2", "test3")); + assertFalse(iterator.hasNext()); + } + + public void testRetrieveAllWithNllValue() { + PathTrie<String> trie = new PathTrie<>(NO_DECODER); + trie.insert("{testA}", null); + Iterator<String> iterator = trie.retrieveAll(); + assertFalse(iterator.hasNext()); + } } diff --git a/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java b/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java index 0becb6cde5e64..66c9801d16598 100644 --- a/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java +++ b/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java @@ -286,24 +286,9 @@ public void testDynamicIndexSettingsRegistration() { } public void testConcurrentSegmentSearchClusterSettings() { - // Test that we throw an exception without the feature flag - Settings settings = Settings.builder().put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build(); - SettingsException ex = expectThrows(SettingsException.class, () -> new SettingsModule(settings)); - assertEquals( - "unknown setting [" - + SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey() - + "] please check that any required plugins are installed, or check the breaking " - + "changes documentation for removed settings", - ex.getMessage() - ); - - // Test that the settings updates correctly with the feature flag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); boolean settingValue = randomBoolean(); - Settings settingsWithFeatureFlag = Settings.builder() - .put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), settingValue) - .build(); - SettingsModule settingsModule = new SettingsModule(settingsWithFeatureFlag); + Settings settings = Settings.builder().put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), settingValue).build(); + SettingsModule settingsModule = new SettingsModule(settings); assertEquals(settingValue, SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settingsModule.getSettings())); } @@ -311,24 +296,9 @@ public void testConcurrentSegmentSearchIndexSettings() { Settings.Builder target = Settings.builder().put(Settings.EMPTY); Settings.Builder update = Settings.builder(); - // Test that we throw an exception without the feature flag SettingsModule module = new SettingsModule(Settings.EMPTY); IndexScopedSettings indexScopedSettings = module.getIndexScopedSettings(); - expectThrows( - SettingsException.class, - () -> indexScopedSettings.updateDynamicSettings( - Settings.builder().put(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build(), - target, - update, - "node" - ) - ); - - // Test that the settings updates correctly with the feature flag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); - SettingsModule moduleWithFeatureFlag = new SettingsModule(Settings.EMPTY); - IndexScopedSettings indexScopedSettingsWithFeatureFlag = moduleWithFeatureFlag.getIndexScopedSettings(); - indexScopedSettingsWithFeatureFlag.updateDynamicSettings( + indexScopedSettings.updateDynamicSettings( Settings.builder().put(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build(), target, update, @@ -337,23 +307,11 @@ public void testConcurrentSegmentSearchIndexSettings() { } public void testMaxSliceCountClusterSettingsForConcurrentSearch() { - // Test that we throw an exception without the feature flag - Settings settings = Settings.builder() - .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), 2) - .build(); - SettingsException ex = expectThrows(SettingsException.class, () -> new SettingsModule(settings)); - assertTrue( - ex.getMessage() - .contains("unknown setting [" + SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey() + "]") - ); - - // Test that the settings updates correctly with the feature flag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); int settingValue = randomIntBetween(0, 10); - Settings settingsWithFeatureFlag = Settings.builder() + Settings settings = Settings.builder() .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) .build(); - SettingsModule settingsModule = new SettingsModule(settingsWithFeatureFlag); + SettingsModule settingsModule = new SettingsModule(settings); assertEquals( settingValue, (int) SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.get(settingsModule.getSettings()) @@ -361,10 +319,10 @@ public void testMaxSliceCountClusterSettingsForConcurrentSearch() { // Test that negative value is not allowed settingValue = -1; - final Settings settingsWithFeatureFlag_2 = Settings.builder() + final Settings settings_2 = Settings.builder() .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) .build(); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settingsWithFeatureFlag_2)); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settings_2)); assertTrue(iae.getMessage().contains(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey())); } } diff --git a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java index 681daf1755890..85c9919275c3a 100644 --- a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java @@ -249,10 +249,19 @@ public void testEpochSecondParserWithFraction() { } public void testEpochMilliParsersWithDifferentFormatters() { - DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time||epoch_millis"); - TemporalAccessor accessor = formatter.parse("123"); - assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(123L)); - assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis")); + { + DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time||epoch_millis"); + TemporalAccessor accessor = formatter.parse("123"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(123L)); + assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis")); + } + + { + DateFormatter formatter = DateFormatter.forPattern("rfc3339_lenient||epoch_millis"); + TemporalAccessor accessor = formatter.parse("123"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(123L)); + assertThat(formatter.pattern(), is("rfc3339_lenient||epoch_millis")); + } } public void testParsersWithMultipleInternalFormats() throws Exception { @@ -317,6 +326,11 @@ public void testEqualsAndHashcode() { assertThat(epochMillisFormatter.hashCode(), is(DateFormatters.forPattern("epoch_millis").hashCode())); assertThat(epochMillisFormatter, sameInstance(DateFormatters.forPattern("epoch_millis"))); assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis"))); + + DateFormatter rfc339Formatter = DateFormatters.forPattern("rfc3339_lenient"); + assertThat(rfc339Formatter.hashCode(), is(DateFormatters.forPattern("rfc3339_lenient").hashCode())); + assertThat(rfc339Formatter, sameInstance(DateFormatters.forPattern("rfc3339_lenient"))); + assertThat(rfc339Formatter, equalTo(DateFormatters.forPattern("rfc3339_lenient"))); } public void testSupportBackwardsJava8Format() { @@ -461,6 +475,132 @@ public void testIso8601Parsing() { formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+01:00")); } + public void testRFC3339Parsing() { + DateFormatter formatter = DateFormatters.forPattern("rfc3339_lenient"); + + // timezone not allowed with just date + formatter.format(formatter.parse("2018")); + formatter.format(formatter.parse("2018-05")); + formatter.format(formatter.parse("2018-05-15")); + + formatter.format(formatter.parse("2018-05-15T17:14Z")); + formatter.format(formatter.parse("2018-05-15T17:14z")); + formatter.format(formatter.parse("2018-05-15T17:14+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14-01:00")); + + formatter.format(formatter.parse("2018-05-15T17:14:56Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56z")); + formatter.format(formatter.parse("2018-05-15T17:14:56+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56-01:00")); + + // milliseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123-01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123+01:00")); + + // microseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123456Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456-01:00")); + + // nanoseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789-01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+01:00")); + + // 1994-11-05T08:15:30-05:00 corresponds to November 5, 1994, 8:15:30 am, US Eastern Standard Time/ + // 1994-11-05T13:15:30Z corresponds to the same instant. + final Instant instantA = DateFormatters.from(formatter.parse("1994-11-05T08:15:30-05:00")).toInstant(); + final Instant instantB = DateFormatters.from(formatter.parse("1994-11-05T13:15:30Z")).toInstant(); + assertThat(instantA, is(instantB)); + + // Invalid dates should throw an exception + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("abc")); + assertThat(e.getMessage(), is("failed to parse date field [abc] with format [rfc3339_lenient]")); + // Invalid offset + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56-00:00")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56-00:00] with format [rfc3339_lenient]")); + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.+00:00")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.+00:00] with format [rfc3339_lenient]")); + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56_00:00")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56_00:00] with format [rfc3339_lenient]")); + // No offset + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56] with format [rfc3339_lenient]")); + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.123")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.123] with format [rfc3339_lenient]")); + // No end of fraction + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.123")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.123] with format [rfc3339_lenient]")); + // Invalid fraction + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.abcZ")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.abcZ] with format [rfc3339_lenient]")); + // Invalid date + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("201805-15T17:14:56.123456+0000")); + assertThat(e.getMessage(), is("failed to parse date field [201805-15T17:14:56.123456+0000] with format [rfc3339_lenient]")); + // More than 9 digits of nanosecond resolution + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.1234567891Z")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.1234567891Z] with format [rfc3339_lenient]")); + } + + public void testRFC3339ParserWithDifferentFormatters() { + { + DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time||rfc3339_lenient"); + TemporalAccessor accessor = formatter.parse("2018-05-15T17:14:56+0100"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(1526400896000L)); + assertThat(formatter.pattern(), is("strict_date_optional_time||rfc3339_lenient")); + } + + { + DateFormatter formatter = DateFormatter.forPattern("rfc3339_lenient||strict_date_optional_time"); + TemporalAccessor accessor = formatter.parse("2018-05-15T17:14:56.123+0100"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(1526400896123L)); + assertThat(formatter.pattern(), is("rfc3339_lenient||strict_date_optional_time")); + } + + { + DateFormatter formatter = DateFormatter.forPattern("rfc3339_lenient||strict_date_optional_time"); + TemporalAccessor accessor = formatter.parse("2018-05-15T17:14:56.123456789+0100"); + assertThat(DateFormatters.from(accessor).toInstant().getNano(), is(123456789)); + assertThat(formatter.pattern(), is("rfc3339_lenient||strict_date_optional_time")); + } + } + + public void testRFC3339ParserAgainstDifferentFormatters() { + DateFormatter rfc3339Formatter = DateFormatter.forPattern("rfc3339_lenient"); + { + DateFormatter isoFormatter = DateFormatter.forPattern("strict_date_optional_time"); + + assertDateTimeEquals("2018-05-15T17:14Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14-01:00", rfc3339Formatter, isoFormatter); + + assertDateTimeEquals("2018-05-15T17:14:56Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56-01:00", rfc3339Formatter, isoFormatter); + + assertDateTimeEquals("2018-05-15T17:14:56.123Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123-01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123-01:00", rfc3339Formatter, isoFormatter); + + assertDateTimeEquals("2018-05-15T17:14:56.123456Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123456789+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123456789-01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123456789+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123456789-01:00", rfc3339Formatter, isoFormatter); + } + } + public void testRoundupFormatterWithEpochDates() { assertRoundupFormatter("epoch_millis", "1234567890", 1234567890L); // also check nanos of the epoch_millis formatter if it is rounded up to the nano second @@ -683,4 +823,10 @@ public void testCamelCaseDeprecation() { } } } + + void assertDateTimeEquals(String toTest, DateFormatter candidateParser, DateFormatter baselineParser) { + Instant gotInstant = DateFormatters.from(candidateParser.parse(toTest)).toInstant(); + Instant expectedInstant = DateFormatters.from(baselineParser.parse(toTest)).toInstant(); + assertThat(gotInstant, is(expectedInstant)); + } } diff --git a/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java b/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java index 2063cd26a9e8e..4823ce7a238e3 100644 --- a/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java +++ b/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java @@ -49,6 +49,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * Tests for OpenSearchExecutors and its components like OpenSearchAbortPolicy. @@ -279,6 +280,41 @@ public void testScaleDown() throws Exception { terminate(pool); } + /** + * The test case is adapted from https://bugs.openjdk.org/browse/JDK-8323659 reproducer. + */ + public void testScaleUpWithSpawningTask() throws Exception { + ThreadPoolExecutor pool = OpenSearchExecutors.newScaling( + getClass().getName() + "/" + getTestName(), + 0, + 1, + between(1, 100), + randomTimeUnit(), + OpenSearchExecutors.daemonThreadFactory("test"), + threadContext + ); + assertThat("Min property", pool.getCorePoolSize(), equalTo(0)); + assertThat("Max property", pool.getMaximumPoolSize(), equalTo(1)); + + final CountDownLatch latch = new CountDownLatch(10); + class TestTask implements Runnable { + @Override + public void run() { + latch.countDown(); + if (latch.getCount() > 0) { + pool.execute(TestTask.this); + } + } + } + pool.execute(new TestTask()); + latch.await(); + + assertThat("wrong pool size", pool.getPoolSize(), lessThanOrEqualTo(1)); + assertThat("wrong active size", pool.getActiveCount(), lessThanOrEqualTo(1)); + + terminate(pool); + } + public void testRejectionMessageAndShuttingDownFlag() throws InterruptedException { int pool = between(1, 10); int queue = between(0, 100); diff --git a/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java index 7f669934579ee..962eb743dca6e 100644 --- a/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java @@ -359,6 +359,57 @@ protected void doRun() throws Exception { env.close(); } + public void testIndexStoreListener() throws Exception { + final AtomicInteger shardCounter = new AtomicInteger(0); + final AtomicInteger indexCounter = new AtomicInteger(0); + final Index index = new Index("foo", "fooUUID"); + final ShardId shardId = new ShardId(index, 0); + final NodeEnvironment.IndexStoreListener listener = new NodeEnvironment.IndexStoreListener() { + @Override + public void beforeShardPathDeleted(ShardId inShardId, IndexSettings indexSettings, NodeEnvironment env) { + assertEquals(shardId, inShardId); + shardCounter.incrementAndGet(); + } + + @Override + public void beforeIndexPathDeleted(Index inIndex, IndexSettings indexSettings, NodeEnvironment env) { + assertEquals(index, inIndex); + indexCounter.incrementAndGet(); + } + }; + final NodeEnvironment env = newNodeEnvironment(listener); + + for (Path path : env.indexPaths(index)) { + Files.createDirectories(path.resolve("0")); + } + + for (Path path : env.indexPaths(index)) { + assertTrue(Files.exists(path.resolve("0"))); + } + assertEquals(0, shardCounter.get()); + + env.deleteShardDirectorySafe(new ShardId(index, 0), idxSettings); + + for (Path path : env.indexPaths(index)) { + assertFalse(Files.exists(path.resolve("0"))); + } + assertEquals(1, shardCounter.get()); + + for (Path path : env.indexPaths(index)) { + assertTrue(Files.exists(path)); + } + assertEquals(0, indexCounter.get()); + + env.deleteIndexDirectorySafe(index, 5000, idxSettings); + + for (Path path : env.indexPaths(index)) { + assertFalse(Files.exists(path)); + } + assertEquals(1, indexCounter.get()); + assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); + env.close(); + } + public void testStressShardLock() throws IOException, InterruptedException { class Int { int value = 0; @@ -629,6 +680,11 @@ public NodeEnvironment newNodeEnvironment() throws IOException { return newNodeEnvironment(Settings.EMPTY); } + public NodeEnvironment newNodeEnvironment(NodeEnvironment.IndexStoreListener listener) throws IOException { + Settings build = buildEnvSettings(Settings.EMPTY); + return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), listener); + } + @Override public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { Settings build = buildEnvSettings(settings); diff --git a/server/src/test/java/org/opensearch/index/codec/fuzzy/BloomFilterTests.java b/server/src/test/java/org/opensearch/index/codec/fuzzy/BloomFilterTests.java new file mode 100644 index 0000000000000..92669d5bc1d92 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/fuzzy/BloomFilterTests.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.ByteArrayDataOutput; +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.lucene.store.ByteArrayIndexInput; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; + +public class BloomFilterTests extends OpenSearchTestCase { + + public void testBloomFilterSerializationDeserialization() throws IOException { + int elementCount = randomIntBetween(1, 100); + long maxDocs = elementCount * 10L; // Keeping this high so that it ensures some bits are not set. + BloomFilter filter = new BloomFilter(maxDocs, getFpp(), () -> idIterator(elementCount)); + byte[] buffer = new byte[(int) maxDocs * 5]; + ByteArrayDataOutput out = new ByteArrayDataOutput(buffer); + + // Write in the format readable through factory + out.writeString(filter.setType().getSetName()); + filter.writeTo(out); + + FuzzySet reconstructedFilter = FuzzySetFactory.deserializeFuzzySet(new ByteArrayIndexInput("filter", buffer)); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, reconstructedFilter.setType()); + + Iterator<BytesRef> idIterator = idIterator(elementCount); + while (idIterator.hasNext()) { + BytesRef element = idIterator.next(); + assertEquals(FuzzySet.Result.MAYBE, reconstructedFilter.contains(element)); + assertEquals(FuzzySet.Result.MAYBE, filter.contains(element)); + } + } + + public void testBloomFilterIsSaturated_returnsTrue() throws IOException { + BloomFilter bloomFilter = new BloomFilter(1L, getFpp(), () -> idIterator(1000)); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, bloomFilter.setType()); + assertEquals(true, bloomFilter.isSaturated()); + } + + public void testBloomFilterIsSaturated_returnsFalse() throws IOException { + int elementCount = randomIntBetween(1, 100); + BloomFilter bloomFilter = new BloomFilter(20000, getFpp(), () -> idIterator(elementCount)); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, bloomFilter.setType()); + assertEquals(false, bloomFilter.isSaturated()); + } + + public void testBloomFilterWithLargeCapacity() throws IOException { + long maxDocs = randomLongBetween(Integer.MAX_VALUE, 5L * Integer.MAX_VALUE); + BloomFilter bloomFilter = new BloomFilter(maxDocs, getFpp(), () -> List.of(new BytesRef("bar")).iterator()); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, bloomFilter.setType()); + } + + private double getFpp() { + return randomDoubleBetween(0.01, 0.50, true); + } + + private Iterator<BytesRef> idIterator(int count) { + return new Iterator<BytesRef>() { + int cnt = count; + + @Override + public boolean hasNext() { + return cnt-- > 0; + } + + @Override + public BytesRef next() { + return new BytesRef(Integer.toString(cnt)); + } + }; + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormatTests.java b/server/src/test/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormatTests.java new file mode 100644 index 0000000000000..868c2175d0689 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormatTests.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BasePostingsFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; + +import java.util.TreeMap; + +public class FuzzyFilterPostingsFormatTests extends BasePostingsFormatTestCase { + + private TreeMap<String, FuzzySetParameters> params = new TreeMap<>() { + @Override + public FuzzySetParameters get(Object k) { + return new FuzzySetParameters(() -> FuzzySetParameters.DEFAULT_FALSE_POSITIVE_PROBABILITY); + } + }; + + private Codec fuzzyFilterCodec = TestUtil.alwaysPostingsFormat( + new FuzzyFilterPostingsFormat(TestUtil.getDefaultPostingsFormat(), new FuzzySetFactory(params)) + ); + + @Override + protected Codec getCodec() { + return fuzzyFilterCodec; + } +} diff --git a/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java index 1a66037d98d71..0a2435553b19e 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java @@ -32,10 +32,14 @@ package org.opensearch.index.mapper; import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.PointRangeQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; @@ -75,22 +79,41 @@ public void testTermQuery() { MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); String ip = "2001:db8::2:1"; - assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, null)); + + Query query = InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)); + + assertEquals( + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowExactQuery("field", new BytesRef(((PointRangeQuery) query).getLowerPoint())) + ), + ft.termQuery(ip, null) + ); ip = "192.168.1.7"; - assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, null)); + query = InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)); + assertEquals( + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowExactQuery("field", new BytesRef(((PointRangeQuery) query).getLowerPoint())) + ), + ft.termQuery(ip, null) + ); ip = "2001:db8::2:1"; String prefix = ip + "/64"; - assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, null)); + + query = InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64); + assertEquals(query, ft.termQuery(prefix, null)); ip = "192.168.1.7"; prefix = ip + "/16"; - assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null)); + query = InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16); + assertEquals(query, ft.termQuery(prefix, null)); - MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, null, Collections.emptyMap()); + MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, false, null, Collections.emptyMap()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("::1", null)); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testTermsQuery() { @@ -118,44 +141,123 @@ public void testTermsQuery() { public void testRangeQuery() { MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); - + Query query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.2.0")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.2.0")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.1.255")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.1.255")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", null, true, randomBoolean(), null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", null, false, randomBoolean(), null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddresses.forString("2001:db8::ffff")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddresses.forString("2001:db8::ffff")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddresses.forString("2001:db8::fffe")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddresses.forString("2001:db8::fffe")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::2"), InetAddresses.forString("2001:db8::")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::2"), InetAddresses.forString("2001:db8::")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), // same lo/hi values but inclusive=false so this won't match anything ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false, null, null, null, null) ); @@ -178,30 +280,60 @@ public void testRangeQuery() { ) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("::fffe:ffff:ffff")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("::fffe:ffff:ffff")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), // same lo/hi values but inclusive=false so this won't match anything ft.rangeQuery("::", "0.0.0.0", true, false, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::1:0:0:0"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::1:0:0:0"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), // same lo/hi values but inclusive=false so this won't match anything ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("192.168.1.7"), InetAddresses.forString("2001:db8::")); assertEquals( // lower bound is ipv4, upper bound is ipv6 - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("192.168.1.7"), InetAddresses.forString("2001:db8::")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true, null, null, null, null) ); - MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, null, Collections.emptyMap()); + MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, false, null, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> unsearchable.rangeQuery("::1", "2001::", true, true, null, null, null, null) ); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testFetchSourceValue() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java index 62337264bc0b1..29efd64e5c751 100644 --- a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java @@ -34,12 +34,16 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexSettings; @@ -58,6 +62,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import static org.opensearch.index.IndexSettingsTests.newIndexMeta; import static org.opensearch.index.query.InnerHitBuilderTests.randomNestedInnerHits; @@ -431,4 +436,96 @@ public void testSetParentFilterInContext() throws Exception { assertNull(queryShardContext.getParentFilter()); verify(innerQueryBuilder).toQuery(queryShardContext); } + + public void testNestedDepthProhibited() throws Exception { + assertThrows(IllegalArgumentException.class, () -> doWithDepth(0, context -> fail("won't call"))); + } + + public void testNestedDepthAllowed() throws Exception { + ThrowingConsumer<QueryShardContext> check = (context) -> { + NestedQueryBuilder queryBuilder = new NestedQueryBuilder("nested1", new MatchAllQueryBuilder(), ScoreMode.None); + OpenSearchToParentBlockJoinQuery blockJoinQuery = (OpenSearchToParentBlockJoinQuery) queryBuilder.toQuery(context); + Optional<BooleanClause> childLeg = ((BooleanQuery) blockJoinQuery.getChildQuery()).clauses() + .stream() + .filter(c -> c.getOccur() == BooleanClause.Occur.MUST) + .findFirst(); + assertTrue(childLeg.isPresent()); + assertEquals(new MatchAllDocsQuery(), childLeg.get().getQuery()); + }; + check.accept(createShardContext()); + doWithDepth(randomIntBetween(1, 20), check); + } + + public void testNestedDepthOnceOnly() throws Exception { + doWithDepth(1, this::checkOnceNested); + } + + public void testNestedDepthDefault() throws Exception { + assertEquals(20, createShardContext().getIndexSettings().getMaxNestedQueryDepth()); + } + + private void checkOnceNested(QueryShardContext ctx) throws Exception { + { + NestedQueryBuilder depth2 = new NestedQueryBuilder( + "nested1", + new NestedQueryBuilder("nested1", new MatchAllQueryBuilder(), ScoreMode.None), + ScoreMode.None + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> depth2.toQuery(ctx)); + assertEquals( + "The depth of Nested Query is [2] has exceeded the allowed maximum of [1]. This maximum can be set by changing the [index.query.max_nested_depth] index level setting.", + e.getMessage() + ); + } + { + QueryBuilder mustBjqMustBjq = new BoolQueryBuilder().must( + new NestedQueryBuilder("nested1", new MatchAllQueryBuilder(), ScoreMode.None) + ).must(new NestedQueryBuilder("nested1", new MatchAllQueryBuilder(), ScoreMode.None)); + BooleanQuery bool = (BooleanQuery) mustBjqMustBjq.toQuery(ctx); + assertEquals( + "Can parse joins one by one without breaching depth limit", + 2, + bool.clauses().stream().filter(c -> c.getQuery() instanceof OpenSearchToParentBlockJoinQuery).count() + ); + } + } + + public void testUpdateMaxDepthSettings() throws Exception { + doWithDepth(2, (ctx) -> { + assertEquals(ctx.getIndexSettings().getMaxNestedQueryDepth(), 2); + NestedQueryBuilder depth2 = new NestedQueryBuilder( + "nested1", + new NestedQueryBuilder("nested1", new MatchAllQueryBuilder(), ScoreMode.None), + ScoreMode.None + ); + Query depth2Query = depth2.toQuery(ctx); + assertTrue(depth2Query instanceof OpenSearchToParentBlockJoinQuery); + }); + } + + void doWithDepth(int depth, ThrowingConsumer<QueryShardContext> test) throws Exception { + QueryShardContext context = createShardContext(); + int defLimit = context.getIndexSettings().getMaxNestedQueryDepth(); + assertTrue(defLimit > 0); + Settings updateSettings = Settings.builder() + .put(context.getIndexSettings().getSettings()) + .put("index.query.max_nested_depth", depth) + .build(); + context.getIndexSettings().updateIndexMetadata(IndexMetadata.builder("index").settings(updateSettings).build()); + try { + test.accept(context); + } finally { + context.getIndexSettings() + .updateIndexMetadata( + IndexMetadata.builder("index") + .settings( + Settings.builder() + .put(context.getIndexSettings().getSettings()) + .put("index.query.max_nested_depth", defLimit) + .build() + ) + .build() + ); + } + } } diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java index 811d6a722d0f6..85878cc2e1c9d 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.tests.store.BaseDirectoryWrapper; @@ -102,6 +103,16 @@ private void indexDocs(int startDocId, int numberOfDocs) throws IOException { public void tearDown() throws Exception { Directory storeDirectory = ((FilterDirectory) ((FilterDirectory) indexShard.store().directory()).getDelegate()).getDelegate(); ((BaseDirectoryWrapper) storeDirectory).setCheckIndexOnClose(false); + + for (ReferenceManager.RefreshListener refreshListener : indexShard.getEngine().config().getInternalRefreshListener()) { + if (refreshListener instanceof ReleasableRetryableRefreshListener) { + ((ReleasableRetryableRefreshListener) refreshListener).drainRefreshes(); + } + } + if (remoteStoreRefreshListener != null) { + remoteStoreRefreshListener.drainRefreshes(); + } + closeShards(indexShard); super.tearDown(); } @@ -335,6 +346,7 @@ public void testRefreshSuccessOnFirstAttempt() throws Exception { RemoteStoreStatsTrackerFactory trackerFactory = tuple.v2(); RemoteSegmentTransferTracker segmentTracker = trackerFactory.getRemoteSegmentTransferTracker(indexShard.shardId()); assertNoLagAndTotalUploadsFailed(segmentTracker, 0); + assertTrue("remote store in sync", tuple.v1().isRemoteSegmentStoreInSync()); } public void testRefreshSuccessOnSecondAttempt() throws Exception { @@ -404,6 +416,20 @@ public void testRefreshSuccessOnThirdAttempt() throws Exception { assertNoLagAndTotalUploadsFailed(segmentTracker, 2); } + public void testRefreshPersistentFailure() throws Exception { + int succeedOnAttempt = 10; + CountDownLatch refreshCountLatch = new CountDownLatch(1); + CountDownLatch successLatch = new CountDownLatch(10); + Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> tuple = mockIndexShardWithRetryAndScheduleRefresh( + succeedOnAttempt, + refreshCountLatch, + successLatch + ); + // Giving 10ms for some iterations of remote refresh upload + Thread.sleep(10); + assertFalse("remote store should not in sync", tuple.v1().isRemoteSegmentStoreInSync()); + } + private void assertNoLagAndTotalUploadsFailed(RemoteSegmentTransferTracker segmentTracker, long totalUploadsFailed) throws Exception { assertBusy(() -> { assertEquals(0, segmentTracker.getBytesLag()); @@ -418,13 +444,40 @@ public void testTrackerData() throws Exception { RemoteStoreRefreshListener listener = tuple.v1(); RemoteStoreStatsTrackerFactory trackerFactory = tuple.v2(); RemoteSegmentTransferTracker tracker = trackerFactory.getRemoteSegmentTransferTracker(indexShard.shardId()); - assertNoLag(tracker); + assertBusy(() -> assertNoLag(tracker)); indexDocs(100, randomIntBetween(100, 200)); indexShard.refresh("test"); listener.afterRefresh(true); assertBusy(() -> assertNoLag(tracker)); } + /** + * Tests segments upload fails with replication checkpoint and replication tracker primary term mismatch + */ + public void testRefreshFailedDueToPrimaryTermMisMatch() throws Exception { + int totalAttempt = 1; + int checkpointPublishSucceedOnAttempt = 0; + // We spy on IndexShard.isPrimaryStarted() to validate that we have tried running remote time as per the expectation. + CountDownLatch refreshCountLatch = new CountDownLatch(totalAttempt); + + // success latch should change as we would be failed primary term latest validation. + CountDownLatch successLatch = new CountDownLatch(1); + CountDownLatch reachedCheckpointPublishLatch = new CountDownLatch(0); + Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> tuple = mockIndexShardWithRetryAndScheduleRefresh( + totalAttempt, + refreshCountLatch, + successLatch, + checkpointPublishSucceedOnAttempt, + reachedCheckpointPublishLatch, + false + ); + + assertBusy(() -> assertEquals(1, tuple.v2().getRemoteSegmentTransferTracker(indexShard.shardId()).getTotalUploadsFailed())); + assertBusy(() -> assertEquals(0, refreshCountLatch.getCount())); + assertBusy(() -> assertEquals(1, successLatch.getCount())); + assertBusy(() -> assertEquals(0, reachedCheckpointPublishLatch.getCount())); + } + private void assertNoLag(RemoteSegmentTransferTracker tracker) { assertEquals(0, tracker.getRefreshSeqNoLag()); assertEquals(0, tracker.getBytesLag()); @@ -460,6 +513,24 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn CountDownLatch successLatch, int succeedCheckpointPublishOnAttempt, CountDownLatch reachedCheckpointPublishLatch + ) throws IOException { + return mockIndexShardWithRetryAndScheduleRefresh( + succeedOnAttempt, + refreshCountLatch, + successLatch, + succeedCheckpointPublishOnAttempt, + reachedCheckpointPublishLatch, + true + ); + } + + private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIndexShardWithRetryAndScheduleRefresh( + int succeedOnAttempt, + CountDownLatch refreshCountLatch, + CountDownLatch successLatch, + int succeedCheckpointPublishOnAttempt, + CountDownLatch reachedCheckpointPublishLatch, + boolean mockPrimaryTerm ) throws IOException { // Create index shard that we will be using to mock different methods in IndexShard for the unit test indexShard = newStartedShard( @@ -473,6 +544,14 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn new InternalEngineFactory() ); + RemoteSegmentTransferTracker tracker = indexShard.getRemoteStoreStatsTrackerFactory() + .getRemoteSegmentTransferTracker(indexShard.shardId()); + try { + assertBusy(() -> assertTrue(tracker.getTotalUploadsSucceeded() > 0)); + } catch (Exception e) { + assert false; + } + indexDocs(1, randomIntBetween(1, 100)); // Mock indexShard.store().directory() @@ -492,6 +571,9 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn when(remoteStore.directory()).thenReturn(remoteStoreFilterDirectory); // Mock indexShard.getOperationPrimaryTerm() + if (mockPrimaryTerm) { + when(shard.getOperationPrimaryTerm()).thenReturn(indexShard.getOperationPrimaryTerm()); + } when(shard.getLatestReplicationCheckpoint()).thenReturn(indexShard.getLatestReplicationCheckpoint()); // Mock indexShard.routingEntry().primary() @@ -512,6 +594,7 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn // Mock indexShard.getSegmentInfosSnapshot() doAnswer(invocation -> { if (counter.incrementAndGet() <= succeedOnAttempt) { + logger.error("Failing in get segment info {}", counter.get()); throw new RuntimeException("Inducing failure in upload"); } return indexShard.getSegmentInfosSnapshot(); @@ -527,6 +610,7 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn doAnswer(invocation -> { if (Objects.nonNull(successLatch)) { successLatch.countDown(); + logger.info("Value fo latch {}", successLatch.getCount()); } return indexShard.getEngine(); }).when(shard).getEngine(); @@ -554,7 +638,6 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn RecoverySettings recoverySettings = mock(RecoverySettings.class); when(recoverySettings.getMinRemoteSegmentMetadataFiles()).thenReturn(10); when(shard.getRecoverySettings()).thenReturn(recoverySettings); - RemoteSegmentTransferTracker tracker = remoteStoreStatsTrackerFactory.getRemoteSegmentTransferTracker(indexShard.shardId()); RemoteStoreRefreshListener refreshListener = new RemoteStoreRefreshListener(shard, emptyCheckpointPublisher, tracker); refreshListener.afterRefresh(true); return Tuple.tuple(refreshListener, remoteStoreStatsTrackerFactory); @@ -587,6 +670,31 @@ private void verifyUploadedSegments(RemoteSegmentStoreDirectory remoteSegmentSto } } } + assertTrue(remoteStoreRefreshListener.isRemoteSegmentStoreInSync()); + } + + public void testRemoteSegmentStoreNotInSync() throws IOException { + setup(true, 3); + remoteStoreRefreshListener.afterRefresh(true); + try (Store remoteStore = indexShard.remoteStore()) { + RemoteSegmentStoreDirectory remoteSegmentStoreDirectory = + (RemoteSegmentStoreDirectory) ((FilterDirectory) ((FilterDirectory) remoteStore.directory()).getDelegate()).getDelegate(); + verifyUploadedSegments(remoteSegmentStoreDirectory); + remoteStoreRefreshListener.isRemoteSegmentStoreInSync(); + boolean oneFileDeleted = false; + // Delete any one file from remote store + try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = indexShard.getSegmentInfosSnapshot()) { + SegmentInfos segmentInfos = segmentInfosGatedCloseable.get(); + for (String file : segmentInfos.files(true)) { + if (oneFileDeleted == false && RemoteStoreRefreshListener.EXCLUDE_FILES.contains(file) == false) { + remoteSegmentStoreDirectory.deleteFile(file); + oneFileDeleted = true; + break; + } + } + } + assertFalse(remoteStoreRefreshListener.isRemoteSegmentStoreInSync()); + } } } diff --git a/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java b/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java index c1a51bb780f61..846b975a9520e 100644 --- a/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java +++ b/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java @@ -270,6 +270,7 @@ public void testStatsDirWrapper() throws IOException { IOUtils.close(dir, target); } + @SuppressWarnings("removal") public boolean hardLinksSupported(Path path) throws IOException { try { Files.createFile(path.resolve("foo.bar")); diff --git a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java index 2c6c4afed69fd..7944ee681f5fc 100644 --- a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java +++ b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java @@ -988,6 +988,53 @@ public void testDeleteStaleCommitsActualDelete() throws Exception { verify(remoteMetadataDirectory).deleteFile(metadataFilename3); } + public void testDeleteStaleCommitsActualDeleteWithLocks() throws Exception { + Map<String, Map<String, String>> metadataFilenameContentMapping = populateMetadata(); + remoteSegmentStoreDirectory.init(); + + // Locking one of the metadata files to ensure that it is not getting deleted. + when(mdLockManager.fetchLockedMetadataFiles(any())).thenReturn(Set.of(metadataFilename2)); + + // popluateMetadata() adds stub to return 3 metadata files + // We are passing lastNMetadataFilesToKeep=2 here so that oldest 1 metadata file will be deleted + remoteSegmentStoreDirectory.deleteStaleSegmentsAsync(1); + + for (String metadata : metadataFilenameContentMapping.get(metadataFilename3).values()) { + String uploadedFilename = metadata.split(RemoteSegmentStoreDirectory.UploadedSegmentMetadata.SEPARATOR)[1]; + verify(remoteDataDirectory).deleteFile(uploadedFilename); + } + assertBusy(() -> assertThat(remoteSegmentStoreDirectory.canDeleteStaleCommits.get(), is(true))); + verify(remoteMetadataDirectory).deleteFile(metadataFilename3); + verify(remoteMetadataDirectory, times(0)).deleteFile(metadataFilename2); + } + + public void testDeleteStaleCommitsNoDeletesDueToLocks() throws Exception { + remoteSegmentStoreDirectory.init(); + + // Locking all the old metadata files to ensure that none of the segment files are getting deleted. + when(mdLockManager.fetchLockedMetadataFiles(any())).thenReturn(Set.of(metadataFilename2, metadataFilename3)); + + // popluateMetadata() adds stub to return 3 metadata files + // We are passing lastNMetadataFilesToKeep=2 here so that oldest 1 metadata file will be deleted + remoteSegmentStoreDirectory.deleteStaleSegmentsAsync(1); + + assertBusy(() -> assertThat(remoteSegmentStoreDirectory.canDeleteStaleCommits.get(), is(true))); + verify(remoteMetadataDirectory, times(0)).deleteFile(any()); + } + + public void testDeleteStaleCommitsExceptionWhileFetchingLocks() throws Exception { + remoteSegmentStoreDirectory.init(); + + // Locking one of the metadata files to ensure that it is not getting deleted. + when(mdLockManager.fetchLockedMetadataFiles(any())).thenThrow(new RuntimeException("Rate limit exceeded")); + + // popluateMetadata() adds stub to return 3 metadata files + // We are passing lastNMetadataFilesToKeep=2 here so that oldest 1 metadata file will be deleted + remoteSegmentStoreDirectory.deleteStaleSegmentsAsync(1); + + verify(remoteMetadataDirectory, times(0)).deleteFile(any()); + } + public void testDeleteStaleCommitsDeleteDedup() throws Exception { Map<String, Map<String, String>> metadataFilenameContentMapping = new HashMap<>(populateMetadata()); metadataFilenameContentMapping.put(metadataFilename4, metadataFilenameContentMapping.get(metadataFilename3)); diff --git a/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java b/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java index b4eac2c4548d5..299100b65a43e 100644 --- a/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java +++ b/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java @@ -17,6 +17,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Set; import junit.framework.TestCase; @@ -96,4 +97,26 @@ public void testIsAcquiredExceptionCase() { // metadata file is not passed durin FileLockInfo testLockInfo = FileLockInfo.getLockInfoBuilder().withAcquirerId(testAcquirerId).build(); assertThrows(IllegalArgumentException.class, () -> remoteStoreMetadataLockManager.isAcquired(testLockInfo)); } + + public void testFetchLocksEmpty() throws IOException { + when(lockDirectory.listFilesByPrefix("metadata")).thenReturn(Set.of()); + assertEquals(0, remoteStoreMetadataLockManager.fetchLockedMetadataFiles("metadata").size()); + } + + public void testFetchLocksNonEmpty() throws IOException { + String metadata1 = "metadata_1_2_3"; + String metadata2 = "metadata_4_5_6"; + when(lockDirectory.listFilesByPrefix("metadata")).thenReturn( + Set.of( + FileLockInfo.LockFileUtils.generateLockName(metadata1, "snapshot1"), + FileLockInfo.LockFileUtils.generateLockName(metadata2, "snapshot2") + ) + ); + assertEquals(Set.of(metadata1, metadata2), remoteStoreMetadataLockManager.fetchLockedMetadataFiles("metadata")); + } + + public void testFetchLocksException() throws IOException { + when(lockDirectory.listFilesByPrefix("metadata")).thenThrow(new IOException("Something went wrong")); + assertThrows(IOException.class, () -> remoteStoreMetadataLockManager.fetchLockedMetadataFiles("metadata")); + } } diff --git a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java index 04434fa52e555..e2a6a4011a6b7 100644 --- a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java +++ b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java @@ -18,7 +18,6 @@ import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.ShardPath; -import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.MatcherAssert; import org.junit.After; @@ -59,7 +58,7 @@ public class FileCacheCleanerTests extends OpenSearchTestCase { @Before public void setUpFileCache() throws IOException { env = newNodeEnvironment(SETTINGS); - cleaner = new FileCacheCleaner(env, fileCache); + cleaner = new FileCacheCleaner(() -> fileCache); files.put(SHARD_0, addFile(fileCache, env, SHARD_0)); files.put(SHARD_1, addFile(fileCache, env, SHARD_1)); MatcherAssert.assertThat(fileCache.size(), equalTo(2L)); @@ -103,12 +102,11 @@ public void testShardRemoved() { final Path cachePath = ShardPath.loadFileCachePath(env, SHARD_0).getDataPath(); assertTrue(Files.exists(cachePath)); - cleaner.beforeIndexShardDeleted(SHARD_0, SETTINGS); + cleaner.beforeShardPathDeleted(SHARD_0, INDEX_SETTINGS, env); MatcherAssert.assertThat(fileCache.size(), equalTo(1L)); assertNull(fileCache.get(files.get(SHARD_0))); assertFalse(Files.exists(files.get(SHARD_0))); assertTrue(Files.exists(files.get(SHARD_1))); - cleaner.afterIndexShardDeleted(SHARD_0, SETTINGS); assertFalse(Files.exists(cachePath)); } @@ -116,15 +114,9 @@ public void testIndexRemoved() { final Path indexCachePath = env.fileCacheNodePath().fileCachePath.resolve(SHARD_0.getIndex().getUUID()); assertTrue(Files.exists(indexCachePath)); - cleaner.beforeIndexShardDeleted(SHARD_0, SETTINGS); - cleaner.afterIndexShardDeleted(SHARD_0, SETTINGS); - cleaner.beforeIndexShardDeleted(SHARD_1, SETTINGS); - cleaner.afterIndexShardDeleted(SHARD_1, SETTINGS); - cleaner.afterIndexRemoved( - SHARD_0.getIndex(), - INDEX_SETTINGS, - IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.DELETED - ); + cleaner.beforeShardPathDeleted(SHARD_0, INDEX_SETTINGS, env); + cleaner.beforeShardPathDeleted(SHARD_1, INDEX_SETTINGS, env); + cleaner.beforeIndexPathDeleted(SHARD_0.getIndex(), INDEX_SETTINGS, env); MatcherAssert.assertThat(fileCache.size(), equalTo(0L)); assertFalse(Files.exists(indexCachePath)); } diff --git a/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java b/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java index 6bfab278993ed..a83e737dc25c1 100644 --- a/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java @@ -86,7 +86,6 @@ import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; -import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -715,6 +714,7 @@ public void testSimpleOperationsUpload() throws Exception { translog.setMinSeqNoToKeep(0); // This should not trim anything from local translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(2, translog.readers.size()); assertBusy(() -> { assertEquals(4, translog.allUploaded().size()); @@ -728,6 +728,7 @@ public void testSimpleOperationsUpload() throws Exception { // This should not trim tlog-2.* files from remote as we not uploading any more translog to remote translog.setMinSeqNoToKeep(1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); assertBusy(() -> { assertEquals(4, translog.allUploaded().size()); @@ -766,6 +767,7 @@ public void testMetadataFileDeletion() throws Exception { addToTranslogAndListAndUpload(translog, ops, new Translog.Index(String.valueOf(i), i, primaryTerm.get(), new byte[] { 1 })); translog.setMinSeqNoToKeep(i); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); } assertBusy(() -> assertEquals(4, translog.allUploaded().size())); @@ -776,6 +778,7 @@ public void testMetadataFileDeletion() throws Exception { addToTranslogAndListAndUpload(translog, ops, new Translog.Index(String.valueOf(i), i, primaryTerm.get(), new byte[] { 1 })); } translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1 + moreDocs, translog.readers.size()); assertBusy(() -> assertEquals(2 + 2L * moreDocs, translog.allUploaded().size())); assertBusy(() -> assertEquals(1, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR)).size())); @@ -783,6 +786,7 @@ public void testMetadataFileDeletion() throws Exception { int totalDocs = numDocs + moreDocs; translog.setMinSeqNoToKeep(totalDocs - 1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); addToTranslogAndListAndUpload( translog, @@ -791,6 +795,7 @@ public void testMetadataFileDeletion() throws Exception { ); translog.setMinSeqNoToKeep(totalDocs); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertBusy(() -> assertEquals(1, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR)).size())); // Change primary term and test the deletion of older primaries @@ -841,6 +846,7 @@ public void testDrainSync() throws Exception { translog.setMinSeqNoToKeep(0); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); // Case 1 - During ongoing uploads, the available permits are 0. @@ -869,6 +875,7 @@ public void testDrainSync() throws Exception { // Case 3 - After drainSync, if trimUnreferencedReaders is attempted, we do not delete from remote store. translog.setMinSeqNoToKeep(1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); assertEquals(6, translog.allUploaded().size()); assertEquals(mdFiles, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR))); @@ -892,6 +899,7 @@ public void testDrainSync() throws Exception { translog.setMinSeqNoToKeep(3); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); assertBusy(() -> assertEquals(4, translog.allUploaded().size())); assertBusy(() -> assertEquals(1, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR)).size())); @@ -1048,7 +1056,7 @@ public void testConcurrentWriteViewsAndSnapshot() throws Throwable { final int threadId = i; writers[i] = new Thread(new AbstractRunnable() { @Override - public void doRun() throws BrokenBarrierException, InterruptedException, IOException { + public void doRun() throws Exception { barrier.await(); int counter = 0; while (run.get() && idGenerator.get() < maxOps) { @@ -1090,6 +1098,7 @@ public void doRun() throws BrokenBarrierException, InterruptedException, IOExcep // deletionPolicy.setLocalCheckpointOfSafeCommit(localCheckpoint); translog.setMinSeqNoToKeep(localCheckpoint + 1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); } } if (id % 7 == 0) { diff --git a/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java b/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java index e4dd32e5c6f70..2531790ede4af 100644 --- a/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java @@ -15,7 +15,6 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.action.ActionListener; import org.opensearch.index.shard.IndexShard; @@ -27,12 +26,9 @@ import org.opensearch.test.ClusterServiceUtils; import org.opensearch.test.transport.CapturingTransport; import org.opensearch.transport.TransportService; -import org.junit.Assert; import java.util.Arrays; import java.util.Collections; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import static org.mockito.Mockito.mock; @@ -165,40 +161,6 @@ public void testTransportTimeoutForGetSegmentFilesAction() { assertEquals(recoverySettings.internalActionLongTimeout(), capturedRequest.options.timeout()); } - public void testGetSegmentFiles_CancelWhileRequestOpen() throws InterruptedException { - CountDownLatch latch = new CountDownLatch(1); - final ReplicationCheckpoint checkpoint = new ReplicationCheckpoint( - indexShard.shardId(), - PRIMARY_TERM, - SEGMENTS_GEN, - VERSION, - Codec.getDefault().getName() - ); - StoreFileMetadata testMetadata = new StoreFileMetadata("testFile", 1L, "checksum", Version.LATEST); - replicationSource.getSegmentFiles( - REPLICATION_ID, - checkpoint, - Arrays.asList(testMetadata), - mock(IndexShard.class), - (fileName, bytesRecovered) -> {}, - new ActionListener<>() { - @Override - public void onResponse(GetSegmentFilesResponse getSegmentFilesResponse) { - Assert.fail("onFailure response expected."); - } - - @Override - public void onFailure(Exception e) { - assertEquals(e.getClass(), CancellableThreads.ExecutionCancelledException.class); - latch.countDown(); - } - } - ); - replicationSource.cancel(); - latch.await(2, TimeUnit.SECONDS); - assertEquals("listener should have resolved in a failure", 0, latch.getCount()); - } - private DiscoveryNode newDiscoveryNode(String nodeName) { return new DiscoveryNode( nodeName, diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java index f284a425a417b..3c72dda2d8b5d 100644 --- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java @@ -620,6 +620,7 @@ public void testForceSegmentSyncHandlerWithFailure_AlreadyClosedException_swallo } public void testTargetCancelledBeforeStartInvoked() { + final String cancelReason = "test"; final SegmentReplicationTarget target = new SegmentReplicationTarget( replicaShard, primaryShard.getLatestReplicationCheckpoint(), @@ -633,12 +634,12 @@ public void onReplicationDone(SegmentReplicationState state) { @Override public void onReplicationFailure(SegmentReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { // failures leave state object in last entered stage. - assertEquals(SegmentReplicationState.Stage.GET_CHECKPOINT_INFO, state.getStage()); - assertTrue(e.getCause() instanceof CancellableThreads.ExecutionCancelledException); + assertEquals(SegmentReplicationState.Stage.INIT, state.getStage()); + assertEquals(cancelReason, e.getMessage()); } } ); - target.cancel("test"); + target.cancel(cancelReason); sut.startReplication(target); } diff --git a/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java b/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java index b976704e8af57..7f55c9f5cc7f7 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java @@ -32,10 +32,13 @@ package org.opensearch.plugins; +import com.fasterxml.jackson.core.JsonParseException; + import org.opensearch.Version; import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.ByteBufferStreamInput; +import org.opensearch.semver.SemverRange; import org.opensearch.test.OpenSearchTestCase; import java.nio.ByteBuffer; @@ -74,6 +77,33 @@ public void testReadFromProperties() throws Exception { assertEquals("fake desc", info.getDescription()); assertEquals("1.0", info.getVersion()); assertEquals("FakePlugin", info.getClassname()); + assertEquals(Version.CURRENT.toString(), info.getOpenSearchVersionRanges().get(0).toString()); + assertThat(info.getExtendedPlugins(), empty()); + } + + public void testReadFromPropertiesWithSingleOpenSearchRange() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"~" + Version.CURRENT.toString() + "\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + PluginInfo info = PluginInfo.readFromProperties(pluginDir); + assertEquals("my_plugin", info.getName()); + assertEquals("fake desc", info.getDescription()); + assertEquals("1.0", info.getVersion()); + assertEquals("FakePlugin", info.getClassname()); + assertEquals("~" + Version.CURRENT.toString(), info.getOpenSearchVersionRanges().get(0).toString()); assertThat(info.getExtendedPlugins(), empty()); } @@ -102,6 +132,7 @@ public void testReadFromPropertiesWithFolderNameAndVersionAfter() throws Excepti assertEquals("1.0", info.getVersion()); assertEquals("FakePlugin", info.getClassname()); assertEquals("custom-folder", info.getTargetFolderName()); + assertEquals(Version.CURRENT.toString(), info.getOpenSearchVersionRanges().get(0).toString()); assertThat(info.getExtendedPlugins(), empty()); } @@ -130,11 +161,40 @@ public void testReadFromPropertiesVersionMissing() throws Exception { assertThat(e.getMessage(), containsString("[version] is missing")); } - public void testReadFromPropertiesOpenSearchVersionMissing() throws Exception { + public void testReadFromPropertiesOpenSearchVersionAndDependenciesMissing() throws Exception { Path pluginDir = createTempDir().resolve("fake-plugin"); PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc", "name", "my_plugin", "version", "1.0"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("[opensearch.version] is missing")); + assertThat( + e.getMessage(), + containsString("Either [opensearch.version] or [dependencies] property must be specified for the plugin ") + ); + } + + public void testReadFromPropertiesWithDependenciesAndOpenSearchVersion() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "opensearch.version", + Version.CURRENT.toString(), + "dependencies", + "{opensearch:" + Version.CURRENT.toString() + "}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); + assertThat( + e.getMessage(), + containsString("Only one of [opensearch.version] or [dependencies] property can be specified for the plugin") + ); } public void testReadFromPropertiesJavaVersionMissing() throws Exception { @@ -305,7 +365,6 @@ public void testSerialize() throws Exception { ByteBufferStreamInput input = new ByteBufferStreamInput(buffer); PluginInfo info2 = new PluginInfo(input); assertThat(info2.toString(), equalTo(info.toString())); - } public void testPluginListSorted() { @@ -347,4 +406,193 @@ public void testUnknownProperties() throws Exception { assertThat(e.getMessage(), containsString("Unknown properties in plugin descriptor")); } + public void testMultipleDependencies() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"~" + Version.CURRENT.toString() + "\", dependency2:\"1.0.0\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); + assertThat(e.getMessage(), containsString("Exactly one dependency is allowed to be specified in plugin descriptor properties")); + } + + public void testNonOpenSearchDependency() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{some_dependency:\"~" + Version.CURRENT.toString() + "\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); + assertThat(e.getMessage(), containsString("Only opensearch is allowed to be specified as a plugin dependency")); + } + + public void testEmptyDependenciesProperty() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); + assertThat(e.getMessage(), containsString("Exactly one dependency is allowed to be specified in plugin descriptor properties")); + } + + public void testInvalidDependenciesProperty() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{invalid}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + expectThrows(JsonParseException.class, () -> PluginInfo.readFromProperties(pluginDir)); + } + + public void testEmptyOpenSearchVersionInDependencies() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); + assertThat(e.getMessage(), containsString("Version cannot be empty")); + } + + public void testInvalidOpenSearchVersionInDependencies() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"1.2\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); + assertThat( + e.getMessage(), + containsString("the version needs to contain major, minor, and revision, and optionally the build: 1.2") + ); + } + + public void testInvalidRangeInDependencies() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"<2.2.0\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + expectThrows(NumberFormatException.class, () -> PluginInfo.readFromProperties(pluginDir)); + } + + public void testhMultipleOpenSearchRangesInDependencies() throws Exception { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "dependencies", + "{opensearch:\"~1.2.3, =1.2.3\"}", + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin" + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginInfo.readFromProperties(pluginDir)); + assertThat(e.getMessage(), containsString("Exactly one range is allowed to be specified in dependencies for the plugin")); + } + + public void testhMultipleOpenSearchRangesInConstructor() throws Exception { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new PluginInfo( + "plugin_name", + "foo", + "dummy", + List.of( + new SemverRange(Version.CURRENT, SemverRange.RangeOperator.EQ), + new SemverRange(Version.CURRENT, SemverRange.RangeOperator.DEFAULT) + ), + "1.8", + "dummyclass", + null, + Collections.emptyList(), + randomBoolean() + ) + ); + assertThat(e.getMessage(), containsString("Exactly one range is allowed to be specified in dependencies for the plugin")); + } } diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index db276678ba4dd..bd9ee33856f14 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -45,8 +45,10 @@ import org.opensearch.env.Environment; import org.opensearch.env.TestEnvironment; import org.opensearch.index.IndexModule; +import org.opensearch.semver.SemverRange; import org.opensearch.test.MockLogAppender; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.VersionUtils; import org.hamcrest.Matchers; import java.io.IOException; @@ -717,6 +719,45 @@ public void testIncompatibleOpenSearchVersion() throws Exception { assertThat(e.getMessage(), containsString("was built for OpenSearch version 6.0.0")); } + public void testCompatibleOpenSearchVersionRange() { + List<SemverRange> pluginCompatibilityRange = List.of(new SemverRange(Version.CURRENT, SemverRange.RangeOperator.TILDE)); + PluginInfo info = new PluginInfo( + "my_plugin", + "desc", + "1.0", + pluginCompatibilityRange, + "1.8", + "FakePlugin", + null, + Collections.emptyList(), + false + ); + PluginsService.verifyCompatibility(info); + } + + public void testIncompatibleOpenSearchVersionRange() { + // Version.CURRENT is behind by one with respect to patch version in the range + List<SemverRange> pluginCompatibilityRange = List.of( + new SemverRange( + VersionUtils.getVersion(Version.CURRENT.major, Version.CURRENT.minor, (byte) (Version.CURRENT.revision + 1)), + SemverRange.RangeOperator.TILDE + ) + ); + PluginInfo info = new PluginInfo( + "my_plugin", + "desc", + "1.0", + pluginCompatibilityRange, + "1.8", + "FakePlugin", + null, + Collections.emptyList(), + false + ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginsService.verifyCompatibility(info)); + assertThat(e.getMessage(), containsString("was built for OpenSearch version ")); + } + public void testIncompatibleJavaVersion() throws Exception { PluginInfo info = new PluginInfo( "my_plugin", @@ -891,7 +932,10 @@ public void testExtensiblePlugin() { TestExtensiblePlugin extensiblePlugin = new TestExtensiblePlugin(); PluginsService.loadExtensions( Collections.singletonList( - Tuple.tuple(new PluginInfo("extensible", null, null, null, null, null, Collections.emptyList(), false), extensiblePlugin) + Tuple.tuple( + new PluginInfo("extensible", null, null, Version.CURRENT, null, null, Collections.emptyList(), false), + extensiblePlugin + ) ) ); @@ -902,9 +946,12 @@ public void testExtensiblePlugin() { TestPlugin testPlugin = new TestPlugin(); PluginsService.loadExtensions( Arrays.asList( - Tuple.tuple(new PluginInfo("extensible", null, null, null, null, null, Collections.emptyList(), false), extensiblePlugin), Tuple.tuple( - new PluginInfo("test", null, null, null, null, null, Collections.singletonList("extensible"), false), + new PluginInfo("extensible", null, null, Version.CURRENT, null, null, Collections.emptyList(), false), + extensiblePlugin + ), + Tuple.tuple( + new PluginInfo("test", null, null, Version.CURRENT, null, null, Collections.singletonList("extensible"), false), testPlugin ) ) @@ -1036,6 +1083,40 @@ public void testThrowingConstructor() { assertThat(e.getCause().getCause(), hasToString(containsString("test constructor failure"))); } + public void testPluginCompatibilityWithSemverRange() { + // Compatible plugin and core versions + assertTrue(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("1.0.0"), Version.fromString("1.0.0"))); + + assertTrue(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("=1.0.0"), Version.fromString("1.0.0"))); + + assertTrue(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("~1.0.0"), Version.fromString("1.0.0"))); + + assertTrue(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("~1.0.1"), Version.fromString("1.0.2"))); + + // Incompatible plugin and core versions + assertFalse(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("1.0.0"), Version.fromString("1.0.1"))); + + assertFalse(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("=1.0.0"), Version.fromString("1.0.1"))); + + assertFalse(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("~1.0.1"), Version.fromString("1.0.0"))); + + assertFalse(PluginsService.isPluginVersionCompatible(getPluginInfoWithWithSemverRange("~1.0.0"), Version.fromString("1.1.0"))); + } + + private PluginInfo getPluginInfoWithWithSemverRange(String semverRange) { + return new PluginInfo( + "my_plugin", + "desc", + "1.0", + List.of(SemverRange.fromString(semverRange)), + "1.8", + "FakePlugin", + null, + Collections.emptyList(), + false + ); + } + private static class TestExtensiblePlugin extends Plugin implements ExtensiblePlugin { private List<TestExtensionPoint> extensions; diff --git a/server/src/test/java/org/opensearch/rest/RestControllerTests.java b/server/src/test/java/org/opensearch/rest/RestControllerTests.java index 25405afa24c16..b7239e7b59742 100644 --- a/server/src/test/java/org/opensearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/opensearch/rest/RestControllerTests.java @@ -138,6 +138,37 @@ public void teardown() throws IOException { IOUtils.close(client); } + public void testDefaultRestControllerGetAllHandlersContainsFavicon() { + final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService, identityService); + Iterator<MethodHandlers> handlers = restController.getAllHandlers(); + assertTrue(handlers.hasNext()); + MethodHandlers faviconHandler = handlers.next(); + assertEquals(faviconHandler.getPath(), "/favicon.ico"); + assertEquals(faviconHandler.getValidMethods(), Set.of(RestRequest.Method.GET)); + assertFalse(handlers.hasNext()); + } + + public void testRestControllerGetAllHandlers() { + final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService, identityService); + + restController.registerHandler(RestRequest.Method.PATCH, "/foo", mock(RestHandler.class)); + restController.registerHandler(RestRequest.Method.GET, "/foo", mock(RestHandler.class)); + + Iterator<MethodHandlers> handlers = restController.getAllHandlers(); + + assertTrue(handlers.hasNext()); + MethodHandlers rootHandler = handlers.next(); + assertEquals(rootHandler.getPath(), "/foo"); + assertEquals(rootHandler.getValidMethods(), Set.of(RestRequest.Method.GET, RestRequest.Method.PATCH)); + + assertTrue(handlers.hasNext()); + MethodHandlers faviconHandler = handlers.next(); + assertEquals(faviconHandler.getPath(), "/favicon.ico"); + assertEquals(faviconHandler.getValidMethods(), Set.of(RestRequest.Method.GET)); + + assertFalse(handlers.hasNext()); + } + public void testApplyRelevantHeaders() throws Exception { final ThreadContext threadContext = client.threadPool().getThreadContext(); Set<RestHeaderDefinition> headers = new HashSet<>( @@ -150,15 +181,15 @@ public void testApplyRelevantHeaders() throws Exception { restHeaders.put("header.3", Collections.singletonList("false")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); final RestController spyRestController = spy(restController); - when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<MethodHandlers>() { + when(spyRestController.getAllRestMethodHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<RestMethodHandlers>() { @Override public boolean hasNext() { return false; } @Override - public MethodHandlers next() { - return new MethodHandlers("/", (RestRequest request, RestChannel channel, NodeClient client) -> { + public RestMethodHandlers next() { + return new RestMethodHandlers("/", (RestRequest request, RestChannel channel, NodeClient client) -> { assertEquals("true", threadContext.getHeader("header.1")); assertEquals("true", threadContext.getHeader("header.2")); assertNull(threadContext.getHeader("header.3")); diff --git a/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java b/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java index 73f83a5642bb4..fa13ec2036797 100644 --- a/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java @@ -125,7 +125,7 @@ public void testBuildTable() { assertThat(headers.get(6).value, equalTo("ip")); assertThat(headers.get(7).value, equalTo("id")); assertThat(headers.get(8).value, equalTo("node")); - assertThat(headers.get(74).value, equalTo("docs.deleted")); + assertThat(headers.get(78).value, equalTo("docs.deleted")); final List<List<Table.Cell>> rows = table.getRows(); assertThat(rows.size(), equalTo(numShards)); @@ -141,9 +141,9 @@ public void testBuildTable() { assertThat(row.get(4).value, equalTo(shardStats.getStats().getDocs().getCount())); assertThat(row.get(6).value, equalTo(localNode.getHostAddress())); assertThat(row.get(7).value, equalTo(localNode.getId())); - assertThat(row.get(72).value, equalTo(shardStats.getDataPath())); - assertThat(row.get(73).value, equalTo(shardStats.getStatePath())); - assertThat(row.get(74).value, equalTo(shardStats.getStats().getDocs().getDeleted())); + assertThat(row.get(76).value, equalTo(shardStats.getDataPath())); + assertThat(row.get(77).value, equalTo(shardStats.getStatePath())); + assertThat(row.get(78).value, equalTo(shardStats.getStats().getDocs().getDeleted())); } } } diff --git a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java index 347011af98c6d..3793249d569f0 100644 --- a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java @@ -54,7 +54,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; @@ -80,7 +79,6 @@ import org.opensearch.search.rescore.RescoreContext; import org.opensearch.search.slice.SliceBuilder; import org.opensearch.search.sort.SortAndFormats; -import org.opensearch.test.FeatureFlagSetter; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; @@ -554,8 +552,6 @@ protected Engine.Searcher acquireSearcherInternal(String source) { } public void testSearchPathEvaluationUsingSortField() throws Exception { - // enable the concurrent set FeatureFlag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); ShardSearchRequest shardSearchRequest = mock(ShardSearchRequest.class); when(shardSearchRequest.searchType()).thenReturn(SearchType.DEFAULT); ShardId shardId = new ShardId("index", UUID.randomUUID().toString(), 1); diff --git a/server/src/test/java/org/opensearch/search/SearchHitTests.java b/server/src/test/java/org/opensearch/search/SearchHitTests.java index 88d5fb38a6cb1..13b4d9f976ed5 100644 --- a/server/src/test/java/org/opensearch/search/SearchHitTests.java +++ b/server/src/test/java/org/opensearch/search/SearchHitTests.java @@ -56,11 +56,13 @@ import org.opensearch.test.AbstractWireSerializingTestCase; import org.opensearch.test.RandomObjects; import org.opensearch.test.VersionUtils; +import org.junit.Assert; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Predicate; @@ -76,6 +78,25 @@ import static org.hamcrest.Matchers.nullValue; public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> { + + private Map<String, Float> getSampleMatchedQueries() { + Map<String, Float> matchedQueries = new LinkedHashMap<>(); + matchedQueries.put("query1", 1.0f); + matchedQueries.put("query2", 0.5f); + return matchedQueries; + } + + public static SearchHit createTestItemWithMatchedQueriesScores(boolean withOptionalInnerHits, boolean withShardTarget) { + var searchHit = createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget); + int size = randomIntBetween(1, 5); // Ensure at least one matched query + Map<String, Float> matchedQueries = new LinkedHashMap<>(size); + for (int i = 0; i < size; i++) { + matchedQueries.put(randomAlphaOfLength(5), randomFloat()); + } + searchHit.matchedQueriesWithScores(matchedQueries); + return searchHit; + } + public static SearchHit createTestItem(boolean withOptionalInnerHits, boolean withShardTarget) { return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget); } @@ -129,11 +150,11 @@ public static SearchHit createTestItem(final MediaType mediaType, boolean withOp } if (randomBoolean()) { int size = randomIntBetween(0, 5); - String[] matchedQueries = new String[size]; + Map<String, Float> matchedQueries = new LinkedHashMap<>(size); for (int i = 0; i < size; i++) { - matchedQueries[i] = randomAlphaOfLength(5); + matchedQueries.put(randomAlphaOfLength(5), Float.NaN); } - hit.matchedQueries(matchedQueries); + hit.matchedQueriesWithScores(matchedQueries); } if (randomBoolean()) { hit.explanation(createExplanation(randomIntBetween(0, 5))); @@ -219,6 +240,21 @@ public void testFromXContentLenientParsing() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType); } + public void testSerializationDeserializationWithMatchedQueriesScores() throws IOException { + SearchHit searchHit = createTestItemWithMatchedQueriesScores(true, true); + SearchHit deserializedSearchHit = copyWriteable(searchHit, getNamedWriteableRegistry(), SearchHit::new, Version.V_3_0_0); + assertEquals(searchHit, deserializedSearchHit); + assertEquals(searchHit.getMatchedQueriesAndScores(), deserializedSearchHit.getMatchedQueriesAndScores()); + } + + public void testSerializationDeserializationWithMatchedQueriesList() throws IOException { + SearchHit searchHit = createTestItem(true, true); + SearchHit deserializedSearchHit = copyWriteable(searchHit, getNamedWriteableRegistry(), SearchHit::new, Version.V_2_12_0); + assertEquals(searchHit, deserializedSearchHit); + assertEquals(searchHit.getMatchedQueriesAndScores(), deserializedSearchHit.getMatchedQueriesAndScores()); + Assert.assertArrayEquals(searchHit.getMatchedQueries(), deserializedSearchHit.getMatchedQueries()); + } + /** * When e.g. with "stored_fields": "_none_", only "_index" and "_score" are returned. */ @@ -244,6 +280,125 @@ public void testToXContent() throws IOException { assertEquals("{\"_id\":\"id1\",\"_score\":1.5}", builder.toString()); } + public void testSerializeShardTargetWithNewVersion() throws Exception { + String clusterAlias = randomBoolean() ? null : "cluster_alias"; + SearchShardTarget target = new SearchShardTarget( + "_node_id", + new ShardId(new Index("_index", "_na_"), 0), + clusterAlias, + OriginalIndices.NONE + ); + + Map<String, SearchHits> innerHits = new HashMap<>(); + SearchHit innerHit1 = new SearchHit(0, "_id", null, null); + innerHit1.shard(target); + SearchHit innerInnerHit2 = new SearchHit(0, "_id", null, null); + innerInnerHit2.shard(target); + innerHits.put("1", new SearchHits(new SearchHit[] { innerInnerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHit1.setInnerHits(innerHits); + SearchHit innerHit2 = new SearchHit(0, "_id", null, null); + innerHit2.shard(target); + SearchHit innerHit3 = new SearchHit(0, "_id", null, null); + innerHit3.shard(target); + + innerHits = new HashMap<>(); + SearchHit hit1 = new SearchHit(0, "_id", null, null); + innerHits.put("1", new SearchHits(new SearchHit[] { innerHit1, innerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHits.put("2", new SearchHits(new SearchHit[] { innerHit3 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + hit1.shard(target); + hit1.setInnerHits(innerHits); + + SearchHit hit2 = new SearchHit(0, "_id", null, null); + hit2.shard(target); + + SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); + + SearchHits results = copyWriteable(hits, getNamedWriteableRegistry(), SearchHits::new, Version.V_3_0_0); + SearchShardTarget deserializedTarget = results.getAt(0).getShard(); + assertThat(deserializedTarget, equalTo(target)); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue()); + for (SearchHit hit : results) { + assertEquals(clusterAlias, hit.getClusterAlias()); + if (hit.getInnerHits() != null) { + for (SearchHits innerhits : hit.getInnerHits().values()) { + for (SearchHit innerHit : innerhits) { + assertEquals(clusterAlias, innerHit.getClusterAlias()); + } + } + } + } + assertThat(results.getAt(1).getShard(), equalTo(target)); + } + + public void testSerializeShardTargetWithNewVersionAndMatchedQueries() throws Exception { + String clusterAlias = randomBoolean() ? null : "cluster_alias"; + SearchShardTarget target = new SearchShardTarget( + "_node_id", + new ShardId(new Index("_index", "_na_"), 0), + clusterAlias, + OriginalIndices.NONE + ); + + Map<String, SearchHits> innerHits = new HashMap<>(); + SearchHit innerHit1 = new SearchHit(0, "_id", null, null); + innerHit1.shard(target); + innerHit1.matchedQueriesWithScores(getSampleMatchedQueries()); + SearchHit innerInnerHit2 = new SearchHit(0, "_id", null, null); + innerInnerHit2.shard(target); + innerHits.put("1", new SearchHits(new SearchHit[] { innerInnerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHit1.setInnerHits(innerHits); + SearchHit innerHit2 = new SearchHit(0, "_id", null, null); + innerHit2.shard(target); + innerHit2.matchedQueriesWithScores(getSampleMatchedQueries()); + SearchHit innerHit3 = new SearchHit(0, "_id", null, null); + innerHit3.shard(target); + innerHit3.matchedQueriesWithScores(getSampleMatchedQueries()); + + innerHits = new HashMap<>(); + SearchHit hit1 = new SearchHit(0, "_id", null, null); + innerHits.put("1", new SearchHits(new SearchHit[] { innerHit1, innerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHits.put("2", new SearchHits(new SearchHit[] { innerHit3 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + hit1.shard(target); + hit1.setInnerHits(innerHits); + + SearchHit hit2 = new SearchHit(0, "_id", null, null); + hit2.shard(target); + + SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); + + SearchHits results = copyWriteable(hits, getNamedWriteableRegistry(), SearchHits::new, Version.V_3_0_0); + SearchShardTarget deserializedTarget = results.getAt(0).getShard(); + assertThat(deserializedTarget, equalTo(target)); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue()); + String[] expectedMatchedQueries = new String[] { "query1", "query2" }; + String[] actualMatchedQueries = results.getAt(0).getInnerHits().get("1").getAt(0).getMatchedQueries(); + assertArrayEquals(expectedMatchedQueries, actualMatchedQueries); + + Map<String, Float> expectedMatchedQueriesAndScores = new LinkedHashMap<>(); + expectedMatchedQueriesAndScores.put("query1", 1.0f); + expectedMatchedQueriesAndScores.put("query2", 0.5f); + + Map<String, Float> actualMatchedQueriesAndScores = results.getAt(0).getInnerHits().get("1").getAt(0).getMatchedQueriesAndScores(); + assertEquals(expectedMatchedQueriesAndScores, actualMatchedQueriesAndScores); + for (SearchHit hit : results) { + assertEquals(clusterAlias, hit.getClusterAlias()); + if (hit.getInnerHits() != null) { + for (SearchHits innerhits : hit.getInnerHits().values()) { + for (SearchHit innerHit : innerhits) { + assertEquals(clusterAlias, innerHit.getClusterAlias()); + } + } + } + } + assertThat(results.getAt(1).getShard(), equalTo(target)); + } + public void testSerializeShardTarget() throws Exception { String clusterAlias = randomBoolean() ? null : "cluster_alias"; SearchShardTarget target = new SearchShardTarget( diff --git a/server/src/test/java/org/opensearch/search/SearchModuleTests.java b/server/src/test/java/org/opensearch/search/SearchModuleTests.java index 317253be9825f..01b8d6d8cdd72 100644 --- a/server/src/test/java/org/opensearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/opensearch/search/SearchModuleTests.java @@ -113,7 +113,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; public class SearchModuleTests extends OpenSearchTestCase { @@ -431,9 +430,7 @@ public void testDefaultQueryPhaseSearcher() { } public void testConcurrentQueryPhaseSearcher() { - Settings settings = Settings.builder().put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true).build(); - FeatureFlags.initializeFeatureFlags(settings); - SearchModule searchModule = new SearchModule(settings, Collections.emptyList()); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); TestSearchContext searchContext = new TestSearchContext(null); searchContext.setConcurrentSegmentSearchEnabled(true); QueryPhase queryPhase = searchModule.getQueryPhase(); @@ -443,8 +440,6 @@ public void testConcurrentQueryPhaseSearcher() { } public void testPluginQueryPhaseSearcher() { - Settings settings = Settings.builder().put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true).build(); - FeatureFlags.initializeFeatureFlags(settings); QueryPhaseSearcher queryPhaseSearcher = (searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout) -> false; SearchPlugin plugin1 = new SearchPlugin() { @Override @@ -452,7 +447,7 @@ public Optional<QueryPhaseSearcher> getQueryPhaseSearcher() { return Optional.of(queryPhaseSearcher); } }; - SearchModule searchModule = new SearchModule(settings, Collections.singletonList(plugin1)); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.singletonList(plugin1)); QueryPhase queryPhase = searchModule.getQueryPhase(); TestSearchContext searchContext = new TestSearchContext(null); assertEquals(queryPhaseSearcher, queryPhase.getQueryPhaseSearcher()); @@ -480,18 +475,10 @@ public Optional<QueryPhaseSearcher> getQueryPhaseSearcher() { } public void testIndexSearcher() { - SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); ThreadPool threadPool = mock(ThreadPool.class); - assertNull(searchModule.getIndexSearcherExecutor(threadPool)); - verify(threadPool, times(0)).executor(ThreadPool.Names.INDEX_SEARCHER); - - // enable concurrent segment search feature flag - Settings settings = Settings.builder().put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true).build(); - FeatureFlags.initializeFeatureFlags(settings); - searchModule = new SearchModule(settings, Collections.emptyList()); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); searchModule.getIndexSearcherExecutor(threadPool); verify(threadPool).executor(ThreadPool.Names.INDEX_SEARCHER); - FeatureFlags.initializeFeatureFlags(Settings.EMPTY); } public void testMultiplePluginRegisterIndexSearcherProvider() { diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 7c84078af080e..d502bab5918a8 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -57,7 +57,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; @@ -225,11 +224,6 @@ public void onQueryPhase(SearchContext context, long tookInNanos) { } } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings() { return Settings.builder().put("search.default_search_timeout", "5s").build(); @@ -1189,7 +1183,7 @@ public void testCreateSearchContext() throws IOException { public void testConcurrentSegmentSearchSearchContext() throws IOException { Boolean[][] scenarios = { // cluster setting, index setting, concurrent search enabled? - { null, null, true }, + { null, null, false }, { null, false, false }, { null, true, true }, { true, null, true }, diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java index 21d05305eed1b..eef7e4c45849d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -37,8 +37,7 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -52,7 +51,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.is; -public abstract class ShardSizeTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class ShardSizeTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ShardSizeTestCase(Settings dynamicSettings) { super(dynamicSettings); @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfShards() { // we need at least 2 diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index eabc4b7764eed..13a3d8145743b 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -35,10 +35,14 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.opensearch.OpenSearchParseException; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.missing.MissingOrder; import org.opensearch.search.aggregations.bucket.terms.StringTerms; @@ -1253,6 +1257,74 @@ public void testMultiValuedWithKeywordLongAndDouble() throws Exception { ); } + public void testDateHistogramSourceWithSize() throws IOException { + final List<Map<String, List<Object>>> dataset = new ArrayList<>( + Arrays.asList( + createDocument("date", asLong("2017-10-20T03:08:45")), + createDocument("date", asLong("2016-09-20T09:00:34")), + createDocument("date", asLong("2016-09-20T11:34:00")), + createDocument("date", asLong("2017-10-20T06:09:24")), + createDocument("date", asLong("2017-10-19T06:09:24")), + createDocument("long", 4L) + ) + ); + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new FieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, + () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") + .calendarInterval(DateHistogramInterval.days(1)); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).size(1); + }, + (result) -> { + assertEquals(1, result.getBuckets().size()); + assertEquals("{date=1474329600000}", result.afterKey().toString()); // 2017-10-20T00:00:00 + assertEquals("{date=1474329600000}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + } + ); + } + + public void testDateHistogramSourceWithDocCountField() throws IOException { + final List<Map<String, List<Object>>> dataset = new ArrayList<>( + Arrays.asList( + createDocument("date", asLong("2017-10-20T03:08:45"), "_doc_count", 5), + createDocument("date", asLong("2016-09-20T09:00:34")), + createDocument("date", asLong("2016-09-20T11:34:00"), "_doc_count", 2), + createDocument("date", asLong("2017-10-20T06:09:24")), + createDocument("date", asLong("2017-10-19T06:09:24"), "_doc_count", 3), + createDocument("long", 4L) + ) + ); + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new FieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, + () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") + .calendarInterval(DateHistogramInterval.days(1)); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)); + }, + (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{date=1508457600000}", result.afterKey().toString()); + assertEquals("{date=1474329600000}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(3L, result.getBuckets().get(0).getDocCount()); + assertEquals("{date=1508371200000}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(3L, result.getBuckets().get(1).getDocCount()); + assertEquals("{date=1508457600000}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(6L, result.getBuckets().get(2).getDocCount()); + } + ); + } + public void testWithDateHistogram() throws IOException { final List<Map<String, List<Object>>> dataset = new ArrayList<>(); dataset.addAll( @@ -1279,7 +1351,7 @@ public void testWithDateHistogram() throws IOException { }, (result) -> { assertEquals(3, result.getBuckets().size()); - assertEquals("{date=1508457600000}", result.afterKey().toString()); + assertEquals("{date=1508457600000}", result.afterKey().toString()); // 2017-10-20T00:00:00 assertEquals("{date=1474329600000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1508371200000}", result.getBuckets().get(1).getKeyAsString()); @@ -1300,9 +1372,8 @@ public void testWithDateHistogram() throws IOException { DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .calendarInterval(DateHistogramInterval.days(1)); return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( - createAfterKey("date", 1474329600000L) + createAfterKey("date", 1474329600000L) // 2016-09-20T00:00:00 ); - }, (result) -> { assertEquals(2, result.getBuckets().size()); @@ -2242,21 +2313,20 @@ private <T extends Comparable<T>, V extends Comparable<T>> void testRandomTerms( Function<Object, V> transformKey ) throws IOException { int numTerms = randomIntBetween(10, 500); - List<T> terms = new ArrayList<>(); + List<T> terms = new ArrayList<>(); // possible values for the terms for (int i = 0; i < numTerms; i++) { terms.add(randomSupplier.get()); } int numDocs = randomIntBetween(100, 200); List<Map<String, List<Object>>> dataset = new ArrayList<>(); - - Set<T> valuesSet = new HashSet<>(); - Map<Comparable<?>, AtomicLong> expectedDocCounts = new HashMap<>(); + Set<T> valuesSet = new HashSet<>(); // how many different values + Map<Comparable<?>, AtomicLong> expectedDocCounts = new HashMap<>(); // how many docs for each value for (int i = 0; i < numDocs; i++) { int numValues = randomIntBetween(1, 5); Set<Object> values = new HashSet<>(); for (int j = 0; j < numValues; j++) { int rand = randomIntBetween(0, terms.size() - 1); - if (values.add(terms.get(rand))) { + if (values.add(terms.get(rand))) { // values are unique for one doc AtomicLong count = expectedDocCounts.computeIfAbsent(terms.get(rand), (k) -> new AtomicLong(0)); count.incrementAndGet(); valuesSet.add(terms.get(rand)); @@ -2264,9 +2334,8 @@ private <T extends Comparable<T>, V extends Comparable<T>> void testRandomTerms( } dataset.add(Collections.singletonMap(field, new ArrayList<>(values))); } - List<T> expected = new ArrayList<>(valuesSet); + List<T> expected = new ArrayList<>(valuesSet); // how many buckets expected Collections.sort(expected); - List<Comparable<T>> seen = new ArrayList<>(); AtomicBoolean finish = new AtomicBoolean(false); int size = randomIntBetween(1, expected.size()); @@ -2463,4 +2532,41 @@ public void testIndexSortWithDuplicate() throws Exception { ); } } + + public void testUnderFilterAggregator() throws IOException { + executeTestCase(false, false, new MatchAllDocsQuery(), Collections.emptyList(), () -> { + FilterAggregationBuilder filterAggregatorBuilder = new FilterAggregationBuilder( + "filter_mcmilterface", + new MatchAllQueryBuilder() + ); + filterAggregatorBuilder.subAggregation( + new CompositeAggregationBuilder( + "compo", + Collections.singletonList(new TermsValuesSourceBuilder("keyword").field("keyword")) + ) + ); + return filterAggregatorBuilder; + }, (ic) -> {}); + } + + public void testUnderBucketAggregator() throws IOException { + try { + executeTestCase(false, false, new MatchAllDocsQuery(), Collections.emptyList(), () -> { + TermsAggregationBuilder termsAggregationBuilder = AggregationBuilders.terms("terms").field("keyword"); + termsAggregationBuilder.subAggregation( + new CompositeAggregationBuilder( + "compo", + Collections.singletonList(new TermsValuesSourceBuilder("keyword").field("keyword")) + ) + ); + return termsAggregationBuilder; + }, (ic) -> {}); + fail("Should have thrown an IllegalArgumentException"); + } catch (IllegalArgumentException iae) { + assertTrue( + iae.getMessage() + .contains("[composite] aggregation cannot be used with a parent aggregation of type: [TermsAggregatorFactory]") + ); + } + } } diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index bca6623e66104..2a4fbca7a8541 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -45,6 +46,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.opensearch.common.time.DateFormatters; import org.opensearch.index.mapper.DateFieldMapper; +import org.opensearch.index.mapper.DocCountFieldMapper; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.terms.StringTerms; @@ -1178,6 +1180,181 @@ public void testOverlappingBounds() { ); } + public void testHardBoundsNotOverlapping() throws IOException { + testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .hardBounds(new LongBounds("2018-01-01", "2020-01-01")) + .field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(0, buckets.size()); + }, + false + ); + + testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .hardBounds(new LongBounds("2016-01-01", "2017-01-01")) + .field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(0, buckets.size()); + }, + false + ); + + testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .hardBounds(new LongBounds("2016-01-01", "2017-02-03")) + .field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(2, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + }, + false + ); + + testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .hardBounds(new LongBounds("2017-02-03", "2020-01-01")) + .field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-04T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }, + false + ); + } + + public void testFilterRewriteOptimizationWithRangeQuery() throws IOException { + testSearchCase( + LongPoint.newRangeQuery(AGGREGABLE_DATE, asLong("2018-01-01"), asLong("2020-01-01")), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(0, buckets.size()); + }, + 10000, + false, + false, + true // force AGGREGABLE_DATE field to be searchable to test the filter rewrite optimization path + ); + + testSearchCase( + LongPoint.newRangeQuery(AGGREGABLE_DATE, asLong("2016-01-01"), asLong("2017-01-01")), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(0, buckets.size()); + }, + 10000, + false, + false, + true + ); + + testSearchCase( + LongPoint.newRangeQuery(AGGREGABLE_DATE, asLong("2016-01-01"), asLong("2017-02-02")), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(2, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + }, + 10000, + false, + false, + true + ); + + testSearchCase( + LongPoint.newRangeQuery(AGGREGABLE_DATE, asLong("2017-02-03"), asLong("2020-01-01")), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-04T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }, + 10000, + false, + false, + true + ); + } + + public void testDocCountField() throws IOException { + testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE), + histogram -> { + List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); + assertEquals(2, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(5, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + }, + 10000, + false, + true + ); + } + public void testIllegalInterval() throws IOException { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -1211,13 +1388,42 @@ private void testSearchCase( int maxBucket, boolean useNanosecondResolution ) throws IOException { - boolean aggregableDateIsSearchable = randomBoolean(); + testSearchCase(query, dataset, configure, verify, maxBucket, useNanosecondResolution, false); + } + + private void testSearchCase( + Query query, + List<String> dataset, + Consumer<DateHistogramAggregationBuilder> configure, + Consumer<InternalDateHistogram> verify, + int maxBucket, + boolean useNanosecondResolution, + boolean useDocCountField + ) throws IOException { + testSearchCase(query, dataset, configure, verify, maxBucket, useNanosecondResolution, useDocCountField, randomBoolean()); + } + + private void testSearchCase( + Query query, + List<String> dataset, + Consumer<DateHistogramAggregationBuilder> configure, + Consumer<InternalDateHistogram> verify, + int maxBucket, + boolean useNanosecondResolution, + boolean useDocCountField, + boolean aggregableDateIsSearchable + ) throws IOException { + logger.debug("Aggregable date is searchable {}", aggregableDateIsSearchable); DateFieldMapper.DateFieldType fieldType = aggregableDateFieldType(useNanosecondResolution, aggregableDateIsSearchable); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); + if (useDocCountField) { + // add the doc count field to the first document + document.add(new NumericDocValuesField(DocCountFieldMapper.NAME, 5)); + } for (String date : dataset) { long instant = asLong(date, fieldType); document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant)); diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java index e02c00005df9b..94cb4c7955a21 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -39,7 +39,6 @@ import org.opensearch.common.document.DocumentField; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -48,7 +47,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.geo.RandomGeoGenerator; import java.util.ArrayList; @@ -65,7 +64,7 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.SuiteScopeTestCase -public abstract class AbstractGeoTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractGeoTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { protected static final String SINGLE_VALUED_FIELD_NAME = "geo_value"; protected static final String MULTI_VALUED_FIELD_NAME = "geo_values"; @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex(UNMAPPED_IDX_NAME); diff --git a/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java b/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java index ca4b7dc49f6f0..55c50b8cf854d 100644 --- a/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java +++ b/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java @@ -39,9 +39,14 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.opensearch.Version; @@ -49,6 +54,7 @@ import org.opensearch.common.lucene.search.function.ScriptScoreQuery; import org.opensearch.script.ScoreScript; import org.opensearch.script.Script; +import org.opensearch.script.ScriptType; import org.opensearch.search.lookup.LeafSearchLookup; import org.opensearch.search.lookup.SearchLookup; import org.opensearch.test.OpenSearchTestCase; @@ -56,6 +62,8 @@ import org.junit.Before; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import java.util.function.Function; import static org.hamcrest.CoreMatchers.containsString; @@ -177,6 +185,37 @@ public void testScriptScoreErrorOnNegativeScore() { assertTrue(e.getMessage().contains("Must be a non-negative score!")); } + public void testTwoPhaseIteratorDelegation() throws IOException { + Map<String, Object> params = new HashMap<>(); + String scriptSource = "doc['field'].value != null ? 2.0 : 0.0"; // Adjust based on actual field and logic + Script script = new Script(ScriptType.INLINE, "painless", scriptSource, params); + float minScore = 1.0f; // This should be below the score produced by the script for all docs + ScoreScript.LeafFactory factory = newFactory(script, false, explanation -> 2.0); + + Query subQuery = new MatchAllDocsQuery(); + ScriptScoreQuery scriptScoreQuery = new ScriptScoreQuery(subQuery, script, factory, minScore, "index", 0, Version.CURRENT); + + Weight weight = searcher.createWeight(searcher.rewrite(scriptScoreQuery), ScoreMode.COMPLETE, 1f); + + boolean foundMatchingDoc = false; + for (LeafReaderContext leafContext : searcher.getIndexReader().leaves()) { + Scorer scorer = weight.scorer(leafContext); + if (scorer != null) { + TwoPhaseIterator twoPhaseIterator = scorer.twoPhaseIterator(); + assertNotNull("TwoPhaseIterator should not be null", twoPhaseIterator); + DocIdSetIterator docIdSetIterator = twoPhaseIterator.approximation(); + int docId; + while ((docId = docIdSetIterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + if (twoPhaseIterator.matches()) { + foundMatchingDoc = true; + break; + } + } + } + } + assertTrue("Expected to find at least one matching document", foundMatchingDoc); + } + private ScoreScript.LeafFactory newFactory( Script script, boolean needsScore, @@ -203,5 +242,4 @@ public double execute(ExplanationHolder explanation) { } }; } - } diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 9bb1f51c51cf6..7c50e961853b5 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -188,7 +188,6 @@ import org.opensearch.index.shard.PrimaryReplicaSyncer; import org.opensearch.index.store.RemoteSegmentStoreDirectoryFactory; import org.opensearch.index.store.remote.filecache.FileCache; -import org.opensearch.index.store.remote.filecache.FileCacheCleaner; import org.opensearch.index.store.remote.filecache.FileCacheStats; import org.opensearch.indices.IndicesModule; import org.opensearch.indices.IndicesService; @@ -2037,7 +2036,6 @@ public void onFailure(final Exception e) { final MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); final SetOnce<RepositoriesService> repositoriesServiceReference = new SetOnce<>(); repositoriesServiceReference.set(repositoriesService); - FileCacheCleaner fileCacheCleaner = new FileCacheCleaner(nodeEnv, null); indicesService = new IndicesService( settings, mock(PluginsService.class), @@ -2072,7 +2070,6 @@ public void onFailure(final Exception e) { emptyMap(), new RemoteSegmentStoreDirectoryFactory(() -> repositoriesService, threadPool), repositoriesServiceReference::get, - fileCacheCleaner, null, new RemoteStoreStatsTrackerFactory(clusterService, settings), DefaultRecoverySettings.INSTANCE diff --git a/server/src/test/java/org/opensearch/telemetry/TelemetrySettingsTests.java b/server/src/test/java/org/opensearch/telemetry/TelemetrySettingsTests.java new file mode 100644 index 0000000000000..4c96f79b30d55 --- /dev/null +++ b/server/src/test/java/org/opensearch/telemetry/TelemetrySettingsTests.java @@ -0,0 +1,64 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Set; + +import static org.opensearch.telemetry.TelemetrySettings.TRACER_ENABLED_SETTING; +import static org.opensearch.telemetry.TelemetrySettings.TRACER_SAMPLER_PROBABILITY; + +public class TelemetrySettingsTests extends OpenSearchTestCase { + + public void testSetTracingEnabledOrDisabled() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Set.of(TRACER_SAMPLER_PROBABILITY, TRACER_ENABLED_SETTING)); + TelemetrySettings telemetrySettings = new TelemetrySettings(Settings.EMPTY, clusterSettings); + + // Validation for tracingEnabled as true + telemetrySettings.setTracingEnabled(true); + assertTrue(telemetrySettings.isTracingEnabled()); + + // Validation for tracingEnabled as false + telemetrySettings.setTracingEnabled(false); + assertFalse(telemetrySettings.isTracingEnabled()); + } + + public void testSetSamplingProbability() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Set.of(TRACER_SAMPLER_PROBABILITY, TRACER_ENABLED_SETTING)); + TelemetrySettings telemetrySettings = new TelemetrySettings(Settings.EMPTY, clusterSettings); + + // Validating default sample rate i.e 1% + assertEquals(0.01, telemetrySettings.getSamplingProbability(), 0.00d); + + // Validating override for sampling for 100% request + telemetrySettings.setSamplingProbability(1.00); + assertEquals(1.00, telemetrySettings.getSamplingProbability(), 0.00d); + + // Validating override for sampling for 50% request + telemetrySettings.setSamplingProbability(0.50); + assertEquals(0.50, telemetrySettings.getSamplingProbability(), 0.00d); + } + + public void testGetSamplingProbability() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Set.of(TRACER_SAMPLER_PROBABILITY, TRACER_ENABLED_SETTING)); + TelemetrySettings telemetrySettings = new TelemetrySettings(Settings.EMPTY, clusterSettings); + + // Validating default value of Sampling is 1% + assertEquals(0.01, telemetrySettings.getSamplingProbability(), 0.00d); + + clusterSettings.applySettings(Settings.builder().put("telemetry.tracer.sampler.probability", "0.02").build()); + + // Validating if default sampling is updated to 2% + assertEquals(0.02, telemetrySettings.getSamplingProbability(), 0.00d); + } + +} diff --git a/server/src/test/java/org/opensearch/telemetry/tracing/SpanBuilderTests.java b/server/src/test/java/org/opensearch/telemetry/tracing/SpanBuilderTests.java index b4183412cdf02..75fc6761a60ef 100644 --- a/server/src/test/java/org/opensearch/telemetry/tracing/SpanBuilderTests.java +++ b/server/src/test/java/org/opensearch/telemetry/tracing/SpanBuilderTests.java @@ -8,6 +8,8 @@ package org.opensearch.telemetry.tracing; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.opensearch.Version; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.network.NetworkAddress; @@ -27,29 +29,64 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; public class SpanBuilderTests extends OpenSearchTestCase { + public String uri; + + public String expectedSpanName; + + public String expectedQueryParams; + + public String expectedReqRawPath; + + @ParametersFactory + public static Collection<Object[]> data() { + return Arrays.asList( + new Object[][] { + { "/_test/resource?name=John&age=25", "GET /_test/resource", "name=John&age=25", "/_test/resource" }, + { "/_test/", "GET /_test/", "", "/_test/" }, } + ); + } + + public SpanBuilderTests(String uri, String expectedSpanName, String expectedQueryParams, String expectedReqRawPath) { + this.uri = uri; + this.expectedSpanName = expectedSpanName; + this.expectedQueryParams = expectedQueryParams; + this.expectedReqRawPath = expectedReqRawPath; + } + public void testHttpRequestContext() { - HttpRequest httpRequest = createHttpRequest(); + HttpRequest httpRequest = createHttpRequest(uri); SpanCreationContext context = SpanBuilder.from(httpRequest); Attributes attributes = context.getAttributes(); - assertEquals("GET /_test", context.getSpanName()); + assertEquals(expectedSpanName, context.getSpanName()); assertEquals("true", attributes.getAttributesMap().get(AttributeNames.TRACE)); assertEquals("GET", attributes.getAttributesMap().get(AttributeNames.HTTP_METHOD)); assertEquals("HTTP_1_0", attributes.getAttributesMap().get(AttributeNames.HTTP_PROTOCOL_VERSION)); - assertEquals("/_test", attributes.getAttributesMap().get(AttributeNames.HTTP_URI)); + assertEquals(uri, attributes.getAttributesMap().get(AttributeNames.HTTP_URI)); + if (expectedQueryParams.isBlank()) { + assertNull(attributes.getAttributesMap().get(AttributeNames.HTTP_REQ_QUERY_PARAMS)); + } else { + assertEquals(expectedQueryParams, attributes.getAttributesMap().get(AttributeNames.HTTP_REQ_QUERY_PARAMS)); + } } public void testRestRequestContext() { - RestRequest restRequest = RestRequest.request(null, createHttpRequest(), null); + RestRequest restRequest = RestRequest.request(null, createHttpRequest(uri), null); SpanCreationContext context = SpanBuilder.from(restRequest); Attributes attributes = context.getAttributes(); - assertEquals("GET /_test", context.getSpanName()); - assertEquals("/_test", attributes.getAttributesMap().get(AttributeNames.REST_REQ_RAW_PATH)); + assertEquals(expectedSpanName, context.getSpanName()); + assertEquals(expectedReqRawPath, attributes.getAttributesMap().get(AttributeNames.REST_REQ_RAW_PATH)); assertNotNull(attributes.getAttributesMap().get(AttributeNames.REST_REQ_ID)); + if (expectedQueryParams.isBlank()) { + assertNull(attributes.getAttributesMap().get(AttributeNames.HTTP_REQ_QUERY_PARAMS)); + } else { + assertEquals(expectedQueryParams, attributes.getAttributesMap().get(AttributeNames.HTTP_REQ_QUERY_PARAMS)); + } } public void testRestRequestContextForNull() { @@ -97,7 +134,7 @@ public void close() { }; } - private static HttpRequest createHttpRequest() { + private static HttpRequest createHttpRequest(String uri) { return new HttpRequest() { @Override public RestRequest.Method method() { @@ -106,7 +143,7 @@ public RestRequest.Method method() { @Override public String uri() { - return "/_test"; + return uri; } @Override diff --git a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json index 4a4fc7d2c81b1..1ed56fa6dab4d 100644 --- a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json +++ b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json @@ -20,4 +20,4 @@ } } } -} \ No newline at end of file +} diff --git a/settings.gradle b/settings.gradle index 24ab4a7a22237..8fbf32504215b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,7 +10,7 @@ */ plugins { - id "com.gradle.enterprise" version "3.16.1" + id "com.gradle.enterprise" version "3.16.2" } ext.disableBuildCache = hasProperty('DISABLE_BUILD_CACHE') || System.getenv().containsKey('DISABLE_BUILD_CACHE') diff --git a/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java b/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java index 90f4f1ba2ceb2..5cb5cdd18f6cc 100644 --- a/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java +++ b/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.search.aggregations.bucket.filter.InternalFilter; import org.opensearch.search.aggregations.metrics.InternalMax; @@ -64,11 +63,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(DelayedShardAggregationPlugin.class); diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 7adf29792f27d..3690fbeeae1cc 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -51,6 +51,7 @@ dependencies { exclude module: "logback-core" exclude module: "logback-classic" exclude module: "avro" + exclude group: 'org.apache.kerby' } api "org.codehaus.jettison:jettison:${versions.jettison}" api "org.apache.commons:commons-compress:${versions.commonscompress}" @@ -72,6 +73,7 @@ dependencies { api "commons-net:commons-net:3.10.0" api "ch.qos.logback:logback-core:1.2.13" api "ch.qos.logback:logback-classic:1.2.13" + api 'org.apache.kerby:kerb-admin:2.0.3' runtimeOnly "com.google.guava:guava:${versions.guava}" runtimeOnly("com.squareup.okhttp3:okhttp:4.12.0") { exclude group: "com.squareup.okio" diff --git a/test/fixtures/minio-fixture/docker-compose.yml b/test/fixtures/minio-fixture/docker-compose.yml index e4d2faab9a657..539ca9471fa04 100644 --- a/test/fixtures/minio-fixture/docker-compose.yml +++ b/test/fixtures/minio-fixture/docker-compose.yml @@ -1,4 +1,4 @@ -version: '3' +version: '3.2' services: minio-fixture: build: diff --git a/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java index 43881d0660e04..933385dedcf49 100644 --- a/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java @@ -84,6 +84,7 @@ * The idea is to mimic as much as possible what happens with ES in production * mode (e.g. assign permissions and install security manager the same way) */ +@SuppressWarnings("removal") public class BootstrapForTesting { // TODO: can we share more code with the non-test side here diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 412d5235fe462..bf1c4d4c94e04 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -785,10 +785,10 @@ protected Store createRemoteStore(Path path, ShardRouting shardRouting, IndexMet protected RemoteSegmentStoreDirectory createRemoteSegmentStoreDirectory(ShardId shardId, Path path) throws IOException { NodeEnvironment.NodePath remoteNodePath = new NodeEnvironment.NodePath(path); ShardPath remoteShardPath = new ShardPath(false, remoteNodePath.resolve(shardId), remoteNodePath.resolve(shardId), shardId); - RemoteDirectory dataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex()); - RemoteDirectory metadataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex()); + RemoteDirectory dataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex().resolve("data")); + RemoteDirectory metadataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex().resolve("metadata")); RemoteStoreLockManager remoteStoreLockManager = new RemoteStoreMetadataLockManager( - new RemoteBufferedOutputDirectory(getBlobContainer(remoteShardPath.resolveIndex())) + new RemoteBufferedOutputDirectory(getBlobContainer(remoteShardPath.resolveIndex().resolve("lock_files"))) ); return new RemoteSegmentStoreDirectory(dataDirectory, metadataDirectory, remoteStoreLockManager, threadPool, shardId); } diff --git a/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java b/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java index 0f5e043ee1135..cc2d26a598757 100644 --- a/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java +++ b/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java @@ -31,6 +31,7 @@ * Mockito plugin which wraps the Mockito calls into priviledged execution blocks and respects * SecurityManager presence. */ +@SuppressWarnings("removal") @SuppressForbidden(reason = "allow URL#getFile() to be used in tests") public class PriviledgedMockMaker implements MockMaker { private static AccessControlContext context; diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java index 8e94f2cacf070..0b44fe447d6f8 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java @@ -36,11 +36,10 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -49,7 +48,7 @@ import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -public abstract class AbstractTermsTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractTermsTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public AbstractTermsTestCase(Settings dynamicSettings) { super(dynamicSettings); @@ -63,11 +62,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(ExecutionMode.values()).toString(); } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java index 6b5ec838f401d..466e4d1bf1742 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java @@ -14,6 +14,7 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; @@ -40,13 +41,16 @@ import org.opensearch.core.index.Index; import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.DateFieldMapper; +import org.opensearch.index.mapper.DocCountFieldMapper; import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.IpFieldMapper; import org.opensearch.index.mapper.KeywordFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.NumberFieldMapper; +import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregatorTestCase; +import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.opensearch.search.aggregations.bucket.composite.InternalComposite; @@ -139,12 +143,16 @@ protected void executeTestCase( boolean useIndexSort, Query query, List<Map<String, List<Object>>> dataset, - Supplier<CompositeAggregationBuilder> create, + Supplier<? extends AggregationBuilder> create, Consumer<InternalComposite> verify ) throws IOException { Map<String, MappedFieldType> types = FIELD_TYPES.stream().collect(Collectors.toMap(MappedFieldType::name, Function.identity())); - CompositeAggregationBuilder aggregationBuilder = create.get(); - Sort indexSort = useIndexSort ? buildIndexSort(aggregationBuilder.sources(), types) : null; + AggregationBuilder aggregationBuilder = create.get(); + Sort indexSort = null; + if (aggregationBuilder instanceof CompositeAggregationBuilder && useIndexSort) { + CompositeAggregationBuilder cab = (CompositeAggregationBuilder) aggregationBuilder; + indexSort = buildIndexSort(cab.sources(), types); + } IndexSettings indexSettings = createIndexSettings(indexSort); try (Directory directory = newDirectory()) { IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random())); @@ -180,14 +188,16 @@ protected void executeTestCase( } try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = new IndexSearcher(indexReader); - InternalComposite composite = searchAndReduce( + InternalAggregation aggregation = searchAndReduce( indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES.toArray(new MappedFieldType[0]) ); - verify.accept(composite); + if (aggregation instanceof InternalComposite) { + verify.accept((InternalComposite) aggregation); + } } } } @@ -196,6 +206,12 @@ protected void addToDocument(int id, Document doc, Map<String, List<Object>> key doc.add(new StringField("id", Integer.toString(id), Field.Store.NO)); for (Map.Entry<String, List<Object>> entry : keys.entrySet()) { final String name = entry.getKey(); + if (name.equals(DocCountFieldMapper.NAME)) { + doc.add(new IntPoint(name, (int) entry.getValue().get(0))); + // doc count field should be DocValuesType.NUMERIC + doc.add(new NumericDocValuesField(name, (int) entry.getValue().get(0))); + continue; + } for (Object value : entry.getValue()) { if (value instanceof Integer) { doc.add(new SortedNumericDocValuesField(name, (int) value)); diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java index 103b67e2782de..8c2cefa89c860 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java @@ -35,9 +35,8 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -48,11 +47,11 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; @OpenSearchIntegTestCase.SuiteScopeTestCase -public abstract class AbstractNumericTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractNumericTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { protected static long minValue, maxValue, minValues, maxValues; - public AbstractNumericTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public AbstractNumericTestCase(Settings staticSettings) { + super(staticSettings); } @ParametersFactory @@ -63,11 +62,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java b/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java index eddcf9c738bb3..f698cd03c464f 100644 --- a/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java +++ b/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java @@ -46,6 +46,7 @@ public static synchronized void clear() { private static final Logger LOGGER = LogManager.getLogger(FeatureFlagSetter.class); private final Set<String> flags = ConcurrentCollections.newConcurrentSet(); + @SuppressWarnings("removal") @SuppressForbidden(reason = "Enables setting of feature flags") private void setFlag(String flag) { flags.add(flag); @@ -53,6 +54,7 @@ private void setFlag(String flag) { LOGGER.info("set feature_flag={}", flag); } + @SuppressWarnings("removal") @SuppressForbidden(reason = "Clears the set feature flags") private void clearAll() { for (String flag : flags) { diff --git a/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java b/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java index 328aaf8a65b1f..59eda7a665d4c 100644 --- a/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java @@ -35,6 +35,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Property; import org.apache.logging.log4j.core.filter.RegexFilter; import org.opensearch.common.logging.Loggers; import org.opensearch.common.regex.Regex; @@ -68,11 +69,19 @@ public class MockLogAppender extends AbstractAppender implements AutoCloseable { * write to a closed MockLogAppender instance. */ public static MockLogAppender createForLoggers(Logger... loggers) throws IllegalAccessException { - return createForLoggers(".*(\n.*)*", loggers); + final String callingClass = Thread.currentThread().getStackTrace()[2].getClassName(); + return createForLoggersInternal(callingClass, ".*(\n.*)*", loggers); } public static MockLogAppender createForLoggers(String filter, Logger... loggers) throws IllegalAccessException { + final String callingClass = Thread.currentThread().getStackTrace()[2].getClassName(); + return createForLoggersInternal(callingClass, filter, loggers); + } + + private static MockLogAppender createForLoggersInternal(String callingClass, String filter, Logger... loggers) + throws IllegalAccessException { final MockLogAppender appender = new MockLogAppender( + callingClass + "-mock-log-appender", RegexFilter.createFilter(filter, new String[0], false, null, null), Collections.unmodifiableList(Arrays.asList(loggers)) ); @@ -83,8 +92,8 @@ public static MockLogAppender createForLoggers(String filter, Logger... loggers) return appender; } - private MockLogAppender(RegexFilter filter, List<Logger> loggers) { - super("mock", filter, null); + private MockLogAppender(String name, RegexFilter filter, List<Logger> loggers) { + super(name, filter, null, true, Property.EMPTY_ARRAY); /* * We use a copy-on-write array list since log messages could be appended while we are setting up expectations. When that occurs, * we would run into a concurrent modification exception from the iteration over the expectations in #append, concurrent with a @@ -116,7 +125,14 @@ public void close() { for (Logger logger : loggers) { Loggers.removeAppender(logger, this); } - this.stop(); + super.stop(); + } + + @Override + public void stop() { + // MockLogAppender should be used with try-with-resources to ensure + // proper clean up ordering and should never be stopped directly. + throw new UnsupportedOperationException("Use close() to ensure proper clean up ordering"); } public interface LoggingExpectation { diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 6215e84f42676..33d5669d33297 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -32,13 +32,12 @@ package org.opensearch.test; -import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.hc.core5.http.HttpHost; import org.apache.lucene.codecs.Codec; +import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TotalHits; import org.apache.lucene.tests.util.LuceneTestCase; @@ -48,7 +47,6 @@ import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.node.hotthreads.NodeHotThreads; -import org.opensearch.action.admin.cluster.node.info.NodeInfo; import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.action.admin.cluster.state.ClusterStateResponse; @@ -77,7 +75,6 @@ import org.opensearch.client.ClusterAdminClient; import org.opensearch.client.Requests; import org.opensearch.client.RestClient; -import org.opensearch.client.RestClientBuilder; import org.opensearch.cluster.ClusterModule; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.coordination.OpenSearchNodeCommand; @@ -96,7 +93,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.Priority; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.network.NetworkAddress; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.FeatureFlagSettings; @@ -106,7 +103,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.smile.SmileXContent; @@ -120,6 +116,7 @@ import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; @@ -128,8 +125,8 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.env.Environment; import org.opensearch.env.TestEnvironment; -import org.opensearch.http.HttpInfo; import org.opensearch.index.IndexModule; +import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; import org.opensearch.index.MergeSchedulerConfig; import org.opensearch.index.MockEngineFactoryPlugin; @@ -138,16 +135,17 @@ import org.opensearch.index.engine.Segment; import org.opensearch.index.mapper.CompletionFieldMapper; import org.opensearch.index.mapper.MockFieldFilterPlugin; +import org.opensearch.index.shard.IndexShard; import org.opensearch.index.store.Store; import org.opensearch.index.translog.Translog; import org.opensearch.indices.IndicesQueryCache; import org.opensearch.indices.IndicesRequestCache; +import org.opensearch.indices.IndicesService; import org.opensearch.indices.store.IndicesStore; import org.opensearch.monitor.os.OsInfo; import org.opensearch.node.NodeMocksPlugin; import org.opensearch.plugins.NetworkPlugin; import org.opensearch.plugins.Plugin; -import org.opensearch.rest.action.RestCancellableNodeClient; import org.opensearch.script.MockScriptService; import org.opensearch.search.MockSearchService; import org.opensearch.search.SearchHit; @@ -158,17 +156,15 @@ import org.opensearch.test.disruption.ServiceDisruptionScheme; import org.opensearch.test.store.MockFSIndexStore; import org.opensearch.test.telemetry.MockTelemetryPlugin; -import org.opensearch.test.telemetry.tracing.StrictCheckSpanProcessor; import org.opensearch.test.transport.MockTransportService; import org.opensearch.transport.TransportInterceptor; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; import org.opensearch.transport.TransportService; import org.hamcrest.Matchers; -import org.junit.After; import org.junit.AfterClass; -import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Rule; import java.io.IOException; import java.lang.Runtime.Version; @@ -189,13 +185,12 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.IdentityHashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.Random; import java.util.Set; -import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -209,6 +204,8 @@ import static org.opensearch.core.common.util.CollectionUtils.eagerPartition; import static org.opensearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING; import static org.opensearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; +import static org.opensearch.index.IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING; +import static org.opensearch.index.IndexSettings.INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.test.XContentTestUtils.convertToMap; @@ -216,7 +213,6 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoTimeout; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -334,6 +330,10 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase { public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules"; private static final boolean MOCK_MODULES_ENABLED = "true".equals(System.getProperty(TESTS_ENABLE_MOCK_MODULES, "true")); + + @Rule + public static OpenSearchTestClusterRule testClusterRule = new OpenSearchTestClusterRule(); + /** * Threshold at which indexing switches from frequently async to frequently bulk. */ @@ -369,22 +369,9 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase { */ public static final String TESTS_CLUSTER_NAME = "tests.clustername"; - /** - * The current cluster depending on the configured {@link Scope}. - * By default if no {@link ClusterScope} is configured this will hold a reference to the suite cluster. - */ - private static TestCluster currentCluster; - private static RestClient restClient = null; - - private static final Map<Class<?>, TestCluster> clusters = new IdentityHashMap<>(); - - private static OpenSearchIntegTestCase INSTANCE = null; // see @SuiteScope - private static Long SUITE_SEED = null; - @BeforeClass public static void beforeClass() throws Exception { - SUITE_SEED = randomLong(); - initializeSuiteScope(); + testClusterRule.beforeClass(); } @Override @@ -394,36 +381,6 @@ protected final boolean enableWarningsCheck() { return false; } - protected final void beforeInternal() throws Exception { - final Scope currentClusterScope = getCurrentClusterScope(); - Callable<Void> setup = () -> { - cluster().beforeTest(random()); - cluster().wipe(excludeTemplates()); - randomIndexTemplate(); - return null; - }; - switch (currentClusterScope) { - case SUITE: - assert SUITE_SEED != null : "Suite seed was not initialized"; - currentCluster = buildAndPutCluster(currentClusterScope, SUITE_SEED); - RandomizedContext.current().runWithPrivateRandomness(SUITE_SEED, setup); - break; - case TEST: - currentCluster = buildAndPutCluster(currentClusterScope, randomLong()); - setup.call(); - break; - } - - } - - private void printTestMessage(String message) { - if (isSuiteScopedTest(getClass()) && (getTestName().equals("<unknown>"))) { - logger.info("[{}]: {} suite", getTestClass().getSimpleName(), message); - } else { - logger.info("[{}#{}]: {} test", getTestClass().getSimpleName(), getTestName(), message); - } - } - /** * Creates a randomized index template. This template is used to pass in randomized settings on a * per index basis. Allows to enable/disable the randomization for number of shards and replicas @@ -547,85 +504,6 @@ private static Settings.Builder setRandomIndexTranslogSettings(Random random, Se return builder; } - private TestCluster buildWithPrivateContext(final Scope scope, final long seed) throws Exception { - return RandomizedContext.current().runWithPrivateRandomness(seed, () -> buildTestCluster(scope, seed)); - } - - private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed) throws Exception { - final Class<?> clazz = this.getClass(); - TestCluster testCluster = clusters.remove(clazz); // remove this cluster first - clearClusters(); // all leftovers are gone by now... this is really just a double safety if we miss something somewhere - switch (currentClusterScope) { - case SUITE: - if (testCluster == null) { // only build if it's not there yet - testCluster = buildWithPrivateContext(currentClusterScope, seed); - } - break; - case TEST: - // close the previous one and create a new one - IOUtils.closeWhileHandlingException(testCluster); - testCluster = buildTestCluster(currentClusterScope, seed); - break; - } - clusters.put(clazz, testCluster); - return testCluster; - } - - private static void clearClusters() throws Exception { - if (!clusters.isEmpty()) { - IOUtils.close(clusters.values()); - clusters.clear(); - } - if (restClient != null) { - restClient.close(); - restClient = null; - } - assertBusy(() -> { - int numChannels = RestCancellableNodeClient.getNumChannels(); - assertEquals( - numChannels - + " channels still being tracked in " - + RestCancellableNodeClient.class.getSimpleName() - + " while there should be none", - 0, - numChannels - ); - }); - } - - private void afterInternal(boolean afterClass) throws Exception { - final Scope currentClusterScope = getCurrentClusterScope(); - if (isInternalCluster()) { - internalCluster().clearDisruptionScheme(); - } - try { - if (cluster() != null) { - if (currentClusterScope != Scope.TEST) { - Metadata metadata = client().admin().cluster().prepareState().execute().actionGet().getState().getMetadata(); - - final Set<String> persistentKeys = new HashSet<>(metadata.persistentSettings().keySet()); - assertThat("test leaves persistent cluster metadata behind", persistentKeys, empty()); - - final Set<String> transientKeys = new HashSet<>(metadata.transientSettings().keySet()); - assertThat("test leaves transient cluster metadata behind", transientKeys, empty()); - } - ensureClusterSizeConsistency(); - ensureClusterStateConsistency(); - ensureClusterStateCanBeReadByNodeTool(); - beforeIndexDeletion(); - cluster().wipe(excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete - if (afterClass || currentClusterScope == Scope.TEST) { - cluster().close(); - } - cluster().assertAfterTest(); - } - } finally { - if (currentClusterScope == Scope.TEST) { - clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST - } - } - } - /** * @return An exclude set of index templates that will not be removed in between tests. */ @@ -638,18 +516,15 @@ protected void beforeIndexDeletion() throws Exception { } public static TestCluster cluster() { - return currentCluster; + return testClusterRule.cluster(); } public static boolean isInternalCluster() { - return (currentCluster instanceof InternalTestCluster); + return testClusterRule.isInternalCluster(); } public static InternalTestCluster internalCluster() { - if (!isInternalCluster()) { - throw new UnsupportedOperationException("current test cluster is immutable"); - } - return (InternalTestCluster) currentCluster; + return testClusterRule.internalCluster().orElseThrow(() -> new UnsupportedOperationException("current test cluster is immutable")); } public ClusterService clusterService() { @@ -661,14 +536,7 @@ public static Client client() { } public static Client client(@Nullable String node) { - if (node != null) { - return internalCluster().client(node); - } - Client client = cluster().client(); - if (frequently()) { - client = new RandomizingClient(client, random()); - } - return client; + return testClusterRule.clientForNode(node); } public static Client dataNodeClient() { @@ -771,6 +639,11 @@ public Settings indexSettings() { ); } + if (randomBoolean()) { + builder.put(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING.getKey(), true); + builder.put(INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING.getKey(), randomDoubleBetween(0.01, 0.50, true)); + } + return builder.build(); } @@ -787,6 +660,9 @@ protected Settings featureFlagSettings() { } // Enabling Telemetry setting by default featureSettings.put(FeatureFlags.TELEMETRY_SETTING.getKey(), true); + + // Enabling fuzzy set for tests by default + featureSettings.put(FeatureFlags.DOC_ID_FUZZY_SET_SETTING.getKey(), true); return featureSettings.build(); } @@ -1687,14 +1563,17 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma if (dummyDocuments) { indexRandomForMultipleSlices(indicesArray); } + if (forceRefresh) { + waitForReplication(); + } } /* - * This method ingests bogus documents for the given indices such that multiple slices - * are formed. This is useful for testing with the concurrent search use-case as it creates - * multiple slices based on segment count. - * @param indices the indices in which bogus documents should be ingested - * */ + * This method ingests bogus documents for the given indices such that multiple slices + * are formed. This is useful for testing with the concurrent search use-case as it creates + * multiple slices based on segment count. + * @param indices the indices in which bogus documents should be ingested + * */ protected void indexRandomForMultipleSlices(String... indices) throws InterruptedException { Set<List<String>> bogusIds = new HashSet<>(); int refreshCount = randomIntBetween(2, 3); @@ -1928,7 +1807,7 @@ public void clearScroll(String... scrollIds) { assertThat(clearResponse.isSucceeded(), equalTo(true)); } - private static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> annotationClass) { + static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> annotationClass) { if (clazz == Object.class || clazz == OpenSearchIntegTestCase.class) { return null; } @@ -1939,16 +1818,6 @@ private static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> a return getAnnotation(clazz.getSuperclass(), annotationClass); } - private Scope getCurrentClusterScope() { - return getCurrentClusterScope(this.getClass()); - } - - private static Scope getCurrentClusterScope(Class<?> clazz) { - ClusterScope annotation = getAnnotation(clazz, ClusterScope.class); - // if we are not annotated assume suite! - return annotation == null ? Scope.SUITE : annotation.scope(); - } - private boolean getSupportsDedicatedClusterManagers() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null ? true : annotation.supportsDedicatedMasters(); @@ -2008,6 +1877,9 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(SearchService.LOW_LEVEL_CANCELLATION_SETTING.getKey(), randomBoolean()) .putList(DISCOVERY_SEED_HOSTS_SETTING.getKey()) // empty list disables a port scan for other nodes .putList(DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), "file") + // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices + // when tests are run with concurrent segment search enabled + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2) .put(featureFlagSettings()); // Enable tracer only when Telemetry Setting is enabled @@ -2015,11 +1887,6 @@ protected Settings nodeSettings(int nodeOrdinal) { builder.put(TelemetrySettings.TRACER_FEATURE_ENABLED_SETTING.getKey(), true); builder.put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), true); } - if (FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING.get(featureFlagSettings)) { - // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices - // when tests are run with concurrent segment search enabled - builder.put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); - } return builder.build(); } @@ -2270,10 +2137,9 @@ public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler( * Returns path to a random directory that can be used to create a temporary file system repo */ public Path randomRepoPath() { - if (currentCluster instanceof InternalTestCluster) { - return randomRepoPath(((InternalTestCluster) currentCluster).getDefaultSettings()); - } - throw new UnsupportedOperationException("unsupported cluster type"); + return testClusterRule.internalCluster() + .map(c -> randomRepoPath(c.getDefaultSettings())) + .orElseThrow(() -> new UnsupportedOperationException("unsupported cluster type")); } /** @@ -2347,83 +2213,9 @@ private NumShards(int numPrimaries, int numReplicas) { } } - private static boolean runTestScopeLifecycle() { - return INSTANCE == null; - } - - @Before - public final void setupTestCluster() throws Exception { - if (runTestScopeLifecycle()) { - printTestMessage("setting up"); - beforeInternal(); - printTestMessage("all set up"); - } - } - - @After - public final void cleanUpCluster() throws Exception { - // Deleting indices is going to clear search contexts implicitly so we - // need to check that there are no more in-flight search contexts before - // we remove indices - if (isInternalCluster()) { - internalCluster().setBootstrapClusterManagerNodeIndex(-1); - } - super.ensureAllSearchContextsReleased(); - if (runTestScopeLifecycle()) { - printTestMessage("cleaning up after"); - afterInternal(false); - printTestMessage("cleaned up after"); - } - } - @AfterClass public static void afterClass() throws Exception { - try { - if (runTestScopeLifecycle()) { - clearClusters(); - } else { - INSTANCE.printTestMessage("cleaning up after"); - INSTANCE.afterInternal(true); - checkStaticState(true); - } - StrictCheckSpanProcessor.validateTracingStateOnShutdown(); - } finally { - SUITE_SEED = null; - currentCluster = null; - INSTANCE = null; - } - } - - private static void initializeSuiteScope() throws Exception { - Class<?> targetClass = getTestClass(); - /* - Note we create these test class instance via reflection - since JUnit creates a new instance per test and that is also - the reason why INSTANCE is static since this entire method - must be executed in a static context. - */ - assert INSTANCE == null; - if (isSuiteScopedTest(targetClass)) { - // note we need to do this way to make sure this is reproducible - if (isSuiteScopedTestParameterized(targetClass)) { - INSTANCE = (OpenSearchIntegTestCase) targetClass.getConstructor(Settings.class).newInstance(Settings.EMPTY); - } else { - INSTANCE = (OpenSearchIntegTestCase) targetClass.getConstructor().newInstance(); - } - boolean success = false; - try { - INSTANCE.printTestMessage("setup"); - INSTANCE.beforeInternal(); - INSTANCE.setupSuiteScopeCluster(); - success = true; - } finally { - if (!success) { - afterClass(); - } - } - } else { - INSTANCE = null; - } + testClusterRule.afterClass(); } /** @@ -2455,41 +2247,8 @@ protected boolean forbidPrivateIndexSettings() { * The returned client gets automatically closed when needed, it shouldn't be closed as part of tests otherwise * it cannot be reused by other tests anymore. */ - protected static synchronized RestClient getRestClient() { - if (restClient == null) { - restClient = createRestClient(); - } - return restClient; - } - - protected static RestClient createRestClient() { - return createRestClient(null, "http"); - } - - protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) { - NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().get(); - assertFalse(nodesInfoResponse.hasFailures()); - return createRestClient(nodesInfoResponse.getNodes(), httpClientConfigCallback, protocol); - } - - protected static RestClient createRestClient( - final List<NodeInfo> nodes, - RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, - String protocol - ) { - List<HttpHost> hosts = new ArrayList<>(); - for (NodeInfo node : nodes) { - if (node.getInfo(HttpInfo.class) != null) { - TransportAddress publishAddress = node.getInfo(HttpInfo.class).address().publishAddress(); - InetSocketAddress address = publishAddress.address(); - hosts.add(new HttpHost(protocol, NetworkAddress.format(address.getAddress()), address.getPort())); - } - } - RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[0])); - if (httpClientConfigCallback != null) { - builder.setHttpClientConfigCallback(httpClientConfigCallback); - } - return builder.build(); + protected static RestClient getRestClient() { + return testClusterRule.getRestClient(); } /** @@ -2500,20 +2259,6 @@ protected static RestClient createRestClient( */ protected void setupSuiteScopeCluster() throws Exception {} - private static boolean isSuiteScopedTest(Class<?> clazz) { - return clazz.getAnnotation(SuiteScopeTestCase.class) != null; - } - - /* - * For tests defined with, SuiteScopeTestCase return true if the - * class has a constructor that takes a single Settings parameter - * */ - private static boolean isSuiteScopedTestParameterized(Class<?> clazz) { - return Arrays.stream(clazz.getConstructors()) - .filter(x -> x.getParameterTypes().length == 1) - .anyMatch(x -> x.getParameterTypes()[0].equals(Settings.class)); - } - /** * If a test is annotated with {@link SuiteScopeTestCase} * the checks and modifications that are applied to the used test cluster are only done after all tests @@ -2622,4 +2367,96 @@ protected ClusterState getClusterState() { return client(internalCluster().getClusterManagerName()).admin().cluster().prepareState().get().getState(); } + /** + * Refreshes the indices in the cluster and waits until active/started replica shards + * are caught up with primary shard only when Segment Replication is enabled. + * This doesn't wait for inactive/non-started replica shards to become active/started. + */ + protected RefreshResponse refreshAndWaitForReplication(String... indices) { + RefreshResponse refreshResponse = refresh(indices); + waitForReplication(); + return refreshResponse; + } + + /** + * Waits until active/started replica shards are caught up with primary shard only when Segment Replication is enabled. + * This doesn't wait for inactive/non-started replica shards to become active/started. + */ + protected void waitForReplication(String... indices) { + if (indices.length == 0) { + indices = getClusterState().routingTable().indicesRouting().keySet().toArray(String[]::new); + } + try { + for (String index : indices) { + if (isSegmentReplicationEnabledForIndex(index)) { + if (isInternalCluster()) { + IndexRoutingTable indexRoutingTable = getClusterState().routingTable().index(index); + if (indexRoutingTable != null) { + assertBusy(() -> { + for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) { + final ShardRouting primaryRouting = shardRoutingTable.primaryShard(); + if (primaryRouting.state().toString().equals("STARTED")) { + if (isSegmentReplicationEnabledForIndex(index)) { + final List<ShardRouting> replicaRouting = shardRoutingTable.replicaShards(); + final IndexShard primaryShard = getIndexShard(primaryRouting, index); + for (ShardRouting replica : replicaRouting) { + if (replica.state().toString().equals("STARTED")) { + IndexShard replicaShard = getIndexShard(replica, index); + assertEquals( + "replica shards haven't caught up with primary", + getLatestSegmentInfoVersion(primaryShard), + getLatestSegmentInfoVersion(replicaShard) + ); + } + } + } + } + } + }, 30, TimeUnit.SECONDS); + } + } else { + throw new IllegalStateException( + "Segment Replication is not supported for testing tests using External Test Cluster" + ); + } + } + } + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + /** + * Checks if Segment Replication is enabled on Index. + */ + protected boolean isSegmentReplicationEnabledForIndex(String index) { + return clusterService().state().getMetadata().isSegmentReplicationEnabled(index); + } + + protected IndexShard getIndexShard(ShardRouting routing, String indexName) { + return getIndexShard(getClusterState().nodes().get(routing.currentNodeId()).getName(), routing.shardId(), indexName); + } + + /** + * Fetch IndexShard by shardId, multiple shards per node allowed. + */ + protected IndexShard getIndexShard(String node, ShardId shardId, String indexName) { + final Index index = resolveIndex(indexName); + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); + IndexService indexService = indicesService.indexServiceSafe(index); + final Optional<Integer> id = indexService.shardIds().stream().filter(sid -> sid.equals(shardId.id())).findFirst(); + return indexService.getShard(id.get()); + } + + /** + * Fetch latest segment info snapshot version of an index. + */ + protected long getLatestSegmentInfoVersion(IndexShard shard) { + try (final GatedCloseable<SegmentInfos> snapshot = shard.getSegmentInfosSnapshot()) { + return snapshot.get().version; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index efc29d1c254e6..45ea63e862df6 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -255,13 +255,11 @@ private Node newNode() { .put(FeatureFlags.TELEMETRY_SETTING.getKey(), true) .put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), true) .put(TelemetrySettings.TRACER_FEATURE_ENABLED_SETTING.getKey(), true) - .put(nodeSettings()) // allow test cases to provide their own settings or override these - .put(featureFlagSettings); - if (FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING.get(featureFlagSettings)) { // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices // when tests are run with concurrent segment search enabled - settingsBuilder.put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); - } + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2) + .put(nodeSettings()) // allow test cases to provide their own settings or override these + .put(featureFlagSettings); Collection<Class<? extends Plugin>> plugins = getPlugins(); if (plugins.contains(getTestTransportPlugin()) == false) { diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java index b5ff30deecf5c..96bffcf2d3692 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java @@ -83,6 +83,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateUtils; import org.opensearch.common.time.FormatNames; +import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; @@ -1095,6 +1096,38 @@ public static void assertBusy(CheckedRunnable<Exception> codeBlock, long maxWait } } + /** + * Runs the code block for the provided max wait time and sleeping for fixed sleep time, waiting for no assertions to trip. + */ + public static void assertBusyWithFixedSleepTime(CheckedRunnable<Exception> codeBlock, TimeValue maxWaitTime, TimeValue sleepTime) + throws Exception { + long maxTimeInMillis = maxWaitTime.millis(); + long sleepTimeInMillis = sleepTime.millis(); + if (sleepTimeInMillis > maxTimeInMillis) { + throw new IllegalArgumentException("sleepTime is more than the maxWaitTime"); + } + long sum = 0; + List<AssertionError> failures = new ArrayList<>(); + while (sum <= maxTimeInMillis) { + try { + codeBlock.run(); + return; + } catch (AssertionError e) { + failures.add(e); + } + sum += sleepTimeInMillis; + Thread.sleep(sleepTimeInMillis); + } + try { + codeBlock.run(); + } catch (AssertionError e) { + for (AssertionError failure : failures) { + e.addSuppressed(failure); + } + throw e; + } + } + /** * Periodically execute the supplied function until it returns true, or a timeout * is reached. This version uses a timeout of 10 seconds. If at all possible, diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java new file mode 100644 index 0000000000000..57e9ccf22ab43 --- /dev/null +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java @@ -0,0 +1,428 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.RandomizedContext; + +import org.apache.hc.core5.http.HttpHost; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.client.Client; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestClientBuilder; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.Nullable; +import org.opensearch.common.network.NetworkAddress; +import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.common.transport.TransportAddress; +import org.opensearch.http.HttpInfo; +import org.opensearch.rest.action.RestCancellableNodeClient; +import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; +import org.opensearch.test.OpenSearchIntegTestCase.Scope; +import org.opensearch.test.OpenSearchIntegTestCase.SuiteScopeTestCase; +import org.opensearch.test.client.RandomizingClient; +import org.opensearch.test.telemetry.tracing.StrictCheckSpanProcessor; +import org.junit.rules.MethodRule; +import org.junit.runners.model.FrameworkMethod; +import org.junit.runners.model.MultipleFailureException; +import org.junit.runners.model.Statement; + +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Callable; + +import static org.hamcrest.Matchers.empty; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThat; + +/** + * The JUnit {@link MethodRule} that handles test method scoped and test suite scoped clusters for integration (internal cluster) tests. There rule is + * injected into {@link OpenSearchIntegTestCase} that every integration test suite should be subclassing. In case of the parameterized test suites, + * please subclass {@link ParameterizedStaticSettingsOpenSearchIntegTestCase} or {@link ParameterizedDynamicSettingsOpenSearchIntegTestCase}, depending + * on the way cluster settings are being managed. + */ +class OpenSearchTestClusterRule implements MethodRule { + // Maps each TestCluster instance to the exact test suite instance that triggered its creation + private final Map<TestCluster, OpenSearchIntegTestCase> suites = new IdentityHashMap<>(); + private final Map<Class<?>, TestCluster> clusters = new IdentityHashMap<>(); + private final Logger logger = LogManager.getLogger(getClass()); + + /** + * The current cluster depending on the configured {@link Scope}. + * By default if no {@link ClusterScope} is configured this will hold a reference to the suite cluster. + */ + private TestCluster currentCluster = null; + private RestClient restClient = null; + + private OpenSearchIntegTestCase suiteInstance = null; // see @SuiteScope + private Long suiteSeed = null; + + @Override + public Statement apply(Statement base, FrameworkMethod method, Object target) { + return statement(base, method, target); + } + + void beforeClass() throws Exception { + suiteSeed = OpenSearchTestCase.randomLong(); + } + + void afterClass() throws Exception { + try { + if (runTestScopeLifecycle()) { + clearClusters(); + } else { + printTestMessage("cleaning up after"); + afterInternal(true, null); + OpenSearchTestCase.checkStaticState(true); + synchronized (clusters) { + final TestCluster cluster = clusters.remove(getTestClass()); + IOUtils.closeWhileHandlingException(cluster); + if (cluster != null) { + suites.remove(cluster); + } + } + } + StrictCheckSpanProcessor.validateTracingStateOnShutdown(); + } finally { + suiteSeed = null; + currentCluster = null; + suiteInstance = null; + } + } + + TestCluster cluster() { + return currentCluster; + } + + boolean isInternalCluster() { + return (cluster() instanceof InternalTestCluster); + } + + Optional<InternalTestCluster> internalCluster() { + if (!isInternalCluster()) { + return Optional.empty(); + } else { + return Optional.of((InternalTestCluster) cluster()); + } + } + + Client clientForAnyNode() { + return clientForNode(null); + } + + Client clientForNode(@Nullable String node) { + if (node != null) { + return internalCluster().orElseThrow(() -> new UnsupportedOperationException("current test cluster is immutable")).client(node); + } + Client client = cluster().client(); + if (OpenSearchTestCase.frequently()) { + client = new RandomizingClient(client, OpenSearchTestCase.random()); + } + return client; + } + + synchronized RestClient getRestClient() { + if (restClient == null) { + restClient = createRestClient(); + } + return restClient; + } + + protected final void beforeInternal(OpenSearchIntegTestCase target) throws Exception { + final Scope currentClusterScope = getClusterScope(target.getClass()); + Callable<Void> setup = () -> { + currentCluster.beforeTest(OpenSearchTestCase.random()); + currentCluster.wipe(target.excludeTemplates()); + target.randomIndexTemplate(); + return null; + }; + switch (currentClusterScope) { + case SUITE: + assert suiteSeed != null : "Suite seed was not initialized"; + currentCluster = buildAndPutCluster(currentClusterScope, suiteSeed, target); + RandomizedContext.current().runWithPrivateRandomness(suiteSeed, setup); + break; + case TEST: + currentCluster = buildAndPutCluster(currentClusterScope, OpenSearchTestCase.randomLong(), target); + setup.call(); + break; + } + } + + protected void before(Object target, FrameworkMethod method) throws Throwable { + final OpenSearchIntegTestCase instance = (OpenSearchIntegTestCase) target; + initializeSuiteScope(instance, method); + + if (runTestScopeLifecycle()) { + printTestMessage("setting up", method); + beforeInternal(instance); + printTestMessage("all set up", method); + } + } + + protected void after(Object target, FrameworkMethod method) throws Exception { + final OpenSearchIntegTestCase instance = (OpenSearchIntegTestCase) target; + + // Deleting indices is going to clear search contexts implicitly so we + // need to check that there are no more in-flight search contexts before + // we remove indices + internalCluster().ifPresent(c -> c.setBootstrapClusterManagerNodeIndex(-1)); + + instance.ensureAllSearchContextsReleased(); + if (runTestScopeLifecycle()) { + printTestMessage("cleaning up after", method); + afterInternal(false, instance); + printTestMessage("cleaned up after", method); + } + } + + protected RestClient createRestClient() { + return createRestClient(null, "http"); + } + + protected RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) { + NodesInfoResponse nodesInfoResponse = clientForAnyNode().admin().cluster().prepareNodesInfo().get(); + assertFalse(nodesInfoResponse.hasFailures()); + return createRestClient(nodesInfoResponse.getNodes(), httpClientConfigCallback, protocol); + } + + protected RestClient createRestClient( + final List<NodeInfo> nodes, + RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, + String protocol + ) { + List<HttpHost> hosts = new ArrayList<>(); + for (NodeInfo node : nodes) { + if (node.getInfo(HttpInfo.class) != null) { + TransportAddress publishAddress = node.getInfo(HttpInfo.class).address().publishAddress(); + InetSocketAddress address = publishAddress.address(); + hosts.add(new HttpHost(protocol, NetworkAddress.format(address.getAddress()), address.getPort())); + } + } + RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[0])); + if (httpClientConfigCallback != null) { + builder.setHttpClientConfigCallback(httpClientConfigCallback); + } + return builder.build(); + } + + private Scope getClusterScope(Class<?> clazz) { + ClusterScope annotation = OpenSearchIntegTestCase.getAnnotation(clazz, ClusterScope.class); + // if we are not annotated assume suite! + return annotation == null ? Scope.SUITE : annotation.scope(); + } + + private TestCluster buildWithPrivateContext(final Scope scope, final long seed, OpenSearchIntegTestCase target) throws Exception { + return RandomizedContext.current().runWithPrivateRandomness(seed, () -> target.buildTestCluster(scope, seed)); + } + + private static boolean isSuiteScopedTest(Class<?> clazz) { + return clazz.getAnnotation(SuiteScopeTestCase.class) != null; + } + + private static boolean hasParametersChanged( + final ParameterizedOpenSearchIntegTestCase instance, + final ParameterizedOpenSearchIntegTestCase target + ) { + return !instance.hasSameParametersAs(target); + } + + private boolean runTestScopeLifecycle() { + return suiteInstance == null; + } + + private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed, OpenSearchIntegTestCase target) throws Exception { + final Class<?> clazz = target.getClass(); + + synchronized (clusters) { + TestCluster testCluster = clusters.remove(clazz); // remove this cluster first + clearClusters(); // all leftovers are gone by now... this is really just a double safety if we miss something somewhere + switch (currentClusterScope) { + case SUITE: + if (testCluster != null && target instanceof ParameterizedOpenSearchIntegTestCase) { + final OpenSearchIntegTestCase instance = suites.get(testCluster); + if (instance != null) { + assert instance instanceof ParameterizedOpenSearchIntegTestCase; + if (hasParametersChanged( + (ParameterizedOpenSearchIntegTestCase) instance, + (ParameterizedOpenSearchIntegTestCase) target + )) { + IOUtils.closeWhileHandlingException(testCluster); + printTestMessage("new instance of parameterized test class, recreating test cluster for suite"); + testCluster = null; + } + } + } + + if (testCluster == null) { // only build if it's not there yet + testCluster = buildWithPrivateContext(currentClusterScope, seed, target); + suites.put(testCluster, target); + } + break; + case TEST: + // close the previous one and create a new one + IOUtils.closeWhileHandlingException(testCluster); + testCluster = target.buildTestCluster(currentClusterScope, seed); + break; + } + clusters.put(clazz, testCluster); + return testCluster; + } + } + + private void printTestMessage(String message) { + logger.info("[{}]: {} suite", getTestClass().getSimpleName(), message); + } + + private static Class<?> getTestClass() { + return OpenSearchTestCase.getTestClass(); + } + + private void printTestMessage(String message, FrameworkMethod method) { + logger.info("[{}#{}]: {} test", getTestClass().getSimpleName(), method.getName(), message); + } + + private void afterInternal(boolean afterClass, OpenSearchIntegTestCase target) throws Exception { + final Scope currentClusterScope = getClusterScope(getTestClass()); + internalCluster().ifPresent(InternalTestCluster::clearDisruptionScheme); + + OpenSearchIntegTestCase instance = suiteInstance; + if (instance == null) { + instance = target; + } + + try { + if (cluster() != null) { + if (currentClusterScope != Scope.TEST) { + Metadata metadata = clientForAnyNode().admin().cluster().prepareState().execute().actionGet().getState().getMetadata(); + + final Set<String> persistentKeys = new HashSet<>(metadata.persistentSettings().keySet()); + assertThat("test leaves persistent cluster metadata behind", persistentKeys, empty()); + + final Set<String> transientKeys = new HashSet<>(metadata.transientSettings().keySet()); + assertThat("test leaves transient cluster metadata behind", transientKeys, empty()); + } + instance.ensureClusterSizeConsistency(); + instance.ensureClusterStateConsistency(); + instance.ensureClusterStateCanBeReadByNodeTool(); + instance.beforeIndexDeletion(); + cluster().wipe(instance.excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete + if (afterClass || currentClusterScope == Scope.TEST) { + cluster().close(); + } + cluster().assertAfterTest(); + } + } finally { + if (currentClusterScope == Scope.TEST) { + clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST + } + } + } + + private void clearClusters() throws Exception { + synchronized (clusters) { + if (!clusters.isEmpty()) { + IOUtils.close(clusters.values()); + suites.clear(); + clusters.clear(); + } + } + if (restClient != null) { + restClient.close(); + restClient = null; + } + OpenSearchTestCase.assertBusy(() -> { + int numChannels = RestCancellableNodeClient.getNumChannels(); + OpenSearchTestCase.assertEquals( + numChannels + + " channels still being tracked in " + + RestCancellableNodeClient.class.getSimpleName() + + " while there should be none", + 0, + numChannels + ); + }); + } + + private Statement statement(final Statement base, FrameworkMethod method, Object target) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + before(target, method); + + List<Throwable> errors = new ArrayList<Throwable>(); + try { + base.evaluate(); + } catch (Throwable t) { + errors.add(t); + } finally { + try { + after(target, method); + } catch (Throwable t) { + errors.add(t); + } + } + MultipleFailureException.assertEmpty(errors); + } + }; + } + + private void initializeSuiteScope(OpenSearchIntegTestCase target, FrameworkMethod method) throws Exception { + final Class<?> targetClass = getTestClass(); + /* + Note we create these test class instance via reflection + since JUnit creates a new instance per test. + */ + if (suiteInstance != null) { + // Catching the case when parameterized test cases are run: the test class stays the same but the test instances changes. + if (target instanceof ParameterizedOpenSearchIntegTestCase) { + assert suiteInstance instanceof ParameterizedOpenSearchIntegTestCase; + if (hasParametersChanged( + (ParameterizedOpenSearchIntegTestCase) suiteInstance, + (ParameterizedOpenSearchIntegTestCase) target + )) { + printTestMessage("new instance of parameterized test class, recreating cluster scope", method); + afterClass(); + beforeClass(); + } else { + return; /* same test class instance */ + } + } else { + return; /* not a parameterized test */ + } + } + + assert suiteInstance == null; + if (isSuiteScopedTest(targetClass)) { + suiteInstance = target; + + boolean success = false; + try { + printTestMessage("setup", method); + beforeInternal(target); + suiteInstance.setupSuiteScopeCluster(); + success = true; + } finally { + if (!success) { + afterClass(); + } + } + } else { + suiteInstance = null; + } + } +} diff --git a/test/framework/src/main/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTestCase.java new file mode 100644 index 0000000000000..b31dfa2bdefa5 --- /dev/null +++ b/test/framework/src/main/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTestCase.java @@ -0,0 +1,66 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsModule; +import org.junit.After; +import org.junit.Before; + +/** + * Base class for running the tests with parameterization using dynamic settings: the cluster will be created once before the test suite and the + * settings will be applied dynamically, please notice that not all settings could be changed dynamically (consider using {@link ParameterizedStaticSettingsOpenSearchIntegTestCase} + * instead). + * <p> + * Here is the simple illustration on of the execution flow per parameters combination: + * <ul> + * <li><b>suite scope</b>: create cluster -> for each test method { apply settings -> run test method -> unapply settings } -> shutdown cluster</li> + * <li><b>test scope</b>: for each test method { create cluster -> apply settings -> run test method -> unapply settings -> shutdown cluster }</li> + * </ul> + */ +public abstract class ParameterizedDynamicSettingsOpenSearchIntegTestCase extends ParameterizedOpenSearchIntegTestCase { + public ParameterizedDynamicSettingsOpenSearchIntegTestCase(Settings dynamicSettings) { + super(dynamicSettings); + } + + @Before + public void beforeTests() { + SettingsModule settingsModule = new SettingsModule(settings); + for (String key : settings.keySet()) { + assertTrue( + settingsModule.getClusterSettings().isDynamicSetting(key) || settingsModule.getIndexScopedSettings().isDynamicSetting(key) + ); + } + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settings).get(); + } + + @After + public void afterTests() { + final Settings.Builder settingsToUnset = Settings.builder(); + settings.keySet().forEach(settingsToUnset::putNull); + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settingsToUnset).get(); + } + + @Override + boolean hasSameParametersAs(ParameterizedOpenSearchIntegTestCase obj) { + if (this == obj) { + return true; + } + + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + return true; + } +} diff --git a/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java index edda6bf5603f7..23316adf6a2d7 100644 --- a/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java @@ -9,48 +9,43 @@ package org.opensearch.test; import org.opensearch.common.settings.Settings; -import org.opensearch.common.settings.SettingsModule; -import org.junit.After; -import org.junit.Before; import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; /** - * Base class for running the tests with parameterization of the dynamic settings - * For any class that wants to use parameterization, use @ParametersFactory to generate - * different params only for dynamic settings. Refer SearchCancellationIT for an example. - * Note: this doesn't work for the parameterization of feature flag/static settings. + * Base class for running the tests with parameterization of the settings. + * For any class that wants to use parameterization, use {@link com.carrotsearch.randomizedtesting.annotations.ParametersFactory} to generate + * different parameters. + * + * There are two flavors of applying the parameterized settings to the cluster on the suite level: + * - static: the cluster will be pre-created with the settings at startup, please subclass {@link ParameterizedStaticSettingsOpenSearchIntegTestCase}, the method + * {@link #hasSameParametersAs(ParameterizedOpenSearchIntegTestCase)} is being used by the test scaffolding to detect when the test suite is instantiated with + * the new parameters and the test cluster has to be recreated + * - dynamic: the cluster will be created once before the test suite and the settings will be applied dynamically , please subclass {@link ParameterizedDynamicSettingsOpenSearchIntegTestCase}, + * please notice that not all settings could be changed dynamically + * + * If the test suites use per-test level, the cluster will be recreated per each test method (applying static or dynamic settings). */ -public abstract class ParameterizedOpenSearchIntegTestCase extends OpenSearchIntegTestCase { - - private final Settings dynamicSettings; +abstract class ParameterizedOpenSearchIntegTestCase extends OpenSearchIntegTestCase { + protected final Settings settings; - public ParameterizedOpenSearchIntegTestCase(Settings dynamicSettings) { - this.dynamicSettings = dynamicSettings; - } - - @Before - public void beforeTests() { - SettingsModule settingsModule = new SettingsModule(dynamicSettings); - for (String key : dynamicSettings.keySet()) { - assertTrue( - settingsModule.getClusterSettings().isDynamicSetting(key) || settingsModule.getIndexScopedSettings().isDynamicSetting(key) - ); - } - client().admin().cluster().prepareUpdateSettings().setPersistentSettings(dynamicSettings).get(); - } - - @After - public void afterTests() { - final Settings.Builder settingsToUnset = Settings.builder(); - dynamicSettings.keySet().forEach(settingsToUnset::putNull); - client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settingsToUnset).get(); + ParameterizedOpenSearchIntegTestCase(Settings settings) { + this.settings = settings; } // This method shouldn't be called in setupSuiteScopeCluster(). Only call this method inside single test. public void indexRandomForConcurrentSearch(String... indices) throws InterruptedException { - if (dynamicSettings.get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey()).equals("true")) { + if (CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) { indexRandomForMultipleSlices(indices); } } + + /** + * Compares the parameters of the two {@link ParameterizedOpenSearchIntegTestCase} test suite instances. + * This method is being use by {@link OpenSearchTestClusterRule} to determine when the parameterized test suite is instantiated with + * another set of parameters and the test cluster has to be recreated to reflect that. + * @param obj instance of the {@link ParameterizedOpenSearchIntegTestCase} to compare with + * @return {@code true} of the parameters of the test suites are the same, {@code false} otherwise + */ + abstract boolean hasSameParametersAs(ParameterizedOpenSearchIntegTestCase obj); } diff --git a/test/framework/src/main/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTestCase.java new file mode 100644 index 0000000000000..2e97228326314 --- /dev/null +++ b/test/framework/src/main/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTestCase.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import org.opensearch.common.settings.Settings; + +import java.util.Objects; + +/** + * Base class for running the tests with parameterization with static settings: the cluster will be pre-created with the settings at startup, the method + * {@link #hasSameParametersAs(ParameterizedOpenSearchIntegTestCase)} is being used by the test scaffolding to detect when the test suite is instantiated with + * the new parameters and the test cluster has to be recreated. + * <p> + * Here is the simple illustration on of the execution flow per parameters combination: + * <ul> + * <li><b>suite scope</b>: create cluster -> for each test method { run test method } -> shutdown cluster</li> + * <li><b>test scope</b>: for each test method { create cluster -> run test method -> shutdown cluster }</li> + * </ul> + */ +public abstract class ParameterizedStaticSettingsOpenSearchIntegTestCase extends ParameterizedOpenSearchIntegTestCase { + public ParameterizedStaticSettingsOpenSearchIntegTestCase(Settings nodeSettings) { + super(nodeSettings); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(settings).build(); + } + + @Override + boolean hasSameParametersAs(ParameterizedOpenSearchIntegTestCase obj) { + if (this == obj) { + return true; + } + + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + final ParameterizedStaticSettingsOpenSearchIntegTestCase other = (ParameterizedStaticSettingsOpenSearchIntegTestCase) obj; + return Objects.equals(settings, other.settings); + } +} diff --git a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java index 2fb345f73fb06..09a72dcdc3641 100644 --- a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java @@ -107,6 +107,7 @@ public class TestSearchContext extends SearchContext { SearchShardTask task; SortAndFormats sort; boolean trackScores = false; + boolean includeNamedQueriesScore = false; int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; ContextIndexSearcher searcher; @@ -409,6 +410,17 @@ public boolean trackScores() { return trackScores; } + @Override + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + @Override + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore; + } + @Override public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { this.trackTotalHitsUpTo = trackTotalHitsUpTo; diff --git a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java index 8fb9bc5cd7c1c..8ce5afab17c00 100644 --- a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java @@ -359,4 +359,14 @@ public static Version randomPreviousCompatibleVersion(Random random, Version ver // but 7.2.0 for minimum compat return randomVersionBetween(random, version.minimumIndexCompatibilityVersion(), getPreviousVersion(version)); } + + /** + * Returns a {@link Version} with a given major, minor and revision version. + * Build version is skipped for the sake of simplicity. + */ + public static Version getVersion(byte major, byte minor, byte revision) { + StringBuilder sb = new StringBuilder(); + sb.append(major).append('.').append(minor).append('.').append(revision); + return Version.fromString(sb.toString()); + } } diff --git a/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java index 44837c37962b4..168fbd5bd0d0a 100644 --- a/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java +++ b/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java @@ -252,7 +252,7 @@ public TimeValue expectedTimeToHeal() { * returns true if some live threads were found. The caller is expected to call this method * until no more "live" are found. */ - @SuppressWarnings("deprecation") // suspends/resumes threads intentionally + @SuppressWarnings({ "deprecation", "removal" }) // suspends/resumes threads intentionally @SuppressForbidden(reason = "suspends/resumes threads intentionally") protected boolean suspendThreads(Set<Thread> nodeThreads) { Thread[] allThreads = null; @@ -360,7 +360,7 @@ protected void onBlockDetected(ThreadInfo blockedThread, @Nullable ThreadInfo bl ); } - @SuppressWarnings("deprecation") // suspends/resumes threads intentionally + @SuppressWarnings({ "deprecation", "removal" }) // suspends/resumes threads intentionally @SuppressForbidden(reason = "suspends/resumes threads intentionally") protected void resumeThreads(Set<Thread> threads) { for (Thread thread : threads) { diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java index 183214c159c14..9b0de13c35ec8 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java @@ -528,6 +528,10 @@ public static Matcher<SearchHit> hasScore(final float score) { return new OpenSearchMatchers.SearchHitHasScoreMatcher(score); } + public static Matcher<SearchHit> hasMatchedQueries(final String[] matchedQueries) { + return new OpenSearchMatchers.SearchHitMatchedQueriesMatcher(matchedQueries); + } + public static <T, V> CombinableMatcher<T> hasProperty(Function<? super T, ? extends V> property, Matcher<V> valueMatcher) { return OpenSearchMatchers.HasPropertyLambdaMatcher.hasProperty(property, valueMatcher); } diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java index 5889b7e269ed2..2be94bd53e3c1 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java @@ -38,6 +38,7 @@ import org.hamcrest.TypeSafeMatcher; import org.hamcrest.core.CombinableMatcher; +import java.util.Arrays; import java.util.function.Function; public class OpenSearchMatchers { @@ -111,6 +112,35 @@ public void describeTo(final Description description) { } } + public static class SearchHitMatchedQueriesMatcher extends TypeSafeMatcher<SearchHit> { + private String[] matchedQueries; + + public SearchHitMatchedQueriesMatcher(String[] matchedQueries) { + this.matchedQueries = matchedQueries; + } + + @Override + protected boolean matchesSafely(SearchHit searchHit) { + String[] searchHitQueries = searchHit.getMatchedQueries(); + if (matchedQueries == null) { + return false; + } + Arrays.sort(searchHitQueries); + Arrays.sort(matchedQueries); + return Arrays.equals(searchHitQueries, matchedQueries); + } + + @Override + public void describeMismatchSafely(final SearchHit searchHit, final Description mismatchDescription) { + mismatchDescription.appendText(" matched queries were ").appendValue(Arrays.toString(searchHit.getMatchedQueries())); + } + + @Override + public void describeTo(final Description description) { + description.appendText("searchHit matched queries should be ").appendValue(Arrays.toString(matchedQueries)); + } + } + public static class HasPropertyLambdaMatcher<T, V> extends FeatureMatcher<T, V> { private final Function<? super T, ? extends V> property; diff --git a/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java b/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java index dda413ce2818e..44daf1b1554e0 100644 --- a/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java +++ b/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java @@ -11,8 +11,10 @@ import org.opensearch.telemetry.Telemetry; import org.opensearch.telemetry.TelemetrySettings; import org.opensearch.telemetry.metrics.Counter; +import org.opensearch.telemetry.metrics.Histogram; import org.opensearch.telemetry.metrics.MetricsTelemetry; import org.opensearch.telemetry.metrics.noop.NoopCounter; +import org.opensearch.telemetry.metrics.noop.NoopHistogram; import org.opensearch.telemetry.tracing.TracingTelemetry; import org.opensearch.test.telemetry.tracing.MockTracingTelemetry; @@ -46,6 +48,11 @@ public Counter createUpDownCounter(String name, String description, String unit) return NoopCounter.INSTANCE; } + @Override + public Histogram createHistogram(String name, String description, String unit) { + return NoopHistogram.INSTANCE; + } + @Override public void close() { diff --git a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java index 3b64e044e7bf0..e43b0756e2f2b 100644 --- a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java @@ -1283,9 +1283,17 @@ public String executor() { Level.TRACE, notSeenReceived ); + final String notSeenResponseSent = ".*\\[internal:testNotSeen].*sent response.*"; + final MockLogAppender.LoggingExpectation notSeenResponseSentExpectation = new MockLogAppender.PatternSeenEventExpectation( + "sent response", + "org.opensearch.transport.TransportService.tracer", + Level.TRACE, + notSeenResponseSent + ); appender.addExpectation(notSeenSentExpectation); appender.addExpectation(notSeenReceivedExpectation); + appender.addExpectation(notSeenResponseSentExpectation); PlainTransportFuture<StringMessageResponse> future = new PlainTransportFuture<>(noopResponseHandler); serviceA.sendRequest(nodeB, "internal:testNotSeen", new StringMessageRequest(""), future); diff --git a/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..6dd14e06248a9 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.common.settings.Settings; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +public class ParameterizedDynamicSettingsOpenSearchIntegTests extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { + public ParameterizedDynamicSettingsOpenSearchIntegTests(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + public void testSettings() throws IOException { + final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(cluster.getState().getMetadata().settings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..f38c1ecd26429 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java @@ -0,0 +1,47 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.common.settings.Settings; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +public class ParameterizedStaticSettingsOpenSearchIntegTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + + public ParameterizedStaticSettingsOpenSearchIntegTests(Settings staticSettings) { + super(staticSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + public void testSettings() throws IOException { + final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); + for (final NodeInfo node : nodes.getNodes()) { + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(node.getSettings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..1f9a7cb87ae15 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -0,0 +1,44 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.common.settings.Settings; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { + public SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + public void testSettings() throws IOException { + final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(cluster.getState().getMetadata().settings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..36ca14e453158 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java @@ -0,0 +1,47 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.common.settings.Settings; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests(Settings staticSettings) { + super(staticSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + public void testSettings() throws IOException { + final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); + for (final NodeInfo node : nodes.getNodes()) { + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(node.getSettings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..84caebdb4302f --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -0,0 +1,44 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.common.settings.Settings; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) +public class TestScopedParameterizedDynamicSettingsOpenSearchIntegTests extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { + public TestScopedParameterizedDynamicSettingsOpenSearchIntegTests(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + public void testSettings() throws IOException { + final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(cluster.getState().getMetadata().settings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..6df8ad2c27210 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java @@ -0,0 +1,47 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.common.settings.Settings; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) +public class TestScopedParameterizedStaticSettingsOpenSearchIntegTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public TestScopedParameterizedStaticSettingsOpenSearchIntegTests(Settings staticSettings) { + super(staticSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + public void testSettings() throws IOException { + final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); + for (final NodeInfo node : nodes.getNodes()) { + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(node.getSettings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } + } +}