diff --git a/.ci/bwcVersions b/.ci/bwcVersions
index 6a5db93053e3b..1e3b913c5cb5a 100644
--- a/.ci/bwcVersions
+++ b/.ci/bwcVersions
@@ -28,3 +28,5 @@ BWC_VERSION:
- "2.11.1"
- "2.11.2"
- "2.12.0"
+ - "2.12.1"
+ - "2.13.0"
diff --git a/.github/ISSUE_TEMPLATE/bug_template.yml b/.github/ISSUE_TEMPLATE/bug_template.yml
index 2cd1ee8a7e688..5f0798abe0f68 100644
--- a/.github/ISSUE_TEMPLATE/bug_template.yml
+++ b/.github/ISSUE_TEMPLATE/bug_template.yml
@@ -15,7 +15,7 @@ body:
description: Choose a specific OpenSearch component your bug belongs to. If you are unsure which to select or if the component is not present, select "Other".
multiple: false
options:
- - Other
+ - # Empty first option to force selection
- Build
- Clients
- Cluster Manager
@@ -24,6 +24,7 @@ body:
- Indexing:Replication
- Indexing
- Libraries
+ - Other
- Plugins
- Search:Aggregations
- Search:Performance
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
index d93ac8b590706..0159e771f7f80 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.yml
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -22,7 +22,7 @@ body:
description: Choose a specific OpenSearch component your feature request belongs to. If you are unsure of which component to select or if the component is not present, select "Other".
multiple: false
options:
- - Other
+ - # Empty first option to force selection
- Build
- Clients
- Cluster Manager
@@ -31,6 +31,7 @@ body:
- Indexing:Replication
- Indexing
- Libraries
+ - Other
- Plugins
- Search:Aggregations
- Search:Performance
diff --git a/.github/ISSUE_TEMPLATE/meta.yml b/.github/ISSUE_TEMPLATE/meta.yml
new file mode 100644
index 0000000000000..b766a26bc3ff2
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/meta.yml
@@ -0,0 +1,58 @@
+name: ✨ Meta Issue
+description: An issue that collects other issues together to describe a larger project or activity.
+title: '[META]
'
+labels: ['Meta, untriaged']
+body:
+ - type: textarea
+ attributes:
+ label: Please describe the end goal of this project
+ description: A clear and concise description of this project/endeavor. This should be understandable to someone with no context.
+ placeholder: Ex. Views is a way to project indices in OpenSearch, these views act as a focal point for describing the underlying data and how the data is accessed. It allows for restricting the scope and filtering the response consistently.
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Supporting References
+ description: Please provide links (and descriptions!) to RFCs, design docs, etc
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Issues
+ description: Please create a list of issues that should be tracked by this meta issue, including a short description. The purpose is to provide everyone on the project with an "at a glance" update of the state us the work being tracked. If you use the format "- [ ]" it will put your list into a checklist.
+ placeholder: Ex. - [ ] https://github.com/opensearch-project/security/issues/3888 Add views to the cluster metadata schema
+ validations:
+ required: true
+ - type: dropdown
+ attributes:
+ label: Related component
+ description: Choose a specific OpenSearch component your project belongs to. If you are unsure of which component to select or if the component is not present, select "Other".
+ multiple: false
+ options:
+ - # Empty first option to force selection
+ - Build
+ - Clients
+ - Cluster Manager
+ - Extensions
+ - Indexing:Performance
+ - Indexing:Replication
+ - Indexing
+ - Libraries
+ - Other
+ - Plugins
+ - Search:Aggregations
+ - Search:Performance
+ - Search:Query Capabilities
+ - Search:Query Insights
+ - Search:Relevance
+ - Search:Remote Search
+ - Search:Resiliency
+ - Search:Searchable Snapshots
+ - Search
+ - Storage:Durability
+ - Storage:Performance
+ - Storage:Remote
+ - Storage:Snapshots
+ - Storage
+ validations:
+ required: true
diff --git a/.github/workflows/check-compatibility.yml b/.github/workflows/check-compatibility.yml
index d6c65ddd446cd..1ad44e6334059 100644
--- a/.github/workflows/check-compatibility.yml
+++ b/.github/workflows/check-compatibility.yml
@@ -53,7 +53,7 @@ jobs:
name: results.txt
- name: Find Comment
- uses: peter-evans/find-comment@v2
+ uses: peter-evans/find-comment@v3
id: fc
with:
issue-number: ${{ github.event.number }}
diff --git a/.github/workflows/create-documentation-issue.yml b/.github/workflows/create-documentation-issue.yml
index df63847f8afca..b45e053cc25c2 100644
--- a/.github/workflows/create-documentation-issue.yml
+++ b/.github/workflows/create-documentation-issue.yml
@@ -29,7 +29,7 @@ jobs:
- name: Create Issue From File
id: create-issue
- uses: peter-evans/create-issue-from-file@v4
+ uses: peter-evans/create-issue-from-file@v5
with:
title: Add documentation related to new feature
content-filepath: ./ci/documentation/issue.md
diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml
index ca026f530b4af..61962c91b4903 100644
--- a/.github/workflows/links.yml
+++ b/.github/workflows/links.yml
@@ -13,7 +13,7 @@ jobs:
- uses: actions/checkout@v4
- name: lychee Link Checker
id: lychee
- uses: lycheeverse/lychee-action@v1.8.0
+ uses: lycheeverse/lychee-action@v1.9.1
with:
args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes
fail: true
diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml
index 2f87afd372d90..fdc2bf16937b4 100644
--- a/.github/workflows/maintainer-approval.yml
+++ b/.github/workflows/maintainer-approval.yml
@@ -2,7 +2,6 @@ name: Maintainers approval
on:
pull_request_review:
- types: [submitted]
jobs:
maintainer-approved-check:
@@ -10,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: find-maintainers
- uses: actions/github-script@v7
+ uses: actions/github-script@v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
result-encoding: string
@@ -26,7 +25,7 @@ jobs:
return maintainersResponse.data.map(item => item.login).join(', ');
- - uses: peternied/required-approval@v1.2
+ - uses: peternied/required-approval@v1.3
with:
token: ${{ secrets.GITHUB_TOKEN }}
min-required: 1
diff --git a/.github/workflows/pull-request-checks.yml b/.github/workflows/pull-request-checks.yml
index 11998e36c2dbb..7efcf529588ed 100644
--- a/.github/workflows/pull-request-checks.yml
+++ b/.github/workflows/pull-request-checks.yml
@@ -17,7 +17,7 @@ jobs:
name: Verify Description Checklist
runs-on: ubuntu-latest
steps:
- - uses: peternied/check-pull-request-description-checklist@v1
+ - uses: peternied/check-pull-request-description-checklist@v1.1
with:
checklist-items: |
New functionality includes testing.
diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml
index c305818bdb0a9..83bf4926a8c2d 100644
--- a/.github/workflows/triage.yml
+++ b/.github/workflows/triage.yml
@@ -9,7 +9,7 @@ jobs:
if: github.repository == 'opensearch-project/OpenSearch'
runs-on: ubuntu-latest
steps:
- - uses: actions/github-script@v7
+ - uses: actions/github-script@v7.0.1
with:
script: |
const { issue, repository } = context.payload;
diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml
index a20c671c137b2..be2a89ac931e9 100644
--- a/.github/workflows/version.yml
+++ b/.github/workflows/version.yml
@@ -1,28 +1,32 @@
name: Increment Version
on:
+ workflow_dispatch:
+ inputs:
+ tag:
+ description: 'the tag'
+ required: true
+ type: string
push:
tags:
- '*.*.*'
-permissions: {}
+permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+
jobs:
build:
if: github.repository == 'opensearch-project/OpenSearch'
runs-on: ubuntu-latest
steps:
- - name: GitHub App token
- id: github_app_token
- uses: tibdex/github-app-token@v2.1.0
- with:
- app_id: ${{ secrets.APP_ID }}
- private_key: ${{ secrets.APP_PRIVATE_KEY }}
- installation_id: 22958780
-
- - uses: actions/checkout@v4
- - name: Fetch Tag and Version Information
+ - name: Fetch tag and version information
run: |
TAG=$(echo "${GITHUB_REF#refs/*/}")
+ if [ -n ${{ github.event.inputs.tag }} ]; then
+ TAG=${{ github.event.inputs.tag }}
+ fi
CURRENT_VERSION_ARRAY=($(echo "$TAG" | tr . '\n'))
BASE=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:2}")
BASE_X=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:1}.x")
@@ -44,24 +48,22 @@ jobs:
echo "NEXT_VERSION=$NEXT_VERSION" >> $GITHUB_ENV
echo "NEXT_VERSION_UNDERSCORE=$NEXT_VERSION_UNDERSCORE" >> $GITHUB_ENV
echo "NEXT_VERSION_ID=$NEXT_VERSION_ID" >> $GITHUB_ENV
+
- uses: actions/checkout@v4
with:
ref: ${{ env.BASE }}
- token: ${{ steps.github_app_token.outputs.token }}
- - name: Increment Patch Version
- run: |
- echo Incrementing $CURRENT_VERSION to $NEXT_VERSION
- echo " - \"$CURRENT_VERSION\"" >> .ci/bwcVersions
- sed -i "s/opensearch = $CURRENT_VERSION/opensearch = $NEXT_VERSION/g" buildSrc/version.properties
- echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE
- sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java
- sed -i "s/CURRENT = $CURRENT_VERSION_UNDERSCORE;/CURRENT = $NEXT_VERSION_UNDERSCORE;/g" libs/core/src/main/java/org/opensearch/Version.java
+ - name: Increment Patch Version on Major.Minor branch
+ uses: peternied/opensearch-core-version-updater@v1
+ with:
+ previous-version: ${{ env.CURRENT_VERSION }}
+ new-version: ${{ env.NEXT_VERSION }}
+ update-current: true
- - name: Create Pull Request
+ - name: Create PR for BASE
+ id: base_pr
uses: peter-evans/create-pull-request@v5
with:
- token: ${{ steps.github_app_token.outputs.token }}
base: ${{ env.BASE }}
branch: 'create-pull-request/patch-${{ env.BASE }}'
commit-message: Increment version to ${{ env.NEXT_VERSION }}
@@ -76,19 +78,18 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ env.BASE_X }}
- token: ${{ steps.github_app_token.outputs.token }}
- - name: Add bwc version to .X branch
- run: |
- echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION
- sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions
- echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE
- sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java
+ - name: Add Patch Version on Major.X branch
+ uses: peternied/opensearch-core-version-updater@v1
+ with:
+ previous-version: ${{ env.CURRENT_VERSION }}
+ new-version: ${{ env.NEXT_VERSION }}
+ update-current: false
- - name: Create Pull Request
+ - name: Create PR for BASE_X
+ id: base_x_pr
uses: peter-evans/create-pull-request@v5
with:
- token: ${{ steps.github_app_token.outputs.token }}
base: ${{ env.BASE_X }}
branch: 'create-pull-request/patch-${{ env.BASE_X }}'
commit-message: Add bwc version ${{ env.NEXT_VERSION }}
@@ -103,19 +104,18 @@ jobs:
- uses: actions/checkout@v4
with:
ref: main
- token: ${{ steps.github_app_token.outputs.token }}
- - name: Add bwc version to main branch
- run: |
- echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION
- sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions
- echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE
- sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java
+ - name: Add Patch Version on main branch
+ uses: peternied/opensearch-core-version-updater@v1
+ with:
+ previous-version: ${{ env.CURRENT_VERSION }}
+ new-version: ${{ env.NEXT_VERSION }}
+ update-current: false
- - name: Create Pull Request
+ - name: Create PR for main
+ id: main_pr
uses: peter-evans/create-pull-request@v5
with:
- token: ${{ steps.github_app_token.outputs.token }}
base: main
branch: 'create-pull-request/patch-main'
commit-message: Add bwc version ${{ env.NEXT_VERSION }}
@@ -126,3 +126,32 @@ jobs:
title: '[AUTO] [main] Add bwc version ${{ env.NEXT_VERSION }}.'
body: |
I've noticed that a new tag ${{ env.TAG }} was pushed, and added a bwc version ${{ env.NEXT_VERSION }}.
+
+ - name: Create tracking issue
+ id: create-issue
+ uses: actions/github-script@v7.0.1
+ with:
+ script: |
+ const body = `
+ ### Description
+ A new version of OpenSearch was released, to prepare for the next release new version numbers need to be updated in all active branches of development.
+
+ ### Exit Criteria
+ Review and merged the following pull requests
+ - [ ] ${{ steps.base_pr.outputs.pull-request-url }}
+ - [ ] ${{ steps.base_x_pr.outputs.pull-request-url }}
+ - [ ] ${{ steps.main_pr.outputs.pull-request-url }}
+
+ ### Additional Context
+ See project wide guidance on branching and versions [[link]](https://github.com/opensearch-project/.github/blob/main/RELEASING.md).
+ `
+ const { data: issue }= await github.rest.issues.create({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ labels: ["Build"],
+ title: "Increment version for ${{ env.NEXT_VERSION }}",
+ body: body
+ });
+ console.error(JSON.stringify(issue));
+ return issue.number;
+ result-encoding: string
diff --git a/.github/workflows/wrapper.yml b/.github/workflows/wrapper.yml
index 6dd48ca15eaa9..dcf2a09717d28 100644
--- a/.github/workflows/wrapper.yml
+++ b/.github/workflows/wrapper.yml
@@ -8,4 +8,4 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - uses: gradle/wrapper-validation-action@v1
+ - uses: gradle/wrapper-validation-action@v2
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 21e7b70e3981c..11efe39475813 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,14 +9,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add getter for path field in NestedQueryBuilder ([#4636](https://github.com/opensearch-project/OpenSearch/pull/4636))
- Allow mmap to use new JDK-19 preview APIs in Apache Lucene 9.4+ ([#5151](https://github.com/opensearch-project/OpenSearch/pull/5151))
- Add events correlation engine plugin ([#6854](https://github.com/opensearch-project/OpenSearch/issues/6854))
-- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107))
- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679), [#10664](https://github.com/opensearch-project/OpenSearch/pull/10664))
- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618))
- [AdmissionControl] Added changes for AdmissionControl Interceptor and AdmissionControlService for RateLimiting ([#9286](https://github.com/opensearch-project/OpenSearch/pull/9286))
- GHA to verify checklist items completion in PR descriptions ([#10800](https://github.com/opensearch-project/OpenSearch/pull/10800))
- Allow to pass the list settings through environment variables (like [], ["a", "b", "c"], ...) ([#10625](https://github.com/opensearch-project/OpenSearch/pull/10625))
- [Admission Control] Integrate CPU AC with ResourceUsageCollector and add CPU AC stats to nodes/stats ([#10887](https://github.com/opensearch-project/OpenSearch/pull/10887))
-- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378))
+- [S3 Repository] Add setting to control connection count for sync client ([#12028](https://github.com/opensearch-project/OpenSearch/pull/12028))
+- Views, simplify data access and manipulation by providing a virtual layer over one or more indices ([#11957](https://github.com/opensearch-project/OpenSearch/pull/11957))
+- Add Remote Store Migration Experimental flag and allow mixed mode clusters under same ([#11986](https://github.com/opensearch-project/OpenSearch/pull/11986))
### Dependencies
- Bump `log4j-core` from 2.18.0 to 2.19.0
@@ -46,10 +47,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Bump `org.bouncycastle:bcmail-jdk15on` to `org.bouncycastle:bcmail-jdk15to18` version 1.75 ([#8247](https://github.com/opensearch-project/OpenSearch/pull/8247))
- Bump `org.bouncycastle:bcpkix-jdk15on` to `org.bouncycastle:bcpkix-jdk15to18` version 1.75 ([#8247](https://github.com/opensearch-project/OpenSearch/pull/8247))
- Bump JNA version from 5.5 to 5.13 ([#9963](https://github.com/opensearch-project/OpenSearch/pull/9963))
-- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822))
- Bump `org.eclipse.jgit` from 6.5.0 to 6.7.0 ([#10147](https://github.com/opensearch-project/OpenSearch/pull/10147))
- Bump OpenTelemetry from 1.30.1 to 1.31.0 ([#10617](https://github.com/opensearch-project/OpenSearch/pull/10617))
- Bump OpenTelemetry from 1.31.0 to 1.32.0 and OpenTelemetry Semconv from 1.21.0-alpha to 1.23.1-alpha ([#11305](https://github.com/opensearch-project/OpenSearch/pull/11305))
+- Bump `org.bouncycastle:bcprov-jdk15to18` to `org.bouncycastle:bcprov-jdk18on` version 1.77 ([#12317](https://github.com/opensearch-project/OpenSearch/pull/12317))
+- Bump `org.bouncycastle:bcmail-jdk15to18` to `org.bouncycastle:bcmail-jdk18on` version 1.77 ([#12317](https://github.com/opensearch-project/OpenSearch/pull/12317))
+- Bump `org.bouncycastle:bcpkix-jdk15to18` to `org.bouncycastle:bcpkix-jdk18on` version 1.77 ([#12317](https://github.com/opensearch-project/OpenSearch/pull/12317))
### Changed
- [CCR] Add getHistoryOperationsFromTranslog method to fetch the history snapshot from translogs ([#3948](https://github.com/opensearch-project/OpenSearch/pull/3948))
@@ -59,9 +62,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Improve summary error message for invalid setting updates ([#4792](https://github.com/opensearch-project/OpenSearch/pull/4792))
- Return 409 Conflict HTTP status instead of 503 on failure to concurrently execute snapshots ([#8986](https://github.com/opensearch-project/OpenSearch/pull/5855))
- Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/))
-- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558))
- Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894))
-
+- Switched to more reliable OpenSearch Lucene snapshot location([#11728](https://github.com/opensearch-project/OpenSearch/pull/11728))
### Deprecated
@@ -85,119 +87,49 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix 'org.apache.hc.core5.http.ParseException: Invalid protocol version' under JDK 16+ ([#4827](https://github.com/opensearch-project/OpenSearch/pull/4827))
- Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944))
- Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993))
-- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873))
-- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249))
+- Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439))
+- Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836))
+- Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872))
+- [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart ([#12035](https://github.com/opensearch-project/OpenSearch/pull/12035))
+- Fix Span operation names generated from RestActions ([#12005](https://github.com/opensearch-project/OpenSearch/pull/12005))
+- Fix error in RemoteSegmentStoreDirectory when debug logging is enabled ([#12328](https://github.com/opensearch-project/OpenSearch/pull/12328))
### Security
## [Unreleased 2.x]
### Added
-- [Admission control] Add Resource usage collector service and resource usage tracker ([#9890](https://github.com/opensearch-project/OpenSearch/pull/9890))
-- [Admission control] Add enhancements to FS stats to include read/write time, queue size and IO time ([#10541](https://github.com/opensearch-project/OpenSearch/pull/10541))
-- [Remote cluster state] Change file names for remote cluster state ([#10557](https://github.com/opensearch-project/OpenSearch/pull/10557))
-- [Search Pipelines] Add request-scoped state shared between processors (and three new processors) ([#9405](https://github.com/opensearch-project/OpenSearch/pull/9405))
-- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351))
-- [Remote Store] Add repository stats for remote store([#10567](https://github.com/opensearch-project/OpenSearch/pull/10567))
-- [Remote cluster state] Upload global metadata in cluster state to remote store([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404))
-- [Remote cluster state] Download functionality of global metadata from remote store ([#10535](https://github.com/opensearch-project/OpenSearch/pull/10535))
-- [Remote cluster state] Restore global metadata from remote store when local state is lost after quorum loss ([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404))
-- [Remote cluster state] Make index and global metadata upload timeout dynamic cluster settings ([#10814](https://github.com/opensearch-project/OpenSearch/pull/10814))
-- Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255))
-- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351))
-- Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670))
-- [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853))
-- Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247))
-- Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248))
-- Introduce ConcurrentQueryProfiler to profile query using concurrent segment search path and support concurrency during rewrite and create weight ([10352](https://github.com/opensearch-project/OpenSearch/pull/10352))
-- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679))
-- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618))
-- [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672))
-- Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024))
-- Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650))
-- Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040))
-- Allowing pipeline processors to access index mapping info by passing ingest service ref as part of the processor factory parameters ([#10307](https://github.com/opensearch-project/OpenSearch/pull/10307))
-- Make number of segment metadata files in remote segment store configurable ([#11329](https://github.com/opensearch-project/OpenSearch/pull/11329))
-- Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317))
-- Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069))
-- [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175))
-- Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170))
-- Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591))
+- Add support for dependencies in plugin descriptor properties with semver range ([#11441](https://github.com/opensearch-project/OpenSearch/pull/11441))
+- Add community_id ingest processor ([#12121](https://github.com/opensearch-project/OpenSearch/pull/12121))
+- Introduce query level setting `index.query.max_nested_depth` limiting nested queries ([#3268](https://github.com/opensearch-project/OpenSearch/issues/3268)
+- Add toString methods to MultiSearchRequest, MultiGetRequest and CreateIndexRequest ([#12163](https://github.com/opensearch-project/OpenSearch/pull/12163))
+- Support for returning scores in matched queries ([#11626](https://github.com/opensearch-project/OpenSearch/pull/11626))
+- Add shard id property to SearchLookup for use in field types provided by plugins ([#1063](https://github.com/opensearch-project/OpenSearch/pull/1063))
### Dependencies
-- Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276))
-- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554))
-- Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298))
-- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630))
-- Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506))
-- Bump `de.thetaphi:forbiddenapis` from 3.5.1 to 3.6 ([#10508](https://github.com/opensearch-project/OpenSearch/pull/10508))
-- Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639))
-- Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297))
-- Bump `com.google.http-client:google-http-client` from 1.43.2 to 1.43.3 ([#10635](https://github.com/opensearch-project/OpenSearch/pull/10635))
-- Bump `com.squareup.okio:okio` from 3.5.0 to 3.7.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637), [#11632](https://github.com/opensearch-project/OpenSearch/pull/11632))
-- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.0 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270))
-- Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504))
-- Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171))
-- Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271))
-- Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273))
-- Bump `netty` from 4.1.100.Final to 4.1.101.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294))
-- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.5 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163))
-- Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861))
-- Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344))
-- Bump `reactor-netty-core` from 1.1.12 to 1.1.13 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350))
-- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.1 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629))
-- Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447))
-- Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450))
-- Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445))
-- Bump `org.apache.xmlbeans:xmlbeans` from 5.1.1 to 5.2.0 ([#11448](https://github.com/opensearch-project/OpenSearch/pull/11448))
-- Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521))
-- Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539))
-- Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555))
-- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556))
-- Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557))
+- Bump `peter-evans/find-comment` from 2 to 3 ([#12288](https://github.com/opensearch-project/OpenSearch/pull/12288))
+- Bump `com.google.api.grpc:proto-google-common-protos` from 2.25.1 to 2.33.0 ([#12289](https://github.com/opensearch-project/OpenSearch/pull/12289))
+- Bump `com.squareup.okio:okio` from 3.7.0 to 3.8.0 ([#12290](https://github.com/opensearch-project/OpenSearch/pull/12290))
+- Bump `gradle/wrapper-validation-action` from 1 to 2 ([#12367](https://github.com/opensearch-project/OpenSearch/pull/12367))
+- Bump `netty` from 4.1.106.Final to 4.1.107.Final ([#12372](https://github.com/opensearch-project/OpenSearch/pull/12372))
+- Bump `opentelemetry` from 1.34.1 to 1.35.0 ([#12388](https://github.com/opensearch-project/OpenSearch/pull/12388))
+- Bump Apache Lucene from 9.9.2 to 9.10.0 ([#12392](https://github.com/opensearch-project/OpenSearch/pull/12392))
### Changed
-- Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840))
-- Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036))
-- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562))
-- Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598))
-- [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524))
-- [BUG] Fix java.lang.SecurityException in repository-gcs plugin ([#10642](https://github.com/opensearch-project/OpenSearch/pull/10642))
-- Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395))
-- Add instrumentation for indexing in transport bulk action and transport shard bulk action. ([#10273](https://github.com/opensearch-project/OpenSearch/pull/10273))
-- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895))
-- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023))
-- Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057))
-- Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083))
-- Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087))
-- Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528))
-- Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308))
-- Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362))
-- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312))
-- Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512))
-- Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562))
+- Allow composite aggregation to run under a parent filter aggregation ([#11499](https://github.com/opensearch-project/OpenSearch/pull/11499))
### Deprecated
### Removed
-- Remove deprecated classes for Rounding ([#10956](https://github.com/opensearch-project/OpenSearch/issues/10956))
### Fixed
-- Fix failure in dissect ingest processor parsing empty brackets ([#9225](https://github.com/opensearch-project/OpenSearch/pull/9255))
-- Fix class_cast_exception when passing int to _version and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101))
-- Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370))
-- Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496))
-- Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737))
-- Fix SuggestSearch.testSkipDuplicates by forceing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068))
-- Delegating CachingWeightWrapper#count to internal weight object ([#10543](https://github.com/opensearch-project/OpenSearch/pull/10543))
-- Fix per request latency last phase not tracked ([#10934](https://github.com/opensearch-project/OpenSearch/pull/10934))
-- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152))
-- Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316))
-- Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369))
-- Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167))
-- Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238))
-- Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417))
+- Fix for deserilization bug in weighted round-robin metadata ([#11679](https://github.com/opensearch-project/OpenSearch/pull/11679))
+- [Revert] [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart ([#12035](https://github.com/opensearch-project/OpenSearch/pull/12035))
+- Add support of special WrappingSearchAsyncActionPhase so the onPhaseStart() will always be followed by onPhaseEnd() within AbstractSearchAsyncAction ([#12293](https://github.com/opensearch-project/OpenSearch/pull/12293))
+- Add a system property to configure YamlParser codepoint limits ([#12298](https://github.com/opensearch-project/OpenSearch/pull/12298))
+- Prevent read beyond slice boundary in ByteArrayIndexInput ([#10481](https://github.com/opensearch-project/OpenSearch/issues/10481))
+- Fix the "highlight.max_analyzer_offset" request parameter with "plain" highlighter ([#10919](https://github.com/opensearch-project/OpenSearch/pull/10919))
+- Warn about deprecated and ignored index.mapper.dynamic index setting ([#11193](https://github.com/opensearch-project/OpenSearch/pull/11193))
- Fix get task API does not refresh resource stats ([#11531](https://github.com/opensearch-project/OpenSearch/pull/11531))
-- Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609))
### Security
diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md
index f9936aad0cf8c..f0851fc58d444 100644
--- a/DEVELOPER_GUIDE.md
+++ b/DEVELOPER_GUIDE.md
@@ -183,6 +183,12 @@ Run OpenSearch using `gradlew run`.
./gradlew run
```
+[Plugins](plugins/) may be installed by passing a `-PinstalledPlugins` property:
+
+```bash
+./gradlew run -PinstalledPlugins="['plugin1', 'plugin2']"
+```
+
That will build OpenSearch and start it, writing its log above Gradle's status message. We log a lot of stuff on startup, specifically these lines tell you that OpenSearch is ready.
```
@@ -342,7 +348,7 @@ Please follow these formatting guidelines:
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail.
* If *absolutely* necessary, you can disable formatting for regions of code with the `// tag::NAME` and `// end::NAME` directives, but note that these are intended for use in documentation, so please make it clear what you have done, and only do this where the benefit clearly outweighs the decrease in consistency.
* Note that JavaDoc and block comments i.e. `/* ... */` are not formatted, but line comments i.e `// ...` are.
-* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, if might get called out in PR reviews as something to change.
+* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, it might get called out in PR reviews as something to change.
## Adding Dependencies
@@ -578,7 +584,7 @@ explicitly marked by an annotation should not be extended by external implementa
any time. The `@DeprecatedApi` annotation could also be added to any classes annotated with `@PublicApi` (or documented as `@opensearch.api`) or their methods that
are either changed (with replacement) or planned to be removed across major versions.
-The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`)
+The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`)
annotation. The presence of this annotation signals that API may change at any time (major, minor or even patch releases). In general, the classes annotated with
`@PublicApi` may expose other classes or methods annotated with `@ExperimentalApi`, in such cases the backward compatibility guarantees would not apply to latter
(see please [Experimental Development](#experimental-development) for more details).
diff --git a/NOTICE.txt b/NOTICE.txt
index 6c7dc983f8c7a..d463b8f28561f 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -10,3 +10,6 @@ Foundation (http://www.apache.org/).
This product includes software developed by
Joda.org (http://www.joda.org/).
+
+This product includes software developed by
+Morten Haraldsen (ethlo) (https://github.com/ethlo) under the Apache License, version 2.0.
diff --git a/TRIAGING.md b/TRIAGING.md
new file mode 100644
index 0000000000000..3917f1e1442b9
--- /dev/null
+++ b/TRIAGING.md
@@ -0,0 +1,83 @@
+
+
+The maintainers of the OpenSearch Repo seek to promote an inclusive and engaged community of contributors. In order to facilitate this, weekly triage meetings are open-to-all and attendance is encouraged for anyone who hopes to contribute, discuss an issue, or learn more about the project. To learn more about contributing to the OpenSearch Repo visit the [Contributing](./CONTRIBUTING.md) documentation.
+
+### Do I need to attend for my issue to be addressed/triaged?
+
+Attendance is not required for your issue to be triaged or addressed. If not accepted the issue will be updated with a comment for next steps. All new issues are triaged weekly.
+
+You can track if your issue was triaged by watching your GitHub notifications for updates.
+
+### What happens if my issue does not get covered this time?
+
+Each meeting we seek to address all new issues. However, should we run out of time before your issue is discussed, you are always welcome to attend the next meeting or to follow up on the issue post itself.
+
+### How do I join the Triage meeting?
+
+Meetings are hosted regularly at 10:00a - 10:55a Central Time every Wednesday and can be joined via [Chime](https://aws.amazon.com/chime/), with this [meeting link](https://chime.aws/1988437365).
+
+After joining the Chime meeting, you can enable your video / voice to join the discussion. If you do not have a webcam or microphone available, you can still join in via the text chat.
+
+If you have an issue you'd like to bring forth please prepare a link to the issue so it can be presented and viewed by everyone in the meeting.
+
+### Is there an agenda for each week?
+
+Meetings are 55 minutes and follows this structure:
+
+Yes, each 55-minute meeting follows this structure:
+1. **Initial Gathering:** Feel free to turn on your video and engage in informal conversation. Shortly, a volunteer triage [facilitator](#what-is-the-role-of-the-facilitator) will begin the meeting and share their screen.
+2. **Record Attendees:** The facilitator will request attendees to share their GitHub profile links. These links will be collected and assembled into a [tag](#how-do-triage-facilitator-tag-comments-during-the-triage-meeting) to annotate comments during the meeting.
+3. **Announcements:** Any announcements will be made at the beginning of the meeting.
+4. **Review of New Issues:** We start by reviewing all untriaged [issues](https://github.com/search?q=label%3Auntriaged+is%3Aopen++repo%3Aopensearch-project%2FOpenSearch+&type=issues&ref=advsearch&s=created&o=desc) for the OpenSearch repo.
+5. **Attendee Requests:** An opportunity for any meeting member to request consideration of an issue or pull request.
+6. **Open Discussion:** Attendees can bring up any topics not already covered by filed issues or pull requests.
+
+### What is the role of the facilitator?
+
+The facilitator is crucial in driving the meeting, ensuring a smooth flow of issues into OpenSearch for future contributions. They maintain the meeting's agenda, solicit input from attendees, and record outcomes using the triage tag as items are discussed.
+
+### Do I need to have already contributed to the project to attend a triage meeting?
+
+No prior contributions are required. All interested individuals are welcome and encouraged to attend. Triage meetings offer a fantastic opportunity for new contributors to understand the project and explore various contribution avenues.
+
+### What if I have an issue that is almost a duplicate, should I open a new one to be triaged?
+
+You can always open an [issue](https://github.com/opensearch-project/OpenSearch/issues/new/choose) including one that you think may be a duplicate. If you believe your issue is similar but distinct from an existing one, you are encouraged to file it and explain the differences during the triage meeting.
+
+### What if I have follow-up questions on an issue?
+
+If you have an existing issue you would like to discuss, you can always comment on the issue itself. Alternatively, you are welcome to come to the triage meeting to discuss.
+
+### Is this meeting a good place to get help setting up features on my OpenSearch instance?
+
+While we are always happy to help the community, the best resource for implementation questions is [the OpenSearch forum](https://forum.opensearch.org/).
+
+There you can find answers to many common questions as well as speak with implementation experts.
+
+### What are the issue labels associated with triaging?
+
+Yes, there are several labels that are used to identify the 'state' of issues filed in OpenSearch .
+| Label | When Applied | Meaning |
+|---------------|----------------------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| `Untriaged` | When issues are created or re-opened. | Issues labeled as 'Untriaged' require the attention of the repository maintainers and may need to be prioritized for quicker resolution. It's crucial to keep the count of 'Untriaged' labels low to ensure all potential security issues are addressed in a timely manner. See [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) for more details on handling these issues. |
+| `Help Wanted` | Anytime. | Issues marked as 'Help Wanted' signal that they are actionable and not the current focus of the project maintainers. Community contributions are especially encouraged for these issues. |
+| `Good First Issue` | Anytime. | Issues labeled as 'Good First Issue' are small in scope and can be resolved with a single pull request. These are recommended starting points for newcomers looking to make their first contributions. |
+
+### What are the typical outcomes of a triaged issue?
+
+| Outcome | Label | Description | Canned Response |
+|--------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Accepted | `-untriaged` | The issue has the details needed to be directed towards area owners. | "Thanks for filing this issue, please feel free to submit a pull request." |
+| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to open a new issue after addressing the reason." |
+| Area Triage | `+{AREALABEL}` | OpenSearch has many different areas. If it's unclear whether an issue should be accepted, it will be labeled with the area and an owner will be @mentioned for follow-up. | "Thanks for creating this issue; the triage meeting was unsure if this issue should be accepted, @{PERSON} or someone from the area please review and then accept or reject this issue?" |
+| Transfer | N/A | If the issue applies to another repository within the OpenSearch Project, it will be transferred accordingly. | "@opensearch-project/triage, can you please transfer this issue to project {REPOSITORY}." Or, if someone at the meeting has permissions, they can start the transfer. |
+
+### Is this where I should bring up potential security vulnerabilities?
+
+Due to the sensitive nature of security vulnerabilities, please report all potential vulnerabilities directly by following the steps outlined on the [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) document.
+
+### How do triage facilitator tag comments during the triage meeting?
+
+During the triage meeting, facilitators should use the tag _[Triage - attendees [1](#Profile_link) [2](#Profile_link)]_ to indicate a collective decision. This ensures contributors know the decision came from the meeting rather than an individual and identifies participants for any follow-up queries.
+
+This tag should not be used outside triage meetings.
diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle
index 6b4634c7e791c..be4579b4e5324 100644
--- a/benchmarks/build.gradle
+++ b/benchmarks/build.gradle
@@ -84,3 +84,45 @@ spotless {
targetExclude 'src/main/generated/**/*.java'
}
}
+
+if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) {
+ // Add support for incubator modules on supported Java versions.
+ run.jvmArgs += ['--add-modules=jdk.incubator.vector']
+ run.classpath += files(jar.archiveFile)
+ run.classpath -= sourceSets.main.output
+ evaluationDependsOn(':libs:opensearch-common')
+
+ sourceSets {
+ java20 {
+ java {
+ srcDirs = ['src/main/java20']
+ }
+ }
+ }
+
+ configurations {
+ java20Implementation.extendsFrom(implementation)
+ }
+
+ dependencies {
+ java20Implementation sourceSets.main.output
+ java20Implementation project(':libs:opensearch-common').sourceSets.java20.output
+ java20AnnotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh"
+ }
+
+ compileJava20Java {
+ targetCompatibility = JavaVersion.VERSION_20
+ options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])
+ }
+
+ jar {
+ metaInf {
+ into 'versions/20'
+ from sourceSets.java20.output
+ }
+ manifest.attributes('Multi-Release': 'true')
+ }
+
+ // classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes
+ disableTasks('forbiddenApisJava20')
+}
diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java
new file mode 100644
index 0000000000000..4e995f5a5067c
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java
@@ -0,0 +1,67 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.benchmark.index.codec.fuzzy;
+
+import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.UUIDs;
+import org.opensearch.index.codec.fuzzy.FuzzySet;
+import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
+import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
+import org.opensearch.index.mapper.IdFieldMapper;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+@Fork(3)
+@Warmup(iterations = 2)
+@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@State(Scope.Benchmark)
+public class FilterConstructionBenchmark {
+
+ private List items;
+
+ @Param({ "1000000", "10000000", "50000000" })
+ private int numIds;
+
+ @Param({ "0.0511", "0.1023", "0.2047" })
+ private double fpp;
+
+ private FuzzySetFactory fuzzySetFactory;
+ private String fieldName;
+
+ @Setup
+ public void setupIds() {
+ this.fieldName = IdFieldMapper.NAME;
+ this.items = IntStream.range(0, numIds).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
+ FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
+ this.fuzzySetFactory = new FuzzySetFactory(Map.of(fieldName, parameters));
+ }
+
+ @Benchmark
+ public FuzzySet buildFilter() throws IOException {
+ return fuzzySetFactory.createFuzzySet(items.size(), fieldName, () -> items.iterator());
+ }
+}
diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java
new file mode 100644
index 0000000000000..383539219830e
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java
@@ -0,0 +1,80 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.benchmark.index.codec.fuzzy;
+
+import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.UUIDs;
+import org.opensearch.index.codec.fuzzy.FuzzySet;
+import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
+import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
+import org.opensearch.index.mapper.IdFieldMapper;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+@Fork(3)
+@Warmup(iterations = 2)
+@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@State(Scope.Benchmark)
+public class FilterLookupBenchmark {
+
+ @Param({ "50000000", "1000000" })
+ private int numItems;
+
+ @Param({ "1000000" })
+ private int searchKeyCount;
+
+ @Param({ "0.0511", "0.1023", "0.2047" })
+ private double fpp;
+
+ private FuzzySet fuzzySet;
+ private List items;
+ private Random random = new Random();
+
+ @Setup
+ public void setupFilter() throws IOException {
+ String fieldName = IdFieldMapper.NAME;
+ items = IntStream.range(0, numItems).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
+ FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
+ fuzzySet = new FuzzySetFactory(Map.of(fieldName, parameters)).createFuzzySet(numItems, fieldName, () -> items.iterator());
+ }
+
+ @Benchmark
+ public void contains_withExistingKeys(Blackhole blackhole) throws IOException {
+ for (int i = 0; i < searchKeyCount; i++) {
+ blackhole.consume(fuzzySet.contains(items.get(random.nextInt(items.size()))) == FuzzySet.Result.MAYBE);
+ }
+ }
+
+ @Benchmark
+ public void contains_withRandomKeys(Blackhole blackhole) throws IOException {
+ for (int i = 0; i < searchKeyCount; i++) {
+ blackhole.consume(fuzzySet.contains(new BytesRef(UUIDs.base64UUID())));
+ }
+ }
+}
diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java
index 4e07af452968b..3909a3f4eb8fc 100644
--- a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java
+++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java
@@ -21,7 +21,6 @@
import org.openjdk.jmh.infra.Blackhole;
import java.util.Random;
-import java.util.function.Supplier;
@Fork(value = 3)
@Warmup(iterations = 3, time = 1)
@@ -83,17 +82,17 @@ public static class Options {
"256" })
public Integer size;
- @Param({ "binary", "linear" })
+ @Param({ "binary", "linear", "btree" })
public String type;
@Param({ "uniform", "skewed_edge", "skewed_center" })
public String distribution;
public long[] queries;
- public Supplier supplier;
+ public RoundableSupplier supplier;
@Setup
- public void setup() {
+ public void setup() throws ClassNotFoundException {
Random random = new Random(size);
long[] values = new long[size];
for (int i = 1; i < values.length; i++) {
@@ -128,16 +127,7 @@ public void setup() {
throw new IllegalArgumentException("invalid distribution: " + distribution);
}
- switch (type) {
- case "binary":
- supplier = () -> new BinarySearcher(values, size);
- break;
- case "linear":
- supplier = () -> new BidirectionalLinearSearcher(values, size);
- break;
- default:
- throw new IllegalArgumentException("invalid type: " + type);
- }
+ supplier = new RoundableSupplier(type, values, size);
}
private static long nextPositiveLong(Random random) {
diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java
new file mode 100644
index 0000000000000..44ac42810996f
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import java.util.function.Supplier;
+
+public class RoundableSupplier implements Supplier {
+ private final Supplier delegate;
+
+ RoundableSupplier(String type, long[] values, int size) throws ClassNotFoundException {
+ switch (type) {
+ case "binary":
+ delegate = () -> new BinarySearcher(values, size);
+ break;
+ case "linear":
+ delegate = () -> new BidirectionalLinearSearcher(values, size);
+ break;
+ case "btree":
+ throw new ClassNotFoundException("BtreeSearcher is not supported below JDK 20");
+ default:
+ throw new IllegalArgumentException("invalid type: " + type);
+ }
+ }
+
+ @Override
+ public Roundable get() {
+ return delegate.get();
+ }
+}
diff --git a/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java
new file mode 100644
index 0000000000000..e81c1b137bd30
--- /dev/null
+++ b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java
@@ -0,0 +1,36 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import java.util.function.Supplier;
+
+public class RoundableSupplier implements Supplier {
+ private final Supplier delegate;
+
+ RoundableSupplier(String type, long[] values, int size) {
+ switch (type) {
+ case "binary":
+ delegate = () -> new BinarySearcher(values, size);
+ break;
+ case "linear":
+ delegate = () -> new BidirectionalLinearSearcher(values, size);
+ break;
+ case "btree":
+ delegate = () -> new BtreeSearcher(values, size);
+ break;
+ default:
+ throw new IllegalArgumentException("invalid type: " + type);
+ }
+ }
+
+ @Override
+ public Roundable get() {
+ return delegate.get();
+ }
+}
diff --git a/build.gradle b/build.gradle
index b1cd1d532bfeb..2aac4a1e893e9 100644
--- a/build.gradle
+++ b/build.gradle
@@ -54,8 +54,8 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
- id "com.diffplug.spotless" version "6.20.0" apply false
- id "org.gradle.test-retry" version "1.5.4" apply false
+ id "com.diffplug.spotless" version "6.25.0" apply false
+ id "org.gradle.test-retry" version "1.5.8" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
}
@@ -516,7 +516,6 @@ subprojects {
includeClasses.add("org.opensearch.remotestore.CreateRemoteIndexClusterDefaultDocRep")
includeClasses.add("org.opensearch.remotestore.CreateRemoteIndexIT")
includeClasses.add("org.opensearch.remotestore.CreateRemoteIndexTranslogDisabledIT")
- includeClasses.add("org.opensearch.remotestore.RemoteIndexPrimaryRelocationIT")
includeClasses.add("org.opensearch.remotestore.RemoteStoreBackpressureIT")
includeClasses.add("org.opensearch.remotestore.RemoteStoreIT")
includeClasses.add("org.opensearch.remotestore.RemoteStoreRefreshListenerIT")
@@ -545,6 +544,7 @@ subprojects {
includeClasses.add("org.opensearch.snapshots.SnapshotStatusApisIT")
includeClasses.add("org.opensearch.test.rest.ClientYamlTestSuiteIT")
includeClasses.add("org.opensearch.upgrade.DetectEsInstallationTaskTests")
+ includeClasses.add("org.opensearch.cluster.MinimumClusterManagerNodesIT")
}
}
}
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index 0efa170250a7b..0562ecc6ee61b 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -106,19 +106,19 @@ dependencies {
api 'org.apache.commons:commons-compress:1.25.0'
api 'org.apache.ant:ant:1.10.14'
api 'com.netflix.nebula:gradle-extra-configurations-plugin:10.0.0'
- api 'com.netflix.nebula:nebula-publishing-plugin:20.3.0'
+ api 'com.netflix.nebula:nebula-publishing-plugin:21.0.0'
api 'com.netflix.nebula:gradle-info-plugin:12.1.6'
api 'org.apache.rat:apache-rat:0.15'
api 'commons-io:commons-io:2.15.1'
- api "net.java.dev.jna:jna:5.13.0"
+ api "net.java.dev.jna:jna:5.14.0"
api 'com.github.johnrengelman:shadow:8.1.1'
api 'org.jdom:jdom2:2.0.6.1'
api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}"
api 'de.thetaphi:forbiddenapis:3.6'
- api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.5'
+ api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6'
api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}"
api 'org.apache.maven:maven-model:3.9.6'
- api 'com.networknt:json-schema-validator:1.0.86'
+ api 'com.networknt:json-schema-validator:1.2.0'
api 'org.jruby.jcodings:jcodings:1.0.58'
api 'org.jruby.joni:joni:2.2.1'
api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}"
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
index 63b88f671c84c..0c901b9726992 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
@@ -94,7 +94,7 @@ public static void configureRepositories(Project project) {
String revision = matcher.group(1);
MavenArtifactRepository luceneRepo = repos.maven(repo -> {
repo.setName("lucene-snapshots");
- repo.setUrl("https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/");
+ repo.setUrl("https://ci.opensearch.org/ci/dbc/snapshots/lucene/");
});
repos.exclusiveContent(exclusiveRepo -> {
exclusiveRepo.filter(
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
index 1ad7e056b6ae6..bc44f81a81aff 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
@@ -77,9 +77,9 @@
import java.util.stream.Stream;
public class DistroTestPlugin implements Plugin {
- private static final String SYSTEM_JDK_VERSION = "17.0.9+9";
+ private static final String SYSTEM_JDK_VERSION = "21.0.2+13";
private static final String SYSTEM_JDK_VENDOR = "adoptium";
- private static final String GRADLE_JDK_VERSION = "17.0.9+9";
+ private static final String GRADLE_JDK_VERSION = "21.0.2+13";
private static final String GRADLE_JDK_VENDOR = "adoptium";
// all distributions used by distro tests. this is temporary until tests are per distribution
diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
index f24b61ef0d165..351b42e5bc921 100644
--- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
+++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
@@ -17,7 +17,7 @@ repositories {
}
dependencies {
- implementation "org.apache.logging.log4j:log4j-core:2.22.0"
+ implementation "org.apache.logging.log4j:log4j-core:2.22.1"
}
["0.0.1", "0.0.2"].forEach { v ->
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 74d655cfb1045..7fc74aff552be 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,14 +1,14 @@
opensearch = 3.0.0
-lucene = 9.8.0
+lucene = 9.10.0
bundled_jdk_vendor = adoptium
-bundled_jdk = 21.0.1+12
+bundled_jdk = 21.0.2+13
# optional dependencies
spatial4j = 0.7
jts = 1.15.0
-jackson = 2.16.0
-jackson_databind = 2.16.0
+jackson = 2.16.1
+jackson_databind = 2.16.1
snakeyaml = 2.1
icu4j = 70.1
supercsv = 2.4.0
@@ -26,12 +26,12 @@ jakarta_annotation = 1.3.5
# when updating the JNA version, also update the version in buildSrc/build.gradle
jna = 5.13.0
-netty = 4.1.101.Final
+netty = 4.1.107.Final
joda = 2.12.2
# project reactor
-reactor_netty = 1.1.13
-reactor = 3.5.11
+reactor_netty = 1.1.15
+reactor = 3.5.14
# client dependencies
httpclient5 = 5.2.1
@@ -50,12 +50,12 @@ reactivestreams = 1.0.4
# when updating this version, you need to ensure compatibility with:
# - plugins/ingest-attachment (transitive dependency, check the upstream POM)
# - distribution/tools/plugin-cli
-bouncycastle=1.76
+bouncycastle=1.77
# test dependencies
randomizedrunner = 2.7.1
junit = 4.13.2
hamcrest = 2.1
-mockito = 5.5.0
+mockito = 5.10.0
objenesis = 3.2
bytebuddy = 1.14.7
@@ -70,5 +70,5 @@ jzlib = 1.1.3
resteasy = 6.2.4.Final
# opentelemetry dependencies
-opentelemetry = 1.32.0
+opentelemetry = 1.35.0
opentelemetrysemconv = 1.23.1-alpha
diff --git a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
index adddb3bda725c..f609fae4e3c81 100644
--- a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
+++ b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
@@ -305,6 +305,7 @@ public void shutdown() {
}
}
+ @SuppressWarnings("removal")
static class SnifferThreadFactory implements ThreadFactory {
private final AtomicInteger threadNumber = new AtomicInteger(1);
private final String namePrefix;
diff --git a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java
index 88f667549f3e8..faef1441d0a02 100644
--- a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java
+++ b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java
@@ -51,6 +51,7 @@ protected Matcher nodeNameMatcher() {
return is("integTest-0");
}
+ @SuppressWarnings("removal")
@Override
protected BufferedReader openReader(Path logFile) {
assumeFalse("Skipping test because it is being run against an external cluster.",
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index ededa7bff34d8..43c38c5ad0c67 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -63,7 +63,7 @@ import java.util.regex.Pattern
*/
plugins {
- id "com.netflix.nebula.ospackage-base" version "11.6.0"
+ id "com.netflix.nebula.ospackage-base" version "11.8.0"
}
void addProcessFilesTask(String type, boolean jdk) {
diff --git a/distribution/src/config/opensearch.yml b/distribution/src/config/opensearch.yml
index b7ab2e1c2309b..ebffdde0f3699 100644
--- a/distribution/src/config/opensearch.yml
+++ b/distribution/src/config/opensearch.yml
@@ -117,12 +117,6 @@ ${path.logs}
#opensearch.experimental.feature.extensions.enabled: false
#
#
-# Gates the concurrent segment search feature. This feature enables concurrent segment search in a separate
-# index searcher threadpool.
-#
-#opensearch.experimental.feature.concurrent_segment_search.enabled: false
-#
-#
# Gates the optimization of datetime formatters caching along with change in default datetime formatter
# Once there is no observed impact on performance, this feature flag can be removed.
#
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
index d269603656114..9ca42ac5f4ec1 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
@@ -78,15 +78,14 @@ private void printPlugin(Environment env, Terminal terminal, Path plugin, String
PluginInfo info = PluginInfo.readFromProperties(env.pluginsDir().resolve(plugin));
terminal.println(Terminal.Verbosity.SILENT, prefix + info.getName());
terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix));
- if (info.getOpenSearchVersion().equals(Version.CURRENT) == false) {
+ if (!PluginsService.isPluginVersionCompatible(info, Version.CURRENT)) {
terminal.errorPrintln(
"WARNING: plugin ["
+ info.getName()
+ "] was built for OpenSearch version "
- + info.getVersion()
- + " but version "
+ + info.getOpenSearchVersionRangesString()
+ + " and is not compatible with "
+ Version.CURRENT
- + " is required"
);
}
}
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
index f4532f5f83cc4..c264788df20e8 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
@@ -70,8 +70,10 @@
import org.opensearch.core.util.FileSystemUtils;
import org.opensearch.env.Environment;
import org.opensearch.env.TestEnvironment;
+import org.opensearch.semver.SemverRange;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.PosixPermissionsResetter;
+import org.opensearch.test.VersionUtils;
import org.junit.After;
import org.junit.Before;
@@ -284,6 +286,35 @@ static void writePlugin(String name, Path structure, String... additionalProps)
writeJar(structure.resolve("plugin.jar"), className);
}
+ static void writePlugin(String name, Path structure, SemverRange opensearchVersionRange, String... additionalProps) throws IOException {
+ String[] properties = Stream.concat(
+ Stream.of(
+ "description",
+ "fake desc",
+ "name",
+ name,
+ "version",
+ "1.0",
+ "dependencies",
+ "{opensearch:\"" + opensearchVersionRange + "\"}",
+ "java.version",
+ System.getProperty("java.specification.version"),
+ "classname",
+ "FakePlugin"
+ ),
+ Arrays.stream(additionalProps)
+ ).toArray(String[]::new);
+ PluginTestUtil.writePluginProperties(structure, properties);
+ String className = name.substring(0, 1).toUpperCase(Locale.ENGLISH) + name.substring(1) + "Plugin";
+ writeJar(structure.resolve("plugin.jar"), className);
+ }
+
+ static Path createPlugin(String name, Path structure, SemverRange opensearchVersionRange, String... additionalProps)
+ throws IOException {
+ writePlugin(name, structure, opensearchVersionRange, additionalProps);
+ return writeZip(structure, null);
+ }
+
static void writePluginSecurityPolicy(Path pluginDir, String... permissions) throws IOException {
StringBuilder securityPolicyContent = new StringBuilder("grant {\n ");
for (String permission : permissions) {
@@ -867,6 +898,32 @@ public void testInstallMisspelledOfficialPlugins() throws Exception {
assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin"));
}
+ public void testInstallPluginWithCompatibleDependencies() throws Exception {
+ Tuple env = createEnv(fs, temp);
+ Path pluginDir = createPluginDir(temp);
+ String pluginZip = createPlugin("fake", pluginDir, SemverRange.fromString("~" + Version.CURRENT.toString())).toUri()
+ .toURL()
+ .toString();
+ skipJarHellCommand.execute(terminal, Collections.singletonList(pluginZip), false, env.v2());
+ assertThat(terminal.getOutput(), containsString("100%"));
+ }
+
+ public void testInstallPluginWithIncompatibleDependencies() throws Exception {
+ Tuple env = createEnv(fs, temp);
+ Path pluginDir = createPluginDir(temp);
+ // Core version is behind plugin version by one w.r.t patch, hence incompatible
+ Version coreVersion = Version.CURRENT;
+ Version pluginVersion = VersionUtils.getVersion(coreVersion.major, coreVersion.minor, (byte) (coreVersion.revision + 1));
+ String pluginZip = createPlugin("fake", pluginDir, SemverRange.fromString("~" + pluginVersion.toString())).toUri()
+ .toURL()
+ .toString();
+ IllegalArgumentException e = expectThrows(
+ IllegalArgumentException.class,
+ () -> skipJarHellCommand.execute(terminal, Collections.singletonList(pluginZip), false, env.v2())
+ );
+ assertThat(e.getMessage(), containsString("Plugin [fake] was built for OpenSearch version ~" + pluginVersion));
+ }
+
public void testBatchFlag() throws Exception {
MockTerminal terminal = new MockTerminal();
installPlugin(terminal, true);
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
index 7bbced38c7adb..6878efce4c804 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
@@ -278,7 +278,7 @@ public void testExistingIncompatiblePlugin() throws Exception {
buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2");
MockTerminal terminal = listPlugins(home);
- String message = "plugin [fake_plugin1] was built for OpenSearch version 1.0 but version " + Version.CURRENT + " is required";
+ String message = "plugin [fake_plugin1] was built for OpenSearch version 5.0.0 and is not compatible with " + Version.CURRENT;
assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput());
assertEquals("WARNING: " + message + "\n", terminal.getErrorOutput());
@@ -286,4 +286,41 @@ public void testExistingIncompatiblePlugin() throws Exception {
terminal = listPlugins(home, params);
assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput());
}
+
+ public void testPluginWithDependencies() throws Exception {
+ PluginTestUtil.writePluginProperties(
+ env.pluginsDir().resolve("fake_plugin1"),
+ "description",
+ "fake desc 1",
+ "name",
+ "fake_plugin1",
+ "version",
+ "1.0",
+ "dependencies",
+ "{opensearch:\"" + Version.CURRENT + "\"}",
+ "java.version",
+ System.getProperty("java.specification.version"),
+ "classname",
+ "org.fake1"
+ );
+ String[] params = { "-v" };
+ MockTerminal terminal = listPlugins(home, params);
+ assertEquals(
+ buildMultiline(
+ "Plugins directory: " + env.pluginsDir(),
+ "fake_plugin1",
+ "- Plugin information:",
+ "Name: fake_plugin1",
+ "Description: fake desc 1",
+ "Version: 1.0",
+ "OpenSearch Version: " + Version.CURRENT.toString(),
+ "Java Version: " + System.getProperty("java.specification.version"),
+ "Native Controller: false",
+ "Extended Plugins: []",
+ " * Classname: org.fake1",
+ "Folder name: null"
+ ),
+ terminal.getOutput()
+ );
+ }
}
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/gradle/code-coverage.gradle b/gradle/code-coverage.gradle
index dfb4ddba24113..3ca6b1fe84ea7 100644
--- a/gradle/code-coverage.gradle
+++ b/gradle/code-coverage.gradle
@@ -13,7 +13,7 @@ repositories {
gradlePluginPortal()
// TODO: Find the way to use the repositories from RepositoriesSetupPlugin
maven {
- url = "https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/"
+ url = "https://ci.opensearch.org/ci/dbc/snapshots/lucene/"
}
}
@@ -37,7 +37,7 @@ tasks.withType(JacocoReport).configureEach {
if (System.getProperty("tests.coverage")) {
reporting {
reports {
- testCodeCoverageReport(JacocoCoverageReport) {
+ testCodeCoverageReport(JacocoCoverageReport) {
testType = TestSuiteType.UNIT_TEST
}
}
@@ -45,6 +45,6 @@ if (System.getProperty("tests.coverage")) {
// Attach code coverage report task to Gradle check task
project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure {
- dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
+ dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
}
}
diff --git a/gradle/formatting.gradle b/gradle/formatting.gradle
index 93e1127c97a56..f3a4bf5cc765b 100644
--- a/gradle/formatting.gradle
+++ b/gradle/formatting.gradle
@@ -99,7 +99,9 @@ allprojects {
}
}
format 'misc', {
- target '*.md', '*.gradle', '**/*.yaml', '**/*.yml', '**/*.svg'
+ target '*.md', '*.gradle', '**/*.json', '**/*.yaml', '**/*.yml', '**/*.svg'
+
+ targetExclude '**/simple-bulk11.json', '**/simple-msearch5.json'
trimTrailingWhitespace()
endWithNewline()
diff --git a/gradle/ide.gradle b/gradle/ide.gradle
index bc442a081adf0..14d6b2982ccd0 100644
--- a/gradle/ide.gradle
+++ b/gradle/ide.gradle
@@ -82,6 +82,9 @@ if (System.getProperty('idea.active') == 'true') {
runConfigurations {
defaults(JUnit) {
vmParameters = '-ea -Djava.locale.providers=SPI,COMPAT'
+ if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_17) {
+ vmParameters += ' -Djava.security.manager=allow'
+ }
}
}
copyright {
diff --git a/gradle/run.gradle b/gradle/run.gradle
index 639479e97d28f..34651f1d94964 100644
--- a/gradle/run.gradle
+++ b/gradle/run.gradle
@@ -39,6 +39,12 @@ testClusters {
testDistribution = 'archive'
if (numZones > 1) numberOfZones = numZones
if (numNodes > 1) numberOfNodes = numNodes
+ if (findProperty("installedPlugins")) {
+ installedPlugins = Eval.me(installedPlugins)
+ for (String p : installedPlugins) {
+ plugin('plugins:'.concat(p))
+ }
+ }
}
}
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index f1d76d80bbfa3..82a4add334a7d 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b
+distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d
diff --git a/libs/common/build.gradle b/libs/common/build.gradle
index 4f89b81636420..60bf488833393 100644
--- a/libs/common/build.gradle
+++ b/libs/common/build.gradle
@@ -43,3 +43,64 @@ tasks.named('forbiddenApisMain').configure {
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to server
replaceSignatureFiles 'jdk-signatures'
}
+
+// Add support for incubator modules on supported Java versions.
+if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) {
+ sourceSets {
+ java20 {
+ java {
+ srcDirs = ['src/main/java20']
+ }
+ }
+ }
+
+ configurations {
+ java20Implementation.extendsFrom(implementation)
+ }
+
+ dependencies {
+ java20Implementation sourceSets.main.output
+ }
+
+ compileJava20Java {
+ targetCompatibility = JavaVersion.VERSION_20
+ options.compilerArgs += ['--add-modules', 'jdk.incubator.vector']
+ options.compilerArgs -= '-Werror' // use of incubator modules is reported as a warning
+ }
+
+ jar {
+ metaInf {
+ into 'versions/20'
+ from sourceSets.java20.output
+ }
+ manifest.attributes('Multi-Release': 'true')
+ }
+
+ tasks.withType(Test).configureEach {
+ // Relying on the convention for Test.classpath in custom Test tasks has been deprecated
+ // and scheduled to be removed in Gradle 9.0. Below lines are added from the migration guide:
+ // https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#test_task_default_classpath
+ testClassesDirs = testing.suites.test.sources.output.classesDirs
+ classpath = testing.suites.test.sources.runtimeClasspath
+
+ // Adds the multi-release JAR to the classpath when executing tests.
+ // This allows newer sources to be picked up at test runtime (if supported).
+ classpath += files(jar.archiveFile)
+ // Removes the "main" sources from the classpath to avoid JarHell problems as
+ // the multi-release JAR already contains those classes.
+ classpath -= sourceSets.main.output
+ }
+
+ tasks.register('roundableSimdTest', Test) {
+ group 'verification'
+ include '**/RoundableTests.class'
+ systemProperty 'opensearch.experimental.feature.simd.rounding.enabled', 'forced'
+ }
+
+ check.dependsOn(roundableSimdTest)
+
+ forbiddenApisJava20 {
+ failOnMissingClasses = false
+ ignoreSignaturesOfMissingClasses = true
+ }
+}
diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
index 1864aec4aa951..569f48a8465f3 100644
--- a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
+++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
@@ -113,7 +113,7 @@ private void process(ExecutableElement executable, Element enclosing) {
// The executable element should not be internal (unless constructor for injectable core component)
checkNotInternal(enclosing, executable);
- // Check this elements annotations
+ // Check this element's annotations
for (final AnnotationMirror annotation : executable.getAnnotationMirrors()) {
final Element element = annotation.getAnnotationType().asElement();
if (inspectable(element)) {
@@ -210,7 +210,7 @@ private void process(ExecutableElement executable, ReferenceType ref) {
}
}
- // Check this elements annotations
+ // Check this element's annotations
for (final AnnotationMirror annotation : ref.getAnnotationMirrors()) {
final Element element = annotation.getAnnotationType().asElement();
if (inspectable(element)) {
@@ -316,7 +316,7 @@ private void checkPublic(@Nullable Element referencedBy, final Element element)
reportFailureAs,
"The element "
+ element
- + " is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi"
+ + " is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi"
+ ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "")
);
}
diff --git a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
index 0f289c09bbae2..60c0717a28f05 100644
--- a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
+++ b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
@@ -52,7 +52,7 @@ public static boolean isInetAddress(String ipString) {
return ipStringToBytes(ipString) != null;
}
- private static byte[] ipStringToBytes(String ipString) {
+ public static byte[] ipStringToBytes(String ipString) {
// Make a first pass to categorize the characters in this string.
boolean hasColon = false;
boolean hasDot = false;
diff --git a/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java
new file mode 100644
index 0000000000000..626fb6e6b810e
--- /dev/null
+++ b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java
@@ -0,0 +1,100 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+import jdk.incubator.vector.LongVector;
+import jdk.incubator.vector.Vector;
+import jdk.incubator.vector.VectorOperators;
+import jdk.incubator.vector.VectorSpecies;
+
+/**
+ * It uses vectorized B-tree search to find the round-down point.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+class BtreeSearcher implements Roundable {
+ private static final VectorSpecies LONG_VECTOR_SPECIES = LongVector.SPECIES_PREFERRED;
+ private static final int LANES = LONG_VECTOR_SPECIES.length();
+ private static final int SHIFT = log2(LANES);
+
+ private final long[] values;
+ private final long minValue;
+
+ BtreeSearcher(long[] values, int size) {
+ if (size <= 0) {
+ throw new IllegalArgumentException("at least one value must be present");
+ }
+
+ int blocks = (size + LANES - 1) / LANES; // number of blocks
+ int length = 1 + blocks * LANES; // size of the backing array (1-indexed)
+
+ this.minValue = values[0];
+ this.values = new long[length];
+ build(values, 0, size, this.values, 1);
+ }
+
+ /**
+ * Builds the B-tree memory layout.
+ * It builds the tree recursively, following an in-order traversal.
+ *
+ *
+ * Each block stores 'lanes' values at indices {@code i, i + 1, ..., i + lanes - 1} where {@code i} is the
+ * starting offset. The starting offset of the root block is 1. The branching factor is (1 + lanes) so each
+ * block can have these many children. Given the starting offset {@code i} of a block, the starting offset
+ * of its k-th child (ranging from {@code 0, 1, ..., k}) can be computed as {@code i + ((i + k) << shift)}.
+ *
+ * @param src is the sorted input array
+ * @param i is the index in the input array to read the value from
+ * @param size the number of values in the input array
+ * @param dst is the output array
+ * @param j is the index in the output array to write the value to
+ * @return the next index 'i'
+ */
+ private static int build(long[] src, int i, int size, long[] dst, int j) {
+ if (j < dst.length) {
+ for (int k = 0; k < LANES; k++) {
+ i = build(src, i, size, dst, j + ((j + k) << SHIFT));
+
+ // Fills the B-tree as a complete tree, i.e., all levels are completely filled,
+ // except the last level which is filled from left to right.
+ // The trick is to fill the destination array between indices 1...size (inclusive / 1-indexed)
+ // and pad the remaining array with +infinity.
+ dst[j + k] = (j + k <= size) ? src[i++] : Long.MAX_VALUE;
+ }
+ i = build(src, i, size, dst, j + ((j + LANES) << SHIFT));
+ }
+ return i;
+ }
+
+ @Override
+ public long floor(long key) {
+ Vector keyVector = LongVector.broadcast(LONG_VECTOR_SPECIES, key);
+ int i = 1, result = 1;
+
+ while (i < values.length) {
+ Vector valuesVector = LongVector.fromArray(LONG_VECTOR_SPECIES, values, i);
+ int j = i + valuesVector.compare(VectorOperators.GT, keyVector).firstTrue();
+ result = (j > i) ? j : result;
+ i += (j << SHIFT);
+ }
+
+ assert result > 1 : "key must be greater than or equal to " + minValue;
+ return values[result - 1];
+ }
+
+ private static int log2(int num) {
+ if ((num <= 0) || ((num & (num - 1)) != 0)) {
+ throw new IllegalArgumentException(num + " is not a positive power of 2");
+ }
+ return 32 - Integer.numberOfLeadingZeros(num - 1);
+ }
+}
diff --git a/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java
new file mode 100644
index 0000000000000..0709ed4374227
--- /dev/null
+++ b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java
@@ -0,0 +1,75 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * Factory class to create and return the fastest implementation of {@link Roundable}.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+public final class RoundableFactory {
+ /**
+ * The maximum limit up to which linear search is used, otherwise binary or B-tree search is used.
+ * This is because linear search is much faster on small arrays.
+ * Benchmark results: PR #9727
+ */
+ private static final int LINEAR_SEARCH_MAX_SIZE = 64;
+
+ /**
+ * Indicates whether the vectorized (SIMD) B-tree search implementation is to be used.
+ * It is true when either:
+ * 1. The feature flag is set to "forced", or
+ * 2. The platform has a minimum of 4 long vector lanes and the feature flag is set to "true".
+ */
+ private static final boolean USE_BTREE_SEARCHER;
+
+ /**
+ * This class is initialized only when:
+ * - JDK-20+
+ * - jdk.incubator.vector.LongVector is available (--add-modules=jdk.incubator.vector is passed)
+ */
+ private static final class VectorCheck {
+ final static int SPECIES_PREFERRED = jdk.incubator.vector.LongVector.SPECIES_PREFERRED.length();
+ }
+
+ static {
+ String simdRoundingFeatureFlag = System.getProperty("opensearch.experimental.feature.simd.rounding.enabled");
+ boolean useBtreeSearcher = false;
+
+ try {
+ final Class> incubator = Class.forName("jdk.incubator.vector.LongVector");
+
+ useBtreeSearcher = "forced".equalsIgnoreCase(simdRoundingFeatureFlag)
+ || (VectorCheck.SPECIES_PREFERRED >= 4 && "true".equalsIgnoreCase(simdRoundingFeatureFlag));
+
+ } catch (final ClassNotFoundException ex) {
+ /* do not use BtreeSearcher */
+ }
+
+ USE_BTREE_SEARCHER = useBtreeSearcher;
+ }
+
+ private RoundableFactory() {}
+
+ /**
+ * Creates and returns the fastest implementation of {@link Roundable}.
+ */
+ public static Roundable create(long[] values, int size) {
+ if (size <= LINEAR_SEARCH_MAX_SIZE) {
+ return new BidirectionalLinearSearcher(values, size);
+ } else if (USE_BTREE_SEARCHER) {
+ return new BtreeSearcher(values, size);
+ } else {
+ return new BinarySearcher(values, size);
+ }
+ }
+}
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
index df04709458b29..8d8a4c7895339 100644
--- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
@@ -35,7 +35,7 @@ public void testPublicApiMethodArgumentNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotated)"
)
)
@@ -56,7 +56,7 @@ public void testPublicApiMethodArgumentNotAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedGenerics)"
)
)
@@ -77,7 +77,7 @@ public void testPublicApiMethodThrowsNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodThrowsNotAnnotated)"
)
)
@@ -111,7 +111,7 @@ public void testPublicApiMethodArgumentNotAnnotatedPackagePrivate() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedPackagePrivate)"
)
)
@@ -209,7 +209,7 @@ public void testPublicApiMethodReturnNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotated)"
)
)
@@ -230,7 +230,7 @@ public void testPublicApiMethodReturnNotAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedGenerics)"
)
)
@@ -251,7 +251,7 @@ public void testPublicApiMethodReturnNotAnnotatedArray() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedArray)"
)
)
@@ -272,7 +272,7 @@ public void testPublicApiMethodReturnNotAnnotatedBoundedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedBoundedGenerics)"
)
)
@@ -297,7 +297,7 @@ public void testPublicApiMethodReturnNotAnnotatedAnnotation() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedAnnotation)"
)
)
@@ -388,7 +388,7 @@ public void testPublicApiMethodGenericsArgumentNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodGenericsArgumentNotAnnotated)"
)
)
@@ -453,7 +453,7 @@ public void testPublicApiMethodReturnAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnAnnotatedGenerics)"
)
)
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
index dcf8dd7945012..c8fdb3333a714 100644
--- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
@@ -43,6 +43,7 @@ default CompilerResult compile(String name, String... names) {
return compileWithPackage(ApiAnnotationProcessorTests.class.getPackageName(), name, names);
}
+ @SuppressWarnings("removal")
default CompilerResult compileWithPackage(String pck, String name, String... names) {
final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
final DiagnosticCollector collector = new DiagnosticCollector<>();
diff --git a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
index ae9f629c59024..ad19f456b0df4 100644
--- a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
+++ b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
@@ -12,15 +12,31 @@
public class RoundableTests extends OpenSearchTestCase {
- public void testFloor() {
- int size = randomIntBetween(1, 256);
- long[] values = new long[size];
- for (int i = 1; i < values.length; i++) {
- values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1;
- }
+ public void testRoundingEmptyArray() {
+ Throwable throwable = assertThrows(IllegalArgumentException.class, () -> RoundableFactory.create(new long[0], 0));
+ assertEquals("at least one value must be present", throwable.getMessage());
+ }
+
+ public void testRoundingSmallArray() {
+ int size = randomIntBetween(1, 64);
+ long[] values = randomArrayOfSortedValues(size);
+ Roundable roundable = RoundableFactory.create(values, size);
+
+ assertEquals("BidirectionalLinearSearcher", roundable.getClass().getSimpleName());
+ assertRounding(roundable, values, size);
+ }
- Roundable[] impls = { new BinarySearcher(values, size), new BidirectionalLinearSearcher(values, size) };
+ public void testRoundingLargeArray() {
+ int size = randomIntBetween(65, 256);
+ long[] values = randomArrayOfSortedValues(size);
+ Roundable roundable = RoundableFactory.create(values, size);
+ boolean useBtreeSearcher = "forced".equalsIgnoreCase(System.getProperty("opensearch.experimental.feature.simd.rounding.enabled"));
+ assertEquals(useBtreeSearcher ? "BtreeSearcher" : "BinarySearcher", roundable.getClass().getSimpleName());
+ assertRounding(roundable, values, size);
+ }
+
+ private void assertRounding(Roundable roundable, long[] values, int size) {
for (int i = 0; i < 100000; i++) {
// Index of the expected round-down point.
int idx = randomIntBetween(0, size - 1);
@@ -35,23 +51,21 @@ public void testFloor() {
// round-down point, which will still floor to the same value.
long key = expected + (randomNonNegativeLong() % delta);
- for (Roundable roundable : impls) {
- assertEquals(expected, roundable.floor(key));
- }
+ assertEquals(expected, roundable.floor(key));
}
+
+ Throwable throwable = assertThrows(AssertionError.class, () -> roundable.floor(values[0] - 1));
+ assertEquals("key must be greater than or equal to " + values[0], throwable.getMessage());
}
- public void testFailureCases() {
- Throwable throwable;
+ private static long[] randomArrayOfSortedValues(int size) {
+ int capacity = size + randomInt(20); // May be slightly more than the size.
+ long[] values = new long[capacity];
- throwable = assertThrows(IllegalArgumentException.class, () -> new BinarySearcher(new long[0], 0));
- assertEquals("at least one value must be present", throwable.getMessage());
- throwable = assertThrows(IllegalArgumentException.class, () -> new BidirectionalLinearSearcher(new long[0], 0));
- assertEquals("at least one value must be present", throwable.getMessage());
+ for (int i = 1; i < size; i++) {
+ values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1;
+ }
- throwable = assertThrows(AssertionError.class, () -> new BinarySearcher(new long[] { 100 }, 1).floor(50));
- assertEquals("key must be greater than or equal to 100", throwable.getMessage());
- throwable = assertThrows(AssertionError.class, () -> new BidirectionalLinearSearcher(new long[] { 100 }, 1).floor(50));
- assertEquals("key must be greater than or equal to 100", throwable.getMessage());
+ return values;
}
}
diff --git a/libs/core/build.gradle b/libs/core/build.gradle
index 4850b5aea5c85..0cf2cd0bf92b6 100644
--- a/libs/core/build.gradle
+++ b/libs/core/build.gradle
@@ -36,45 +36,6 @@ base {
archivesName = 'opensearch-core'
}
-// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 11 so we do not include this source set in our IDEs
-if (!isEclipse) {
- sourceSets {
- java11 {
- java {
- srcDirs = ['src/main/java11']
- }
- }
- }
-
- configurations {
- java11Compile.extendsFrom(compile)
- }
-
- dependencies {
- java11Implementation sourceSets.main.output
- }
-
- compileJava11Java {
- sourceCompatibility = JavaVersion.VERSION_11
- targetCompatibility = JavaVersion.VERSION_11
- }
-
- forbiddenApisJava11 {
- if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) {
- targetCompatibility = JavaVersion.VERSION_11
- }
- replaceSignatureFiles 'jdk-signatures'
- }
-
- jar {
- metaInf {
- into 'versions/11'
- from sourceSets.java11.output
- }
- manifest.attributes('Multi-Release': 'true')
- }
-}
-
dependencies {
api project(':libs:opensearch-common')
diff --git a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 b/libs/core/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/libs/core/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/core/licenses/jackson-core-2.16.1.jar.sha1 b/libs/core/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/libs/core/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.10.0.jar.sha1 b/libs/core/licenses/lucene-core-9.10.0.jar.sha1
new file mode 100644
index 0000000000000..31b6bcd29f418
--- /dev/null
+++ b/libs/core/licenses/lucene-core-9.10.0.jar.sha1
@@ -0,0 +1 @@
+64e5624754d59386be5d9159c68f81ff96298704
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 b/libs/core/licenses/lucene-core-9.8.0.jar.sha1
deleted file mode 100644
index f9a3e2f3cbee6..0000000000000
--- a/libs/core/licenses/lucene-core-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e8421c5f8573bcf22e9265fc7e19469545a775a
\ No newline at end of file
diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java
index d94be3f25b53d..1bc0583d5e7df 100644
--- a/libs/core/src/main/java/org/opensearch/Version.java
+++ b/libs/core/src/main/java/org/opensearch/Version.java
@@ -98,8 +98,10 @@ public class Version implements Comparable, ToXContentFragment {
public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0);
- public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_8_0);
- public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_8_0);
+ public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_2);
+ public static final Version V_2_12_1 = new Version(2120199, org.apache.lucene.util.Version.LUCENE_9_9_2);
+ public static final Version V_2_13_0 = new Version(2130099, org.apache.lucene.util.Version.LUCENE_9_10_0);
+ public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version CURRENT = V_3_0_0;
public static Version fromId(int id) {
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
index 3e996bdee83a2..ea23b3d81a775 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
@@ -56,6 +56,7 @@
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
import org.opensearch.core.xcontent.MediaType;
import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.semver.SemverRange;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
@@ -750,6 +751,8 @@ public Object readGenericValue() throws IOException {
return readCollection(StreamInput::readGenericValue, HashSet::new, Collections.emptySet());
case 26:
return readBigInteger();
+ case 27:
+ return readSemverRange();
default:
throw new IOException("Can't read unknown type [" + type + "]");
}
@@ -1090,6 +1093,10 @@ public Version readVersion() throws IOException {
return Version.fromId(readVInt());
}
+ public SemverRange readSemverRange() throws IOException {
+ return SemverRange.fromString(readString());
+ }
+
/** Reads the {@link Version} from the input stream */
public Build readBuild() throws IOException {
// the following is new for opensearch: we write the distribution to support any "forks"
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
index 2d69e1c686df3..b7599265aece3 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
@@ -54,6 +54,7 @@
import org.opensearch.core.common.settings.SecureString;
import org.opensearch.core.common.text.Text;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
+import org.opensearch.semver.SemverRange;
import java.io.EOFException;
import java.io.FileNotFoundException;
@@ -784,6 +785,10 @@ public final void writeOptionalInstant(@Nullable Instant instant) throws IOExcep
o.writeByte((byte) 26);
o.writeString(v.toString());
});
+ writers.put(SemverRange.class, (o, v) -> {
+ o.writeByte((byte) 27);
+ o.writeSemverRange((SemverRange) v);
+ });
WRITERS = Collections.unmodifiableMap(writers);
}
@@ -1101,6 +1106,10 @@ public void writeVersion(final Version version) throws IOException {
writeVInt(version.id);
}
+ public void writeSemverRange(final SemverRange range) throws IOException {
+ writeString(range.toString());
+ }
+
/** Writes the OpenSearch {@link Build} informn to the output stream */
public void writeBuild(final Build build) throws IOException {
// the following is new for opensearch: we write the distribution name to support any "forks" of the code
diff --git a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
index c0abad7ed727f..1e48cf1f476da 100644
--- a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
+++ b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
@@ -32,6 +32,7 @@
package org.opensearch.core.index.shard;
+import org.apache.lucene.util.RamUsageEstimator;
import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -55,6 +56,8 @@ public class ShardId implements Comparable, ToXContentFragment, Writeab
private final int shardId;
private final int hashCode;
+ private final static long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ShardId.class);
+
/**
* Constructs a new shard id.
* @param index the index name
@@ -88,6 +91,10 @@ public ShardId(StreamInput in) throws IOException {
hashCode = computeHashCode();
}
+ public long getBaseRamBytesUsed() {
+ return BASE_RAM_BYTES_USED;
+ }
+
/**
* Writes this shard id to a stream.
* @param out the stream to write to
diff --git a/libs/core/src/main/java/org/opensearch/semver/SemverRange.java b/libs/core/src/main/java/org/opensearch/semver/SemverRange.java
new file mode 100644
index 0000000000000..da87acc7124aa
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/SemverRange.java
@@ -0,0 +1,170 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver;
+
+import org.opensearch.Version;
+import org.opensearch.common.Nullable;
+import org.opensearch.core.xcontent.ToXContentFragment;
+import org.opensearch.core.xcontent.XContentBuilder;
+import org.opensearch.semver.expr.Caret;
+import org.opensearch.semver.expr.Equal;
+import org.opensearch.semver.expr.Expression;
+import org.opensearch.semver.expr.Tilde;
+
+import java.io.IOException;
+import java.util.Objects;
+import java.util.Optional;
+
+import static java.util.Arrays.stream;
+
+/**
+ * Represents a single semver range that allows for specifying which {@code org.opensearch.Version}s satisfy the range.
+ * It is composed of a range version and a range operator. Following are the supported operators:
+ *
+ *
'=' Requires exact match with the range version. For example, =1.2.3 range would match only 1.2.3
+ *
'~' Allows for patch version variability starting from the range version. For example, ~1.2.3 range would match versions greater than or equal to 1.2.3 but less than 1.3.0
+ *
'^' Allows for patch and minor version variability starting from the range version. For example, ^1.2.3 range would match versions greater than or equal to 1.2.3 but less than 2.0.0
+ *
+ */
+public class SemverRange implements ToXContentFragment {
+
+ private final Version rangeVersion;
+ private final RangeOperator rangeOperator;
+
+ public SemverRange(final Version rangeVersion, final RangeOperator rangeOperator) {
+ this.rangeVersion = rangeVersion;
+ this.rangeOperator = rangeOperator;
+ }
+
+ /**
+ * Constructs a {@code SemverRange} from its string representation.
+ * @param range given range
+ * @return a {@code SemverRange}
+ */
+ public static SemverRange fromString(final String range) {
+ RangeOperator rangeOperator = RangeOperator.fromRange(range);
+ String version = range.replaceFirst(rangeOperator.asEscapedString(), "");
+ if (!Version.stringHasLength(version)) {
+ throw new IllegalArgumentException("Version cannot be empty");
+ }
+ return new SemverRange(Version.fromString(version), rangeOperator);
+ }
+
+ /**
+ * Return the range operator for this range.
+ * @return range operator
+ */
+ public RangeOperator getRangeOperator() {
+ return rangeOperator;
+ }
+
+ /**
+ * Return the version for this range.
+ * @return the range version
+ */
+ public Version getRangeVersion() {
+ return rangeVersion;
+ }
+
+ /**
+ * Check if range is satisfied by given version string.
+ *
+ * @param versionToEvaluate version to check
+ * @return {@code true} if range is satisfied by version, {@code false} otherwise
+ */
+ public boolean isSatisfiedBy(final String versionToEvaluate) {
+ return isSatisfiedBy(Version.fromString(versionToEvaluate));
+ }
+
+ /**
+ * Check if range is satisfied by given version.
+ *
+ * @param versionToEvaluate version to check
+ * @return {@code true} if range is satisfied by version, {@code false} otherwise
+ * @see #isSatisfiedBy(String)
+ */
+ public boolean isSatisfiedBy(final Version versionToEvaluate) {
+ return this.rangeOperator.expression.evaluate(this.rangeVersion, versionToEvaluate);
+ }
+
+ @Override
+ public boolean equals(@Nullable final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ SemverRange range = (SemverRange) o;
+ return Objects.equals(rangeVersion, range.rangeVersion) && rangeOperator == range.rangeOperator;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(rangeVersion, rangeOperator);
+ }
+
+ @Override
+ public String toString() {
+ return rangeOperator.asString() + rangeVersion;
+ }
+
+ @Override
+ public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
+ return builder.value(toString());
+ }
+
+ /**
+ * A range operator.
+ */
+ public enum RangeOperator {
+
+ EQ("=", new Equal()),
+ TILDE("~", new Tilde()),
+ CARET("^", new Caret()),
+ DEFAULT("", new Equal());
+
+ private final String operator;
+ private final Expression expression;
+
+ RangeOperator(final String operator, final Expression expression) {
+ this.operator = operator;
+ this.expression = expression;
+ }
+
+ /**
+ * String representation of the range operator.
+ *
+ * @return range operator as string
+ */
+ public String asString() {
+ return operator;
+ }
+
+ /**
+ * Escaped string representation of the range operator,
+ * if operator is a regex character.
+ *
+ * @return range operator as escaped string, if operator is a regex character
+ */
+ public String asEscapedString() {
+ if (Objects.equals(operator, "^")) {
+ return "\\^";
+ }
+ return operator;
+ }
+
+ public static RangeOperator fromRange(final String range) {
+ Optional rangeOperator = stream(values()).filter(
+ operator -> operator != DEFAULT && range.startsWith(operator.asString())
+ ).findFirst();
+ return rangeOperator.orElse(DEFAULT);
+ }
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java b/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java
new file mode 100644
index 0000000000000..ce2b74dde0865
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java
@@ -0,0 +1,32 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * Expression to evaluate version compatibility allowing for minor and patch version variability.
+ */
+public class Caret implements Expression {
+
+ /**
+ * Checks if the given version is compatible with the range version allowing for minor and
+ * patch version variability.
+ * Allows all versions starting from the rangeVersion upto next major version (exclusive).
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return {@code true} if the versions are compatible {@code false} otherwise
+ */
+ @Override
+ public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) {
+ Version lower = rangeVersion;
+ Version upper = Version.fromString((rangeVersion.major + 1) + ".0.0");
+ return versionToEvaluate.onOrAfter(lower) && versionToEvaluate.before(upper);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java b/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java
new file mode 100644
index 0000000000000..d3e1d63060b77
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java
@@ -0,0 +1,29 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * Expression to evaluate equality of versions.
+ */
+public class Equal implements Expression {
+
+ /**
+ * Checks if a given version matches a certain range version.
+ *
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return {@code true} if the versions are equal {@code false} otherwise
+ */
+ @Override
+ public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) {
+ return versionToEvaluate.equals(rangeVersion);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java b/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java
new file mode 100644
index 0000000000000..68bb4e249836a
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java
@@ -0,0 +1,26 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * An evaluation expression.
+ */
+public interface Expression {
+
+ /**
+ * Evaluates an expression.
+ *
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return the result of the expression evaluation
+ */
+ boolean evaluate(final Version rangeVersion, final Version versionToEvaluate);
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java b/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java
new file mode 100644
index 0000000000000..5f62ffe62ddeb
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java
@@ -0,0 +1,31 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * Expression to evaluate version compatibility allowing patch version variability.
+ */
+public class Tilde implements Expression {
+
+ /**
+ * Checks if the given version is compatible with a range version allowing for patch version variability.
+ * Allows all versions starting from the rangeVersion upto next minor version (exclusive).
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return {@code true} if the versions are compatible {@code false} otherwise
+ */
+ @Override
+ public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) {
+ Version lower = rangeVersion;
+ Version upper = Version.fromString(rangeVersion.major + "." + (rangeVersion.minor + 1) + "." + 0);
+ return versionToEvaluate.onOrAfter(lower) && versionToEvaluate.before(upper);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java b/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java
new file mode 100644
index 0000000000000..06cf9feaaaf8f
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java
@@ -0,0 +1,9 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+/** Expressions library module */
+package org.opensearch.semver.expr;
diff --git a/libs/core/src/main/java/org/opensearch/semver/package-info.java b/libs/core/src/main/java/org/opensearch/semver/package-info.java
new file mode 100644
index 0000000000000..ada935582d408
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/package-info.java
@@ -0,0 +1,10 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/** Semver library module */
+package org.opensearch.semver;
diff --git a/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java b/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java
new file mode 100644
index 0000000000000..af1d95b2561b7
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java
@@ -0,0 +1,105 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver;
+
+import org.opensearch.test.OpenSearchTestCase;
+
+public class SemverRangeTests extends OpenSearchTestCase {
+
+ public void testRangeWithEqualsOperator() {
+ SemverRange range = SemverRange.fromString("=1.2.3");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.EQ);
+ assertTrue(range.isSatisfiedBy("1.2.3"));
+ assertFalse(range.isSatisfiedBy("1.2.4"));
+ assertFalse(range.isSatisfiedBy("1.3.3"));
+ assertFalse(range.isSatisfiedBy("2.2.3"));
+ }
+
+ public void testRangeWithDefaultOperator() {
+ SemverRange range = SemverRange.fromString("1.2.3");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.DEFAULT);
+ assertTrue(range.isSatisfiedBy("1.2.3"));
+ assertFalse(range.isSatisfiedBy("1.2.4"));
+ assertFalse(range.isSatisfiedBy("1.3.3"));
+ assertFalse(range.isSatisfiedBy("2.2.3"));
+ }
+
+ public void testRangeWithTildeOperator() {
+ SemverRange range = SemverRange.fromString("~2.3.4");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.TILDE);
+ assertTrue(range.isSatisfiedBy("2.3.4"));
+ assertTrue(range.isSatisfiedBy("2.3.5"));
+ assertTrue(range.isSatisfiedBy("2.3.12"));
+
+ assertFalse(range.isSatisfiedBy("2.3.0"));
+ assertFalse(range.isSatisfiedBy("2.3.3"));
+ assertFalse(range.isSatisfiedBy("2.4.0"));
+ assertFalse(range.isSatisfiedBy("3.0.0"));
+ }
+
+ public void testRangeWithCaretOperator() {
+ SemverRange range = SemverRange.fromString("^2.3.4");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.CARET);
+ assertTrue(range.isSatisfiedBy("2.3.4"));
+ assertTrue(range.isSatisfiedBy("2.3.5"));
+ assertTrue(range.isSatisfiedBy("2.4.12"));
+
+ assertFalse(range.isSatisfiedBy("2.3.3"));
+ assertFalse(range.isSatisfiedBy("3.0.0"));
+ }
+
+ public void testInvalidRanges() {
+ IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString(""));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("="));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("=1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("=1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~"));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^"));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ expectThrows(NumberFormatException.class, () -> SemverRange.fromString("$1.2.3"));
+ }
+}
diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java
new file mode 100644
index 0000000000000..3cb168d42cda0
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java
@@ -0,0 +1,30 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+import org.opensearch.test.OpenSearchTestCase;
+
+public class CaretTests extends OpenSearchTestCase {
+
+ public void testMinorAndPatchVersionVariability() {
+ Caret caretExpr = new Caret();
+ Version rangeVersion = Version.fromString("1.2.3");
+
+ // Compatible versions
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.3")));
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.4")));
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.3.3")));
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.9.9")));
+
+ // Incompatible versions
+ assertFalse(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.2")));
+ assertFalse(caretExpr.evaluate(rangeVersion, Version.fromString("2.0.0")));
+ }
+}
diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java
new file mode 100644
index 0000000000000..fb090865157ed
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java
@@ -0,0 +1,22 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+import org.opensearch.test.OpenSearchTestCase;
+
+public class EqualTests extends OpenSearchTestCase {
+
+ public void testEquality() {
+ Equal equalExpr = new Equal();
+ Version rangeVersion = Version.fromString("1.2.3");
+ assertTrue(equalExpr.evaluate(rangeVersion, Version.fromString("1.2.3")));
+ assertFalse(equalExpr.evaluate(rangeVersion, Version.fromString("1.2.4")));
+ }
+}
diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java
new file mode 100644
index 0000000000000..8666611645c3a
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java
@@ -0,0 +1,29 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+import org.opensearch.test.OpenSearchTestCase;
+
+public class TildeTests extends OpenSearchTestCase {
+
+ public void testPatchVersionVariability() {
+ Tilde tildeExpr = new Tilde();
+ Version rangeVersion = Version.fromString("1.2.3");
+
+ assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.3")));
+ assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.4")));
+ assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.9")));
+
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.0")));
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.2")));
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.3.0")));
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("2.0.0")));
+ }
+}
diff --git a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java
index 898ce7e4e913b..ab48cc2357e7f 100644
--- a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java
+++ b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java
@@ -129,6 +129,7 @@ private void configureSocket(ServerSocket socket) throws IOException {
socket.setReuseAddress(config.tcpReuseAddress());
}
+ @SuppressWarnings("removal")
protected static SocketChannel accept(ServerSocketChannel serverSocketChannel) throws IOException {
try {
assert serverSocketChannel.isBlocking() == false;
diff --git a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
index 3df8e42fe4f14..530aa1d86afc7 100644
--- a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
+++ b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
@@ -388,6 +388,7 @@ private void configureSocket(Socket socket, boolean isConnectComplete) throws IO
}
}
+ @SuppressWarnings("removal")
private static void connect(SocketChannel socketChannel, InetSocketAddress remoteAddress) throws IOException {
try {
AccessController.doPrivileged((PrivilegedExceptionAction) () -> socketChannel.connect(remoteAddress));
diff --git a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java
index 4a200a5dfa9bd..969fa91b50538 100644
--- a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java
+++ b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java
@@ -65,6 +65,7 @@ protected Class> findClass(String name) throws ClassNotFoundException {
/**
* Return a new classloader across the parent and extended loaders.
*/
+ @SuppressWarnings("removal")
public static ExtendedPluginsClassLoader create(ClassLoader parent, List extendedLoaders) {
return AccessController.doPrivileged(
(PrivilegedAction) () -> new ExtendedPluginsClassLoader(parent, extendedLoaders)
diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java
index f41c49844997d..a2531f4a9156e 100644
--- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java
+++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java
@@ -76,6 +76,7 @@
* @see
* http://cs.oswego.edu/pipermail/concurrency-interest/2009-August/006508.html
*/
+@SuppressWarnings("removal")
public class SecureSM extends SecurityManager {
private final String[] classesThatCanExit;
diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java
index fe239fea8129e..3c8e78a902fcb 100644
--- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java
+++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java
@@ -18,6 +18,7 @@
import java.util.concurrent.ForkJoinPool.ForkJoinWorkerThreadFactory;
import java.util.concurrent.ForkJoinWorkerThread;
+@SuppressWarnings("removal")
public class SecuredForkJoinWorkerThreadFactory implements ForkJoinWorkerThreadFactory {
static AccessControlContext contextWithPermissions(Permission... perms) {
Permissions permissions = new Permissions();
diff --git a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java
index 026ffb080ee61..fd666c70cfebb 100644
--- a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java
+++ b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java
@@ -41,6 +41,7 @@
import junit.framework.TestCase;
/** Simple tests for SecureSM */
+@SuppressWarnings("removal")
public class SecureSMTests extends TestCase {
static {
// install a mock security policy:
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
index d57def9406b17..f38fdd6412d79 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
@@ -34,6 +34,11 @@ public Counter createUpDownCounter(String name, String description, String unit)
return metricsTelemetry.createUpDownCounter(name, description, unit);
}
+ @Override
+ public Histogram createHistogram(String name, String description, String unit) {
+ return metricsTelemetry.createHistogram(name, description, unit);
+ }
+
@Override
public void close() throws IOException {
metricsTelemetry.close();
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java
new file mode 100644
index 0000000000000..95ada626e21ee
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * Histogram records the value for an existing metric.
+ * {@opensearch.experimental}
+ */
+@ExperimentalApi
+public interface Histogram {
+
+ /**
+ * record value.
+ * @param value value to be added.
+ */
+ void record(double value);
+
+ /**
+ * record value along with the attributes.
+ *
+ * @param value value to be added.
+ * @param tags attributes/dimensions of the metric.
+ */
+ void record(double value, Tags tags);
+
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
index 61b3df089928b..94d19bda31f34 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
@@ -36,4 +36,15 @@ public interface MetricsRegistry extends Closeable {
* @return counter.
*/
Counter createUpDownCounter(String name, String description, String unit);
+
+ /**
+ * Creates the histogram type of Metric. Implementation framework will take care
+ * of the bucketing strategy.
+ *
+ * @param name name of the histogram.
+ * @param description any description about the metric.
+ * @param unit unit of the metric.
+ * @return histogram.
+ */
+ Histogram createHistogram(String name, String description, String unit);
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java
new file mode 100644
index 0000000000000..20e72bccad899
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java
@@ -0,0 +1,38 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics.noop;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.telemetry.metrics.Histogram;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * No-op {@link Histogram}
+ * {@opensearch.internal}
+ */
+@InternalApi
+public class NoopHistogram implements Histogram {
+
+ /**
+ * No-op Histogram instance
+ */
+ public final static NoopHistogram INSTANCE = new NoopHistogram();
+
+ private NoopHistogram() {}
+
+ @Override
+ public void record(double value) {
+
+ }
+
+ @Override
+ public void record(double value, Tags tags) {
+
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
index 640c6842a8960..d3dda68cfae71 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
@@ -10,6 +10,7 @@
import org.opensearch.common.annotation.InternalApi;
import org.opensearch.telemetry.metrics.Counter;
+import org.opensearch.telemetry.metrics.Histogram;
import org.opensearch.telemetry.metrics.MetricsRegistry;
import java.io.IOException;
@@ -38,6 +39,11 @@ public Counter createUpDownCounter(String name, String description, String unit)
return NoopCounter.INSTANCE;
}
+ @Override
+ public Histogram createHistogram(String name, String description, String unit) {
+ return NoopHistogram.INSTANCE;
+ }
+
@Override
public void close() throws IOException {
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
index 6171641db5f07..02f126075845b 100644
--- a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
@@ -48,4 +48,15 @@ public void testUpDownCounter() {
assertSame(mockCounter, counter);
}
+ public void testHistogram() {
+ Histogram mockHistogram = mock(Histogram.class);
+ when(defaultMeterRegistry.createHistogram(any(String.class), any(String.class), any(String.class))).thenReturn(mockHistogram);
+ Histogram histogram = defaultMeterRegistry.createHistogram(
+ "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testHistogram",
+ "test up-down counter",
+ "ms"
+ );
+ assertSame(mockHistogram, histogram);
+ }
+
}
diff --git a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
deleted file mode 100644
index 8da478fc6013d..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-35e8b7bf4fc1d078766bb155103d433ed5bb1627
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..b4b781f604910
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1
@@ -0,0 +1 @@
+1be7098dccc079171464dca7e386bd8df623b031
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
deleted file mode 100644
index 3e952ffe92418..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3c422d7f3901c9a1becf9df3cf41efc68a5ab95c
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..ad91e748ebe94
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1
@@ -0,0 +1 @@
+c4ddbc5277670f2e56b1f5e44e83afa748bcb125
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
deleted file mode 100644
index d62b5874ab023..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2033e2c5f531785d17f3a2bc31842e3bbb7983b2
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..9b30e7bf921b2
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1
@@ -0,0 +1 @@
+8e4f1923d73cd55f2b4c0d56ee4ed80419297354
\ No newline at end of file
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
new file mode 100644
index 0000000000000..2f4dada29780d
--- /dev/null
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
@@ -0,0 +1,38 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.xcontent;
+
+import com.fasterxml.jackson.core.StreamReadConstraints;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * Consolidates the XContent constraints (primarily reflecting Jackson's {@link StreamReadConstraints} constraints)
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+public interface XContentContraints {
+ final String DEFAULT_CODEPOINT_LIMIT_PROPERTY = "opensearch.xcontent.codepoint.max";
+ final String DEFAULT_MAX_STRING_LEN_PROPERTY = "opensearch.xcontent.string.length.max";
+ final String DEFAULT_MAX_NAME_LEN_PROPERTY = "opensearch.xcontent.name.length.max";
+ final String DEFAULT_MAX_DEPTH_PROPERTY = "opensearch.xcontent.depth.max";
+
+ final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(System.getProperty(DEFAULT_MAX_STRING_LEN_PROPERTY, "50000000" /* ~50 Mb */));
+
+ final int DEFAULT_MAX_NAME_LEN = Integer.parseInt(
+ System.getProperty(DEFAULT_MAX_NAME_LEN_PROPERTY, "50000" /* StreamReadConstraints.DEFAULT_MAX_NAME_LEN */)
+ );
+
+ final int DEFAULT_MAX_DEPTH = Integer.parseInt(
+ System.getProperty(DEFAULT_MAX_DEPTH_PROPERTY, "1000" /* StreamReadConstraints.DEFAULT_MAX_DEPTH */)
+ );
+
+ final int DEFAULT_CODEPOINT_LIMIT = Integer.parseInt(System.getProperty(DEFAULT_CODEPOINT_LIMIT_PROPERTY, "52428800" /* ~50 Mb */));
+}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
index 81f8fe9b6366f..7e92f236213d4 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
@@ -37,8 +37,10 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.cbor.CBORFactory;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -58,11 +60,7 @@
/**
* A CBOR based content implementation using Jackson.
*/
-public class CborXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class CborXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(cborXContent);
}
@@ -76,7 +74,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method
cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- cborFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ cborFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ cborFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
cborFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
cborXContent = new CborXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
index 4bd7c4c99bb46..91f6bbeb4f786 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
@@ -38,7 +38,9 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -57,11 +59,7 @@
/**
* A JSON based content implementation using Jackson.
*/
-public class JsonXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class JsonXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(jsonXContent);
}
@@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method
jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- jsonFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ jsonFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ jsonFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
jsonFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
jsonXContent = new JsonXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
index e824d4e1ae991..c73e126102a80 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
@@ -37,9 +37,11 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -58,11 +60,7 @@
/**
* A Smile based content implementation using Jackson.
*/
-public class SmileXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class SmileXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(smileXContent);
}
@@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method
smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- smileFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ smileFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ smileFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
smileFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
smileXContent = new SmileXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
index 0ad3c44e0168a..0e69c6c33b923 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
@@ -36,8 +36,11 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactoryBuilder;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -53,14 +56,12 @@
import java.io.Reader;
import java.util.Set;
+import org.yaml.snakeyaml.LoaderOptions;
+
/**
* A YAML based content implementation using Jackson.
*/
-public class YamlXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class YamlXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(yamlXContent);
}
@@ -69,9 +70,18 @@ public static XContentBuilder contentBuilder() throws IOException {
public static final YamlXContent yamlXContent;
static {
- yamlFactory = new YAMLFactory();
+ final LoaderOptions loaderOptions = new LoaderOptions();
+ loaderOptions.setCodePointLimit(DEFAULT_CODEPOINT_LIMIT);
+ yamlFactory = new YAMLFactoryBuilder(new YAMLFactory()).loaderOptions(loaderOptions).build();
yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- yamlFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ yamlFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ yamlFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
yamlFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
yamlXContent = new YamlXContent();
}
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
index d3d9ea174cf1b..81a2b0e290121 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
@@ -40,6 +40,7 @@
import org.opensearch.common.xcontent.cbor.CborXContent;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.smile.SmileXContent;
+import org.opensearch.common.xcontent.yaml.YamlXContent;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParseException;
@@ -48,16 +49,20 @@
import org.opensearch.test.OpenSearchTestCase;
import java.io.IOException;
+import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.function.Supplier;
+import java.util.zip.GZIPInputStream;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
+import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@@ -67,6 +72,7 @@
import static org.hamcrest.Matchers.in;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
+import static org.junit.Assume.assumeThat;
import static org.junit.internal.matchers.ThrowableMessageMatcher.hasMessage;
public class XContentParserTests extends OpenSearchTestCase {
@@ -79,7 +85,8 @@ public class XContentParserTests extends OpenSearchTestCase {
() -> randomAlphaOfLengthBetween(1, SmileXContent.DEFAULT_MAX_STRING_LEN),
/* YAML parser limitation */
XContentType.YAML,
- () -> randomAlphaOfLengthBetween(1, 3140000)
+ /* use 75% of the limit, difficult to get the exact size of the content right */
+ () -> randomRealisticUnicodeOfCodepointLengthBetween(1, (int) (YamlXContent.DEFAULT_CODEPOINT_LIMIT * 0.75))
);
private static final Map> OFF_LIMIT_GENERATORS = Map.of(
@@ -91,7 +98,51 @@ public class XContentParserTests extends OpenSearchTestCase {
() -> randomAlphaOfLength(SmileXContent.DEFAULT_MAX_STRING_LEN + 1),
/* YAML parser limitation */
XContentType.YAML,
- () -> randomRealisticUnicodeOfCodepointLength(3145730)
+ () -> randomRealisticUnicodeOfCodepointLength(YamlXContent.DEFAULT_CODEPOINT_LIMIT + 1)
+ );
+
+ private static final Map> FIELD_NAME_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomAlphaOfLengthBetween(1, JsonXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.CBOR,
+ () -> randomAlphaOfLengthBetween(1, CborXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.SMILE,
+ () -> randomAlphaOfLengthBetween(1, SmileXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.YAML,
+ () -> randomAlphaOfLengthBetween(1, YamlXContent.DEFAULT_MAX_NAME_LEN)
+ );
+
+ private static final Map> FIELD_NAME_OFF_LIMIT_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomAlphaOfLength(JsonXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.CBOR,
+ () -> randomAlphaOfLength(CborXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.SMILE,
+ () -> randomAlphaOfLength(SmileXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.YAML,
+ () -> randomAlphaOfLength(YamlXContent.DEFAULT_MAX_NAME_LEN + 1)
+ );
+
+ private static final Map> DEPTH_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomIntBetween(1, JsonXContent.DEFAULT_MAX_DEPTH),
+ XContentType.CBOR,
+ () -> randomIntBetween(1, CborXContent.DEFAULT_MAX_DEPTH),
+ XContentType.SMILE,
+ () -> randomIntBetween(1, SmileXContent.DEFAULT_MAX_DEPTH),
+ XContentType.YAML,
+ () -> randomIntBetween(1, YamlXContent.DEFAULT_MAX_DEPTH)
+ );
+
+ private static final Map> OFF_LIMIT_DEPTH_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> JsonXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.CBOR,
+ () -> CborXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.SMILE,
+ () -> SmileXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.YAML,
+ () -> YamlXContent.DEFAULT_MAX_DEPTH + 1
);
public void testStringOffLimit() throws IOException {
@@ -155,6 +206,188 @@ public void testString() throws IOException {
}
}
+ public void testFieldNameOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = FIELD_NAME_OFF_LIMIT_GENERATORS.get(xContentType).get();
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ // See please https://github.com/FasterXML/jackson-dataformats-binary/issues/392, support
+ // for CBOR, Smile is coming
+ if (xContentType != XContentType.JSON) {
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ assertNull(parser.nextToken());
+ } else {
+ assertThrows(StreamConstraintsException.class, () -> parser.nextToken());
+ }
+ }
+ }
+ }
+
+ public void testFieldName() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = FIELD_NAME_GENERATORS.get(xContentType).get();
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ assertNull(parser.nextToken());
+ }
+ }
+ }
+
+ public void testWriteDepthOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+ // Branching off YAML logic into separate test case testWriteDepthOffLimitYaml since it behaves differently
+ assumeThat(xContentType, not(XContentType.YAML));
+
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ // The behavior here is very interesting: the generator does write the new object tag (changing the internal state)
+ // BUT throws the exception after the fact, this is why we have to close the object at the end.
+ assertThrows(StreamConstraintsException.class, () -> builder.startObject());
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+
+ builder.endObject();
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.endObject();
+ }
+ }
+ }
+
+ public void testWriteDepthOffLimitYaml() throws IOException {
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ try (XContentBuilder builder = XContentBuilder.builder(XContentType.YAML.xContent())) {
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(XContentType.YAML).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ // The behavior here is very interesting: the generator does write the new object tag (changing the internal state)
+ // BUT throws the exception after the fact, this is why we have to close the object at the end.
+ assertThrows(StreamConstraintsException.class, () -> builder.startObject());
+ } catch (final IllegalStateException ex) {
+ // YAML parser is having really hard time recovering from StreamConstraintsException, the internal
+ // state seems to be completely messed up and the closing cleanly seems to be not feasible.
+ }
+ }
+
+ public void testReadDepthOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ // Since parser and generator use the same max depth constraints, we could not generate the content with off limits,
+ // using precreated test files instead.
+ try (
+ InputStream in = new GZIPInputStream(
+ getDataInputStream("depth-off-limit." + xContentType.name().toLowerCase(Locale.US) + ".gz")
+ )
+ ) {
+ try (XContentParser parser = createParser(xContentType.xContent(), in)) {
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ }
+
+ if (xContentType != XContentType.YAML) {
+ assertThrows(StreamConstraintsException.class, () -> parser.nextToken());
+ }
+ }
+ }
+ }
+
+ public void testDepth() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ final int maxDepth = DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.endObject();
+ }
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field + depth, parser.currentName());
+ }
+
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ }
+
+ assertNull(parser.nextToken());
+ }
+ }
+ }
+
public void testFloat() throws IOException {
final XContentType xContentType = randomFrom(XContentType.values());
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz
new file mode 100644
index 0000000000000..88de7e590e7f0
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz
new file mode 100644
index 0000000000000..76274910542ac
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz
new file mode 100644
index 0000000000000..e248778b37253
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz
new file mode 100644
index 0000000000000..3b36594482a68
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz differ
diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
index 71af708f2e1dc..648536f9136a8 100644
--- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
+++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
@@ -36,10 +36,9 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.query.Operator;
import org.opensearch.plugins.Plugin;
-import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase;
import java.util.Arrays;
import java.util.Collection;
@@ -49,10 +48,10 @@
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
-public class QueryStringWithAnalyzersIT extends ParameterizedOpenSearchIntegTestCase {
+public class QueryStringWithAnalyzersIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase {
- public QueryStringWithAnalyzersIT(Settings dynamicSettings) {
- super(dynamicSettings);
+ public QueryStringWithAnalyzersIT(Settings staticSettings) {
+ super(staticSettings);
}
@ParametersFactory
@@ -63,11 +62,6 @@ public static Collection