diff --git a/.github/ISSUE_TEMPLATE/bug_template.yml b/.github/ISSUE_TEMPLATE/bug_template.yml
index 2cd1ee8a7e688..5f0798abe0f68 100644
--- a/.github/ISSUE_TEMPLATE/bug_template.yml
+++ b/.github/ISSUE_TEMPLATE/bug_template.yml
@@ -15,7 +15,7 @@ body:
description: Choose a specific OpenSearch component your bug belongs to. If you are unsure which to select or if the component is not present, select "Other".
multiple: false
options:
- - Other
+ - # Empty first option to force selection
- Build
- Clients
- Cluster Manager
@@ -24,6 +24,7 @@ body:
- Indexing:Replication
- Indexing
- Libraries
+ - Other
- Plugins
- Search:Aggregations
- Search:Performance
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
index d93ac8b590706..0159e771f7f80 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.yml
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -22,7 +22,7 @@ body:
description: Choose a specific OpenSearch component your feature request belongs to. If you are unsure of which component to select or if the component is not present, select "Other".
multiple: false
options:
- - Other
+ - # Empty first option to force selection
- Build
- Clients
- Cluster Manager
@@ -31,6 +31,7 @@ body:
- Indexing:Replication
- Indexing
- Libraries
+ - Other
- Plugins
- Search:Aggregations
- Search:Performance
diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml
index ca026f530b4af..61962c91b4903 100644
--- a/.github/workflows/links.yml
+++ b/.github/workflows/links.yml
@@ -13,7 +13,7 @@ jobs:
- uses: actions/checkout@v4
- name: lychee Link Checker
id: lychee
- uses: lycheeverse/lychee-action@v1.8.0
+ uses: lycheeverse/lychee-action@v1.9.1
with:
args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes
fail: true
diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml
index 2f87afd372d90..34e8f57cc1878 100644
--- a/.github/workflows/maintainer-approval.yml
+++ b/.github/workflows/maintainer-approval.yml
@@ -2,7 +2,6 @@ name: Maintainers approval
on:
pull_request_review:
- types: [submitted]
jobs:
maintainer-approved-check:
@@ -26,7 +25,7 @@ jobs:
return maintainersResponse.data.map(item => item.login).join(', ');
- - uses: peternied/required-approval@v1.2
+ - uses: peternied/required-approval@v1.3
with:
token: ${{ secrets.GITHUB_TOKEN }}
min-required: 1
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7bff7c6ac83cf..d74e319b07566 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -58,6 +58,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/))
- Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894))
- Switched to more reliable OpenSearch Lucene snapshot location([#11728](https://github.com/opensearch-project/OpenSearch/pull/11728))
+- Added support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394))
### Deprecated
@@ -82,6 +83,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944))
- Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993))
- Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439))
+- Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836))
+- Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872))
### Security
@@ -100,6 +103,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255))
- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351))
- Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670))
+- Remove ingest processor supports excluding fields ([#10967](https://github.com/opensearch-project/OpenSearch/pull/10967))
+- [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275))
+- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753))
- [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853))
- Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247))
- Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248))
@@ -107,6 +113,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679))
- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618))
- [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672))
+- Enable must_exist parameter for update aliases API ([#11210](https://github.com/opensearch-project/OpenSearch/pull/11210))
- Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024))
- Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650))
- Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040))
@@ -120,13 +127,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591))
- Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583))
- Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039))
+- Add copy ingest processor ([#11870](https://github.com/opensearch-project/OpenSearch/pull/11870))
- Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170))
+- Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891))
- Support for returning scores in matched queries ([#11626](https://github.com/opensearch-project/OpenSearch/pull/11626))
### Dependencies
- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822))
- Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276))
-- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560))
+- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796))
- Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298))
- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630))
- Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506))
@@ -160,6 +169,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559))
- Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691))
- Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693))
+- Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798))
+- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887))
+- Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421))
+- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.1.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886))
### Changed
- Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840))
@@ -176,6 +189,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057))
- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023))
- Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083))
+- Apply the fast filter optimization to composite aggregation of date histogram source ([#11505](https://github.com/opensearch-project/OpenSearch/pull/11083))
- Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087))
- Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528))
- Improved performance of numeric exact-match queries ([#11209](https://github.com/opensearch-project/OpenSearch/pull/11209))
@@ -185,6 +199,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512))
- Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562))
- Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678))
+- Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582))
+- Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732))
+- Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526))
+- Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890))
### Deprecated
@@ -207,10 +225,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167))
- Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425))
- Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238))
+- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485))
- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152))
- Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417))
- Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609))
- Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711))
+- Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490))
+- Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815))
### Security
diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md
index f9936aad0cf8c..21adbb0305ab1 100644
--- a/DEVELOPER_GUIDE.md
+++ b/DEVELOPER_GUIDE.md
@@ -183,6 +183,12 @@ Run OpenSearch using `gradlew run`.
./gradlew run
```
+[Plugins](plugins/) may be installed by passing a `-PinstalledPlugins` property:
+
+```bash
+./gradlew run -PinstalledPlugins="['plugin1', 'plugin2']"
+```
+
That will build OpenSearch and start it, writing its log above Gradle's status message. We log a lot of stuff on startup, specifically these lines tell you that OpenSearch is ready.
```
@@ -578,7 +584,7 @@ explicitly marked by an annotation should not be extended by external implementa
any time. The `@DeprecatedApi` annotation could also be added to any classes annotated with `@PublicApi` (or documented as `@opensearch.api`) or their methods that
are either changed (with replacement) or planned to be removed across major versions.
-The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`)
+The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`)
annotation. The presence of this annotation signals that API may change at any time (major, minor or even patch releases). In general, the classes annotated with
`@PublicApi` may expose other classes or methods annotated with `@ExperimentalApi`, in such cases the backward compatibility guarantees would not apply to latter
(see please [Experimental Development](#experimental-development) for more details).
diff --git a/TRIAGING.md b/TRIAGING.md
new file mode 100644
index 0000000000000..47cb44a4f5ba2
--- /dev/null
+++ b/TRIAGING.md
@@ -0,0 +1,83 @@
+
+
+The maintainers of the OpenSearch Repo seek to promote an inclusive and engaged community of contributors. In order to facilitate this, weekly triage meetings are open-to-all and attendance is encouraged for anyone who hopes to contribute, discuss an issue, or learn more about the project. To learn more about contributing to the OpenSearch Repo visit the [Contributing](./CONTRIBUTING.md) documentation.
+
+### Do I need to attend for my issue to be addressed/triaged?
+
+Attendance is not required for your issue to be triaged or addressed. If not accepted the issue will be updated with a comment for next steps. All new issues are triaged weekly.
+
+You can track if your issue was triaged by watching your GitHub notifications for updates.
+
+### What happens if my issue does not get covered this time?
+
+Each meeting we seek to address all new issues. However, should we run out of time before your issue is discussed, you are always welcome to attend the next meeting or to follow up on the issue post itself.
+
+### How do I join the Triage meeting?
+
+Meetings are hosted regularly at 10:00a - 10:55a Central Time every Wednesday and can be joined via [Chime](https://aws.amazon.com/chime/), with this [meeting link](https://chime.aws/1988437365).
+
+After joining the Chime meeting, you can enable your video / voice to join the discussion. If you do not have a webcam or microphone available, you can still join in via the text chat.
+
+If you have an issue you'd like to bring forth please prepare a link to the issue so it can be presented and viewed by everyone in the meeting.
+
+### Is there an agenda for each week?
+
+Meetings are 55 minutes and follows this structure:
+
+Yes, each 55-minute meeting follows this structure:
+1. **Initial Gathering:** Feel free to turn on your video and engage in informal conversation. Shortly, a volunteer triage [facilitator](#what-is-the-role-of-the-facilitator) will begin the meeting and share their screen.
+2. **Record Attendees:** The facilitator will request attendees to share their GitHub profile links. These links will be collected and assembled into a [tag](#how-do-triage-facilitator-tag-comments-during-the-triage-meeting) to annotate comments during the meeting.
+3. **Announcements:** Any announcements will be made at the beginning of the meeting.
+4. **Review of New Issues:** We start by reviewing all untriaged [issues](https://github.com/search?q=label%3Auntriaged+is%3Aopen++repo%3Aopensearch-project%2FOpenSearch+&type=issues&ref=advsearch&s=created&o=desc) for the OpenSearch repo.
+5. **Attendee Requests:** An opportunity for any meeting member to request consideration of an issue or pull request.
+6. **Open Discussion:** Attendees can bring up any topics not already covered by filed issues or pull requests.
+
+### What is the role of the facilitator?
+
+The facilitator is crucial in driving the meeting, ensuring a smooth flow of issues into OpenSearch for future contributions. They maintain the meeting's agenda, solicit input from attendees, and record outcomes using the triage tag as items are discussed.
+
+### Do I need to have already contributed to the project to attend a triage meeting?
+
+No prior contributions are required. All interested individuals are welcome and encouraged to attend. Triage meetings offer a fantastic opportunity for new contributors to understand the project and explore various contribution avenues.
+
+### What if I have an issue that is almost a duplicate, should I open a new one to be triaged?
+
+You can always open an [issue](https://github.com/opensearch-project/OpenSearch/issues/new/choose) including one that you think may be a duplicate. If you believe your issue is similar but distinct from an existing one, you are encouraged to file it and explain the differences during the triage meeting.
+
+### What if I have follow-up questions on an issue?
+
+If you have an existing issue you would like to discuss, you can always comment on the issue itself. Alternatively, you are welcome to come to the triage meeting to discuss.
+
+### Is this meeting a good place to get help setting up features on my OpenSearch instance?
+
+While we are always happy to help the community, the best resource for implementation questions is [the OpenSearch forum](https://forum.opensearch.org/).
+
+There you can find answers to many common questions as well as speak with implementation experts.
+
+### What are the issue labels associated with triaging?
+
+Yes, there are several labels that are used to identify the 'state' of issues filed in OpenSearch .
+| Label | When Applied | Meaning |
+|---------------|----------------------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| `Untriaged` | When issues are created or re-opened. | Issues labeled as 'Untriaged' require the attention of the repository maintainers and may need to be prioritized for quicker resolution. It's crucial to keep the count of 'Untriaged' labels low to ensure all potential security issues are addressed in a timely manner. See [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) for more details on handling these issues. |
+| `Help Wanted` | Anytime. | Issues marked as 'Help Wanted' signal that they are actionable and not the current focus of the project maintainers. Community contributions are especially encouraged for these issues. |
+| `Good First Issue` | Anytime. | Issues labeled as 'Good First Issue' are small in scope and can be resolved with a single pull request. These are recommended starting points for newcomers looking to make their first contributions. |
+
+### What are the typical outcomes of a triaged issue?
+
+| Outcome | Label | Description | Canned Response |
+|--------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Accepted | `-untriaged` | The issue has the details needed to be directed towards area owners. | "Thanks for filing this issue, please feel free to submit a pull request." |
+| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to re-open after addressing the reason." |
+| Area Triage | `+{AREALABEL}` | OpenSearch has many different areas. If it's unclear whether an issue should be accepted, it will be labeled with the area and an owner will be @mentioned for follow-up. | "Thanks for creating this issue; the triage meeting was unsure if this issue should be accepted, @{PERSON} or someone from the area please review and then accept or reject this issue?" |
+| Transfer | N/A | If the issue applies to another repository within the OpenSearch Project, it will be transferred accordingly. | "@opensearch-project/triage, can you please transfer this issue to project {REPOSITORY}." Or, if someone at the meeting has permissions, they can start the transfer. |
+
+### Is this where I should bring up potential security vulnerabilities?
+
+Due to the sensitive nature of security vulnerabilities, please report all potential vulnerabilities directly by following the steps outlined on the [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) document.
+
+### How do triage facilitator tag comments during the triage meeting?
+
+During the triage meeting, facilitators should use the tag _[Triage - attendees [1](#Profile_link) [2](#Profile_link)]_ to indicate a collective decision. This ensures contributors know the decision came from the meeting rather than an individual and identifies participants for any follow-up queries.
+
+This tag should not be used outside triage meetings.
diff --git a/build.gradle b/build.gradle
index b1cd1d532bfeb..296c30391af09 100644
--- a/build.gradle
+++ b/build.gradle
@@ -54,7 +54,7 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
- id "com.diffplug.spotless" version "6.20.0" apply false
+ id "com.diffplug.spotless" version "6.23.2" apply false
id "org.gradle.test-retry" version "1.5.4" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index a42976fef572c..1cb21acd14af7 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -110,7 +110,7 @@ dependencies {
api 'com.netflix.nebula:gradle-info-plugin:12.1.6'
api 'org.apache.rat:apache-rat:0.15'
api 'commons-io:commons-io:2.15.1'
- api "net.java.dev.jna:jna:5.13.0"
+ api "net.java.dev.jna:jna:5.14.0"
api 'com.github.johnrengelman:shadow:8.1.1'
api 'org.jdom:jdom2:2.0.6.1'
api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}"
@@ -118,7 +118,7 @@ dependencies {
api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6'
api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}"
api 'org.apache.maven:maven-model:3.9.6'
- api 'com.networknt:json-schema-validator:1.0.86'
+ api 'com.networknt:json-schema-validator:1.1.0'
api 'org.jruby.jcodings:jcodings:1.0.58'
api 'org.jruby.joni:joni:2.2.1'
api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}"
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 58b54e7b77a1e..dd7f2e1eaabf0 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,5 +1,5 @@
opensearch = 3.0.0
-lucene = 9.8.0
+lucene = 9.9.1
bundled_jdk_vendor = adoptium
bundled_jdk = 21.0.1+12
@@ -7,8 +7,8 @@ bundled_jdk = 21.0.1+12
# optional dependencies
spatial4j = 0.7
jts = 1.15.0
-jackson = 2.16.0
-jackson_databind = 2.16.0
+jackson = 2.16.1
+jackson_databind = 2.16.1
snakeyaml = 2.1
icu4j = 70.1
supercsv = 2.4.0
@@ -70,5 +70,5 @@ jzlib = 1.1.3
resteasy = 6.2.4.Final
# opentelemetry dependencies
-opentelemetry = 1.32.0
+opentelemetry = 1.34.1
opentelemetrysemconv = 1.23.1-alpha
diff --git a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/gradle/run.gradle b/gradle/run.gradle
index 639479e97d28f..34651f1d94964 100644
--- a/gradle/run.gradle
+++ b/gradle/run.gradle
@@ -39,6 +39,12 @@ testClusters {
testDistribution = 'archive'
if (numZones > 1) numberOfZones = numZones
if (numNodes > 1) numberOfNodes = numNodes
+ if (findProperty("installedPlugins")) {
+ installedPlugins = Eval.me(installedPlugins)
+ for (String p : installedPlugins) {
+ plugin('plugins:'.concat(p))
+ }
+ }
}
}
diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
index 1864aec4aa951..569f48a8465f3 100644
--- a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
+++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
@@ -113,7 +113,7 @@ private void process(ExecutableElement executable, Element enclosing) {
// The executable element should not be internal (unless constructor for injectable core component)
checkNotInternal(enclosing, executable);
- // Check this elements annotations
+ // Check this element's annotations
for (final AnnotationMirror annotation : executable.getAnnotationMirrors()) {
final Element element = annotation.getAnnotationType().asElement();
if (inspectable(element)) {
@@ -210,7 +210,7 @@ private void process(ExecutableElement executable, ReferenceType ref) {
}
}
- // Check this elements annotations
+ // Check this element's annotations
for (final AnnotationMirror annotation : ref.getAnnotationMirrors()) {
final Element element = annotation.getAnnotationType().asElement();
if (inspectable(element)) {
@@ -316,7 +316,7 @@ private void checkPublic(@Nullable Element referencedBy, final Element element)
reportFailureAs,
"The element "
+ element
- + " is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi"
+ + " is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi"
+ ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "")
);
}
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
index df04709458b29..8d8a4c7895339 100644
--- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
@@ -35,7 +35,7 @@ public void testPublicApiMethodArgumentNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotated)"
)
)
@@ -56,7 +56,7 @@ public void testPublicApiMethodArgumentNotAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedGenerics)"
)
)
@@ -77,7 +77,7 @@ public void testPublicApiMethodThrowsNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodThrowsNotAnnotated)"
)
)
@@ -111,7 +111,7 @@ public void testPublicApiMethodArgumentNotAnnotatedPackagePrivate() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedPackagePrivate)"
)
)
@@ -209,7 +209,7 @@ public void testPublicApiMethodReturnNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotated)"
)
)
@@ -230,7 +230,7 @@ public void testPublicApiMethodReturnNotAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedGenerics)"
)
)
@@ -251,7 +251,7 @@ public void testPublicApiMethodReturnNotAnnotatedArray() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedArray)"
)
)
@@ -272,7 +272,7 @@ public void testPublicApiMethodReturnNotAnnotatedBoundedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedBoundedGenerics)"
)
)
@@ -297,7 +297,7 @@ public void testPublicApiMethodReturnNotAnnotatedAnnotation() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedAnnotation)"
)
)
@@ -388,7 +388,7 @@ public void testPublicApiMethodGenericsArgumentNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodGenericsArgumentNotAnnotated)"
)
)
@@ -453,7 +453,7 @@ public void testPublicApiMethodReturnAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnAnnotatedGenerics)"
)
)
diff --git a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 b/libs/core/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/libs/core/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/core/licenses/jackson-core-2.16.1.jar.sha1 b/libs/core/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/libs/core/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 b/libs/core/licenses/lucene-core-9.8.0.jar.sha1
deleted file mode 100644
index f9a3e2f3cbee6..0000000000000
--- a/libs/core/licenses/lucene-core-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e8421c5f8573bcf22e9265fc7e19469545a775a
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.9.1.jar.sha1 b/libs/core/licenses/lucene-core-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..ae596196d9e6a
--- /dev/null
+++ b/libs/core/licenses/lucene-core-9.9.1.jar.sha1
@@ -0,0 +1 @@
+55249fa9a0ed321adcf8283c6f3b649a6812b0a9
\ No newline at end of file
diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java
index d94be3f25b53d..6a92993f5dd42 100644
--- a/libs/core/src/main/java/org/opensearch/Version.java
+++ b/libs/core/src/main/java/org/opensearch/Version.java
@@ -98,8 +98,8 @@ public class Version implements Comparable, ToXContentFragment {
public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0);
- public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_8_0);
- public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_8_0);
+ public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_1);
+ public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_1);
public static final Version CURRENT = V_3_0_0;
public static Version fromId(int id) {
diff --git a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
index c0abad7ed727f..1e48cf1f476da 100644
--- a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
+++ b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
@@ -32,6 +32,7 @@
package org.opensearch.core.index.shard;
+import org.apache.lucene.util.RamUsageEstimator;
import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -55,6 +56,8 @@ public class ShardId implements Comparable, ToXContentFragment, Writeab
private final int shardId;
private final int hashCode;
+ private final static long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ShardId.class);
+
/**
* Constructs a new shard id.
* @param index the index name
@@ -88,6 +91,10 @@ public ShardId(StreamInput in) throws IOException {
hashCode = computeHashCode();
}
+ public long getBaseRamBytesUsed() {
+ return BASE_RAM_BYTES_USED;
+ }
+
/**
* Writes this shard id to a stream.
* @param out the stream to write to
diff --git a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
deleted file mode 100644
index 8da478fc6013d..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-35e8b7bf4fc1d078766bb155103d433ed5bb1627
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..b4b781f604910
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1
@@ -0,0 +1 @@
+1be7098dccc079171464dca7e386bd8df623b031
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
deleted file mode 100644
index 3e952ffe92418..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3c422d7f3901c9a1becf9df3cf41efc68a5ab95c
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..ad91e748ebe94
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1
@@ -0,0 +1 @@
+c4ddbc5277670f2e56b1f5e44e83afa748bcb125
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
deleted file mode 100644
index d62b5874ab023..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2033e2c5f531785d17f3a2bc31842e3bbb7983b2
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..9b30e7bf921b2
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1
@@ -0,0 +1 @@
+8e4f1923d73cd55f2b4c0d56ee4ed80419297354
\ No newline at end of file
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
new file mode 100644
index 0000000000000..4c05f0058f2ed
--- /dev/null
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.xcontent;
+
+import com.fasterxml.jackson.core.StreamReadConstraints;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * Consolidates the XContent constraints (primarily reflecting Jackson's {@link StreamReadConstraints} constraints)
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+public interface XContentContraints {
+ final String DEFAULT_MAX_STRING_LEN_PROPERTY = "opensearch.xcontent.string.length.max";
+ final String DEFAULT_MAX_NAME_LEN_PROPERTY = "opensearch.xcontent.name.length.max";
+ final String DEFAULT_MAX_DEPTH_PROPERTY = "opensearch.xcontent.depth.max";
+
+ final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(System.getProperty(DEFAULT_MAX_STRING_LEN_PROPERTY, "50000000" /* ~50 Mb */));
+
+ final int DEFAULT_MAX_NAME_LEN = Integer.parseInt(
+ System.getProperty(DEFAULT_MAX_NAME_LEN_PROPERTY, "50000" /* StreamReadConstraints.DEFAULT_MAX_NAME_LEN */)
+ );
+
+ final int DEFAULT_MAX_DEPTH = Integer.parseInt(
+ System.getProperty(DEFAULT_MAX_DEPTH_PROPERTY, "1000" /* StreamReadConstraints.DEFAULT_MAX_DEPTH */)
+ );
+}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
index 81f8fe9b6366f..7e92f236213d4 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
@@ -37,8 +37,10 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.cbor.CBORFactory;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -58,11 +60,7 @@
/**
* A CBOR based content implementation using Jackson.
*/
-public class CborXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class CborXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(cborXContent);
}
@@ -76,7 +74,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method
cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- cborFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ cborFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ cborFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
cborFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
cborXContent = new CborXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
index 4bd7c4c99bb46..91f6bbeb4f786 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
@@ -38,7 +38,9 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -57,11 +59,7 @@
/**
* A JSON based content implementation using Jackson.
*/
-public class JsonXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class JsonXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(jsonXContent);
}
@@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method
jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- jsonFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ jsonFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ jsonFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
jsonFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
jsonXContent = new JsonXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
index e824d4e1ae991..c73e126102a80 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
@@ -37,9 +37,11 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -58,11 +60,7 @@
/**
* A Smile based content implementation using Jackson.
*/
-public class SmileXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class SmileXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(smileXContent);
}
@@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method
smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- smileFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ smileFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ smileFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
smileFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
smileXContent = new SmileXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
index 0ad3c44e0168a..3f6a4b3aeead7 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
@@ -36,8 +36,10 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -56,11 +58,7 @@
/**
* A YAML based content implementation using Jackson.
*/
-public class YamlXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class YamlXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(yamlXContent);
}
@@ -71,7 +69,14 @@ public static XContentBuilder contentBuilder() throws IOException {
static {
yamlFactory = new YAMLFactory();
yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- yamlFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ yamlFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ yamlFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
yamlFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
yamlXContent = new YamlXContent();
}
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
index d3d9ea174cf1b..0e431d8ea4277 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
@@ -40,6 +40,7 @@
import org.opensearch.common.xcontent.cbor.CborXContent;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.smile.SmileXContent;
+import org.opensearch.common.xcontent.yaml.YamlXContent;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParseException;
@@ -48,16 +49,20 @@
import org.opensearch.test.OpenSearchTestCase;
import java.io.IOException;
+import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.function.Supplier;
+import java.util.zip.GZIPInputStream;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
+import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@@ -67,6 +72,7 @@
import static org.hamcrest.Matchers.in;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
+import static org.junit.Assume.assumeThat;
import static org.junit.internal.matchers.ThrowableMessageMatcher.hasMessage;
public class XContentParserTests extends OpenSearchTestCase {
@@ -94,6 +100,50 @@ public class XContentParserTests extends OpenSearchTestCase {
() -> randomRealisticUnicodeOfCodepointLength(3145730)
);
+ private static final Map> FIELD_NAME_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomAlphaOfLengthBetween(1, JsonXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.CBOR,
+ () -> randomAlphaOfLengthBetween(1, CborXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.SMILE,
+ () -> randomAlphaOfLengthBetween(1, SmileXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.YAML,
+ () -> randomAlphaOfLengthBetween(1, YamlXContent.DEFAULT_MAX_NAME_LEN)
+ );
+
+ private static final Map> FIELD_NAME_OFF_LIMIT_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomAlphaOfLength(JsonXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.CBOR,
+ () -> randomAlphaOfLength(CborXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.SMILE,
+ () -> randomAlphaOfLength(SmileXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.YAML,
+ () -> randomAlphaOfLength(YamlXContent.DEFAULT_MAX_NAME_LEN + 1)
+ );
+
+ private static final Map> DEPTH_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomIntBetween(1, JsonXContent.DEFAULT_MAX_DEPTH),
+ XContentType.CBOR,
+ () -> randomIntBetween(1, CborXContent.DEFAULT_MAX_DEPTH),
+ XContentType.SMILE,
+ () -> randomIntBetween(1, SmileXContent.DEFAULT_MAX_DEPTH),
+ XContentType.YAML,
+ () -> randomIntBetween(1, YamlXContent.DEFAULT_MAX_DEPTH)
+ );
+
+ private static final Map> OFF_LIMIT_DEPTH_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> JsonXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.CBOR,
+ () -> CborXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.SMILE,
+ () -> SmileXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.YAML,
+ () -> YamlXContent.DEFAULT_MAX_DEPTH + 1
+ );
+
public void testStringOffLimit() throws IOException {
final XContentType xContentType = randomFrom(XContentType.values());
@@ -155,6 +205,188 @@ public void testString() throws IOException {
}
}
+ public void testFieldNameOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = FIELD_NAME_OFF_LIMIT_GENERATORS.get(xContentType).get();
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ // See please https://github.com/FasterXML/jackson-dataformats-binary/issues/392, support
+ // for CBOR, Smile is coming
+ if (xContentType != XContentType.JSON) {
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ assertNull(parser.nextToken());
+ } else {
+ assertThrows(StreamConstraintsException.class, () -> parser.nextToken());
+ }
+ }
+ }
+ }
+
+ public void testFieldName() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = FIELD_NAME_GENERATORS.get(xContentType).get();
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ assertNull(parser.nextToken());
+ }
+ }
+ }
+
+ public void testWriteDepthOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+ // Branching off YAML logic into separate test case testWriteDepthOffLimitYaml since it behaves differently
+ assumeThat(xContentType, not(XContentType.YAML));
+
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ // The behavior here is very interesting: the generator does write the new object tag (changing the internal state)
+ // BUT throws the exception after the fact, this is why we have to close the object at the end.
+ assertThrows(StreamConstraintsException.class, () -> builder.startObject());
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+
+ builder.endObject();
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.endObject();
+ }
+ }
+ }
+
+ public void testWriteDepthOffLimitYaml() throws IOException {
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ try (XContentBuilder builder = XContentBuilder.builder(XContentType.YAML.xContent())) {
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(XContentType.YAML).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ // The behavior here is very interesting: the generator does write the new object tag (changing the internal state)
+ // BUT throws the exception after the fact, this is why we have to close the object at the end.
+ assertThrows(StreamConstraintsException.class, () -> builder.startObject());
+ } catch (final IllegalStateException ex) {
+ // YAML parser is having really hard time recovering from StreamConstraintsException, the internal
+ // state seems to be completely messed up and the closing cleanly seems to be not feasible.
+ }
+ }
+
+ public void testReadDepthOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ // Since parser and generator use the same max depth constraints, we could not generate the content with off limits,
+ // using precreated test files instead.
+ try (
+ InputStream in = new GZIPInputStream(
+ getDataInputStream("depth-off-limit." + xContentType.name().toLowerCase(Locale.US) + ".gz")
+ )
+ ) {
+ try (XContentParser parser = createParser(xContentType.xContent(), in)) {
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ }
+
+ if (xContentType != XContentType.YAML) {
+ assertThrows(StreamConstraintsException.class, () -> parser.nextToken());
+ }
+ }
+ }
+ }
+
+ public void testDepth() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ final int maxDepth = DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.endObject();
+ }
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field + depth, parser.currentName());
+ }
+
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ }
+
+ assertNull(parser.nextToken());
+ }
+ }
+ }
+
public void testFloat() throws IOException {
final XContentType xContentType = randomFrom(XContentType.values());
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz
new file mode 100644
index 0000000000000..88de7e590e7f0
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz
new file mode 100644
index 0000000000000..76274910542ac
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz
new file mode 100644
index 0000000000000..e248778b37253
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz
new file mode 100644
index 0000000000000..3b36594482a68
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz differ
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java
new file mode 100644
index 0000000000000..dec69df275130
--- /dev/null
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java
@@ -0,0 +1,161 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.core.common.Strings;
+import org.opensearch.ingest.AbstractProcessor;
+import org.opensearch.ingest.ConfigurationUtils;
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+import org.opensearch.script.ScriptService;
+import org.opensearch.script.TemplateScript;
+
+import java.util.Map;
+
+public final class CopyProcessor extends AbstractProcessor {
+ public static final String TYPE = "copy";
+
+ private final TemplateScript.Factory sourceField;
+ private final TemplateScript.Factory targetField;
+
+ private final boolean ignoreMissing;
+
+ private final boolean removeSource;
+
+ private final boolean overrideTarget;
+
+ CopyProcessor(String tag, String description, TemplateScript.Factory sourceField, TemplateScript.Factory targetField) {
+ this(tag, description, sourceField, targetField, false, false, false);
+ }
+
+ CopyProcessor(
+ String tag,
+ String description,
+ TemplateScript.Factory sourceField,
+ TemplateScript.Factory targetField,
+ boolean ignoreMissing,
+ boolean removeSource,
+ boolean overrideTarget
+ ) {
+ super(tag, description);
+ this.sourceField = sourceField;
+ this.targetField = targetField;
+ this.ignoreMissing = ignoreMissing;
+ this.removeSource = removeSource;
+ this.overrideTarget = overrideTarget;
+ }
+
+ public TemplateScript.Factory getSourceField() {
+ return sourceField;
+ }
+
+ public TemplateScript.Factory getTargetField() {
+ return targetField;
+ }
+
+ public boolean isIgnoreMissing() {
+ return ignoreMissing;
+ }
+
+ public boolean isRemoveSource() {
+ return removeSource;
+ }
+
+ public boolean isOverrideTarget() {
+ return overrideTarget;
+ }
+
+ @Override
+ public IngestDocument execute(IngestDocument document) {
+ String source = document.renderTemplate(sourceField);
+ final boolean sourceFieldPathIsNullOrEmpty = Strings.isNullOrEmpty(source);
+ if (sourceFieldPathIsNullOrEmpty || document.hasField(source, true) == false) {
+ if (ignoreMissing) {
+ return document;
+ } else if (sourceFieldPathIsNullOrEmpty) {
+ throw new IllegalArgumentException("source field path cannot be null nor empty");
+ } else {
+ throw new IllegalArgumentException("source field [" + source + "] doesn't exist");
+ }
+ }
+
+ String target = document.renderTemplate(targetField);
+ if (Strings.isNullOrEmpty(target)) {
+ throw new IllegalArgumentException("target field path cannot be null nor empty");
+ }
+ if (source.equals(target)) {
+ throw new IllegalArgumentException("source field path and target field path cannot be same");
+ }
+
+ if (overrideTarget || document.hasField(target, true) == false || document.getFieldValue(target, Object.class) == null) {
+ Object sourceValue = document.getFieldValue(source, Object.class);
+ document.setFieldValue(target, IngestDocument.deepCopy(sourceValue));
+ } else {
+ throw new IllegalArgumentException("target field [" + target + "] already exists");
+ }
+
+ if (removeSource) {
+ document.removeField(source);
+ }
+
+ return document;
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static final class Factory implements Processor.Factory {
+
+ private final ScriptService scriptService;
+
+ public Factory(ScriptService scriptService) {
+ this.scriptService = scriptService;
+ }
+
+ @Override
+ public CopyProcessor create(
+ Map registry,
+ String processorTag,
+ String description,
+ Map config
+ ) throws Exception {
+ String sourceField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "source_field");
+ TemplateScript.Factory sourceFieldTemplate = ConfigurationUtils.compileTemplate(
+ TYPE,
+ processorTag,
+ "source_field",
+ sourceField,
+ scriptService
+ );
+ String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field");
+ TemplateScript.Factory targetFieldTemplate = ConfigurationUtils.compileTemplate(
+ TYPE,
+ processorTag,
+ "target_field",
+ targetField,
+ scriptService
+ );
+ boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
+ boolean removeSource = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "remove_source", false);
+ boolean overrideTarget = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override_target", false);
+
+ return new CopyProcessor(
+ processorTag,
+ description,
+ sourceFieldTemplate,
+ targetFieldTemplate,
+ ignoreMissing,
+ removeSource,
+ overrideTarget
+ );
+ }
+ }
+}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
index a2a51d968e078..7c1b4841122b0 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
@@ -106,6 +106,7 @@ public Map getProcessors(Processor.Parameters paramet
processors.put(DropProcessor.TYPE, new DropProcessor.Factory());
processors.put(HtmlStripProcessor.TYPE, new HtmlStripProcessor.Factory());
processors.put(CsvProcessor.TYPE, new CsvProcessor.Factory());
+ processors.put(CopyProcessor.TYPE, new CopyProcessor.Factory(parameters.scriptService));
return Collections.unmodifiableMap(processors);
}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java
index a48cfd87b78c3..d01dce02fca31 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java
@@ -32,6 +32,7 @@
package org.opensearch.ingest.common;
+import org.opensearch.common.Nullable;
import org.opensearch.core.common.Strings;
import org.opensearch.index.VersionType;
import org.opensearch.ingest.AbstractProcessor;
@@ -42,11 +43,15 @@
import org.opensearch.script.TemplateScript;
import java.util.ArrayList;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Set;
import java.util.stream.Collectors;
+import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException;
+
/**
* Processor that removes existing fields. Nothing happens if the field is not present.
*/
@@ -55,11 +60,28 @@ public final class RemoveProcessor extends AbstractProcessor {
public static final String TYPE = "remove";
private final List fields;
+ private final List excludeFields;
private final boolean ignoreMissing;
- RemoveProcessor(String tag, String description, List fields, boolean ignoreMissing) {
+ RemoveProcessor(
+ String tag,
+ String description,
+ @Nullable List fields,
+ @Nullable List excludeFields,
+ boolean ignoreMissing
+ ) {
super(tag, description);
- this.fields = new ArrayList<>(fields);
+ if (fields == null && excludeFields == null || fields != null && excludeFields != null) {
+ throw new IllegalArgumentException("ether fields and excludeFields must be set");
+ }
+ if (fields != null) {
+ this.fields = new ArrayList<>(fields);
+ this.excludeFields = null;
+ } else {
+ this.fields = null;
+ this.excludeFields = new ArrayList<>(excludeFields);
+ }
+
this.ignoreMissing = ignoreMissing;
}
@@ -67,42 +89,76 @@ public List getFields() {
return fields;
}
+ public List getExcludeFields() {
+ return excludeFields;
+ }
+
@Override
public IngestDocument execute(IngestDocument document) {
- fields.forEach(field -> {
- String path = document.renderTemplate(field);
- final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path);
- if (fieldPathIsNullOrEmpty || document.hasField(path) == false) {
- if (ignoreMissing) {
- return;
- } else if (fieldPathIsNullOrEmpty) {
- throw new IllegalArgumentException("field path cannot be null nor empty");
- } else {
- throw new IllegalArgumentException("field [" + path + "] doesn't exist");
+ if (fields != null && !fields.isEmpty()) {
+ fields.forEach(field -> {
+ String path = document.renderTemplate(field);
+ final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path);
+ if (fieldPathIsNullOrEmpty || document.hasField(path) == false) {
+ if (ignoreMissing) {
+ return;
+ } else if (fieldPathIsNullOrEmpty) {
+ throw new IllegalArgumentException("field path cannot be null nor empty");
+ } else {
+ throw new IllegalArgumentException("field [" + path + "] doesn't exist");
+ }
}
- }
- // cannot remove _index, _version and _version_type.
- if (path.equals(IngestDocument.Metadata.INDEX.getFieldName())
- || path.equals(IngestDocument.Metadata.VERSION.getFieldName())
- || path.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) {
- throw new IllegalArgumentException("cannot remove metadata field [" + path + "]");
- }
- // removing _id is disallowed when there's an external version specified in the request
- if (path.equals(IngestDocument.Metadata.ID.getFieldName())
- && document.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) {
- String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class);
- if (!Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) {
- Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class, true);
- throw new IllegalArgumentException(
- "cannot remove metadata field [_id] when specifying external version for the document, version: "
- + version
- + ", version_type: "
- + versionType
- );
+
+ // cannot remove _index, _version and _version_type.
+ if (path.equals(IngestDocument.Metadata.INDEX.getFieldName())
+ || path.equals(IngestDocument.Metadata.VERSION.getFieldName())
+ || path.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) {
+ throw new IllegalArgumentException("cannot remove metadata field [" + path + "]");
}
+ // removing _id is disallowed when there's an external version specified in the request
+ if (path.equals(IngestDocument.Metadata.ID.getFieldName())
+ && document.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) {
+ String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class);
+ if (!Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) {
+ Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class, true);
+ throw new IllegalArgumentException(
+ "cannot remove metadata field [_id] when specifying external version for the document, version: "
+ + version
+ + ", version_type: "
+ + versionType
+ );
+ }
+ }
+ document.removeField(path);
+ });
+ }
+
+ if (excludeFields != null && !excludeFields.isEmpty()) {
+ Set excludeFieldSet = new HashSet<>();
+ excludeFields.forEach(field -> {
+ String path = document.renderTemplate(field);
+ // ignore the empty or null field path
+ if (!Strings.isNullOrEmpty(path)) {
+ excludeFieldSet.add(path);
+ }
+ });
+
+ if (!excludeFieldSet.isEmpty()) {
+ Set existingFields = new HashSet<>(document.getSourceAndMetadata().keySet());
+ Set metadataFields = document.getMetadata()
+ .keySet()
+ .stream()
+ .map(IngestDocument.Metadata::getFieldName)
+ .collect(Collectors.toSet());
+ existingFields.forEach(field -> {
+ // ignore metadata fields such as _index, _id, etc.
+ if (!metadataFields.contains(field) && !excludeFieldSet.contains(field)) {
+ document.removeField(field);
+ }
+ });
}
- document.removeField(path);
- });
+ }
+
return document;
}
@@ -127,20 +183,41 @@ public RemoveProcessor create(
Map config
) throws Exception {
final List fields = new ArrayList<>();
- final Object field = ConfigurationUtils.readObject(TYPE, processorTag, config, "field");
- if (field instanceof List) {
- @SuppressWarnings("unchecked")
- List stringList = (List) field;
- fields.addAll(stringList);
- } else {
- fields.add((String) field);
+ final List excludeFields = new ArrayList<>();
+ final Object field = ConfigurationUtils.readOptionalObject(config, "field");
+ final Object excludeField = ConfigurationUtils.readOptionalObject(config, "exclude_field");
+
+ if (field == null && excludeField == null || field != null && excludeField != null) {
+ throw newConfigurationException(TYPE, processorTag, "field", "ether field or exclude_field must be set");
}
- final List compiledTemplates = fields.stream()
- .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", f, scriptService))
- .collect(Collectors.toList());
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
- return new RemoveProcessor(processorTag, description, compiledTemplates, ignoreMissing);
+
+ if (field != null) {
+ if (field instanceof List) {
+ @SuppressWarnings("unchecked")
+ List stringList = (List) field;
+ fields.addAll(stringList);
+ } else {
+ fields.add((String) field);
+ }
+ List fieldCompiledTemplates = fields.stream()
+ .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", f, scriptService))
+ .collect(Collectors.toList());
+ return new RemoveProcessor(processorTag, description, fieldCompiledTemplates, null, ignoreMissing);
+ } else {
+ if (excludeField instanceof List) {
+ @SuppressWarnings("unchecked")
+ List stringList = (List) excludeField;
+ excludeFields.addAll(stringList);
+ } else {
+ excludeFields.add((String) excludeField);
+ }
+ List excludeFieldCompiledTemplates = excludeFields.stream()
+ .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "exclude_field", f, scriptService))
+ .collect(Collectors.toList());
+ return new RemoveProcessor(processorTag, description, null, excludeFieldCompiledTemplates, ignoreMissing);
+ }
}
}
}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java
index cc8889af27621..d1b4a0961b7bd 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java
@@ -102,8 +102,11 @@ public IngestDocument execute(IngestDocument document) {
} else {
ingestScript = precompiledIngestScript;
}
- ingestScript.execute(document.getSourceAndMetadata());
- CollectionUtils.ensureNoSelfReferences(document.getSourceAndMetadata(), "ingest script");
+ IngestDocument mutableDocument = new IngestDocument(document);
+ ingestScript.execute(mutableDocument.getSourceAndMetadata());
+ CollectionUtils.ensureNoSelfReferences(mutableDocument.getSourceAndMetadata(), "ingest script");
+ document.getSourceAndMetadata().clear();
+ document.getSourceAndMetadata().putAll(mutableDocument.getSourceAndMetadata());
return document;
}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java
new file mode 100644
index 0000000000000..c1ca86a49e334
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java
@@ -0,0 +1,101 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.OpenSearchException;
+import org.opensearch.OpenSearchParseException;
+import org.opensearch.ingest.TestTemplateService;
+import org.opensearch.test.OpenSearchTestCase;
+import org.junit.Before;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class CopyProcessorFactoryTests extends OpenSearchTestCase {
+
+ private CopyProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new CopyProcessor.Factory(TestTemplateService.instance());
+ }
+
+ public void testCreate() throws Exception {
+ boolean ignoreMissing = randomBoolean();
+ boolean removeSource = randomBoolean();
+ boolean overrideTarget = randomBoolean();
+ Map config = new HashMap<>();
+ config.put("source_field", "source");
+ config.put("target_field", "target");
+ config.put("ignore_missing", ignoreMissing);
+ config.put("remove_source", removeSource);
+ config.put("override_target", overrideTarget);
+ String processorTag = randomAlphaOfLength(10);
+ CopyProcessor copyProcessor = factory.create(null, processorTag, null, config);
+ assertThat(copyProcessor.getTag(), equalTo(processorTag));
+ assertThat(copyProcessor.getSourceField().newInstance(Collections.emptyMap()).execute(), equalTo("source"));
+ assertThat(copyProcessor.getTargetField().newInstance(Collections.emptyMap()).execute(), equalTo("target"));
+ assertThat(copyProcessor.isIgnoreMissing(), equalTo(ignoreMissing));
+ assertThat(copyProcessor.isRemoveSource(), equalTo(removeSource));
+ assertThat(copyProcessor.isOverrideTarget(), equalTo(overrideTarget));
+ }
+
+ public void testCreateWithSourceField() throws Exception {
+ Map config = new HashMap<>();
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[source_field] required property is missing"));
+ }
+
+ config.put("source_field", null);
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[source_field] required property is missing"));
+ }
+ }
+
+ public void testCreateWithTargetField() throws Exception {
+ Map config = new HashMap<>();
+ config.put("source_field", "source");
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[target_field] required property is missing"));
+ }
+
+ config.put("source_field", "source");
+ config.put("target_field", null);
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[target_field] required property is missing"));
+ }
+ }
+
+ public void testInvalidMustacheTemplate() throws Exception {
+ CopyProcessor.Factory factory = new CopyProcessor.Factory(TestTemplateService.instance(true));
+ Map config = new HashMap<>();
+ config.put("source_field", "{{source}}");
+ config.put("target_field", "target");
+ String processorTag = randomAlphaOfLength(10);
+ OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, null, config));
+ assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script"));
+ assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag));
+ }
+
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java
new file mode 100644
index 0000000000000..f271bdd342d0b
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java
@@ -0,0 +1,125 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+import org.opensearch.ingest.RandomDocumentPicks;
+import org.opensearch.ingest.TestTemplateService;
+import org.opensearch.test.OpenSearchTestCase;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class CopyProcessorTests extends OpenSearchTestCase {
+
+ public void testCopyExistingField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String sourceFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ String targetFieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, false, false);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(true));
+ Object sourceValue = ingestDocument.getFieldValue(sourceFieldName, Object.class);
+ assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue));
+ assertThat(ingestDocument.getFieldValue(sourceFieldName, Object.class), equalTo(sourceValue));
+
+ Processor processorWithEmptyTarget = createCopyProcessor(sourceFieldName, "", false, false, false);
+ assertThrows(
+ "target field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithEmptyTarget.execute(ingestDocument)
+ );
+
+ Processor processorWithSameSourceAndTarget = createCopyProcessor(sourceFieldName, sourceFieldName, false, false, false);
+ assertThrows(
+ "source field path and target field path cannot be same",
+ IllegalArgumentException.class,
+ () -> processorWithSameSourceAndTarget.execute(ingestDocument)
+ );
+ }
+
+ public void testCopyWithIgnoreMissing() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String targetFieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = createCopyProcessor("non-existing-field", targetFieldName, false, false, false);
+ assertThrows(
+ "source field [non-existing-field] doesn't exist",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+
+ Processor processorWithEmptyFieldName = createCopyProcessor("", targetFieldName, false, false, false);
+ assertThrows(
+ "source field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithEmptyFieldName.execute(ingestDocument)
+ );
+
+ Processor processorWithIgnoreMissing = createCopyProcessor("non-existing-field", targetFieldName, true, false, false);
+ processorWithIgnoreMissing.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(false));
+ }
+
+ public void testCopyWithRemoveSource() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String sourceFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ String targetFieldName = RandomDocumentPicks.randomFieldName(random());
+ Object sourceValue = ingestDocument.getFieldValue(sourceFieldName, Object.class);
+
+ Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, true, false);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue));
+ assertThat(ingestDocument.hasField(sourceFieldName), equalTo(false));
+ }
+
+ public void testCopyToExistingField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String targetFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ Object sourceValue = RandomDocumentPicks.randomFieldValue(random());
+ String sourceFieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, sourceValue);
+
+ Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, false, false);
+ assertThrows(
+ "target field [" + targetFieldName + "] already exists",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+
+ // if override_target is false but target field's value is null, copy can execute successfully
+ String targetFieldWithNullValue = RandomDocumentPicks.addRandomField(random(), ingestDocument, null);
+ Processor processorWithTargetNullValue = createCopyProcessor(sourceFieldName, targetFieldWithNullValue, false, false, false);
+ processorWithTargetNullValue.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldWithNullValue), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(targetFieldWithNullValue, Object.class), equalTo(sourceValue));
+
+ Processor processorWithOverrideTargetIsTrue = createCopyProcessor(sourceFieldName, targetFieldName, false, false, true);
+ processorWithOverrideTargetIsTrue.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue));
+ }
+
+ private static Processor createCopyProcessor(
+ String sourceFieldName,
+ String targetFieldName,
+ boolean ignoreMissing,
+ boolean removeSource,
+ boolean overrideTarget
+ ) {
+ return new CopyProcessor(
+ randomAlphaOfLength(10),
+ null,
+ new TestTemplateService.MockTemplateScript.Factory(sourceFieldName),
+ new TestTemplateService.MockTemplateScript.Factory(targetFieldName),
+ ignoreMissing,
+ removeSource,
+ overrideTarget
+ );
+ }
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java
index 66ca888a0d39f..179aef2feac0c 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java
@@ -41,6 +41,7 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@@ -79,16 +80,6 @@ public void testCreateMultipleFields() throws Exception {
);
}
- public void testCreateMissingField() throws Exception {
- Map config = new HashMap<>();
- try {
- factory.create(null, null, null, config);
- fail("factory create should have failed");
- } catch (OpenSearchParseException e) {
- assertThat(e.getMessage(), equalTo("[field] required property is missing"));
- }
- }
-
public void testInvalidMustacheTemplate() throws Exception {
RemoveProcessor.Factory factory = new RemoveProcessor.Factory(TestTemplateService.instance(true));
Map config = new HashMap<>();
@@ -98,4 +89,31 @@ public void testInvalidMustacheTemplate() throws Exception {
assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script"));
assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag));
}
+
+ public void testCreateWithExcludeField() throws Exception {
+ Map config = new HashMap<>();
+ String processorTag = randomAlphaOfLength(10);
+ OpenSearchException exception = expectThrows(
+ OpenSearchParseException.class,
+ () -> factory.create(null, processorTag, null, config)
+ );
+ assertThat(exception.getMessage(), equalTo("[field] ether field or exclude_field must be set"));
+
+ Map config2 = new HashMap<>();
+ config2.put("field", "field1");
+ config2.put("exclude_field", "field2");
+ exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config2));
+ assertThat(exception.getMessage(), equalTo("[field] ether field or exclude_field must be set"));
+
+ Map config6 = new HashMap<>();
+ config6.put("exclude_field", "exclude_field");
+ RemoveProcessor removeProcessor = factory.create(null, processorTag, null, config6);
+ assertThat(
+ removeProcessor.getExcludeFields()
+ .stream()
+ .map(template -> template.newInstance(Collections.emptyMap()).execute())
+ .collect(Collectors.toList()),
+ equalTo(List.of("exclude_field"))
+ );
+ }
}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java
index c138ad606d2e5..78a3d36124d45 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java
@@ -38,8 +38,10 @@
import org.opensearch.ingest.Processor;
import org.opensearch.ingest.RandomDocumentPicks;
import org.opensearch.ingest.TestTemplateService;
+import org.opensearch.script.TemplateScript;
import org.opensearch.test.OpenSearchTestCase;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@@ -57,12 +59,28 @@ public void testRemoveFields() throws Exception {
randomAlphaOfLength(10),
null,
Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(field)),
+ null,
false
);
processor.execute(ingestDocument);
assertThat(ingestDocument.hasField(field), equalTo(false));
}
+ public void testRemoveByExcludeFields() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ ingestDocument.setFieldValue("foo_1", "value");
+ ingestDocument.setFieldValue("foo_2", "value");
+ ingestDocument.setFieldValue("foo_3", "value");
+ List excludeFields = new ArrayList<>();
+ excludeFields.add(new TestTemplateService.MockTemplateScript.Factory("foo_1"));
+ excludeFields.add(new TestTemplateService.MockTemplateScript.Factory("foo_2"));
+ Processor processor = new RemoveProcessor(randomAlphaOfLength(10), null, null, excludeFields, false);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField("foo_1"), equalTo(true));
+ assertThat(ingestDocument.hasField("foo_2"), equalTo(true));
+ assertThat(ingestDocument.hasField("foo_3"), equalTo(false));
+ }
+
public void testRemoveNonExistingField() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
String fieldName = RandomDocumentPicks.randomFieldName(random());
@@ -183,6 +201,34 @@ public void testRemoveMetadataField() throws Exception {
}
}
+ public void testCreateRemoveProcessorWithBothFieldsAndExcludeFields() throws Exception {
+ assertThrows(
+ "ether fields and excludeFields must be set",
+ IllegalArgumentException.class,
+ () -> new RemoveProcessor(randomAlphaOfLength(10), null, null, null, false)
+ );
+
+ final List fields;
+ if (randomBoolean()) {
+ fields = new ArrayList<>();
+ } else {
+ fields = List.of(new TestTemplateService.MockTemplateScript.Factory("foo_1"));
+ }
+
+ final List excludeFields;
+ if (randomBoolean()) {
+ excludeFields = new ArrayList<>();
+ } else {
+ excludeFields = List.of(new TestTemplateService.MockTemplateScript.Factory("foo_2"));
+ }
+
+ assertThrows(
+ "ether fields and excludeFields must be set",
+ IllegalArgumentException.class,
+ () -> new RemoveProcessor(randomAlphaOfLength(10), null, fields, excludeFields, false)
+ );
+ }
+
public void testRemoveDocumentId() throws Exception {
Map config = new HashMap<>();
config.put("field", IngestDocument.Metadata.ID.getFieldName());
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java
index 96d9be75c4ab7..e900458e361ce 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java
@@ -105,4 +105,16 @@ private void assertIngestDocument(IngestDocument ingestDocument) {
int bytesTotal = ingestDocument.getFieldValue("bytes_in", Integer.class) + ingestDocument.getFieldValue("bytes_out", Integer.class);
assertThat(ingestDocument.getSourceAndMetadata().get("bytes_total"), is(bytesTotal));
}
+
+ public void testScriptingWithSelfReferencingSourceMetadata() {
+ ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), null, script, null, scriptService);
+ IngestDocument originalIngestDocument = randomDocument();
+ String index = originalIngestDocument.getSourceAndMetadata().get(IngestDocument.Metadata.INDEX.getFieldName()).toString();
+ String id = originalIngestDocument.getSourceAndMetadata().get(IngestDocument.Metadata.ID.getFieldName()).toString();
+ Map sourceMetadata = originalIngestDocument.getSourceAndMetadata();
+ originalIngestDocument.getSourceAndMetadata().put("_source", sourceMetadata);
+ IngestDocument ingestDocument = new IngestDocument(index, id, null, null, null, originalIngestDocument.getSourceAndMetadata());
+ expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
+ }
+
}
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
index f44cc1f9f9fcf..0719082c887f2 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
@@ -36,3 +36,20 @@
- contains: { nodes.$cluster_manager.ingest.processors: { type: split } }
- contains: { nodes.$cluster_manager.ingest.processors: { type: trim } }
- contains: { nodes.$cluster_manager.ingest.processors: { type: uppercase } }
+
+---
+"Copy processor exists":
+ - skip:
+ version: " - 2.11.99"
+ features: contains
+ reason: "copy processor was introduced in 2.12.0 and contains is a newly added assertion"
+ - do:
+ cluster.state: {}
+
+ # Get cluster-manager node id
+ - set: { cluster_manager_node: cluster_manager }
+
+ - do:
+ nodes.info: {}
+
+ - contains: { nodes.$cluster_manager.ingest.processors: { type: copy } }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml
index 3230fb37b43f7..a66f02d6b6a6d 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml
@@ -202,3 +202,79 @@ teardown:
id: 1
- match: { _source.source_field: "foo%20bar" }
- match: { _source.target_field: "foo bar" }
+
+---
+"Test self referencing source with ignore failure":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "script" : {
+ "lang": "painless",
+ "source" : "ctx.foo['foo']=ctx.foo;ctx['test-field']='test-value'",
+ "ignore_failure": true
+ }
+ },
+ {
+ "script" : {
+ "lang": "painless",
+ "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {source_field: "fooBar", foo: {foo: "bar"}}
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.source_field: "fooBar" }
+ - match: { _source.target_field: "FOOBAR"}
+ - match: { _source.test-field: null}
+
+---
+"Test self referencing source without ignoring failure":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "script" : {
+ "lang": "painless",
+ "source" : "ctx.foo['foo']=ctx.foo;ctx['test-field']='test-value'"
+ }
+ },
+ {
+ "script" : {
+ "lang": "painless",
+ "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: bad_request
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {source_field: "fooBar", foo: {foo: "bar"}}
+ - match: { error.root_cause.0.type: "illegal_argument_exception" }
+ - match: { error.root_cause.0.reason: "Iterable object is self-referencing itself (ingest script)" }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml
index 6668b468f8edc..e120a865052b0 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml
@@ -319,3 +319,43 @@ teardown:
}
- match: { docs.0.error.type: "illegal_argument_exception" }
- match: { docs.0.error.reason: "cannot remove metadata field [_id] when specifying external version for the document, version: 1, version_type: external_gte" }
+
+# Related issue: https://github.com/opensearch-project/OpenSearch/issues/1578
+---
+"Test remove processor with exclude_field":
+ - skip:
+ version: " - 2.11.99"
+ reason: "exclude_field is introduced in 2.12"
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove" : {
+ "exclude_field": "bar"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {
+ foo1: "bar",
+ foo2: "bar",
+ bar: "zoo",
+ zoo: "bar"
+ }
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { bar: "zoo"}}
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml
new file mode 100644
index 0000000000000..0203b62ba67d6
--- /dev/null
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml
@@ -0,0 +1,374 @@
+---
+teardown:
+ - do:
+ ingest.delete_pipeline:
+ id: "1"
+ ignore: 404
+
+---
+"Test creat copy processor":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12"
+ - do:
+ catch: /\[target\_field\] required property is missing/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "source"
+ }
+ }
+ ]
+ }
+ - do:
+ catch: /\[source\_field\] required property is missing/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "target_field" : "target"
+ }
+ }
+ ]
+ }
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "source",
+ "target_field" : "target",
+ "ignore_missing" : true,
+ "remove_source" : true,
+ "override_target" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+---
+"Test copy processor with ignore_missing":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "unknown_field",
+ "target_field" : "bar"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /source field \[unknown\_field\] doesn\'t exist/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "hello"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "unknown_field",
+ "target_field" : "bar",
+ "ignore_missing" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "hello"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { foo: "hello" } }
+
+---
+"Test copy processor with remove_source":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "foo",
+ "target_field" : "bar"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "hello"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { foo: "hello", bar: "hello" } }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "foo",
+ "target_field" : "bar",
+ "remove_source" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "hello"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { bar: "hello" } }
+
+---
+"Test copy processor with override_target":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "foo",
+ "target_field" : "bar"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /target field \[bar\] already exists/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "hello",
+ bar: "world"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "foo",
+ "target_field" : "bar",
+ "override_target" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "hello",
+ bar: "world"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { foo: "hello", bar: "hello" } }
+
+---
+"Test copy processor with template snippets":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "{{source}}",
+ "target_field" : "{{target}}"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /source field path cannot be null nor empty/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ target: "bar",
+ foo: "hello",
+ bar: "world"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "{{source}}",
+ "target_field" : "{{target}}"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /target field path cannot be null nor empty/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "foo",
+ foo: "hello",
+ bar: "world"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "{{source}}",
+ "target_field" : "{{target}}"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /source field path and target field path cannot be same/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "foo",
+ target: "foo",
+ foo: "hello",
+ bar: "world"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "copy" : {
+ "source_field" : "{{source}}",
+ "target_field" : "{{target}}",
+ "override_target" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "foo",
+ target: "bar",
+ foo: "hello",
+ bar: "world"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { source: "foo", target: "bar", foo: "hello", bar: "hello" } }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
index 7c073739f6a1f..edd649a310d42 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
@@ -1113,3 +1113,48 @@ teardown:
- match: { status: 400 }
- match: { error.root_cause.0.type: "illegal_argument_exception" }
- match: { error.root_cause.0.reason: "Failed to parse parameter [_if_primary_term], only int or long is accepted" }
+
+---
+"Test simulate with pipeline with ignore failure and cyclic field assignments in script":
+ - do:
+ ingest.simulate:
+ verbose: true
+ body: >
+ {
+ "pipeline": {
+ "description": "_description",
+ "processors": [
+ {
+ "script" : {
+ "ignore_failure" : true,
+ "lang": "painless",
+ "source": "ctx.foo['foo']=ctx.foo;ctx.tag='recursive'"
+ }
+ },
+ {
+ "script" : {
+ "lang": "painless",
+ "source" : "ctx.target_field = Processors.uppercase(ctx.foo.foo)"
+ }
+ }
+ ]
+ },
+ "docs": [
+ {
+ "_source": {
+ "foo": {
+ "foo": "bar"
+ }
+ }
+ }
+ ]
+ }
+ - length: { docs: 1 }
+ - length: { docs.0.processor_results: 2 }
+ - match: { docs.0.processor_results.0.status: "error_ignored" }
+ - match: { docs.0.processor_results.0.ignored_error.error.type: "illegal_argument_exception" }
+ - match: { docs.0.processor_results.0.doc._source.tag: null }
+ - match: { docs.0.processor_results.1.doc._source.target_field: "BAR" }
+ - match: { docs.0.processor_results.1.doc._source.foo.foo: "bar" }
+ - match: { docs.0.processor_results.1.status: "success" }
+ - match: { docs.0.processor_results.1.processor_type: "script" }
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1
deleted file mode 100644
index 892865a017f48..0000000000000
--- a/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7725476acfcb9bdfeff1b813ce15c39c6b857dc2
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..402cc36ba3d68
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1
@@ -0,0 +1 @@
+1782a69d0e83af9cc3c65db0dcd2e7e7c1e5f90e
\ No newline at end of file
diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4
index 21b03b85d8edd..69b789dd2aa25 100644
--- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4
+++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4
@@ -25,7 +25,7 @@ protected abstract boolean isSlashRegex();
}
WS: [ \t\n\r]+ -> skip;
-COMMENT: ( '//' .*? [\n\r] | '/*' .*? '*/' ) -> skip;
+COMMENT: ( '//' ~[\n\r]* | '/*' .*? '*/' ) -> skip;
LBRACK: '{';
RBRACK: '}';
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java
index fb33cf6e2a6f5..260a2fc0c062c 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java
@@ -435,7 +435,7 @@ private boolean REGEX_sempred(RuleContext _localctx, int predIndex) {
return true;
}
- public static final String _serializedATN = "\u0004\u0000U\u0278\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"
+ public static final String _serializedATN = "\u0004\u0000U\u0277\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"
+ "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007"
+ "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007"
+ "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n"
@@ -459,381 +459,381 @@ private boolean REGEX_sempred(RuleContext _localctx, int predIndex) {
+ "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0001\u0000\u0004\u0000\u00ae\b\u0000"
+ "\u000b\u0000\f\u0000\u00af\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"
+ "\u0001\u0001\u0001\u0001\u0005\u0001\u00b8\b\u0001\n\u0001\f\u0001\u00bb"
- + "\t\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005"
- + "\u0001\u00c2\b\u0001\n\u0001\f\u0001\u00c5\t\u0001\u0001\u0001\u0001\u0001"
- + "\u0003\u0001\u00c9\b\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002"
- + "\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"
- + "\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001"
- + "\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001"
- + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"
- + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001"
- + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001"
- + "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"
- + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"
- + "\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001"
- + "\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"
- + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001"
- + "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"
- + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"
- + "\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"
- + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001"
- + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"
- + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001"
- + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"
- + "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001"
- + "\"\u0001\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001"
- + "%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001"
- + ")\u0001)\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001"
- + ",\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u00010\u0001"
- + "0\u00010\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00014\u0001"
- + "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u0001"
- + "7\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001"
- + ":\u0001;\u0001;\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001"
- + ">\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"
- + "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001"
- + "D\u0001E\u0001E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001F\u0001F\u0001"
- + "G\u0001G\u0004G\u01b8\bG\u000bG\fG\u01b9\u0001G\u0003G\u01bd\bG\u0001"
- + "H\u0001H\u0001H\u0004H\u01c2\bH\u000bH\fH\u01c3\u0001H\u0003H\u01c7\b"
- + "H\u0001I\u0001I\u0001I\u0005I\u01cc\bI\nI\fI\u01cf\tI\u0003I\u01d1\bI"
- + "\u0001I\u0003I\u01d4\bI\u0001J\u0001J\u0001J\u0005J\u01d9\bJ\nJ\fJ\u01dc"
- + "\tJ\u0003J\u01de\bJ\u0001J\u0001J\u0004J\u01e2\bJ\u000bJ\fJ\u01e3\u0003"
- + "J\u01e6\bJ\u0001J\u0001J\u0003J\u01ea\bJ\u0001J\u0004J\u01ed\bJ\u000b"
- + "J\fJ\u01ee\u0003J\u01f1\bJ\u0001J\u0003J\u01f4\bJ\u0001K\u0001K\u0001"
- + "K\u0001K\u0001K\u0001K\u0005K\u01fc\bK\nK\fK\u01ff\tK\u0001K\u0001K\u0001"
- + "K\u0001K\u0001K\u0001K\u0001K\u0005K\u0208\bK\nK\fK\u020b\tK\u0001K\u0003"
- + "K\u020e\bK\u0001L\u0001L\u0001L\u0001L\u0004L\u0214\bL\u000bL\fL\u0215"
- + "\u0001L\u0001L\u0005L\u021a\bL\nL\fL\u021d\tL\u0001L\u0001L\u0001M\u0001"
- + "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001N\u0001"
- + "O\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001P\u0001"
+ + "\t\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u00c1"
+ + "\b\u0001\n\u0001\f\u0001\u00c4\t\u0001\u0001\u0001\u0001\u0001\u0003\u0001"
+ + "\u00c8\b\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0003"
+ + "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006"
+ + "\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001"
+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\u000b\u0001\u000b"
+ + "\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e"
+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f"
+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010"
+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012"
+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"
+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"
+ + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"
+ + "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"
+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"
+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"
+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"
+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a"
+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"
+ + "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"
+ + "\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f"
+ + "\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001"
+ + "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001&\u0001"
+ + "&\u0001&\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001"
+ + "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001"
+ + ",\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00010\u0001"
+ + "1\u00011\u00011\u00012\u00012\u00013\u00013\u00014\u00014\u00014\u0001"
+ + "5\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u00018\u0001"
+ + "8\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001"
+ + ";\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001"
+ + "?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001"
+ + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001D\u0001E\u0001"
+ + "E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001F\u0001F\u0001G\u0001G\u0004"
+ + "G\u01b7\bG\u000bG\fG\u01b8\u0001G\u0003G\u01bc\bG\u0001H\u0001H\u0001"
+ + "H\u0004H\u01c1\bH\u000bH\fH\u01c2\u0001H\u0003H\u01c6\bH\u0001I\u0001"
+ + "I\u0001I\u0005I\u01cb\bI\nI\fI\u01ce\tI\u0003I\u01d0\bI\u0001I\u0003I"
+ + "\u01d3\bI\u0001J\u0001J\u0001J\u0005J\u01d8\bJ\nJ\fJ\u01db\tJ\u0003J\u01dd"
+ + "\bJ\u0001J\u0001J\u0004J\u01e1\bJ\u000bJ\fJ\u01e2\u0003J\u01e5\bJ\u0001"
+ + "J\u0001J\u0003J\u01e9\bJ\u0001J\u0004J\u01ec\bJ\u000bJ\fJ\u01ed\u0003"
+ + "J\u01f0\bJ\u0001J\u0003J\u01f3\bJ\u0001K\u0001K\u0001K\u0001K\u0001K\u0001"
+ + "K\u0005K\u01fb\bK\nK\fK\u01fe\tK\u0001K\u0001K\u0001K\u0001K\u0001K\u0001"
+ + "K\u0001K\u0005K\u0207\bK\nK\fK\u020a\tK\u0001K\u0003K\u020d\bK\u0001L"
+ + "\u0001L\u0001L\u0001L\u0004L\u0213\bL\u000bL\fL\u0214\u0001L\u0001L\u0005"
+ + "L\u0219\bL\nL\fL\u021c\tL\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001"
+ + "M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001"
+ + "O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001"
+ "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001"
+ "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001"
- + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001"
- + "P\u0001P\u0001P\u0003P\u0257\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001"
- + "R\u0005R\u025f\bR\nR\fR\u0262\tR\u0001S\u0001S\u0001S\u0005S\u0267\bS"
- + "\nS\fS\u026a\tS\u0003S\u026c\bS\u0001S\u0001S\u0001T\u0001T\u0005T\u0272"
- + "\bT\nT\fT\u0275\tT\u0001T\u0001T\u0005\u00b9\u00c3\u01fd\u0209\u0215\u0000"
- + "U\u0002\u0001\u0004\u0002\u0006\u0003\b\u0004\n\u0005\f\u0006\u000e\u0007"
- + "\u0010\b\u0012\t\u0014\n\u0016\u000b\u0018\f\u001a\r\u001c\u000e\u001e"
- + "\u000f \u0010\"\u0011$\u0012&\u0013(\u0014*\u0015,\u0016.\u00170\u0018"
- + "2\u00194\u001a6\u001b8\u001c:\u001d<\u001e>\u001f@ B!D\"F#H$J%L&N\'P("
- + "R)T*V+X,Z-\\.^/`0b1d2f3h4j5l6n7p8r9t:v;x~?\u0080@\u0082A\u0084B\u0086"
- + "C\u0088D\u008aE\u008cF\u008eG\u0090H\u0092I\u0094J\u0096K\u0098L\u009a"
- + "M\u009cN\u009eO\u00a0P\u00a2Q\u00a4R\u00a6S\u00a8T\u00aaU\u0002\u0000"
- + "\u0001\u0013\u0003\u0000\t\n\r\r \u0002\u0000\n\n\r\r\u0001\u000007\u0002"
- + "\u0000LLll\u0002\u0000XXxx\u0003\u000009AFaf\u0001\u000019\u0001\u0000"
- + "09\u0006\u0000DDFFLLddffll\u0002\u0000EEee\u0002\u0000++--\u0004\u0000"
- + "DDFFddff\u0002\u0000\"\"\\\\\u0002\u0000\'\'\\\\\u0001\u0000\n\n\u0002"
- + "\u0000\n\n//\u0007\u0000UUcciilmssuuxx\u0003\u0000AZ__az\u0004\u00000"
- + "9AZ__az\u029e\u0000\u0002\u0001\u0000\u0000\u0000\u0000\u0004\u0001\u0000"
- + "\u0000\u0000\u0000\u0006\u0001\u0000\u0000\u0000\u0000\b\u0001\u0000\u0000"
- + "\u0000\u0000\n\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000"
- + "\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000"
- + "\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000"
- + "\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000"
- + "\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000"
- + "\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000"
- + "\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001"
- + "\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000"
- + "\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u0000"
- + "0\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001"
- + "\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000"
- + "\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000"
- + ">\u0001\u0000\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001"
- + "\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000"
- + "\u0000\u0000H\u0001\u0000\u0000\u0000\u0000J\u0001\u0000\u0000\u0000\u0000"
- + "L\u0001\u0000\u0000\u0000\u0000N\u0001\u0000\u0000\u0000\u0000P\u0001"
- + "\u0000\u0000\u0000\u0000R\u0001\u0000\u0000\u0000\u0000T\u0001\u0000\u0000"
- + "\u0000\u0000V\u0001\u0000\u0000\u0000\u0000X\u0001\u0000\u0000\u0000\u0000"
- + "Z\u0001\u0000\u0000\u0000\u0000\\\u0001\u0000\u0000\u0000\u0000^\u0001"
- + "\u0000\u0000\u0000\u0000`\u0001\u0000\u0000\u0000\u0000b\u0001\u0000\u0000"
- + "\u0000\u0000d\u0001\u0000\u0000\u0000\u0000f\u0001\u0000\u0000\u0000\u0000"
- + "h\u0001\u0000\u0000\u0000\u0000j\u0001\u0000\u0000\u0000\u0000l\u0001"
- + "\u0000\u0000\u0000\u0000n\u0001\u0000\u0000\u0000\u0000p\u0001\u0000\u0000"
- + "\u0000\u0000r\u0001\u0000\u0000\u0000\u0000t\u0001\u0000\u0000\u0000\u0000"
- + "v\u0001\u0000\u0000\u0000\u0000x\u0001\u0000\u0000\u0000\u0000z\u0001"
- + "\u0000\u0000\u0000\u0000|\u0001\u0000\u0000\u0000\u0000~\u0001\u0000\u0000"
- + "\u0000\u0000\u0080\u0001\u0000\u0000\u0000\u0000\u0082\u0001\u0000\u0000"
- + "\u0000\u0000\u0084\u0001\u0000\u0000\u0000\u0000\u0086\u0001\u0000\u0000"
- + "\u0000\u0000\u0088\u0001\u0000\u0000\u0000\u0000\u008a\u0001\u0000\u0000"
- + "\u0000\u0000\u008c\u0001\u0000\u0000\u0000\u0000\u008e\u0001\u0000\u0000"
- + "\u0000\u0000\u0090\u0001\u0000\u0000\u0000\u0000\u0092\u0001\u0000\u0000"
- + "\u0000\u0000\u0094\u0001\u0000\u0000\u0000\u0000\u0096\u0001\u0000\u0000"
- + "\u0000\u0000\u0098\u0001\u0000\u0000\u0000\u0000\u009a\u0001\u0000\u0000"
- + "\u0000\u0000\u009c\u0001\u0000\u0000\u0000\u0000\u009e\u0001\u0000\u0000"
- + "\u0000\u0000\u00a0\u0001\u0000\u0000\u0000\u0000\u00a2\u0001\u0000\u0000"
- + "\u0000\u0000\u00a4\u0001\u0000\u0000\u0000\u0000\u00a6\u0001\u0000\u0000"
- + "\u0000\u0001\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000"
- + "\u0000\u0002\u00ad\u0001\u0000\u0000\u0000\u0004\u00c8\u0001\u0000\u0000"
- + "\u0000\u0006\u00cc\u0001\u0000\u0000\u0000\b\u00ce\u0001\u0000\u0000\u0000"
- + "\n\u00d0\u0001\u0000\u0000\u0000\f\u00d2\u0001\u0000\u0000\u0000\u000e"
- + "\u00d4\u0001\u0000\u0000\u0000\u0010\u00d6\u0001\u0000\u0000\u0000\u0012"
- + "\u00d8\u0001\u0000\u0000\u0000\u0014\u00dc\u0001\u0000\u0000\u0000\u0016"
- + "\u00e1\u0001\u0000\u0000\u0000\u0018\u00e3\u0001\u0000\u0000\u0000\u001a"
- + "\u00e5\u0001\u0000\u0000\u0000\u001c\u00e8\u0001\u0000\u0000\u0000\u001e"
- + "\u00eb\u0001\u0000\u0000\u0000 \u00f0\u0001\u0000\u0000\u0000\"\u00f6"
- + "\u0001\u0000\u0000\u0000$\u00f9\u0001\u0000\u0000\u0000&\u00fd\u0001\u0000"
- + "\u0000\u0000(\u0106\u0001\u0000\u0000\u0000*\u010c\u0001\u0000\u0000\u0000"
- + ",\u0113\u0001\u0000\u0000\u0000.\u0117\u0001\u0000\u0000\u00000\u011b"
- + "\u0001\u0000\u0000\u00002\u0121\u0001\u0000\u0000\u00004\u0127\u0001\u0000"
- + "\u0000\u00006\u012c\u0001\u0000\u0000\u00008\u0137\u0001\u0000\u0000\u0000"
- + ":\u0139\u0001\u0000\u0000\u0000<\u013b\u0001\u0000\u0000\u0000>\u013d"
- + "\u0001\u0000\u0000\u0000@\u0140\u0001\u0000\u0000\u0000B\u0142\u0001\u0000"
- + "\u0000\u0000D\u0144\u0001\u0000\u0000\u0000F\u0146\u0001\u0000\u0000\u0000"
- + "H\u0149\u0001\u0000\u0000\u0000J\u014c\u0001\u0000\u0000\u0000L\u0150"
- + "\u0001\u0000\u0000\u0000N\u0152\u0001\u0000\u0000\u0000P\u0155\u0001\u0000"
- + "\u0000\u0000R\u0157\u0001\u0000\u0000\u0000T\u015a\u0001\u0000\u0000\u0000"
- + "V\u015d\u0001\u0000\u0000\u0000X\u0161\u0001\u0000\u0000\u0000Z\u0164"
- + "\u0001\u0000\u0000\u0000\\\u0168\u0001\u0000\u0000\u0000^\u016a\u0001"
- + "\u0000\u0000\u0000`\u016c\u0001\u0000\u0000\u0000b\u016e\u0001\u0000\u0000"
- + "\u0000d\u0171\u0001\u0000\u0000\u0000f\u0174\u0001\u0000\u0000\u0000h"
- + "\u0176\u0001\u0000\u0000\u0000j\u0178\u0001\u0000\u0000\u0000l\u017b\u0001"
- + "\u0000\u0000\u0000n\u017e\u0001\u0000\u0000\u0000p\u0181\u0001\u0000\u0000"
- + "\u0000r\u0184\u0001\u0000\u0000\u0000t\u0188\u0001\u0000\u0000\u0000v"
- + "\u018b\u0001\u0000\u0000\u0000x\u018e\u0001\u0000\u0000\u0000z\u0190\u0001"
- + "\u0000\u0000\u0000|\u0193\u0001\u0000\u0000\u0000~\u0196\u0001\u0000\u0000"
- + "\u0000\u0080\u0199\u0001\u0000\u0000\u0000\u0082\u019c\u0001\u0000\u0000"
- + "\u0000\u0084\u019f\u0001\u0000\u0000\u0000\u0086\u01a2\u0001\u0000\u0000"
- + "\u0000\u0088\u01a5\u0001\u0000\u0000\u0000\u008a\u01a8\u0001\u0000\u0000"
- + "\u0000\u008c\u01ac\u0001\u0000\u0000\u0000\u008e\u01b0\u0001\u0000\u0000"
- + "\u0000\u0090\u01b5\u0001\u0000\u0000\u0000\u0092\u01be\u0001\u0000\u0000"
- + "\u0000\u0094\u01d0\u0001\u0000\u0000\u0000\u0096\u01dd\u0001\u0000\u0000"
- + "\u0000\u0098\u020d\u0001\u0000\u0000\u0000\u009a\u020f\u0001\u0000\u0000"
- + "\u0000\u009c\u0220\u0001\u0000\u0000\u0000\u009e\u0225\u0001\u0000\u0000"
- + "\u0000\u00a0\u022b\u0001\u0000\u0000\u0000\u00a2\u0256\u0001\u0000\u0000"
- + "\u0000\u00a4\u0258\u0001\u0000\u0000\u0000\u00a6\u025c\u0001\u0000\u0000"
- + "\u0000\u00a8\u026b\u0001\u0000\u0000\u0000\u00aa\u026f\u0001\u0000\u0000"
- + "\u0000\u00ac\u00ae\u0007\u0000\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000"
- + "\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000"
- + "\u0000\u00af\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000"
- + "\u0000\u00b1\u00b2\u0006\u0000\u0000\u0000\u00b2\u0003\u0001\u0000\u0000"
- + "\u0000\u00b3\u00b4\u0005/\u0000\u0000\u00b4\u00b5\u0005/\u0000\u0000\u00b5"
- + "\u00b9\u0001\u0000\u0000\u0000\u00b6\u00b8\t\u0000\u0000\u0000\u00b7\u00b6"
- + "\u0001\u0000\u0000\u0000\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00ba"
- + "\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000\u00ba\u00bc"
- + "\u0001\u0000\u0000\u0000\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00c9"
- + "\u0007\u0001\u0000\u0000\u00bd\u00be\u0005/\u0000\u0000\u00be\u00bf\u0005"
- + "*\u0000\u0000\u00bf\u00c3\u0001\u0000\u0000\u0000\u00c0\u00c2\t\u0000"
- + "\u0000\u0000\u00c1\u00c0\u0001\u0000\u0000\u0000\u00c2\u00c5\u0001\u0000"
- + "\u0000\u0000\u00c3\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000"
- + "\u0000\u0000\u00c4\u00c6\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000"
- + "\u0000\u0000\u00c6\u00c7\u0005*\u0000\u0000\u00c7\u00c9\u0005/\u0000\u0000"
- + "\u00c8\u00b3\u0001\u0000\u0000\u0000\u00c8\u00bd\u0001\u0000\u0000\u0000"
- + "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0006\u0001\u0000\u0000"
- + "\u00cb\u0005\u0001\u0000\u0000\u0000\u00cc\u00cd\u0005{\u0000\u0000\u00cd"
- + "\u0007\u0001\u0000\u0000\u0000\u00ce\u00cf\u0005}\u0000\u0000\u00cf\t"
- + "\u0001\u0000\u0000\u0000\u00d0\u00d1\u0005[\u0000\u0000\u00d1\u000b\u0001"
- + "\u0000\u0000\u0000\u00d2\u00d3\u0005]\u0000\u0000\u00d3\r\u0001\u0000"
- + "\u0000\u0000\u00d4\u00d5\u0005(\u0000\u0000\u00d5\u000f\u0001\u0000\u0000"
- + "\u0000\u00d6\u00d7\u0005)\u0000\u0000\u00d7\u0011\u0001\u0000\u0000\u0000"
- + "\u00d8\u00d9\u0005.\u0000\u0000\u00d9\u00da\u0001\u0000\u0000\u0000\u00da"
- + "\u00db\u0006\b\u0001\u0000\u00db\u0013\u0001\u0000\u0000\u0000\u00dc\u00dd"
- + "\u0005?\u0000\u0000\u00dd\u00de\u0005.\u0000\u0000\u00de\u00df\u0001\u0000"
- + "\u0000\u0000\u00df\u00e0\u0006\t\u0001\u0000\u00e0\u0015\u0001\u0000\u0000"
- + "\u0000\u00e1\u00e2\u0005,\u0000\u0000\u00e2\u0017\u0001\u0000\u0000\u0000"
- + "\u00e3\u00e4\u0005;\u0000\u0000\u00e4\u0019\u0001\u0000\u0000\u0000\u00e5"
- + "\u00e6\u0005i\u0000\u0000\u00e6\u00e7\u0005f\u0000\u0000\u00e7\u001b\u0001"
- + "\u0000\u0000\u0000\u00e8\u00e9\u0005i\u0000\u0000\u00e9\u00ea\u0005n\u0000"
- + "\u0000\u00ea\u001d\u0001\u0000\u0000\u0000\u00eb\u00ec\u0005e\u0000\u0000"
- + "\u00ec\u00ed\u0005l\u0000\u0000\u00ed\u00ee\u0005s\u0000\u0000\u00ee\u00ef"
- + "\u0005e\u0000\u0000\u00ef\u001f\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005"
- + "w\u0000\u0000\u00f1\u00f2\u0005h\u0000\u0000\u00f2\u00f3\u0005i\u0000"
- + "\u0000\u00f3\u00f4\u0005l\u0000\u0000\u00f4\u00f5\u0005e\u0000\u0000\u00f5"
- + "!\u0001\u0000\u0000\u0000\u00f6\u00f7\u0005d\u0000\u0000\u00f7\u00f8\u0005"
- + "o\u0000\u0000\u00f8#\u0001\u0000\u0000\u0000\u00f9\u00fa\u0005f\u0000"
- + "\u0000\u00fa\u00fb\u0005o\u0000\u0000\u00fb\u00fc\u0005r\u0000\u0000\u00fc"
- + "%\u0001\u0000\u0000\u0000\u00fd\u00fe\u0005c\u0000\u0000\u00fe\u00ff\u0005"
- + "o\u0000\u0000\u00ff\u0100\u0005n\u0000\u0000\u0100\u0101\u0005t\u0000"
- + "\u0000\u0101\u0102\u0005i\u0000\u0000\u0102\u0103\u0005n\u0000\u0000\u0103"
- + "\u0104\u0005u\u0000\u0000\u0104\u0105\u0005e\u0000\u0000\u0105\'\u0001"
- + "\u0000\u0000\u0000\u0106\u0107\u0005b\u0000\u0000\u0107\u0108\u0005r\u0000"
- + "\u0000\u0108\u0109\u0005e\u0000\u0000\u0109\u010a\u0005a\u0000\u0000\u010a"
- + "\u010b\u0005k\u0000\u0000\u010b)\u0001\u0000\u0000\u0000\u010c\u010d\u0005"
- + "r\u0000\u0000\u010d\u010e\u0005e\u0000\u0000\u010e\u010f\u0005t\u0000"
- + "\u0000\u010f\u0110\u0005u\u0000\u0000\u0110\u0111\u0005r\u0000\u0000\u0111"
- + "\u0112\u0005n\u0000\u0000\u0112+\u0001\u0000\u0000\u0000\u0113\u0114\u0005"
- + "n\u0000\u0000\u0114\u0115\u0005e\u0000\u0000\u0115\u0116\u0005w\u0000"
- + "\u0000\u0116-\u0001\u0000\u0000\u0000\u0117\u0118\u0005t\u0000\u0000\u0118"
- + "\u0119\u0005r\u0000\u0000\u0119\u011a\u0005y\u0000\u0000\u011a/\u0001"
- + "\u0000\u0000\u0000\u011b\u011c\u0005c\u0000\u0000\u011c\u011d\u0005a\u0000"
- + "\u0000\u011d\u011e\u0005t\u0000\u0000\u011e\u011f\u0005c\u0000\u0000\u011f"
- + "\u0120\u0005h\u0000\u0000\u01201\u0001\u0000\u0000\u0000\u0121\u0122\u0005"
- + "t\u0000\u0000\u0122\u0123\u0005h\u0000\u0000\u0123\u0124\u0005r\u0000"
- + "\u0000\u0124\u0125\u0005o\u0000\u0000\u0125\u0126\u0005w\u0000\u0000\u0126"
- + "3\u0001\u0000\u0000\u0000\u0127\u0128\u0005t\u0000\u0000\u0128\u0129\u0005"
- + "h\u0000\u0000\u0129\u012a\u0005i\u0000\u0000\u012a\u012b\u0005s\u0000"
- + "\u0000\u012b5\u0001\u0000\u0000\u0000\u012c\u012d\u0005i\u0000\u0000\u012d"
- + "\u012e\u0005n\u0000\u0000\u012e\u012f\u0005s\u0000\u0000\u012f\u0130\u0005"
- + "t\u0000\u0000\u0130\u0131\u0005a\u0000\u0000\u0131\u0132\u0005n\u0000"
- + "\u0000\u0132\u0133\u0005c\u0000\u0000\u0133\u0134\u0005e\u0000\u0000\u0134"
- + "\u0135\u0005o\u0000\u0000\u0135\u0136\u0005f\u0000\u0000\u01367\u0001"
- + "\u0000\u0000\u0000\u0137\u0138\u0005!\u0000\u0000\u01389\u0001\u0000\u0000"
- + "\u0000\u0139\u013a\u0005~\u0000\u0000\u013a;\u0001\u0000\u0000\u0000\u013b"
- + "\u013c\u0005*\u0000\u0000\u013c=\u0001\u0000\u0000\u0000\u013d\u013e\u0005"
- + "/\u0000\u0000\u013e\u013f\u0004\u001e\u0000\u0000\u013f?\u0001\u0000\u0000"
- + "\u0000\u0140\u0141\u0005%\u0000\u0000\u0141A\u0001\u0000\u0000\u0000\u0142"
- + "\u0143\u0005+\u0000\u0000\u0143C\u0001\u0000\u0000\u0000\u0144\u0145\u0005"
- + "-\u0000\u0000\u0145E\u0001\u0000\u0000\u0000\u0146\u0147\u0005<\u0000"
- + "\u0000\u0147\u0148\u0005<\u0000\u0000\u0148G\u0001\u0000\u0000\u0000\u0149"
- + "\u014a\u0005>\u0000\u0000\u014a\u014b\u0005>\u0000\u0000\u014bI\u0001"
- + "\u0000\u0000\u0000\u014c\u014d\u0005>\u0000\u0000\u014d\u014e\u0005>\u0000"
- + "\u0000\u014e\u014f\u0005>\u0000\u0000\u014fK\u0001\u0000\u0000\u0000\u0150"
- + "\u0151\u0005<\u0000\u0000\u0151M\u0001\u0000\u0000\u0000\u0152\u0153\u0005"
- + "<\u0000\u0000\u0153\u0154\u0005=\u0000\u0000\u0154O\u0001\u0000\u0000"
- + "\u0000\u0155\u0156\u0005>\u0000\u0000\u0156Q\u0001\u0000\u0000\u0000\u0157"
- + "\u0158\u0005>\u0000\u0000\u0158\u0159\u0005=\u0000\u0000\u0159S\u0001"
- + "\u0000\u0000\u0000\u015a\u015b\u0005=\u0000\u0000\u015b\u015c\u0005=\u0000"
- + "\u0000\u015cU\u0001\u0000\u0000\u0000\u015d\u015e\u0005=\u0000\u0000\u015e"
- + "\u015f\u0005=\u0000\u0000\u015f\u0160\u0005=\u0000\u0000\u0160W\u0001"
- + "\u0000\u0000\u0000\u0161\u0162\u0005!\u0000\u0000\u0162\u0163\u0005=\u0000"
- + "\u0000\u0163Y\u0001\u0000\u0000\u0000\u0164\u0165\u0005!\u0000\u0000\u0165"
- + "\u0166\u0005=\u0000\u0000\u0166\u0167\u0005=\u0000\u0000\u0167[\u0001"
- + "\u0000\u0000\u0000\u0168\u0169\u0005&\u0000\u0000\u0169]\u0001\u0000\u0000"
- + "\u0000\u016a\u016b\u0005^\u0000\u0000\u016b_\u0001\u0000\u0000\u0000\u016c"
- + "\u016d\u0005|\u0000\u0000\u016da\u0001\u0000\u0000\u0000\u016e\u016f\u0005"
- + "&\u0000\u0000\u016f\u0170\u0005&\u0000\u0000\u0170c\u0001\u0000\u0000"
- + "\u0000\u0171\u0172\u0005|\u0000\u0000\u0172\u0173\u0005|\u0000\u0000\u0173"
- + "e\u0001\u0000\u0000\u0000\u0174\u0175\u0005?\u0000\u0000\u0175g\u0001"
- + "\u0000\u0000\u0000\u0176\u0177\u0005:\u0000\u0000\u0177i\u0001\u0000\u0000"
- + "\u0000\u0178\u0179\u0005?\u0000\u0000\u0179\u017a\u0005:\u0000\u0000\u017a"
- + "k\u0001\u0000\u0000\u0000\u017b\u017c\u0005:\u0000\u0000\u017c\u017d\u0005"
- + ":\u0000\u0000\u017dm\u0001\u0000\u0000\u0000\u017e\u017f\u0005-\u0000"
- + "\u0000\u017f\u0180\u0005>\u0000\u0000\u0180o\u0001\u0000\u0000\u0000\u0181"
- + "\u0182\u0005=\u0000\u0000\u0182\u0183\u0005~\u0000\u0000\u0183q\u0001"
- + "\u0000\u0000\u0000\u0184\u0185\u0005=\u0000\u0000\u0185\u0186\u0005=\u0000"
- + "\u0000\u0186\u0187\u0005~\u0000\u0000\u0187s\u0001\u0000\u0000\u0000\u0188"
- + "\u0189\u0005+\u0000\u0000\u0189\u018a\u0005+\u0000\u0000\u018au\u0001"
- + "\u0000\u0000\u0000\u018b\u018c\u0005-\u0000\u0000\u018c\u018d\u0005-\u0000"
- + "\u0000\u018dw\u0001\u0000\u0000\u0000\u018e\u018f\u0005=\u0000\u0000\u018f"
- + "y\u0001\u0000\u0000\u0000\u0190\u0191\u0005+\u0000\u0000\u0191\u0192\u0005"
- + "=\u0000\u0000\u0192{\u0001\u0000\u0000\u0000\u0193\u0194\u0005-\u0000"
- + "\u0000\u0194\u0195\u0005=\u0000\u0000\u0195}\u0001\u0000\u0000\u0000\u0196"
- + "\u0197\u0005*\u0000\u0000\u0197\u0198\u0005=\u0000\u0000\u0198\u007f\u0001"
- + "\u0000\u0000\u0000\u0199\u019a\u0005/\u0000\u0000\u019a\u019b\u0005=\u0000"
- + "\u0000\u019b\u0081\u0001\u0000\u0000\u0000\u019c\u019d\u0005%\u0000\u0000"
- + "\u019d\u019e\u0005=\u0000\u0000\u019e\u0083\u0001\u0000\u0000\u0000\u019f"
- + "\u01a0\u0005&\u0000\u0000\u01a0\u01a1\u0005=\u0000\u0000\u01a1\u0085\u0001"
- + "\u0000\u0000\u0000\u01a2\u01a3\u0005^\u0000\u0000\u01a3\u01a4\u0005=\u0000"
- + "\u0000\u01a4\u0087\u0001\u0000\u0000\u0000\u01a5\u01a6\u0005|\u0000\u0000"
- + "\u01a6\u01a7\u0005=\u0000\u0000\u01a7\u0089\u0001\u0000\u0000\u0000\u01a8"
- + "\u01a9\u0005<\u0000\u0000\u01a9\u01aa\u0005<\u0000\u0000\u01aa\u01ab\u0005"
- + "=\u0000\u0000\u01ab\u008b\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005>\u0000"
- + "\u0000\u01ad\u01ae\u0005>\u0000\u0000\u01ae\u01af\u0005=\u0000\u0000\u01af"
- + "\u008d\u0001\u0000\u0000\u0000\u01b0\u01b1\u0005>\u0000\u0000\u01b1\u01b2"
- + "\u0005>\u0000\u0000\u01b2\u01b3\u0005>\u0000\u0000\u01b3\u01b4\u0005="
- + "\u0000\u0000\u01b4\u008f\u0001\u0000\u0000\u0000\u01b5\u01b7\u00050\u0000"
- + "\u0000\u01b6\u01b8\u0007\u0002\u0000\u0000\u01b7\u01b6\u0001\u0000\u0000"
- + "\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9\u01b7\u0001\u0000\u0000"
- + "\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bc\u0001\u0000\u0000"
- + "\u0000\u01bb\u01bd\u0007\u0003\u0000\u0000\u01bc\u01bb\u0001\u0000\u0000"
- + "\u0000\u01bc\u01bd\u0001\u0000\u0000\u0000\u01bd\u0091\u0001\u0000\u0000"
- + "\u0000\u01be\u01bf\u00050\u0000\u0000\u01bf\u01c1\u0007\u0004\u0000\u0000"
- + "\u01c0\u01c2\u0007\u0005\u0000\u0000\u01c1\u01c0\u0001\u0000\u0000\u0000"
- + "\u01c2\u01c3\u0001\u0000\u0000\u0000\u01c3\u01c1\u0001\u0000\u0000\u0000"
- + "\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c6\u0001\u0000\u0000\u0000"
- + "\u01c5\u01c7\u0007\u0003\u0000\u0000\u01c6\u01c5\u0001\u0000\u0000\u0000"
- + "\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u0093\u0001\u0000\u0000\u0000"
- + "\u01c8\u01d1\u00050\u0000\u0000\u01c9\u01cd\u0007\u0006\u0000\u0000\u01ca"
- + "\u01cc\u0007\u0007\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cc"
- + "\u01cf\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01cd"
- + "\u01ce\u0001\u0000\u0000\u0000\u01ce\u01d1\u0001\u0000\u0000\u0000\u01cf"
- + "\u01cd\u0001\u0000\u0000\u0000\u01d0\u01c8\u0001\u0000\u0000\u0000\u01d0"
- + "\u01c9\u0001\u0000\u0000\u0000\u01d1\u01d3\u0001\u0000\u0000\u0000\u01d2"
- + "\u01d4\u0007\b\u0000\u0000\u01d3\u01d2\u0001\u0000\u0000\u0000\u01d3\u01d4"
- + "\u0001\u0000\u0000\u0000\u01d4\u0095\u0001\u0000\u0000\u0000\u01d5\u01de"
- + "\u00050\u0000\u0000\u01d6\u01da\u0007\u0006\u0000\u0000\u01d7\u01d9\u0007"
- + "\u0007\u0000\u0000\u01d8\u01d7\u0001\u0000\u0000\u0000\u01d9\u01dc\u0001"
- + "\u0000\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01da\u01db\u0001"
- + "\u0000\u0000\u0000\u01db\u01de\u0001\u0000\u0000\u0000\u01dc\u01da\u0001"
- + "\u0000\u0000\u0000\u01dd\u01d5\u0001\u0000\u0000\u0000\u01dd\u01d6\u0001"
- + "\u0000\u0000\u0000\u01de\u01e5\u0001\u0000\u0000\u0000\u01df\u01e1\u0003"
- + "\u0012\b\u0000\u01e0\u01e2\u0007\u0007\u0000\u0000\u01e1\u01e0\u0001\u0000"
- + "\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000"
- + "\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e6\u0001\u0000"
- + "\u0000\u0000\u01e5\u01df\u0001\u0000\u0000\u0000\u01e5\u01e6\u0001\u0000"
- + "\u0000\u0000\u01e6\u01f0\u0001\u0000\u0000\u0000\u01e7\u01e9\u0007\t\u0000"
- + "\u0000\u01e8\u01ea\u0007\n\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"
- + "\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ec\u0001\u0000\u0000\u0000"
- + "\u01eb\u01ed\u0007\u0007\u0000\u0000\u01ec\u01eb\u0001\u0000\u0000\u0000"
- + "\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ec\u0001\u0000\u0000\u0000"
- + "\u01ee\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f1\u0001\u0000\u0000\u0000"
- + "\u01f0\u01e7\u0001\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000"
- + "\u01f1\u01f3\u0001\u0000\u0000\u0000\u01f2\u01f4\u0007\u000b\u0000\u0000"
- + "\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000"
- + "\u01f4\u0097\u0001\u0000\u0000\u0000\u01f5\u01fd\u0005\"\u0000\u0000\u01f6"
- + "\u01f7\u0005\\\u0000\u0000\u01f7\u01fc\u0005\"\u0000\u0000\u01f8\u01f9"
- + "\u0005\\\u0000\u0000\u01f9\u01fc\u0005\\\u0000\u0000\u01fa\u01fc\b\f\u0000"
- + "\u0000\u01fb\u01f6\u0001\u0000\u0000\u0000\u01fb\u01f8\u0001\u0000\u0000"
- + "\u0000\u01fb\u01fa\u0001\u0000\u0000\u0000\u01fc\u01ff\u0001\u0000\u0000"
- + "\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000"
- + "\u0000\u01fe\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000"
- + "\u0000\u0200\u020e\u0005\"\u0000\u0000\u0201\u0209\u0005\'\u0000\u0000"
- + "\u0202\u0203\u0005\\\u0000\u0000\u0203\u0208\u0005\'\u0000\u0000\u0204"
- + "\u0205\u0005\\\u0000\u0000\u0205\u0208\u0005\\\u0000\u0000\u0206\u0208"
- + "\b\r\u0000\u0000\u0207\u0202\u0001\u0000\u0000\u0000\u0207\u0204\u0001"
- + "\u0000\u0000\u0000\u0207\u0206\u0001\u0000\u0000\u0000\u0208\u020b\u0001"
- + "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u0209\u0207\u0001"
- + "\u0000\u0000\u0000\u020a\u020c\u0001\u0000\u0000\u0000\u020b\u0209\u0001"
- + "\u0000\u0000\u0000\u020c\u020e\u0005\'\u0000\u0000\u020d\u01f5\u0001\u0000"
- + "\u0000\u0000\u020d\u0201\u0001\u0000\u0000\u0000\u020e\u0099\u0001\u0000"
- + "\u0000\u0000\u020f\u0213\u0005/\u0000\u0000\u0210\u0211\u0005\\\u0000"
- + "\u0000\u0211\u0214\b\u000e\u0000\u0000\u0212\u0214\b\u000f\u0000\u0000"
- + "\u0213\u0210\u0001\u0000\u0000\u0000\u0213\u0212\u0001\u0000\u0000\u0000"
- + "\u0214\u0215\u0001\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000"
- + "\u0215\u0213\u0001\u0000\u0000\u0000\u0216\u0217\u0001\u0000\u0000\u0000"
- + "\u0217\u021b\u0005/\u0000\u0000\u0218\u021a\u0007\u0010\u0000\u0000\u0219"
- + "\u0218\u0001\u0000\u0000\u0000\u021a\u021d\u0001\u0000\u0000\u0000\u021b"
- + "\u0219\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"
- + "\u021e\u0001\u0000\u0000\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e"
- + "\u021f\u0004L\u0001\u0000\u021f\u009b\u0001\u0000\u0000\u0000\u0220\u0221"
- + "\u0005t\u0000\u0000\u0221\u0222\u0005r\u0000\u0000\u0222\u0223\u0005u"
- + "\u0000\u0000\u0223\u0224\u0005e\u0000\u0000\u0224\u009d\u0001\u0000\u0000"
- + "\u0000\u0225\u0226\u0005f\u0000\u0000\u0226\u0227\u0005a\u0000\u0000\u0227"
- + "\u0228\u0005l\u0000\u0000\u0228\u0229\u0005s\u0000\u0000\u0229\u022a\u0005"
- + "e\u0000\u0000\u022a\u009f\u0001\u0000\u0000\u0000\u022b\u022c\u0005n\u0000"
- + "\u0000\u022c\u022d\u0005u\u0000\u0000\u022d\u022e\u0005l\u0000\u0000\u022e"
- + "\u022f\u0005l\u0000\u0000\u022f\u00a1\u0001\u0000\u0000\u0000\u0230\u0231"
- + "\u0005b\u0000\u0000\u0231\u0232\u0005o\u0000\u0000\u0232\u0233\u0005o"
- + "\u0000\u0000\u0233\u0234\u0005l\u0000\u0000\u0234\u0235\u0005e\u0000\u0000"
- + "\u0235\u0236\u0005a\u0000\u0000\u0236\u0257\u0005n\u0000\u0000\u0237\u0238"
- + "\u0005b\u0000\u0000\u0238\u0239\u0005y\u0000\u0000\u0239\u023a\u0005t"
- + "\u0000\u0000\u023a\u0257\u0005e\u0000\u0000\u023b\u023c\u0005s\u0000\u0000"
- + "\u023c\u023d\u0005h\u0000\u0000\u023d\u023e\u0005o\u0000\u0000\u023e\u023f"
- + "\u0005r\u0000\u0000\u023f\u0257\u0005t\u0000\u0000\u0240\u0241\u0005c"
- + "\u0000\u0000\u0241\u0242\u0005h\u0000\u0000\u0242\u0243\u0005a\u0000\u0000"
- + "\u0243\u0257\u0005r\u0000\u0000\u0244\u0245\u0005i\u0000\u0000\u0245\u0246"
- + "\u0005n\u0000\u0000\u0246\u0257\u0005t\u0000\u0000\u0247\u0248\u0005l"
- + "\u0000\u0000\u0248\u0249\u0005o\u0000\u0000\u0249\u024a\u0005n\u0000\u0000"
- + "\u024a\u0257\u0005g\u0000\u0000\u024b\u024c\u0005f\u0000\u0000\u024c\u024d"
- + "\u0005l\u0000\u0000\u024d\u024e\u0005o\u0000\u0000\u024e\u024f\u0005a"
- + "\u0000\u0000\u024f\u0257\u0005t\u0000\u0000\u0250\u0251\u0005d\u0000\u0000"
- + "\u0251\u0252\u0005o\u0000\u0000\u0252\u0253\u0005u\u0000\u0000\u0253\u0254"
- + "\u0005b\u0000\u0000\u0254\u0255\u0005l\u0000\u0000\u0255\u0257\u0005e"
- + "\u0000\u0000\u0256\u0230\u0001\u0000\u0000\u0000\u0256\u0237\u0001\u0000"
- + "\u0000\u0000\u0256\u023b\u0001\u0000\u0000\u0000\u0256\u0240\u0001\u0000"
- + "\u0000\u0000\u0256\u0244\u0001\u0000\u0000\u0000\u0256\u0247\u0001\u0000"
- + "\u0000\u0000\u0256\u024b\u0001\u0000\u0000\u0000\u0256\u0250\u0001\u0000"
- + "\u0000\u0000\u0257\u00a3\u0001\u0000\u0000\u0000\u0258\u0259\u0005d\u0000"
- + "\u0000\u0259\u025a\u0005e\u0000\u0000\u025a\u025b\u0005f\u0000\u0000\u025b"
- + "\u00a5\u0001\u0000\u0000\u0000\u025c\u0260\u0007\u0011\u0000\u0000\u025d"
- + "\u025f\u0007\u0012\u0000\u0000\u025e\u025d\u0001\u0000\u0000\u0000\u025f"
- + "\u0262\u0001\u0000\u0000\u0000\u0260\u025e\u0001\u0000\u0000\u0000\u0260"
- + "\u0261\u0001\u0000\u0000\u0000\u0261\u00a7\u0001\u0000\u0000\u0000\u0262"
- + "\u0260\u0001\u0000\u0000\u0000\u0263\u026c\u00050\u0000\u0000\u0264\u0268"
- + "\u0007\u0006\u0000\u0000\u0265\u0267\u0007\u0007\u0000\u0000\u0266\u0265"
- + "\u0001\u0000\u0000\u0000\u0267\u026a\u0001\u0000\u0000\u0000\u0268\u0266"
- + "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026c"
- + "\u0001\u0000\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026b\u0263"
- + "\u0001\u0000\u0000\u0000\u026b\u0264\u0001\u0000\u0000\u0000\u026c\u026d"
- + "\u0001\u0000\u0000\u0000\u026d\u026e\u0006S\u0002\u0000\u026e\u00a9\u0001"
- + "\u0000\u0000\u0000\u026f\u0273\u0007\u0011\u0000\u0000\u0270\u0272\u0007"
- + "\u0012\u0000\u0000\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001"
- + "\u0000\u0000\u0000\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001"
- + "\u0000\u0000\u0000\u0274\u0276\u0001\u0000\u0000\u0000\u0275\u0273\u0001"
- + "\u0000\u0000\u0000\u0276\u0277\u0006T\u0002\u0000\u0277\u00ab\u0001\u0000"
- + "\u0000\u0000\"\u0000\u0001\u00af\u00b9\u00c3\u00c8\u01b9\u01bc\u01c3\u01c6"
- + "\u01cd\u01d0\u01d3\u01da\u01dd\u01e3\u01e5\u01e9\u01ee\u01f0\u01f3\u01fb"
- + "\u01fd\u0207\u0209\u020d\u0213\u0215\u021b\u0256\u0260\u0268\u026b\u0273"
- + "\u0003\u0006\u0000\u0000\u0002\u0001\u0000\u0002\u0000\u0000";
+ + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0003"
+ + "P\u0256\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0005R\u025e\bR\n"
+ + "R\fR\u0261\tR\u0001S\u0001S\u0001S\u0005S\u0266\bS\nS\fS\u0269\tS\u0003"
+ + "S\u026b\bS\u0001S\u0001S\u0001T\u0001T\u0005T\u0271\bT\nT\fT\u0274\tT"
+ + "\u0001T\u0001T\u0004\u00c2\u01fc\u0208\u0214\u0000U\u0002\u0001\u0004"
+ + "\u0002\u0006\u0003\b\u0004\n\u0005\f\u0006\u000e\u0007\u0010\b\u0012\t"
+ + "\u0014\n\u0016\u000b\u0018\f\u001a\r\u001c\u000e\u001e\u000f \u0010\""
+ + "\u0011$\u0012&\u0013(\u0014*\u0015,\u0016.\u00170\u00182\u00194\u001a"
+ + "6\u001b8\u001c:\u001d<\u001e>\u001f@ B!D\"F#H$J%L&N\'P(R)T*V+X,Z-\\.^"
+ + "/`0b1d2f3h4j5l6n7p8r9t:v;x~?\u0080@\u0082A\u0084B\u0086C\u0088D\u008a"
+ + "E\u008cF\u008eG\u0090H\u0092I\u0094J\u0096K\u0098L\u009aM\u009cN\u009e"
+ + "O\u00a0P\u00a2Q\u00a4R\u00a6S\u00a8T\u00aaU\u0002\u0000\u0001\u0013\u0003"
+ + "\u0000\t\n\r\r \u0002\u0000\n\n\r\r\u0001\u000007\u0002\u0000LLll\u0002"
+ + "\u0000XXxx\u0003\u000009AFaf\u0001\u000019\u0001\u000009\u0006\u0000D"
+ + "DFFLLddffll\u0002\u0000EEee\u0002\u0000++--\u0004\u0000DDFFddff\u0002"
+ + "\u0000\"\"\\\\\u0002\u0000\'\'\\\\\u0001\u0000\n\n\u0002\u0000\n\n//\u0007"
+ + "\u0000UUcciilmssuuxx\u0003\u0000AZ__az\u0004\u000009AZ__az\u029d\u0000"
+ + "\u0002\u0001\u0000\u0000\u0000\u0000\u0004\u0001\u0000\u0000\u0000\u0000"
+ + "\u0006\u0001\u0000\u0000\u0000\u0000\b\u0001\u0000\u0000\u0000\u0000\n"
+ + "\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001"
+ + "\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"
+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"
+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"
+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"
+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"
+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"
+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"
+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"
+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"
+ + "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:"
+ + "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000"
+ + "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000"
+ + "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0000H"
+ + "\u0001\u0000\u0000\u0000\u0000J\u0001\u0000\u0000\u0000\u0000L\u0001\u0000"
+ + "\u0000\u0000\u0000N\u0001\u0000\u0000\u0000\u0000P\u0001\u0000\u0000\u0000"
+ + "\u0000R\u0001\u0000\u0000\u0000\u0000T\u0001\u0000\u0000\u0000\u0000V"
+ + "\u0001\u0000\u0000\u0000\u0000X\u0001\u0000\u0000\u0000\u0000Z\u0001\u0000"
+ + "\u0000\u0000\u0000\\\u0001\u0000\u0000\u0000\u0000^\u0001\u0000\u0000"
+ + "\u0000\u0000`\u0001\u0000\u0000\u0000\u0000b\u0001\u0000\u0000\u0000\u0000"
+ + "d\u0001\u0000\u0000\u0000\u0000f\u0001\u0000\u0000\u0000\u0000h\u0001"
+ + "\u0000\u0000\u0000\u0000j\u0001\u0000\u0000\u0000\u0000l\u0001\u0000\u0000"
+ + "\u0000\u0000n\u0001\u0000\u0000\u0000\u0000p\u0001\u0000\u0000\u0000\u0000"
+ + "r\u0001\u0000\u0000\u0000\u0000t\u0001\u0000\u0000\u0000\u0000v\u0001"
+ + "\u0000\u0000\u0000\u0000x\u0001\u0000\u0000\u0000\u0000z\u0001\u0000\u0000"
+ + "\u0000\u0000|\u0001\u0000\u0000\u0000\u0000~\u0001\u0000\u0000\u0000\u0000"
+ + "\u0080\u0001\u0000\u0000\u0000\u0000\u0082\u0001\u0000\u0000\u0000\u0000"
+ + "\u0084\u0001\u0000\u0000\u0000\u0000\u0086\u0001\u0000\u0000\u0000\u0000"
+ + "\u0088\u0001\u0000\u0000\u0000\u0000\u008a\u0001\u0000\u0000\u0000\u0000"
+ + "\u008c\u0001\u0000\u0000\u0000\u0000\u008e\u0001\u0000\u0000\u0000\u0000"
+ + "\u0090\u0001\u0000\u0000\u0000\u0000\u0092\u0001\u0000\u0000\u0000\u0000"
+ + "\u0094\u0001\u0000\u0000\u0000\u0000\u0096\u0001\u0000\u0000\u0000\u0000"
+ + "\u0098\u0001\u0000\u0000\u0000\u0000\u009a\u0001\u0000\u0000\u0000\u0000"
+ + "\u009c\u0001\u0000\u0000\u0000\u0000\u009e\u0001\u0000\u0000\u0000\u0000"
+ + "\u00a0\u0001\u0000\u0000\u0000\u0000\u00a2\u0001\u0000\u0000\u0000\u0000"
+ + "\u00a4\u0001\u0000\u0000\u0000\u0000\u00a6\u0001\u0000\u0000\u0000\u0001"
+ + "\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0002"
+ + "\u00ad\u0001\u0000\u0000\u0000\u0004\u00c7\u0001\u0000\u0000\u0000\u0006"
+ + "\u00cb\u0001\u0000\u0000\u0000\b\u00cd\u0001\u0000\u0000\u0000\n\u00cf"
+ + "\u0001\u0000\u0000\u0000\f\u00d1\u0001\u0000\u0000\u0000\u000e\u00d3\u0001"
+ + "\u0000\u0000\u0000\u0010\u00d5\u0001\u0000\u0000\u0000\u0012\u00d7\u0001"
+ + "\u0000\u0000\u0000\u0014\u00db\u0001\u0000\u0000\u0000\u0016\u00e0\u0001"
+ + "\u0000\u0000\u0000\u0018\u00e2\u0001\u0000\u0000\u0000\u001a\u00e4\u0001"
+ + "\u0000\u0000\u0000\u001c\u00e7\u0001\u0000\u0000\u0000\u001e\u00ea\u0001"
+ + "\u0000\u0000\u0000 \u00ef\u0001\u0000\u0000\u0000\"\u00f5\u0001\u0000"
+ + "\u0000\u0000$\u00f8\u0001\u0000\u0000\u0000&\u00fc\u0001\u0000\u0000\u0000"
+ + "(\u0105\u0001\u0000\u0000\u0000*\u010b\u0001\u0000\u0000\u0000,\u0112"
+ + "\u0001\u0000\u0000\u0000.\u0116\u0001\u0000\u0000\u00000\u011a\u0001\u0000"
+ + "\u0000\u00002\u0120\u0001\u0000\u0000\u00004\u0126\u0001\u0000\u0000\u0000"
+ + "6\u012b\u0001\u0000\u0000\u00008\u0136\u0001\u0000\u0000\u0000:\u0138"
+ + "\u0001\u0000\u0000\u0000<\u013a\u0001\u0000\u0000\u0000>\u013c\u0001\u0000"
+ + "\u0000\u0000@\u013f\u0001\u0000\u0000\u0000B\u0141\u0001\u0000\u0000\u0000"
+ + "D\u0143\u0001\u0000\u0000\u0000F\u0145\u0001\u0000\u0000\u0000H\u0148"
+ + "\u0001\u0000\u0000\u0000J\u014b\u0001\u0000\u0000\u0000L\u014f\u0001\u0000"
+ + "\u0000\u0000N\u0151\u0001\u0000\u0000\u0000P\u0154\u0001\u0000\u0000\u0000"
+ + "R\u0156\u0001\u0000\u0000\u0000T\u0159\u0001\u0000\u0000\u0000V\u015c"
+ + "\u0001\u0000\u0000\u0000X\u0160\u0001\u0000\u0000\u0000Z\u0163\u0001\u0000"
+ + "\u0000\u0000\\\u0167\u0001\u0000\u0000\u0000^\u0169\u0001\u0000\u0000"
+ + "\u0000`\u016b\u0001\u0000\u0000\u0000b\u016d\u0001\u0000\u0000\u0000d"
+ + "\u0170\u0001\u0000\u0000\u0000f\u0173\u0001\u0000\u0000\u0000h\u0175\u0001"
+ + "\u0000\u0000\u0000j\u0177\u0001\u0000\u0000\u0000l\u017a\u0001\u0000\u0000"
+ + "\u0000n\u017d\u0001\u0000\u0000\u0000p\u0180\u0001\u0000\u0000\u0000r"
+ + "\u0183\u0001\u0000\u0000\u0000t\u0187\u0001\u0000\u0000\u0000v\u018a\u0001"
+ + "\u0000\u0000\u0000x\u018d\u0001\u0000\u0000\u0000z\u018f\u0001\u0000\u0000"
+ + "\u0000|\u0192\u0001\u0000\u0000\u0000~\u0195\u0001\u0000\u0000\u0000\u0080"
+ + "\u0198\u0001\u0000\u0000\u0000\u0082\u019b\u0001\u0000\u0000\u0000\u0084"
+ + "\u019e\u0001\u0000\u0000\u0000\u0086\u01a1\u0001\u0000\u0000\u0000\u0088"
+ + "\u01a4\u0001\u0000\u0000\u0000\u008a\u01a7\u0001\u0000\u0000\u0000\u008c"
+ + "\u01ab\u0001\u0000\u0000\u0000\u008e\u01af\u0001\u0000\u0000\u0000\u0090"
+ + "\u01b4\u0001\u0000\u0000\u0000\u0092\u01bd\u0001\u0000\u0000\u0000\u0094"
+ + "\u01cf\u0001\u0000\u0000\u0000\u0096\u01dc\u0001\u0000\u0000\u0000\u0098"
+ + "\u020c\u0001\u0000\u0000\u0000\u009a\u020e\u0001\u0000\u0000\u0000\u009c"
+ + "\u021f\u0001\u0000\u0000\u0000\u009e\u0224\u0001\u0000\u0000\u0000\u00a0"
+ + "\u022a\u0001\u0000\u0000\u0000\u00a2\u0255\u0001\u0000\u0000\u0000\u00a4"
+ + "\u0257\u0001\u0000\u0000\u0000\u00a6\u025b\u0001\u0000\u0000\u0000\u00a8"
+ + "\u026a\u0001\u0000\u0000\u0000\u00aa\u026e\u0001\u0000\u0000\u0000\u00ac"
+ + "\u00ae\u0007\u0000\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000\u0000\u00ae"
+ + "\u00af\u0001\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00af"
+ + "\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1"
+ + "\u00b2\u0006\u0000\u0000\u0000\u00b2\u0003\u0001\u0000\u0000\u0000\u00b3"
+ + "\u00b4\u0005/\u0000\u0000\u00b4\u00b5\u0005/\u0000\u0000\u00b5\u00b9\u0001"
+ + "\u0000\u0000\u0000\u00b6\u00b8\b\u0001\u0000\u0000\u00b7\u00b6\u0001\u0000"
+ + "\u0000\u0000\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000"
+ + "\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00c8\u0001\u0000"
+ + "\u0000\u0000\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00bd\u0005/\u0000"
+ + "\u0000\u00bd\u00be\u0005*\u0000\u0000\u00be\u00c2\u0001\u0000\u0000\u0000"
+ + "\u00bf\u00c1\t\u0000\u0000\u0000\u00c0\u00bf\u0001\u0000\u0000\u0000\u00c1"
+ + "\u00c4\u0001\u0000\u0000\u0000\u00c2\u00c3\u0001\u0000\u0000\u0000\u00c2"
+ + "\u00c0\u0001\u0000\u0000\u0000\u00c3\u00c5\u0001\u0000\u0000\u0000\u00c4"
+ + "\u00c2\u0001\u0000\u0000\u0000\u00c5\u00c6\u0005*\u0000\u0000\u00c6\u00c8"
+ + "\u0005/\u0000\u0000\u00c7\u00b3\u0001\u0000\u0000\u0000\u00c7\u00bc\u0001"
+ + "\u0000\u0000\u0000\u00c8\u00c9\u0001\u0000\u0000\u0000\u00c9\u00ca\u0006"
+ + "\u0001\u0000\u0000\u00ca\u0005\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"
+ + "{\u0000\u0000\u00cc\u0007\u0001\u0000\u0000\u0000\u00cd\u00ce\u0005}\u0000"
+ + "\u0000\u00ce\t\u0001\u0000\u0000\u0000\u00cf\u00d0\u0005[\u0000\u0000"
+ + "\u00d0\u000b\u0001\u0000\u0000\u0000\u00d1\u00d2\u0005]\u0000\u0000\u00d2"
+ + "\r\u0001\u0000\u0000\u0000\u00d3\u00d4\u0005(\u0000\u0000\u00d4\u000f"
+ + "\u0001\u0000\u0000\u0000\u00d5\u00d6\u0005)\u0000\u0000\u00d6\u0011\u0001"
+ + "\u0000\u0000\u0000\u00d7\u00d8\u0005.\u0000\u0000\u00d8\u00d9\u0001\u0000"
+ + "\u0000\u0000\u00d9\u00da\u0006\b\u0001\u0000\u00da\u0013\u0001\u0000\u0000"
+ + "\u0000\u00db\u00dc\u0005?\u0000\u0000\u00dc\u00dd\u0005.\u0000\u0000\u00dd"
+ + "\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0006\t\u0001\u0000\u00df\u0015"
+ + "\u0001\u0000\u0000\u0000\u00e0\u00e1\u0005,\u0000\u0000\u00e1\u0017\u0001"
+ + "\u0000\u0000\u0000\u00e2\u00e3\u0005;\u0000\u0000\u00e3\u0019\u0001\u0000"
+ + "\u0000\u0000\u00e4\u00e5\u0005i\u0000\u0000\u00e5\u00e6\u0005f\u0000\u0000"
+ + "\u00e6\u001b\u0001\u0000\u0000\u0000\u00e7\u00e8\u0005i\u0000\u0000\u00e8"
+ + "\u00e9\u0005n\u0000\u0000\u00e9\u001d\u0001\u0000\u0000\u0000\u00ea\u00eb"
+ + "\u0005e\u0000\u0000\u00eb\u00ec\u0005l\u0000\u0000\u00ec\u00ed\u0005s"
+ + "\u0000\u0000\u00ed\u00ee\u0005e\u0000\u0000\u00ee\u001f\u0001\u0000\u0000"
+ + "\u0000\u00ef\u00f0\u0005w\u0000\u0000\u00f0\u00f1\u0005h\u0000\u0000\u00f1"
+ + "\u00f2\u0005i\u0000\u0000\u00f2\u00f3\u0005l\u0000\u0000\u00f3\u00f4\u0005"
+ + "e\u0000\u0000\u00f4!\u0001\u0000\u0000\u0000\u00f5\u00f6\u0005d\u0000"
+ + "\u0000\u00f6\u00f7\u0005o\u0000\u0000\u00f7#\u0001\u0000\u0000\u0000\u00f8"
+ + "\u00f9\u0005f\u0000\u0000\u00f9\u00fa\u0005o\u0000\u0000\u00fa\u00fb\u0005"
+ + "r\u0000\u0000\u00fb%\u0001\u0000\u0000\u0000\u00fc\u00fd\u0005c\u0000"
+ + "\u0000\u00fd\u00fe\u0005o\u0000\u0000\u00fe\u00ff\u0005n\u0000\u0000\u00ff"
+ + "\u0100\u0005t\u0000\u0000\u0100\u0101\u0005i\u0000\u0000\u0101\u0102\u0005"
+ + "n\u0000\u0000\u0102\u0103\u0005u\u0000\u0000\u0103\u0104\u0005e\u0000"
+ + "\u0000\u0104\'\u0001\u0000\u0000\u0000\u0105\u0106\u0005b\u0000\u0000"
+ + "\u0106\u0107\u0005r\u0000\u0000\u0107\u0108\u0005e\u0000\u0000\u0108\u0109"
+ + "\u0005a\u0000\u0000\u0109\u010a\u0005k\u0000\u0000\u010a)\u0001\u0000"
+ + "\u0000\u0000\u010b\u010c\u0005r\u0000\u0000\u010c\u010d\u0005e\u0000\u0000"
+ + "\u010d\u010e\u0005t\u0000\u0000\u010e\u010f\u0005u\u0000\u0000\u010f\u0110"
+ + "\u0005r\u0000\u0000\u0110\u0111\u0005n\u0000\u0000\u0111+\u0001\u0000"
+ + "\u0000\u0000\u0112\u0113\u0005n\u0000\u0000\u0113\u0114\u0005e\u0000\u0000"
+ + "\u0114\u0115\u0005w\u0000\u0000\u0115-\u0001\u0000\u0000\u0000\u0116\u0117"
+ + "\u0005t\u0000\u0000\u0117\u0118\u0005r\u0000\u0000\u0118\u0119\u0005y"
+ + "\u0000\u0000\u0119/\u0001\u0000\u0000\u0000\u011a\u011b\u0005c\u0000\u0000"
+ + "\u011b\u011c\u0005a\u0000\u0000\u011c\u011d\u0005t\u0000\u0000\u011d\u011e"
+ + "\u0005c\u0000\u0000\u011e\u011f\u0005h\u0000\u0000\u011f1\u0001\u0000"
+ + "\u0000\u0000\u0120\u0121\u0005t\u0000\u0000\u0121\u0122\u0005h\u0000\u0000"
+ + "\u0122\u0123\u0005r\u0000\u0000\u0123\u0124\u0005o\u0000\u0000\u0124\u0125"
+ + "\u0005w\u0000\u0000\u01253\u0001\u0000\u0000\u0000\u0126\u0127\u0005t"
+ + "\u0000\u0000\u0127\u0128\u0005h\u0000\u0000\u0128\u0129\u0005i\u0000\u0000"
+ + "\u0129\u012a\u0005s\u0000\u0000\u012a5\u0001\u0000\u0000\u0000\u012b\u012c"
+ + "\u0005i\u0000\u0000\u012c\u012d\u0005n\u0000\u0000\u012d\u012e\u0005s"
+ + "\u0000\u0000\u012e\u012f\u0005t\u0000\u0000\u012f\u0130\u0005a\u0000\u0000"
+ + "\u0130\u0131\u0005n\u0000\u0000\u0131\u0132\u0005c\u0000\u0000\u0132\u0133"
+ + "\u0005e\u0000\u0000\u0133\u0134\u0005o\u0000\u0000\u0134\u0135\u0005f"
+ + "\u0000\u0000\u01357\u0001\u0000\u0000\u0000\u0136\u0137\u0005!\u0000\u0000"
+ + "\u01379\u0001\u0000\u0000\u0000\u0138\u0139\u0005~\u0000\u0000\u0139;"
+ + "\u0001\u0000\u0000\u0000\u013a\u013b\u0005*\u0000\u0000\u013b=\u0001\u0000"
+ + "\u0000\u0000\u013c\u013d\u0005/\u0000\u0000\u013d\u013e\u0004\u001e\u0000"
+ + "\u0000\u013e?\u0001\u0000\u0000\u0000\u013f\u0140\u0005%\u0000\u0000\u0140"
+ + "A\u0001\u0000\u0000\u0000\u0141\u0142\u0005+\u0000\u0000\u0142C\u0001"
+ + "\u0000\u0000\u0000\u0143\u0144\u0005-\u0000\u0000\u0144E\u0001\u0000\u0000"
+ + "\u0000\u0145\u0146\u0005<\u0000\u0000\u0146\u0147\u0005<\u0000\u0000\u0147"
+ + "G\u0001\u0000\u0000\u0000\u0148\u0149\u0005>\u0000\u0000\u0149\u014a\u0005"
+ + ">\u0000\u0000\u014aI\u0001\u0000\u0000\u0000\u014b\u014c\u0005>\u0000"
+ + "\u0000\u014c\u014d\u0005>\u0000\u0000\u014d\u014e\u0005>\u0000\u0000\u014e"
+ + "K\u0001\u0000\u0000\u0000\u014f\u0150\u0005<\u0000\u0000\u0150M\u0001"
+ + "\u0000\u0000\u0000\u0151\u0152\u0005<\u0000\u0000\u0152\u0153\u0005=\u0000"
+ + "\u0000\u0153O\u0001\u0000\u0000\u0000\u0154\u0155\u0005>\u0000\u0000\u0155"
+ + "Q\u0001\u0000\u0000\u0000\u0156\u0157\u0005>\u0000\u0000\u0157\u0158\u0005"
+ + "=\u0000\u0000\u0158S\u0001\u0000\u0000\u0000\u0159\u015a\u0005=\u0000"
+ + "\u0000\u015a\u015b\u0005=\u0000\u0000\u015bU\u0001\u0000\u0000\u0000\u015c"
+ + "\u015d\u0005=\u0000\u0000\u015d\u015e\u0005=\u0000\u0000\u015e\u015f\u0005"
+ + "=\u0000\u0000\u015fW\u0001\u0000\u0000\u0000\u0160\u0161\u0005!\u0000"
+ + "\u0000\u0161\u0162\u0005=\u0000\u0000\u0162Y\u0001\u0000\u0000\u0000\u0163"
+ + "\u0164\u0005!\u0000\u0000\u0164\u0165\u0005=\u0000\u0000\u0165\u0166\u0005"
+ + "=\u0000\u0000\u0166[\u0001\u0000\u0000\u0000\u0167\u0168\u0005&\u0000"
+ + "\u0000\u0168]\u0001\u0000\u0000\u0000\u0169\u016a\u0005^\u0000\u0000\u016a"
+ + "_\u0001\u0000\u0000\u0000\u016b\u016c\u0005|\u0000\u0000\u016ca\u0001"
+ + "\u0000\u0000\u0000\u016d\u016e\u0005&\u0000\u0000\u016e\u016f\u0005&\u0000"
+ + "\u0000\u016fc\u0001\u0000\u0000\u0000\u0170\u0171\u0005|\u0000\u0000\u0171"
+ + "\u0172\u0005|\u0000\u0000\u0172e\u0001\u0000\u0000\u0000\u0173\u0174\u0005"
+ + "?\u0000\u0000\u0174g\u0001\u0000\u0000\u0000\u0175\u0176\u0005:\u0000"
+ + "\u0000\u0176i\u0001\u0000\u0000\u0000\u0177\u0178\u0005?\u0000\u0000\u0178"
+ + "\u0179\u0005:\u0000\u0000\u0179k\u0001\u0000\u0000\u0000\u017a\u017b\u0005"
+ + ":\u0000\u0000\u017b\u017c\u0005:\u0000\u0000\u017cm\u0001\u0000\u0000"
+ + "\u0000\u017d\u017e\u0005-\u0000\u0000\u017e\u017f\u0005>\u0000\u0000\u017f"
+ + "o\u0001\u0000\u0000\u0000\u0180\u0181\u0005=\u0000\u0000\u0181\u0182\u0005"
+ + "~\u0000\u0000\u0182q\u0001\u0000\u0000\u0000\u0183\u0184\u0005=\u0000"
+ + "\u0000\u0184\u0185\u0005=\u0000\u0000\u0185\u0186\u0005~\u0000\u0000\u0186"
+ + "s\u0001\u0000\u0000\u0000\u0187\u0188\u0005+\u0000\u0000\u0188\u0189\u0005"
+ + "+\u0000\u0000\u0189u\u0001\u0000\u0000\u0000\u018a\u018b\u0005-\u0000"
+ + "\u0000\u018b\u018c\u0005-\u0000\u0000\u018cw\u0001\u0000\u0000\u0000\u018d"
+ + "\u018e\u0005=\u0000\u0000\u018ey\u0001\u0000\u0000\u0000\u018f\u0190\u0005"
+ + "+\u0000\u0000\u0190\u0191\u0005=\u0000\u0000\u0191{\u0001\u0000\u0000"
+ + "\u0000\u0192\u0193\u0005-\u0000\u0000\u0193\u0194\u0005=\u0000\u0000\u0194"
+ + "}\u0001\u0000\u0000\u0000\u0195\u0196\u0005*\u0000\u0000\u0196\u0197\u0005"
+ + "=\u0000\u0000\u0197\u007f\u0001\u0000\u0000\u0000\u0198\u0199\u0005/\u0000"
+ + "\u0000\u0199\u019a\u0005=\u0000\u0000\u019a\u0081\u0001\u0000\u0000\u0000"
+ + "\u019b\u019c\u0005%\u0000\u0000\u019c\u019d\u0005=\u0000\u0000\u019d\u0083"
+ + "\u0001\u0000\u0000\u0000\u019e\u019f\u0005&\u0000\u0000\u019f\u01a0\u0005"
+ + "=\u0000\u0000\u01a0\u0085\u0001\u0000\u0000\u0000\u01a1\u01a2\u0005^\u0000"
+ + "\u0000\u01a2\u01a3\u0005=\u0000\u0000\u01a3\u0087\u0001\u0000\u0000\u0000"
+ + "\u01a4\u01a5\u0005|\u0000\u0000\u01a5\u01a6\u0005=\u0000\u0000\u01a6\u0089"
+ + "\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005<\u0000\u0000\u01a8\u01a9\u0005"
+ + "<\u0000\u0000\u01a9\u01aa\u0005=\u0000\u0000\u01aa\u008b\u0001\u0000\u0000"
+ + "\u0000\u01ab\u01ac\u0005>\u0000\u0000\u01ac\u01ad\u0005>\u0000\u0000\u01ad"
+ + "\u01ae\u0005=\u0000\u0000\u01ae\u008d\u0001\u0000\u0000\u0000\u01af\u01b0"
+ + "\u0005>\u0000\u0000\u01b0\u01b1\u0005>\u0000\u0000\u01b1\u01b2\u0005>"
+ + "\u0000\u0000\u01b2\u01b3\u0005=\u0000\u0000\u01b3\u008f\u0001\u0000\u0000"
+ + "\u0000\u01b4\u01b6\u00050\u0000\u0000\u01b5\u01b7\u0007\u0002\u0000\u0000"
+ + "\u01b6\u01b5\u0001\u0000\u0000\u0000\u01b7\u01b8\u0001\u0000\u0000\u0000"
+ + "\u01b8\u01b6\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000"
+ + "\u01b9\u01bb\u0001\u0000\u0000\u0000\u01ba\u01bc\u0007\u0003\u0000\u0000"
+ + "\u01bb\u01ba\u0001\u0000\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000"
+ + "\u01bc\u0091\u0001\u0000\u0000\u0000\u01bd\u01be\u00050\u0000\u0000\u01be"
+ + "\u01c0\u0007\u0004\u0000\u0000\u01bf\u01c1\u0007\u0005\u0000\u0000\u01c0"
+ + "\u01bf\u0001\u0000\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2"
+ + "\u01c0\u0001\u0000\u0000\u0000\u01c2\u01c3\u0001\u0000\u0000\u0000\u01c3"
+ + "\u01c5\u0001\u0000\u0000\u0000\u01c4\u01c6\u0007\u0003\u0000\u0000\u01c5"
+ + "\u01c4\u0001\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6"
+ + "\u0093\u0001\u0000\u0000\u0000\u01c7\u01d0\u00050\u0000\u0000\u01c8\u01cc"
+ + "\u0007\u0006\u0000\u0000\u01c9\u01cb\u0007\u0007\u0000\u0000\u01ca\u01c9"
+ + "\u0001\u0000\u0000\u0000\u01cb\u01ce\u0001\u0000\u0000\u0000\u01cc\u01ca"
+ + "\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd\u01d0"
+ + "\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001\u0000\u0000\u0000\u01cf\u01c7"
+ + "\u0001\u0000\u0000\u0000\u01cf\u01c8\u0001\u0000\u0000\u0000\u01d0\u01d2"
+ + "\u0001\u0000\u0000\u0000\u01d1\u01d3\u0007\b\u0000\u0000\u01d2\u01d1\u0001"
+ + "\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u0095\u0001"
+ + "\u0000\u0000\u0000\u01d4\u01dd\u00050\u0000\u0000\u01d5\u01d9\u0007\u0006"
+ + "\u0000\u0000\u01d6\u01d8\u0007\u0007\u0000\u0000\u01d7\u01d6\u0001\u0000"
+ + "\u0000\u0000\u01d8\u01db\u0001\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000"
+ + "\u0000\u0000\u01d9\u01da\u0001\u0000\u0000\u0000\u01da\u01dd\u0001\u0000"
+ + "\u0000\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01dc\u01d4\u0001\u0000"
+ + "\u0000\u0000\u01dc\u01d5\u0001\u0000\u0000\u0000\u01dd\u01e4\u0001\u0000"
+ + "\u0000\u0000\u01de\u01e0\u0003\u0012\b\u0000\u01df\u01e1\u0007\u0007\u0000"
+ + "\u0000\u01e0\u01df\u0001\u0000\u0000\u0000\u01e1\u01e2\u0001\u0000\u0000"
+ + "\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000"
+ + "\u0000\u01e3\u01e5\u0001\u0000\u0000\u0000\u01e4\u01de\u0001\u0000\u0000"
+ + "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01ef\u0001\u0000\u0000"
+ + "\u0000\u01e6\u01e8\u0007\t\u0000\u0000\u01e7\u01e9\u0007\n\u0000\u0000"
+ + "\u01e8\u01e7\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000"
+ + "\u01e9\u01eb\u0001\u0000\u0000\u0000\u01ea\u01ec\u0007\u0007\u0000\u0000"
+ + "\u01eb\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000"
+ + "\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000"
+ + "\u01ee\u01f0\u0001\u0000\u0000\u0000\u01ef\u01e6\u0001\u0000\u0000\u0000"
+ + "\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f2\u0001\u0000\u0000\u0000"
+ + "\u01f1\u01f3\u0007\u000b\u0000\u0000\u01f2\u01f1\u0001\u0000\u0000\u0000"
+ + "\u01f2\u01f3\u0001\u0000\u0000\u0000\u01f3\u0097\u0001\u0000\u0000\u0000"
+ + "\u01f4\u01fc\u0005\"\u0000\u0000\u01f5\u01f6\u0005\\\u0000\u0000\u01f6"
+ + "\u01fb\u0005\"\u0000\u0000\u01f7\u01f8\u0005\\\u0000\u0000\u01f8\u01fb"
+ + "\u0005\\\u0000\u0000\u01f9\u01fb\b\f\u0000\u0000\u01fa\u01f5\u0001\u0000"
+ + "\u0000\u0000\u01fa\u01f7\u0001\u0000\u0000\u0000\u01fa\u01f9\u0001\u0000"
+ + "\u0000\u0000\u01fb\u01fe\u0001\u0000\u0000\u0000\u01fc\u01fd\u0001\u0000"
+ + "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01ff\u0001\u0000"
+ + "\u0000\u0000\u01fe\u01fc\u0001\u0000\u0000\u0000\u01ff\u020d\u0005\"\u0000"
+ + "\u0000\u0200\u0208\u0005\'\u0000\u0000\u0201\u0202\u0005\\\u0000\u0000"
+ + "\u0202\u0207\u0005\'\u0000\u0000\u0203\u0204\u0005\\\u0000\u0000\u0204"
+ + "\u0207\u0005\\\u0000\u0000\u0205\u0207\b\r\u0000\u0000\u0206\u0201\u0001"
+ + "\u0000\u0000\u0000\u0206\u0203\u0001\u0000\u0000\u0000\u0206\u0205\u0001"
+ + "\u0000\u0000\u0000\u0207\u020a\u0001\u0000\u0000\u0000\u0208\u0209\u0001"
+ + "\u0000\u0000\u0000\u0208\u0206\u0001\u0000\u0000\u0000\u0209\u020b\u0001"
+ + "\u0000\u0000\u0000\u020a\u0208\u0001\u0000\u0000\u0000\u020b\u020d\u0005"
+ + "\'\u0000\u0000\u020c\u01f4\u0001\u0000\u0000\u0000\u020c\u0200\u0001\u0000"
+ + "\u0000\u0000\u020d\u0099\u0001\u0000\u0000\u0000\u020e\u0212\u0005/\u0000"
+ + "\u0000\u020f\u0210\u0005\\\u0000\u0000\u0210\u0213\b\u000e\u0000\u0000"
+ + "\u0211\u0213\b\u000f\u0000\u0000\u0212\u020f\u0001\u0000\u0000\u0000\u0212"
+ + "\u0211\u0001\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000\u0000\u0214"
+ + "\u0215\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215"
+ + "\u0216\u0001\u0000\u0000\u0000\u0216\u021a\u0005/\u0000\u0000\u0217\u0219"
+ + "\u0007\u0010\u0000\u0000\u0218\u0217\u0001\u0000\u0000\u0000\u0219\u021c"
+ + "\u0001\u0000\u0000\u0000\u021a\u0218\u0001\u0000\u0000\u0000\u021a\u021b"
+ + "\u0001\u0000\u0000\u0000\u021b\u021d\u0001\u0000\u0000\u0000\u021c\u021a"
+ + "\u0001\u0000\u0000\u0000\u021d\u021e\u0004L\u0001\u0000\u021e\u009b\u0001"
+ + "\u0000\u0000\u0000\u021f\u0220\u0005t\u0000\u0000\u0220\u0221\u0005r\u0000"
+ + "\u0000\u0221\u0222\u0005u\u0000\u0000\u0222\u0223\u0005e\u0000\u0000\u0223"
+ + "\u009d\u0001\u0000\u0000\u0000\u0224\u0225\u0005f\u0000\u0000\u0225\u0226"
+ + "\u0005a\u0000\u0000\u0226\u0227\u0005l\u0000\u0000\u0227\u0228\u0005s"
+ + "\u0000\u0000\u0228\u0229\u0005e\u0000\u0000\u0229\u009f\u0001\u0000\u0000"
+ + "\u0000\u022a\u022b\u0005n\u0000\u0000\u022b\u022c\u0005u\u0000\u0000\u022c"
+ + "\u022d\u0005l\u0000\u0000\u022d\u022e\u0005l\u0000\u0000\u022e\u00a1\u0001"
+ + "\u0000\u0000\u0000\u022f\u0230\u0005b\u0000\u0000\u0230\u0231\u0005o\u0000"
+ + "\u0000\u0231\u0232\u0005o\u0000\u0000\u0232\u0233\u0005l\u0000\u0000\u0233"
+ + "\u0234\u0005e\u0000\u0000\u0234\u0235\u0005a\u0000\u0000\u0235\u0256\u0005"
+ + "n\u0000\u0000\u0236\u0237\u0005b\u0000\u0000\u0237\u0238\u0005y\u0000"
+ + "\u0000\u0238\u0239\u0005t\u0000\u0000\u0239\u0256\u0005e\u0000\u0000\u023a"
+ + "\u023b\u0005s\u0000\u0000\u023b\u023c\u0005h\u0000\u0000\u023c\u023d\u0005"
+ + "o\u0000\u0000\u023d\u023e\u0005r\u0000\u0000\u023e\u0256\u0005t\u0000"
+ + "\u0000\u023f\u0240\u0005c\u0000\u0000\u0240\u0241\u0005h\u0000\u0000\u0241"
+ + "\u0242\u0005a\u0000\u0000\u0242\u0256\u0005r\u0000\u0000\u0243\u0244\u0005"
+ + "i\u0000\u0000\u0244\u0245\u0005n\u0000\u0000\u0245\u0256\u0005t\u0000"
+ + "\u0000\u0246\u0247\u0005l\u0000\u0000\u0247\u0248\u0005o\u0000\u0000\u0248"
+ + "\u0249\u0005n\u0000\u0000\u0249\u0256\u0005g\u0000\u0000\u024a\u024b\u0005"
+ + "f\u0000\u0000\u024b\u024c\u0005l\u0000\u0000\u024c\u024d\u0005o\u0000"
+ + "\u0000\u024d\u024e\u0005a\u0000\u0000\u024e\u0256\u0005t\u0000\u0000\u024f"
+ + "\u0250\u0005d\u0000\u0000\u0250\u0251\u0005o\u0000\u0000\u0251\u0252\u0005"
+ + "u\u0000\u0000\u0252\u0253\u0005b\u0000\u0000\u0253\u0254\u0005l\u0000"
+ + "\u0000\u0254\u0256\u0005e\u0000\u0000\u0255\u022f\u0001\u0000\u0000\u0000"
+ + "\u0255\u0236\u0001\u0000\u0000\u0000\u0255\u023a\u0001\u0000\u0000\u0000"
+ + "\u0255\u023f\u0001\u0000\u0000\u0000\u0255\u0243\u0001\u0000\u0000\u0000"
+ + "\u0255\u0246\u0001\u0000\u0000\u0000\u0255\u024a\u0001\u0000\u0000\u0000"
+ + "\u0255\u024f\u0001\u0000\u0000\u0000\u0256\u00a3\u0001\u0000\u0000\u0000"
+ + "\u0257\u0258\u0005d\u0000\u0000\u0258\u0259\u0005e\u0000\u0000\u0259\u025a"
+ + "\u0005f\u0000\u0000\u025a\u00a5\u0001\u0000\u0000\u0000\u025b\u025f\u0007"
+ + "\u0011\u0000\u0000\u025c\u025e\u0007\u0012\u0000\u0000\u025d\u025c\u0001"
+ + "\u0000\u0000\u0000\u025e\u0261\u0001\u0000\u0000\u0000\u025f\u025d\u0001"
+ + "\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260\u00a7\u0001"
+ + "\u0000\u0000\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0262\u026b\u0005"
+ + "0\u0000\u0000\u0263\u0267\u0007\u0006\u0000\u0000\u0264\u0266\u0007\u0007"
+ + "\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000\u0266\u0269\u0001\u0000"
+ + "\u0000\u0000\u0267\u0265\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000"
+ + "\u0000\u0000\u0268\u026b\u0001\u0000\u0000\u0000\u0269\u0267\u0001\u0000"
+ + "\u0000\u0000\u026a\u0262\u0001\u0000\u0000\u0000\u026a\u0263\u0001\u0000"
+ + "\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026d\u0006S\u0002"
+ + "\u0000\u026d\u00a9\u0001\u0000\u0000\u0000\u026e\u0272\u0007\u0011\u0000"
+ + "\u0000\u026f\u0271\u0007\u0012\u0000\u0000\u0270\u026f\u0001\u0000\u0000"
+ + "\u0000\u0271\u0274\u0001\u0000\u0000\u0000\u0272\u0270\u0001\u0000\u0000"
+ + "\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0275\u0001\u0000\u0000"
+ + "\u0000\u0274\u0272\u0001\u0000\u0000\u0000\u0275\u0276\u0006T\u0002\u0000"
+ + "\u0276\u00ab\u0001\u0000\u0000\u0000\"\u0000\u0001\u00af\u00b9\u00c2\u00c7"
+ + "\u01b8\u01bb\u01c2\u01c5\u01cc\u01cf\u01d2\u01d9\u01dc\u01e2\u01e4\u01e8"
+ + "\u01ed\u01ef\u01f2\u01fa\u01fc\u0206\u0208\u020c\u0212\u0214\u021a\u0255"
+ + "\u025f\u0267\u026a\u0272\u0003\u0006\u0000\u0000\u0002\u0001\u0000\u0002"
+ + "\u0000\u0000";
public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java
new file mode 100644
index 0000000000000..dbba3226ba300
--- /dev/null
+++ b/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java
@@ -0,0 +1,51 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.painless;
+
+public class CommentTests extends ScriptTestCase {
+
+ public void testSingleLineComments() {
+ assertEquals(5, exec("// comment\n return 5"));
+ assertEquals(5, exec("// comment\r return 5"));
+ assertEquals(5, exec("return 5 // comment no newline or return char"));
+ }
+
+ public void testOpenCloseComments() {
+ assertEquals(5, exec("/* single-line comment */ return 5"));
+ assertEquals(5, exec("/* multi-line \n */ return 5"));
+ assertEquals(5, exec("/* multi-line \r */ return 5"));
+ assertEquals(5, exec("/* multi-line \n\n\r\r */ return 5"));
+ assertEquals(5, exec("def five = 5; /* multi-line \r */ return five"));
+ assertEquals(5, exec("return 5 /* multi-line ignored code */"));
+ }
+}
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java
index d83811e6668eb..a653edbd05992 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java
@@ -135,35 +135,40 @@ public void testRangeQuery() throws IOException {
public void testRoundsUpperBoundCorrectly() {
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100);
Query scaledFloatQ = ft.rangeQuery(null, 0.1, true, false, MOCK_QSC);
- assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(null, 0.1, true, true, MOCK_QSC);
- assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(null, 0.095, true, false, MOCK_QSC);
- assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(null, 0.095, true, true, MOCK_QSC);
- assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(null, 0.105, true, false, MOCK_QSC);
- assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(null, 0.105, true, true, MOCK_QSC);
- assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(null, 79.99, true, true, MOCK_QSC);
- assertEquals("scaled_float:[-9223372036854775808 TO 7999]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9223372036854775808 TO 7999]", getQueryString(scaledFloatQ));
}
public void testRoundsLowerBoundCorrectly() {
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100);
Query scaledFloatQ = ft.rangeQuery(-0.1, null, false, true, MOCK_QSC);
- assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(-0.1, null, true, true, MOCK_QSC);
- assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(-0.095, null, false, true, MOCK_QSC);
- assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(-0.095, null, true, true, MOCK_QSC);
- assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(-0.105, null, false, true, MOCK_QSC);
- assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ));
scaledFloatQ = ft.rangeQuery(-0.105, null, true, true, MOCK_QSC);
- assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString());
+ assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ));
+ }
+
+ private String getQueryString(Query query) {
+ assertTrue(query instanceof IndexOrDocValuesQuery);
+ return ((IndexOrDocValuesQuery) query).getIndexQuery().toString();
}
public void testValueForSearch() {
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java
index 743b51e979fb7..99d576bed01c7 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java
@@ -79,7 +79,7 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase {
@After
public void tearDown() throws Exception {
waitingRequests.keySet().forEach(this::finishRequest);
- shutdownExecutorService();
+ shutdownExecutorServices();
super.tearDown();
}
@@ -88,7 +88,7 @@ private CountDownLatch finishRequest(String url) {
return finishingRequests.get(url);
}
- private void shutdownExecutorService() throws InterruptedException {
+ private void shutdownExecutorServices() throws InterruptedException {
if (!handlerService.isShutdown()) {
handlerService.shutdown();
handlerService.awaitTermination(10, TimeUnit.SECONDS);
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1
deleted file mode 100644
index ef410899981ca..0000000000000
--- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7133d34e92770f59eb28686f4d511b9f3f32e970
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..dde9b7c100dc7
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1
@@ -0,0 +1 @@
+147cb42a90a29501d9ca6094ea0db1d213f3076a
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1
deleted file mode 100644
index 46b83c9e40b3a..0000000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-be44282e1f6b91a0650fcceb558053d6bdd4863d
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..b70a22e9db096
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1
@@ -0,0 +1 @@
+b034dd3a975763e083c7e11b5d0f7d516ab72590
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java
index 2939711f6f7e1..76b109932e642 100644
--- a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java
+++ b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java
@@ -38,7 +38,7 @@
import org.apache.lucene.analysis.ja.JapaneseTokenizer;
import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode;
import org.apache.lucene.analysis.ja.dict.UserDictionary;
-import org.apache.lucene.analysis.ja.util.CSVUtil;
+import org.apache.lucene.analysis.util.CSVUtil;
import org.opensearch.OpenSearchException;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1
deleted file mode 100644
index 36664695a7818..0000000000000
--- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-bd1f80d33346f7e588685484ef29a304db5190e4
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..323f165c62790
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1
@@ -0,0 +1 @@
+c405f2f7d0fc127d88dfbadd753469b2028fdf52
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1
deleted file mode 100644
index 003ccdf8b0727..0000000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b9ffdc7a52d2087ecb03318ec06305b480cdfe82
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..dd659ddf4de95
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1
@@ -0,0 +1 @@
+970e5775876c2d7e1b9af7421a4b17d96f63faf4
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1
deleted file mode 100644
index e22eaa474016f..0000000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f73e2007b133fb699e517ef13b4952844f0150d8
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..ed0e81d8f1f75
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1
@@ -0,0 +1 @@
+2421e5238e9b8484929291744d709dd743c01da1
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1
deleted file mode 100644
index 1ebe42a2a2f56..0000000000000
--- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2c09cbc021a8f81a01600a1d2a999361e70f7aed
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..fd8e000088180
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1
@@ -0,0 +1 @@
+a23e7de4cd9ae7af285c89dc1c55e0ac3f157fd3
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1
deleted file mode 100644
index 3c4523d45c0f5..0000000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b054f2c7b11fc7c5601b4c3cdf18aa7508612898
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..d0e7a3b0c751c
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1
@@ -0,0 +1 @@
+8d9bce1ea51db279878c51091dd9aefc7b335da4
\ No newline at end of file
diff --git a/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java
index 5e2cb8bfbc03a..3fcc995fb4199 100644
--- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java
+++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java
@@ -9,10 +9,10 @@
package org.opensearch.plugin.correlation.core.index.codec;
import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.lucene95.Lucene95Codec;
+import org.apache.lucene.codecs.lucene99.Lucene99Codec;
import org.opensearch.index.mapper.MapperService;
-import org.opensearch.plugin.correlation.core.index.codec.correlation950.CorrelationCodec;
-import org.opensearch.plugin.correlation.core.index.codec.correlation950.PerFieldCorrelationVectorsFormat;
+import org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec;
+import org.opensearch.plugin.correlation.core.index.codec.correlation990.PerFieldCorrelationVectorsFormat;
import java.util.Optional;
import java.util.function.BiFunction;
@@ -24,15 +24,15 @@
* @opensearch.internal
*/
public enum CorrelationCodecVersion {
- V_9_5_0(
+ V_9_9_0(
"CorrelationCodec",
- new Lucene95Codec(),
+ new Lucene99Codec(),
new PerFieldCorrelationVectorsFormat(Optional.empty()),
(userCodec, mapperService) -> new CorrelationCodec(userCodec, new PerFieldCorrelationVectorsFormat(Optional.of(mapperService))),
CorrelationCodec::new
);
- private static final CorrelationCodecVersion CURRENT = V_9_5_0;
+ private static final CorrelationCodecVersion CURRENT = V_9_9_0;
private final String codecName;
private final Codec defaultCodecDelegate;
private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat;
diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java
similarity index 97%
rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java
rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java
index f91ba429fbea9..022972e2e06c3 100644
--- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java
+++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java
@@ -6,7 +6,7 @@
* compatible open source license.
*/
-package org.opensearch.plugin.correlation.core.index.codec.correlation950;
+package org.opensearch.plugin.correlation.core.index.codec.correlation990;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FilterCodec;
@@ -19,7 +19,7 @@
* @opensearch.internal
*/
public class CorrelationCodec extends FilterCodec {
- private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_5_0;
+ private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_9_0;
private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat;
/**
diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java
similarity index 77%
rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java
rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java
index f6862ecc17736..89cc0b614a1a5 100644
--- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java
+++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java
@@ -6,9 +6,9 @@
* compatible open source license.
*/
-package org.opensearch.plugin.correlation.core.index.codec.correlation950;
+package org.opensearch.plugin.correlation.core.index.codec.correlation990;
-import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat;
+import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat;
@@ -26,10 +26,10 @@ public class PerFieldCorrelationVectorsFormat extends BasePerFieldCorrelationVec
public PerFieldCorrelationVectorsFormat(final Optional mapperService) {
super(
mapperService,
- Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN,
- Lucene95HnswVectorsFormat.DEFAULT_BEAM_WIDTH,
- Lucene95HnswVectorsFormat::new,
- Lucene95HnswVectorsFormat::new
+ Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN,
+ Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH,
+ Lucene99HnswVectorsFormat::new,
+ Lucene99HnswVectorsFormat::new
);
}
}
diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java
similarity index 96%
rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java
rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java
index b4dad34d2718e..fc2a9de58a73a 100644
--- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java
+++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java
@@ -9,4 +9,4 @@
/**
* custom Lucene9.5 codec package for events-correlation-engine
*/
-package org.opensearch.plugin.correlation.core.index.codec.correlation950;
+package org.opensearch.plugin.correlation.core.index.codec.correlation990;
diff --git a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec
index 598a3b6af73c2..013c17e4a9736 100644
--- a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec
+++ b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec
@@ -1 +1 @@
-org.opensearch.plugin.correlation.core.index.codec.correlation950.CorrelationCodec
+org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec
diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java
similarity index 98%
rename from plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java
rename to plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java
index b93172537d419..7223b450a136c 100644
--- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java
+++ b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java
@@ -6,7 +6,7 @@
* compatible open source license.
*/
-package org.opensearch.plugin.correlation.core.index.codec.correlation950;
+package org.opensearch.plugin.correlation.core.index.codec.correlation990;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
@@ -32,7 +32,7 @@
import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_EF_CONSTRUCTION;
import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_M;
-import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_5_0;
+import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_9_0;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
@@ -56,7 +56,7 @@ public void testCorrelationVectorIndex() throws Exception {
Function perFieldCorrelationVectorsProvider =
mapperService -> new PerFieldCorrelationVectorsFormat(Optional.of(mapperService));
Function correlationCodecProvider = (correlationVectorsFormat -> new CorrelationCodec(
- V_9_5_0.getDefaultCodecDelegate(),
+ V_9_9_0.getDefaultCodecDelegate(),
correlationVectorsFormat
));
testCorrelationVectorIndex(correlationCodecProvider, perFieldCorrelationVectorsProvider);
diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1
deleted file mode 100644
index f0d165ff7cf82..0000000000000
--- a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f3cdb002e0f2f30ad9c5fd053d78b1a485511ab1
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..ad4e055d4f19a
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1
@@ -0,0 +1 @@
+d952ad30d3f2d1220f39db175618414b56d14638
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1
deleted file mode 100644
index 40379694f5ea5..0000000000000
--- a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-77e3a27823f795d928b897d8444744ddb044a5c3
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..4309dad93b2b6
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1
@@ -0,0 +1 @@
+36a418325c618e440e5ccb80b75c705d894f50bd
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 820d14b3df8e4..0000000000000
--- a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-684daae9ea45087c670b4f6511edcfdb19c3a695
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..5f54d0ac554e0
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+e9df364a2695e66eb8d2803d6725424842760125
\ No newline at end of file
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java
new file mode 100644
index 0000000000000..5002ab9a2e704
--- /dev/null
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java
@@ -0,0 +1,33 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.repositories.gcs;
+
+import com.google.auth.oauth2.GoogleCredentials;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.IOException;
+
+/**
+ * This class facilitates to fetch Application Default Credentials
+ * see How Application Default Credentials works
+ */
+public class GoogleApplicationDefaultCredentials {
+ private static final Logger logger = LogManager.getLogger(GoogleApplicationDefaultCredentials.class);
+
+ public GoogleCredentials get() {
+ GoogleCredentials credentials = null;
+ try {
+ credentials = SocketAccess.doPrivilegedIOException(GoogleCredentials::getApplicationDefault);
+ } catch (IOException e) {
+ logger.error("Failed to retrieve \"Application Default Credentials\"", e);
+ }
+ return credentials;
+ }
+}
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java
index c9ebb3acaf3e5..83a4146c99b99 100644
--- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java
@@ -36,6 +36,7 @@
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
+import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.ServiceOptions;
import com.google.cloud.http.HttpTransportOptions;
@@ -70,6 +71,16 @@ public class GoogleCloudStorageService {
*/
private volatile Map clientCache = emptyMap();
+ final private GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials;
+
+ public GoogleCloudStorageService() {
+ this.googleApplicationDefaultCredentials = new GoogleApplicationDefaultCredentials();
+ }
+
+ public GoogleCloudStorageService(GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials) {
+ this.googleApplicationDefaultCredentials = googleApplicationDefaultCredentials;
+ }
+
/**
* Refreshes the client settings and clears the client cache. Subsequent calls to
* {@code GoogleCloudStorageService#client} will return new clients constructed
@@ -213,10 +224,11 @@ StorageOptions createStorageOptions(
storageOptionsBuilder.setProjectId(clientSettings.getProjectId());
}
if (clientSettings.getCredential() == null) {
- logger.warn(
- "\"Application Default Credentials\" are not supported out of the box."
- + " Additional file system permissions have to be granted to the plugin."
- );
+ logger.info("\"Application Default Credentials\" will be in use");
+ final GoogleCredentials credentials = googleApplicationDefaultCredentials.get();
+ if (credentials != null) {
+ storageOptionsBuilder.setCredentials(credentials);
+ }
} else {
ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential();
// override token server URI
diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java
index a531555debefb..58e412684ed5a 100644
--- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java
+++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java
@@ -33,8 +33,10 @@
package org.opensearch.repositories.gcs;
import com.google.auth.Credentials;
+import com.google.auth.oauth2.GoogleCredentials;
import com.google.cloud.http.HttpTransportOptions;
import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.StorageOptions;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
@@ -42,30 +44,38 @@
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.test.OpenSearchTestCase;
+import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
+import java.io.IOException;
+import java.net.Proxy;
+import java.net.URI;
+import java.net.URISyntaxException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.util.Base64;
import java.util.Locale;
import java.util.UUID;
+import org.mockito.Mockito;
+
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class GoogleCloudStorageServiceTests extends OpenSearchTestCase {
+ final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000));
+ final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000));
+ final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
+ final String endpoint = randomFrom("http://", "https://")
+ + randomFrom("www.opensearch.org", "www.googleapis.com", "localhost/api", "google.com/oauth")
+ + ":"
+ + randomIntBetween(1, 65535);
+ final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
+
public void testClientInitializer() throws Exception {
final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
- final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000));
- final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000));
- final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
- final String endpoint = randomFrom("http://", "https://")
- + randomFrom("www.opensearch.org", "www.googleapis.com", "localhost/api", "google.com/oauth")
- + ":"
- + randomIntBetween(1, 65535);
- final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
final Settings settings = Settings.builder()
.put(
GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(),
@@ -82,31 +92,35 @@ public void testClientInitializer() throws Exception {
.put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint)
.put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName)
.build();
- final GoogleCloudStorageService service = new GoogleCloudStorageService();
+ GoogleCredentials mockGoogleCredentials = Mockito.mock(GoogleCredentials.class);
+ GoogleApplicationDefaultCredentials mockDefaultCredentials = Mockito.mock(GoogleApplicationDefaultCredentials.class);
+ Mockito.when(mockDefaultCredentials.get()).thenReturn(mockGoogleCredentials);
+
+ final GoogleCloudStorageService service = new GoogleCloudStorageService(mockDefaultCredentials);
service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings));
GoogleCloudStorageOperationsStats statsCollector = new GoogleCloudStorageOperationsStats("bucket");
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> service.client("another_client", "repo", statsCollector)
);
- assertThat(e.getMessage(), Matchers.startsWith("Unknown client name"));
+ MatcherAssert.assertThat(e.getMessage(), Matchers.startsWith("Unknown client name"));
assertSettingDeprecationsAndWarnings(
new Setting>[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) }
);
final Storage storage = service.client(clientName, "repo", statsCollector);
- assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName));
- assertThat(storage.getOptions().getHost(), Matchers.is(endpoint));
- assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName));
- assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class));
- assertThat(
+ MatcherAssert.assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName));
+ MatcherAssert.assertThat(storage.getOptions().getHost(), Matchers.is(endpoint));
+ MatcherAssert.assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName));
+ MatcherAssert.assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class));
+ MatcherAssert.assertThat(
((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(),
Matchers.is((int) connectTimeValue.millis())
);
- assertThat(
+ MatcherAssert.assertThat(
((HttpTransportOptions) storage.getOptions().getTransportOptions()).getReadTimeout(),
Matchers.is((int) readTimeValue.millis())
);
- assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class));
+ MatcherAssert.assertThat(storage.getOptions().getCredentials(), Matchers.instanceOf(Credentials.class));
}
public void testReinitClientSettings() throws Exception {
@@ -122,33 +136,33 @@ public void testReinitClientSettings() throws Exception {
final GoogleCloudStorageService storageService = plugin.storageService;
GoogleCloudStorageOperationsStats statsCollector = new GoogleCloudStorageOperationsStats("bucket");
final Storage client11 = storageService.client("gcs1", "repo1", statsCollector);
- assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11"));
+ MatcherAssert.assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11"));
final Storage client12 = storageService.client("gcs2", "repo2", statsCollector);
- assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12"));
+ MatcherAssert.assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12"));
// client 3 is missing
final IllegalArgumentException e1 = expectThrows(
IllegalArgumentException.class,
() -> storageService.client("gcs3", "repo3", statsCollector)
);
- assertThat(e1.getMessage(), containsString("Unknown client name [gcs3]."));
+ MatcherAssert.assertThat(e1.getMessage(), containsString("Unknown client name [gcs3]."));
// update client settings
plugin.reload(settings2);
// old client 1 not changed
- assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11"));
+ MatcherAssert.assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11"));
// new client 1 is changed
final Storage client21 = storageService.client("gcs1", "repo1", statsCollector);
- assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21"));
+ MatcherAssert.assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21"));
// old client 2 not changed
- assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12"));
+ MatcherAssert.assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12"));
// new client2 is gone
final IllegalArgumentException e2 = expectThrows(
IllegalArgumentException.class,
() -> storageService.client("gcs2", "repo2", statsCollector)
);
- assertThat(e2.getMessage(), containsString("Unknown client name [gcs2]."));
+ MatcherAssert.assertThat(e2.getMessage(), containsString("Unknown client name [gcs2]."));
// client 3 emerged
final Storage client23 = storageService.client("gcs3", "repo3", statsCollector);
- assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23"));
+ MatcherAssert.assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23"));
}
}
@@ -193,4 +207,72 @@ public void testToTimeout() {
assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue());
assertEquals(0, GoogleCloudStorageService.toTimeout(TimeValue.MINUS_ONE).intValue());
}
+
+ /**
+ * The following method test the Google Application Default Credential instead of
+ * using service account file.
+ * Considered use of JUnit Mocking due to static method GoogleCredentials.getApplicationDefault
+ * and avoiding environment variables to set which later use GCE.
+ * @throws Exception
+ */
+ public void testApplicationDefaultCredential() throws Exception {
+ GoogleCloudStorageClientSettings settings = getGCSClientSettingsWithoutCredentials();
+ GoogleCredentials mockGoogleCredentials = Mockito.mock(GoogleCredentials.class);
+ HttpTransportOptions mockHttpTransportOptions = Mockito.mock(HttpTransportOptions.class);
+ GoogleApplicationDefaultCredentials mockDefaultCredentials = Mockito.mock(GoogleApplicationDefaultCredentials.class);
+ Mockito.when(mockDefaultCredentials.get()).thenReturn(mockGoogleCredentials);
+
+ GoogleCloudStorageService service = new GoogleCloudStorageService(mockDefaultCredentials);
+ StorageOptions storageOptions = service.createStorageOptions(settings, mockHttpTransportOptions);
+ assertNotNull(storageOptions);
+ assertEquals(storageOptions.getCredentials().toString(), mockGoogleCredentials.toString());
+ }
+
+ /**
+ * The application default credential throws exception when there are
+ * no Environment Variables provided or Google Compute Engine is not running
+ * @throws Exception
+ */
+ public void testApplicationDefaultCredentialsWhenNoSettingProvided() throws Exception {
+ GoogleCloudStorageClientSettings settings = getGCSClientSettingsWithoutCredentials();
+ HttpTransportOptions mockHttpTransportOptions = Mockito.mock(HttpTransportOptions.class);
+ GoogleCloudStorageService service = new GoogleCloudStorageService();
+ StorageOptions storageOptions = service.createStorageOptions(settings, mockHttpTransportOptions);
+
+ Exception exception = assertThrows(IOException.class, GoogleCredentials::getApplicationDefault);
+ assertNotNull(storageOptions);
+ assertNull(storageOptions.getCredentials());
+ MatcherAssert.assertThat(exception.getMessage(), containsString("The Application Default Credentials are not available"));
+ }
+
+ /**
+ * The application default credential throws IOException when it is
+ * used without GoogleCloudStorageService
+ */
+ public void testDefaultCredentialsThrowsExceptionWithoutGCStorageService() {
+ GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials = new GoogleApplicationDefaultCredentials();
+ GoogleCredentials credentials = googleApplicationDefaultCredentials.get();
+ assertNull(credentials);
+ Exception exception = assertThrows(IOException.class, GoogleCredentials::getApplicationDefault);
+ MatcherAssert.assertThat(exception.getMessage(), containsString("The Application Default Credentials are not available"));
+ }
+
+ /**
+ * This is a helper method to provide GCS Client settings without credentials
+ * @return GoogleCloudStorageClientSettings
+ * @throws URISyntaxException
+ */
+ private GoogleCloudStorageClientSettings getGCSClientSettingsWithoutCredentials() throws URISyntaxException {
+ return new GoogleCloudStorageClientSettings(
+ null,
+ endpoint,
+ projectIdName,
+ connectTimeValue,
+ readTimeValue,
+ applicationName,
+ new URI(""),
+ new ProxySettings(Proxy.Type.DIRECT, null, 0, null, null)
+ );
+ }
+
}
diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle
index f04d42a2155d6..36843e3bc8700 100644
--- a/plugins/repository-hdfs/build.gradle
+++ b/plugins/repository-hdfs/build.gradle
@@ -75,7 +75,7 @@ dependencies {
api 'commons-collections:commons-collections:3.2.2'
api "org.apache.commons:commons-compress:${versions.commonscompress}"
api 'org.apache.commons:commons-configuration2:2.9.0'
- api 'commons-io:commons-io:2.14.0'
+ api 'commons-io:commons-io:2.15.1'
api 'org.apache.commons:commons-lang3:3.14.0'
implementation 'com.google.re2j:re2j:1.7'
api 'javax.servlet:servlet-api:2.5'
diff --git a/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1
deleted file mode 100644
index 33c5cfe53e01d..0000000000000
--- a/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a4c6e1f6c196339473cd2e1b037f0eb97c62755b
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1
new file mode 100644
index 0000000000000..47c5d13812a36
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1
@@ -0,0 +1 @@
+f11560da189ab563a5c8e351941415430e9304ea
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1
deleted file mode 100644
index 2c038aad4b934..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a5c081d8f877225732efe13908f350029c811709
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..19f734ca17b79
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1
@@ -0,0 +1 @@
+b4aea155f6d6b1032eba85378564431cfd86f562
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1
deleted file mode 100644
index 3243f524432eb..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c5f8bb68084ea5709a27e935907b1bb49d0bd049
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..4c06d28cba199
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1
@@ -0,0 +1 @@
+3fcc87f3d810ce49d865ee54b40831559c5e129b
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1
deleted file mode 100644
index 1d7da47286ae0..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3643061da474061ffa7f2036a58a7a0d40212276
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..91a5c0f715d2b
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1
@@ -0,0 +1 @@
+19c9a3f52851a1333b648ed83c82d16eb4c64afd
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1
deleted file mode 100644
index 3fab0e47adcbe..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ab56c7223112fac13a66e3f667c5fc666f4a3707
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..6c05600ae3b08
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1
@@ -0,0 +1 @@
+b3e74d5b8cf5e60d9965042fa284085bbe081ce3
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1
deleted file mode 100644
index f93cf7a63bfad..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5752d171cd08ac84f9273258a315bc5f97e1187e
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..f54e6f6893050
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1
@@ -0,0 +1 @@
+af68f90f0410b7b3a1900d3e0a15ad51b10ffd5b
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1
deleted file mode 100644
index 2fc33b62aee54..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6b41cd66a385d513b58b6617f20b701435b64abd
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..49d40b36ba85b
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1
@@ -0,0 +1 @@
+4acab18052267e280d1f9de22c591a5c88bed3a6
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1
deleted file mode 100644
index 99f758b047aa2..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9346006cead763247a786b5cabf3e1ae3c88eadb
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..a01de2aa84c43
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1
@@ -0,0 +1 @@
+9f07e1764389e076a36fb7d9e5769e29f3dab950
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1
deleted file mode 100644
index 705a342a684c4..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fab56e187e3fb3c70c18223184d53a76500114ab
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1
new file mode 100644
index 0000000000000..a5fc8c2059104
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1
@@ -0,0 +1 @@
+9201e6a43a0a89515626f7516c7d1b2c349f76df
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1
deleted file mode 100644
index 31818695cc774..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-504de8cc7dc68e84c8c7c2757522d934e9c50d35
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..cd746f0756e46
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1
@@ -0,0 +1 @@
+ab49eb621d6d01f0ad2f016989d0352ef18ea9a2
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1
deleted file mode 100644
index 3cf3080a98bd9..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-454c7a6afab864de9f0c166246f28f16aaa824c1
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..740737dc13efc
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1
@@ -0,0 +1 @@
+01fcd8bad38d7b8987f6fc93bd7e933240eb727e
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1
deleted file mode 100644
index 41b0dca07556e..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b054760243906af0a327a8f5bd99adc2826ccd88
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..e6ff3dbafda22
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1
@@ -0,0 +1 @@
+abad9abc80dfe6118a60413afa161696bbf8dd43
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1
deleted file mode 100644
index 2f71fd5cc780a..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-bff24f085193e105d4e23e3db27bf81ccb3d830e
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..36ec960c4f7be
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1
@@ -0,0 +1 @@
+d88407ae475e5f4e859a81e4f61e362e939f7bc2
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1
deleted file mode 100644
index f0060b8a0f78f..0000000000000
--- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d80ad3210fa890a856a1d04379d134ab44a09501
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1
new file mode 100644
index 0000000000000..293b82f206c99
--- /dev/null
+++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1
@@ -0,0 +1 @@
+121a75c2ba9ed8b80f5ff131c2411a5d460f38d0
\ No newline at end of file
diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java
index 46cf6ae708d1c..d0c0406bd7774 100644
--- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java
+++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java
@@ -80,7 +80,7 @@ public class NioHttpPipeliningHandlerTests extends OpenSearchTestCase {
@After
public void cleanup() throws Exception {
waitingRequests.keySet().forEach(this::finishRequest);
- shutdownExecutorService();
+ shutdownExecutorServices();
}
private CountDownLatch finishRequest(String url) {
@@ -88,7 +88,7 @@ private CountDownLatch finishRequest(String url) {
return finishingRequests.get(url);
}
- private void shutdownExecutorService() throws InterruptedException {
+ private void shutdownExecutorServices() throws InterruptedException {
if (!handlerService.isShutdown()) {
handlerService.shutdown();
handlerService.awaitTermination(10, TimeUnit.SECONDS);
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml
new file mode 100644
index 0000000000000..dd8c2a2deb721
--- /dev/null
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml
@@ -0,0 +1,55 @@
+---
+setup:
+
+ - do:
+ indices.create:
+ index: test1
+ wait_for_active_shards: all
+ body:
+ settings:
+ index.number_of_shards: 1
+ index.number_of_replicas: 1
+
+ - do:
+ index:
+ index: test1
+ id: 1
+ body: { "bar": "bar" }
+
+ - do:
+ indices.refresh: {}
+
+# Related issue: https://github.com/opensearch-project/OpenSearch/issues/9857
+---
+"Test noop_update_total metric can be updated by both update API and bulk API":
+ - skip:
+ version: " - 2.99.99" #TODO: change to 2.11.99 after the PR is backported to 2.x branch
+ reason: "fixed in 3.0"
+
+ - do:
+ update:
+ index: test1
+ id: 1
+ body: { "doc": { "bar": "bar" } }
+
+ - do:
+ indices.stats:
+ index: test1
+ metric: indexing
+
+ - match: { indices.test1.primaries.indexing.noop_update_total: 1 }
+ - match: { indices.test1.total.indexing.noop_update_total: 1 }
+
+ - do:
+ bulk:
+ body: |
+ {"update": {"_id": "1", "_index": "test1"}}
+ {"doc": {"bar": "bar"}}
+
+ - do:
+ indices.stats:
+ index: test1
+ metric: indexing
+
+ - match: { indices.test1.primaries.indexing.noop_update_total: 2 }
+ - match: { indices.test1.total.indexing.noop_update_total: 2 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml
new file mode 100644
index 0000000000000..b9457f0290897
--- /dev/null
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml
@@ -0,0 +1,141 @@
+---
+"Throw aliases missing exception when removing non-existing alias with setting must_exist to true":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+
+ - do:
+ indices.create:
+ index: test_index
+
+ - do:
+ indices.exists_alias:
+ name: test_alias
+
+ - is_false: ''
+
+ - do:
+ catch: /aliases \[test_alias\] missing/
+ indices.update_aliases:
+ body:
+ actions:
+ - remove:
+ index: test_index
+ alias: test_alias
+ must_exist: true
+
+ - do:
+ catch: /aliases \[testAlias\*\] missing/
+ indices.update_aliases:
+ body:
+ actions:
+ - remove:
+ index: test_index
+ aliases: [ testAlias* ]
+ must_exist: true
+
+ - do:
+ catch: /\[aliases\] can't be empty/
+ indices.update_aliases:
+ body:
+ actions:
+ - remove:
+ index: test_index
+ aliases: []
+ must_exist: true
+
+---
+"Throw aliases missing exception when all of the specified aliases are non-existing":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+
+ - do:
+ indices.create:
+ index: test_index
+
+ - do:
+ indices.exists_alias:
+ name: test_alias
+
+ - is_false: ''
+
+ - do:
+ catch: /aliases \[test\_alias\] missing/
+ indices.update_aliases:
+ body:
+ actions:
+ - remove:
+ index: test_index
+ alias: test_alias
+
+ - do:
+ catch: /aliases \[test\_alias\*\] missing/
+ indices.update_aliases:
+ body:
+ actions:
+ - remove:
+ indices: [ test_index ]
+ aliases: [ test_alias* ]
+
+---
+"Remove successfully when some specified aliases are non-existing":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+
+ - do:
+ indices.create:
+ index: test_index
+
+ - do:
+ indices.exists_alias:
+ name: test_alias
+
+ - is_false: ''
+
+ - do:
+ indices.update_aliases:
+ body:
+ actions:
+ - add:
+ indices: [ test_index ]
+ aliases: [ test_alias ]
+
+ - do:
+ indices.update_aliases:
+ body:
+ actions:
+ - remove:
+ index: test_index
+ aliases: [test_alias, test_alias1, test_alias2]
+ must_exist: false
+
+ - match: { acknowledged: true }
+
+---
+"Remove silently when all of the specified aliases are non-existing and must_exist is false":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+
+ - do:
+ indices.create:
+ index: test_index
+
+ - do:
+ indices.exists_alias:
+ name: test_alias
+
+ - is_false: ''
+
+ - do:
+ indices.update_aliases:
+ body:
+ actions:
+ - remove:
+ index: test_index
+ aliases: [test_alias, test_alias1, test_alias2]
+ must_exist: false
+
+ - match: { acknowledged: true }
diff --git a/server/licenses/lucene-analysis-common-9.8.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.8.0.jar.sha1
deleted file mode 100644
index 6ad304fa52c12..0000000000000
--- a/server/licenses/lucene-analysis-common-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-36f0363325ca7bf62c180160d1ed5165c7c37795
\ No newline at end of file
diff --git a/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..c9e6120da7497
--- /dev/null
+++ b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1
@@ -0,0 +1 @@
+24c8401b530308f9568eb7b408c2029c63f564c6
\ No newline at end of file
diff --git a/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1
deleted file mode 100644
index f104c4207d390..0000000000000
--- a/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e98fb408028f40170e6d87c16422bfdc0bb2e392
\ No newline at end of file
diff --git a/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..69ecf6aa68200
--- /dev/null
+++ b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1
@@ -0,0 +1 @@
+11c46007366bb037be7d271ab0a5849b1d544662
\ No newline at end of file
diff --git a/server/licenses/lucene-core-9.8.0.jar.sha1 b/server/licenses/lucene-core-9.8.0.jar.sha1
deleted file mode 100644
index f9a3e2f3cbee6..0000000000000
--- a/server/licenses/lucene-core-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e8421c5f8573bcf22e9265fc7e19469545a775a
\ No newline at end of file
diff --git a/server/licenses/lucene-core-9.9.1.jar.sha1 b/server/licenses/lucene-core-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..ae596196d9e6a
--- /dev/null
+++ b/server/licenses/lucene-core-9.9.1.jar.sha1
@@ -0,0 +1 @@
+55249fa9a0ed321adcf8283c6f3b649a6812b0a9
\ No newline at end of file
diff --git a/server/licenses/lucene-grouping-9.8.0.jar.sha1 b/server/licenses/lucene-grouping-9.8.0.jar.sha1
deleted file mode 100644
index ab132121b2edc..0000000000000
--- a/server/licenses/lucene-grouping-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d39184518351178c404ed9669fc6cb6111f2288d
\ No newline at end of file
diff --git a/server/licenses/lucene-grouping-9.9.1.jar.sha1 b/server/licenses/lucene-grouping-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..e7df056400661
--- /dev/null
+++ b/server/licenses/lucene-grouping-9.9.1.jar.sha1
@@ -0,0 +1 @@
+2f2785e17c5c823cc8f41a7ddb4647aaca8ee773
\ No newline at end of file
diff --git a/server/licenses/lucene-highlighter-9.8.0.jar.sha1 b/server/licenses/lucene-highlighter-9.8.0.jar.sha1
deleted file mode 100644
index c7cb678fb7b72..0000000000000
--- a/server/licenses/lucene-highlighter-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1ac38c8278dbd63dfab30744a41dd955a415a31c
\ No newline at end of file
diff --git a/server/licenses/lucene-highlighter-9.9.1.jar.sha1 b/server/licenses/lucene-highlighter-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..828c7294aa586
--- /dev/null
+++ b/server/licenses/lucene-highlighter-9.9.1.jar.sha1
@@ -0,0 +1 @@
+30928513461bf79a5cb057e84da7d34a1e53227d
\ No newline at end of file
diff --git a/server/licenses/lucene-join-9.8.0.jar.sha1 b/server/licenses/lucene-join-9.8.0.jar.sha1
deleted file mode 100644
index 2b6cb8af4faf6..0000000000000
--- a/server/licenses/lucene-join-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3d64fc57bb6e718d906413a9f73c713e6d4d8bb0
\ No newline at end of file
diff --git a/server/licenses/lucene-join-9.9.1.jar.sha1 b/server/licenses/lucene-join-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..34b44ca8c6ad5
--- /dev/null
+++ b/server/licenses/lucene-join-9.9.1.jar.sha1
@@ -0,0 +1 @@
+b9c8cc99632280148f92b4c0a64111c482d5d0ac
\ No newline at end of file
diff --git a/server/licenses/lucene-memory-9.8.0.jar.sha1 b/server/licenses/lucene-memory-9.8.0.jar.sha1
deleted file mode 100644
index 5fdfee401dd0a..0000000000000
--- a/server/licenses/lucene-memory-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5283ac71d6ccecb5e00c7b52df2faec012f2625a
\ No newline at end of file
diff --git a/server/licenses/lucene-memory-9.9.1.jar.sha1 b/server/licenses/lucene-memory-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..b75fba4c331e9
--- /dev/null
+++ b/server/licenses/lucene-memory-9.9.1.jar.sha1
@@ -0,0 +1 @@
+49f820b1b321860fa42a4f7583e8ed8f77b9c1c2
\ No newline at end of file
diff --git a/server/licenses/lucene-misc-9.8.0.jar.sha1 b/server/licenses/lucene-misc-9.8.0.jar.sha1
deleted file mode 100644
index cf815cba15862..0000000000000
--- a/server/licenses/lucene-misc-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9a57b049cf51a5e9c9c1909c420f645f1b6f9a54
\ No newline at end of file
diff --git a/server/licenses/lucene-misc-9.9.1.jar.sha1 b/server/licenses/lucene-misc-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..f1e1e056004e9
--- /dev/null
+++ b/server/licenses/lucene-misc-9.9.1.jar.sha1
@@ -0,0 +1 @@
+db7c30217602dfcda394a4d0f0a9e68140d385a6
\ No newline at end of file
diff --git a/server/licenses/lucene-queries-9.8.0.jar.sha1 b/server/licenses/lucene-queries-9.8.0.jar.sha1
deleted file mode 100644
index 09f369ef18e12..0000000000000
--- a/server/licenses/lucene-queries-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-628db4ef46f1c6a05145bdac1d1bc4ace6341b13
\ No newline at end of file
diff --git a/server/licenses/lucene-queries-9.9.1.jar.sha1 b/server/licenses/lucene-queries-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..888b9b4a05ec8
--- /dev/null
+++ b/server/licenses/lucene-queries-9.9.1.jar.sha1
@@ -0,0 +1 @@
+d157547bd24edc8e9d9d59c273107dc3ac5fde5e
\ No newline at end of file
diff --git a/server/licenses/lucene-queryparser-9.8.0.jar.sha1 b/server/licenses/lucene-queryparser-9.8.0.jar.sha1
deleted file mode 100644
index 2a42a8956b18b..0000000000000
--- a/server/licenses/lucene-queryparser-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-982faf2bfa55542bf57fbadef54c19ac00f57cae
\ No newline at end of file
diff --git a/server/licenses/lucene-queryparser-9.9.1.jar.sha1 b/server/licenses/lucene-queryparser-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..1ce8a069a0f4e
--- /dev/null
+++ b/server/licenses/lucene-queryparser-9.9.1.jar.sha1
@@ -0,0 +1 @@
+12d844fe224f6f97c510ac20d68903ed7f626f6c
\ No newline at end of file
diff --git a/server/licenses/lucene-sandbox-9.8.0.jar.sha1 b/server/licenses/lucene-sandbox-9.8.0.jar.sha1
deleted file mode 100644
index 64a0b07f72d29..0000000000000
--- a/server/licenses/lucene-sandbox-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-06493dbd14d02537716822254866a94458f4d842
\ No newline at end of file
diff --git a/server/licenses/lucene-sandbox-9.9.1.jar.sha1 b/server/licenses/lucene-sandbox-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..14fd86dadc404
--- /dev/null
+++ b/server/licenses/lucene-sandbox-9.9.1.jar.sha1
@@ -0,0 +1 @@
+272e588fd3d8c0a401b28a1ac715f27044bf62ec
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1
deleted file mode 100644
index d1bcb0581435c..0000000000000
--- a/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9d9a731822ad6eefa1ba288a0c158d478522f165
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..0efd5a7595bfe
--- /dev/null
+++ b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1
@@ -0,0 +1 @@
+e066432e7ab02b2a4914f989bcd8c44adbf340ad
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial3d-9.8.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.8.0.jar.sha1
deleted file mode 100644
index d17459cc569a9..0000000000000
--- a/server/licenses/lucene-spatial3d-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ce752a52b2d4eac90633c7df7982e29504f99e76
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..7f06466e4c721
--- /dev/null
+++ b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1
@@ -0,0 +1 @@
+fa54c9b962778e28ebc0efb9f75297781350361a
\ No newline at end of file
diff --git a/server/licenses/lucene-suggest-9.8.0.jar.sha1 b/server/licenses/lucene-suggest-9.8.0.jar.sha1
deleted file mode 100644
index ff47b87672d2c..0000000000000
--- a/server/licenses/lucene-suggest-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f977f96f2093b7fddea6b67caa2e1c5b10edebf6
\ No newline at end of file
diff --git a/server/licenses/lucene-suggest-9.9.1.jar.sha1 b/server/licenses/lucene-suggest-9.9.1.jar.sha1
new file mode 100644
index 0000000000000..06732480d1b6c
--- /dev/null
+++ b/server/licenses/lucene-suggest-9.9.1.jar.sha1
@@ -0,0 +1 @@
+9554de5b22ae7483b344b94a9a956960b7a5d49c
\ No newline at end of file
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
index d7fb632c847d1..e27c0c4786da8 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
@@ -35,6 +35,8 @@
import org.opensearch.action.DocWriteRequest.OpType;
import org.opensearch.action.DocWriteResponse;
import org.opensearch.action.admin.indices.alias.Alias;
+import org.opensearch.action.admin.indices.stats.IndicesStatsRequest;
+import org.opensearch.action.admin.indices.stats.IndicesStatsResponse;
import org.opensearch.action.delete.DeleteRequest;
import org.opensearch.action.get.GetResponse;
import org.opensearch.action.index.IndexRequest;
@@ -738,6 +740,12 @@ public void testNoopUpdate() {
equalTo(2)
);
+ // test noop_update_total metric in stats changed
+ IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest().indices(indexName).indexing(true);
+ final IndicesStatsResponse indicesStatsResponse = client().admin().indices().stats(indicesStatsRequest).actionGet();
+ assertThat(indicesStatsResponse.getIndex(indexName).getTotal().indexing.getTotal().getNoopUpdateCount(), equalTo(1L));
+ assertThat(indicesStatsResponse.getIndex(indexName).getPrimaries().indexing.getTotal().getNoopUpdateCount(), equalTo(1L));
+
final BulkItemResponse notFoundUpdate = bulkResponse.getItems()[1];
assertNotNull(notFoundUpdate.getFailure());
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java
index c69718d982f8b..522d63b22a0da 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java
@@ -364,6 +364,7 @@ public void testAwarenessZonesIncrementalNodes() {
assertThat(counts.get(noZoneNode), equalTo(2));
}
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/5908")
public void testThreeZoneOneReplicaWithForceZoneValueAndLoadAwareness() throws Exception {
int nodeCountPerAZ = 5;
int numOfShards = 30;
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java
index 8291fef5d177b..f46f413f4d23f 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java
@@ -710,6 +710,7 @@ public void testPrimaryCorruptionDuringReplicationDoesNotFailReplicaShard() thro
final NodeStats primaryNode = dataNodeStats.get(0);
final NodeStats replicaNode = dataNodeStats.get(1);
+
assertAcked(
prepareCreate("test").setSettings(
Settings.builder()
@@ -795,6 +796,17 @@ public void testPrimaryCorruptionDuringReplicationDoesNotFailReplicaShard() thro
// Assert the cluster returns to green status because the replica will be promoted to primary
ensureGreen();
+
+ // After Lucene 9.9 check index will flag corruption with old (not the latest) commit points.
+ // For this test our previous corrupt commit "segments_2" will remain on the primary.
+ // This asserts this is the case, and then resets check index status.
+ assertEquals("Check index has a single failure", 1, checkIndexFailures.size());
+ assertTrue(
+ checkIndexFailures.get(0)
+ .getMessage()
+ .contains("could not read old (not latest) commit point segments file \"segments_2\" in directory")
+ );
+ resetCheckIndexStatus();
}
private int numShards(String... index) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java
index 848f6eddbb0df..51dba07a8f9f8 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java
@@ -636,6 +636,45 @@ public void testProfileDisableCache() throws Exception {
}
}
+ public void testCacheWithInvalidation() throws Exception {
+ Client client = client();
+ assertAcked(
+ client.admin()
+ .indices()
+ .prepareCreate("index")
+ .setMapping("k", "type=keyword")
+ .setSettings(
+ Settings.builder()
+ .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ )
+ .get()
+ );
+ indexRandom(true, client.prepareIndex("index").setSource("k", "hello"));
+ ensureSearchable("index");
+ SearchResponse resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get();
+ assertSearchResponse(resp);
+ OpenSearchAssertions.assertAllSuccessful(resp);
+ assertThat(resp.getHits().getTotalHits().value, equalTo(1L));
+
+ assertCacheState(client, "index", 0, 1);
+ // Index but don't refresh
+ indexRandom(false, client.prepareIndex("index").setSource("k", "hello2"));
+ resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get();
+ assertSearchResponse(resp);
+ // Should expect hit as here as refresh didn't happen
+ assertCacheState(client, "index", 1, 1);
+
+ // Explicit refresh would invalidate cache
+ refresh();
+ // Hit same query again
+ resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get();
+ assertSearchResponse(resp);
+ // Should expect miss as key has changed due to change in IndexReader.CacheKey (due to refresh)
+ assertCacheState(client, "index", 1, 2);
+ }
+
private static void assertCacheState(Client client, String index, long expectedHits, long expectedMisses) {
RequestCacheStats requestCacheStats = client.admin()
.indices()
@@ -650,6 +689,7 @@ private static void assertCacheState(Client client, String index, long expectedH
Arrays.asList(expectedHits, expectedMisses, 0L),
Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount(), requestCacheStats.getEvictions())
);
+
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java
index aa1fe695ecc12..d1e66c19c28e2 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java
@@ -57,7 +57,7 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import static org.opensearch.action.search.TransportSearchAction.SEARCH_REQUEST_STATS_ENABLED_KEY;
+import static org.opensearch.action.search.SearchRequestStats.SEARCH_REQUEST_STATS_ENABLED_KEY;
import static org.opensearch.search.aggregations.AggregationBuilders.terms;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java
index 145830f02ee56..7c7cc12888307 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java
@@ -86,6 +86,7 @@ public void testShardSizeEqualsSizeString() throws Exception {
terms("keys").field("key")
.size(3)
.shardSize(3)
+ .showTermDocCountError(true)
.collectMode(randomFrom(SubAggCollectionMode.values()))
.order(BucketOrder.count(false))
)
@@ -98,8 +99,11 @@ public void testShardSizeEqualsSizeString() throws Exception {
expected.put("1", 8L);
expected.put("3", 8L);
expected.put("2", 4L);
+ Long expectedDocCount;
for (Terms.Bucket bucket : buckets) {
- assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
+ expectedDocCount = expected.get(bucket.getKeyAsString());
+ // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680
+ assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount);
}
}
@@ -221,6 +225,7 @@ public void testShardSizeEqualsSizeLong() throws Exception {
terms("keys").field("key")
.size(3)
.shardSize(3)
+ .showTermDocCountError(true)
.collectMode(randomFrom(SubAggCollectionMode.values()))
.order(BucketOrder.count(false))
)
@@ -233,8 +238,11 @@ public void testShardSizeEqualsSizeLong() throws Exception {
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 4L);
+ Long expectedDocCount;
for (Terms.Bucket bucket : buckets) {
- assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
+ expectedDocCount = expected.get(bucket.getKeyAsNumber().intValue());
+ // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680
+ assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount);
}
}
@@ -355,6 +363,7 @@ public void testShardSizeEqualsSizeDouble() throws Exception {
terms("keys").field("key")
.size(3)
.shardSize(3)
+ .showTermDocCountError(true)
.collectMode(randomFrom(SubAggCollectionMode.values()))
.order(BucketOrder.count(false))
)
@@ -367,8 +376,11 @@ public void testShardSizeEqualsSizeDouble() throws Exception {
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 4L);
+ Long expectedDocCount;
for (Terms.Bucket bucket : buckets) {
- assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
+ expectedDocCount = expected.get(bucket.getKeyAsNumber().intValue());
+ // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680
+ assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount);
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java
index b355ce6d7a8dd..343cea4b94c87 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java
@@ -225,8 +225,16 @@ public void setupSuiteScopeCluster() throws Exception {
}
indexRandom(true, builders);
- indexRandomForMultipleSlices("idx");
ensureSearchable();
+
+ // Force merge each shard down to 1 segment to verify results are the same between concurrent and non-concurrent search paths, else
+ // for concurrent segment search there will be additional error introduced during the slice level reduce and thus different buckets,
+ // doc_counts, and doc_count_errors may be returned. This test serves to verify that the doc_count_error is the same between
+ // concurrent and non-concurrent search in the 1 slice case. TermsFixedDocCountErrorIT verifies that the doc count error is
+ // correctly calculated for concurrent segment search at the slice level.
+ // See https://github.com/opensearch-project/OpenSearch/issues/11680"
+ forceMerge(1);
+ Thread.sleep(5000); // Sleep 5s to ensure force merge completes
}
private void assertDocCountErrorWithinBounds(int size, SearchResponse accurateResponse, SearchResponse testResponse) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java
new file mode 100644
index 0000000000000..5ad913e8c7086
--- /dev/null
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java
@@ -0,0 +1,347 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.aggregations.bucket;
+
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
+import org.opensearch.action.admin.indices.segments.IndicesSegmentResponse;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
+import org.opensearch.search.aggregations.bucket.terms.Terms;
+import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
+import static org.opensearch.search.aggregations.AggregationBuilders.terms;
+import static org.opensearch.test.OpenSearchIntegTestCase.Scope.TEST;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+
+@OpenSearchIntegTestCase.ClusterScope(scope = TEST, numClientNodes = 0, maxNumDataNodes = 1, supportsDedicatedMasters = false)
+public class TermsFixedDocCountErrorIT extends ParameterizedOpenSearchIntegTestCase {
+
+ private static final String STRING_FIELD_NAME = "s_value";
+
+ public TermsFixedDocCountErrorIT(Settings dynamicSettings) {
+ super(dynamicSettings);
+ }
+
+ @ParametersFactory
+ public static Collection